- ffmpeg.rs: discovery, duration extraction, metadata probing, download - subtitles.rs: SRT-to-VTT conversion, sidecar discovery, storage, extraction - fonts.rs: Google Fonts and Font Awesome local caching
615 lines
21 KiB
Rust
615 lines
21 KiB
Rust
use once_cell::sync::Lazy;
|
|
use regex::Regex;
|
|
use serde_json::json;
|
|
use sha2::{Digest, Sha256};
|
|
use std::collections::HashMap;
|
|
use std::fs;
|
|
use std::path::Path;
|
|
use std::time::SystemTime;
|
|
|
|
use crate::state::{atomic_write_json, load_json_with_fallbacks, BACKUP_COUNT};
|
|
|
|
// ---------------------------------------------------------------------------
|
|
// Constants
|
|
// ---------------------------------------------------------------------------
|
|
|
|
/// Current version for Google Fonts metadata (fonts_meta.json).
|
|
const GOOGLE_FONTS_META_VERSION: u64 = 7;
|
|
|
|
/// Current version for Font Awesome metadata (fa_meta.json).
|
|
const FA_META_VERSION: u64 = 3;
|
|
|
|
/// User-Agent header value for HTTP requests.
|
|
/// Google Fonts API returns different CSS based on User-Agent; we want woff2.
|
|
const USER_AGENT: &str = "Mozilla/5.0";
|
|
|
|
/// Google Fonts CSS URLs.
|
|
const GOOGLE_FONT_URLS: &[(&str, &str)] = &[
|
|
(
|
|
"Sora",
|
|
"https://fonts.googleapis.com/css2?family=Sora:wght@500;600;700;800&display=swap",
|
|
),
|
|
(
|
|
"Manrope",
|
|
"https://fonts.googleapis.com/css2?family=Manrope:wght@400;500;600;700;800&display=swap",
|
|
),
|
|
(
|
|
"IBM Plex Mono",
|
|
"https://fonts.googleapis.com/css2?family=IBM+Plex+Mono:wght@400;500;600&display=swap",
|
|
),
|
|
];
|
|
|
|
/// Font Awesome CSS URL.
|
|
const FA_CSS_URL: &str =
|
|
"https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.5.2/css/all.min.css";
|
|
|
|
/// Base URL for resolving relative Font Awesome webfont URLs.
|
|
const FA_WEBFONTS_BASE: &str =
|
|
"https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.5.2/webfonts/";
|
|
|
|
// ---------------------------------------------------------------------------
|
|
// Compiled regex patterns
|
|
// ---------------------------------------------------------------------------
|
|
|
|
/// Regex for extracting woff2 font URLs from Google Fonts CSS.
|
|
static GOOGLE_FONT_URL_RE: Lazy<Regex> =
|
|
Lazy::new(|| Regex::new(r#"url\(([^)]+)\)\s*format\(['"]woff2['"]\)"#).unwrap());
|
|
|
|
/// Regex for extracting all url(...) references from Font Awesome CSS.
|
|
static FA_URL_RE: Lazy<Regex> = Lazy::new(|| Regex::new(r"url\(([^)]+)\)").unwrap());
|
|
|
|
// ---------------------------------------------------------------------------
|
|
// 1. safe_filename_from_url
|
|
// ---------------------------------------------------------------------------
|
|
|
|
/// Generate a safe local filename from a URL using SHA-256 hash for uniqueness.
|
|
///
|
|
/// The filename is `{stem}-{hash}{suffix}` where `hash` is the first 10 hex
|
|
/// characters of the SHA-256 digest of the full URL. If the URL path has no
|
|
/// extension, `.woff2` is appended.
|
|
pub fn safe_filename_from_url(url: &str) -> String {
|
|
// Extract the last path component from the URL
|
|
let base = url
|
|
.split('?')
|
|
.next()
|
|
.unwrap_or(url)
|
|
.split('#')
|
|
.next()
|
|
.unwrap_or(url)
|
|
.rsplit('/')
|
|
.next()
|
|
.unwrap_or("font.woff2");
|
|
|
|
let base = if base.is_empty() { "font.woff2" } else { base };
|
|
|
|
// Ensure the base has an extension
|
|
let base = if !base.contains('.') {
|
|
format!("{}.woff2", base)
|
|
} else {
|
|
base.to_string()
|
|
};
|
|
|
|
// Split into stem and suffix
|
|
let (stem, suffix) = match base.rfind('.') {
|
|
Some(pos) => (&base[..pos], &base[pos..]),
|
|
None => (base.as_str(), ".woff2"),
|
|
};
|
|
|
|
// Compute SHA-256 hash of the full URL
|
|
let mut hasher = Sha256::new();
|
|
hasher.update(url.as_bytes());
|
|
let digest = format!("{:x}", hasher.finalize());
|
|
let url_hash = &digest[..10];
|
|
|
|
format!("{}-{}{}", stem, url_hash, suffix)
|
|
}
|
|
|
|
// ---------------------------------------------------------------------------
|
|
// 2. ensure_google_fonts_local
|
|
// ---------------------------------------------------------------------------
|
|
|
|
/// Download and cache Google Fonts (Sora, Manrope, IBM Plex Mono) locally.
|
|
///
|
|
/// The `fonts_dir` is the directory where `fonts.css`, `fonts_meta.json`, and
|
|
/// individual `.woff2` files are stored.
|
|
///
|
|
/// If already cached (version matches, ok=true, CSS file exists), this is a
|
|
/// no-op. Otherwise, downloads each font family's CSS from the Google Fonts
|
|
/// API, extracts woff2 URLs, downloads each font file, rewrites the CSS to
|
|
/// use local paths, and writes the combined CSS and metadata.
|
|
pub async fn ensure_google_fonts_local(fonts_dir: &Path) -> Result<(), String> {
|
|
fs::create_dir_all(fonts_dir).map_err(|e| format!("Failed to create fonts dir: {}", e))?;
|
|
|
|
let meta_path = fonts_dir.join("fonts_meta.json");
|
|
let css_path = fonts_dir.join("fonts.css");
|
|
|
|
// Check if already cached
|
|
if let Some(meta) = load_json_with_fallbacks(&meta_path, BACKUP_COUNT) {
|
|
if let Some(obj) = meta.as_object() {
|
|
let version_ok = obj
|
|
.get("version")
|
|
.and_then(|v| v.as_u64())
|
|
.map(|v| v == GOOGLE_FONTS_META_VERSION)
|
|
.unwrap_or(false);
|
|
let ok_flag = obj
|
|
.get("ok")
|
|
.and_then(|v| v.as_bool())
|
|
.unwrap_or(false);
|
|
|
|
if version_ok && ok_flag && css_path.exists() {
|
|
return Ok(());
|
|
}
|
|
}
|
|
}
|
|
|
|
let client = reqwest::Client::builder()
|
|
.user_agent(USER_AGENT)
|
|
.build()
|
|
.map_err(|e| format!("Failed to build HTTP client: {}", e))?;
|
|
|
|
let mut all_css_parts: Vec<String> = Vec::new();
|
|
let mut downloaded_files: Vec<String> = Vec::new();
|
|
let mut errors: Vec<String> = Vec::new();
|
|
|
|
for (family, css_url) in GOOGLE_FONT_URLS {
|
|
// Download the CSS for this font family
|
|
let css_text = match client.get(*css_url).send().await {
|
|
Ok(resp) => match resp.text().await {
|
|
Ok(text) => text,
|
|
Err(e) => {
|
|
errors.push(format!("Failed to read CSS for {}: {}", family, e));
|
|
continue;
|
|
}
|
|
},
|
|
Err(e) => {
|
|
errors.push(format!("Failed to download CSS for {}: {}", family, e));
|
|
continue;
|
|
}
|
|
};
|
|
|
|
// Find all woff2 url(...) references and download each font file
|
|
let mut rewritten_css = css_text.clone();
|
|
let mut replacements: Vec<(String, String)> = Vec::new();
|
|
|
|
for cap in GOOGLE_FONT_URL_RE.captures_iter(&css_text) {
|
|
let raw_url = cap[1].trim().trim_matches('\'').trim_matches('"');
|
|
|
|
let safe_name = safe_filename_from_url(raw_url);
|
|
let local_path = fonts_dir.join(&safe_name);
|
|
|
|
// Download the font file
|
|
match client.get(raw_url).send().await {
|
|
Ok(resp) => match resp.bytes().await {
|
|
Ok(bytes) => {
|
|
if let Err(e) = fs::write(&local_path, &bytes) {
|
|
errors.push(format!("Failed to write {}: {}", safe_name, e));
|
|
continue;
|
|
}
|
|
downloaded_files.push(safe_name.clone());
|
|
}
|
|
Err(e) => {
|
|
errors.push(format!("Failed to read bytes for {}: {}", safe_name, e));
|
|
continue;
|
|
}
|
|
},
|
|
Err(e) => {
|
|
errors.push(format!("Failed to download {}: {}", raw_url, e));
|
|
continue;
|
|
}
|
|
}
|
|
|
|
// Record the replacement: original url(...) content -> local path
|
|
let replacement_url = format!("/fonts/{}", safe_name);
|
|
replacements.push((cap[1].to_string(), replacement_url));
|
|
}
|
|
|
|
// Apply all URL replacements to the CSS
|
|
for (original, replacement) in &replacements {
|
|
let old = format!("url({}) format", original);
|
|
let new = format!("url({}) format", replacement);
|
|
rewritten_css = rewritten_css.replace(&old, &new);
|
|
}
|
|
|
|
all_css_parts.push(rewritten_css);
|
|
}
|
|
|
|
// Write combined CSS
|
|
let combined_css = all_css_parts.join("\n");
|
|
fs::write(&css_path, &combined_css)
|
|
.map_err(|e| format!("Failed to write fonts.css: {}", e))?;
|
|
|
|
// Write metadata
|
|
let timestamp = SystemTime::now()
|
|
.duration_since(SystemTime::UNIX_EPOCH)
|
|
.map(|d| d.as_secs())
|
|
.unwrap_or(0);
|
|
|
|
let ok = errors.is_empty();
|
|
let meta = json!({
|
|
"version": GOOGLE_FONTS_META_VERSION,
|
|
"ok": ok,
|
|
"timestamp": timestamp,
|
|
"downloaded": downloaded_files,
|
|
"errors": errors,
|
|
});
|
|
|
|
atomic_write_json(&meta_path, &meta, BACKUP_COUNT);
|
|
|
|
if ok {
|
|
Ok(())
|
|
} else {
|
|
Err(format!(
|
|
"Google Fonts download completed with errors: {}",
|
|
errors.join("; ")
|
|
))
|
|
}
|
|
}
|
|
|
|
// ---------------------------------------------------------------------------
|
|
// 3. ensure_fontawesome_local
|
|
// ---------------------------------------------------------------------------
|
|
|
|
/// Clean and resolve a Font Awesome URL reference.
|
|
///
|
|
/// Strips whitespace and quotes, resolves relative URLs against the FA
|
|
/// webfonts base URL. Returns the URL unchanged if it is a `data:` URI.
|
|
fn clean_fa_url(u: &str) -> String {
|
|
let u = u.trim().trim_matches('\'').trim_matches('"');
|
|
|
|
if u.starts_with("data:") {
|
|
return u.to_string();
|
|
}
|
|
if u.starts_with("//") {
|
|
return format!("https:{}", u);
|
|
}
|
|
if u.starts_with("http://") || u.starts_with("https://") {
|
|
return u.to_string();
|
|
}
|
|
|
|
// Relative URL: strip leading "./" and "../" then join with base
|
|
let cleaned = u
|
|
.trim_start_matches("./")
|
|
.replace("../", "");
|
|
format!("{}{}", FA_WEBFONTS_BASE, cleaned)
|
|
}
|
|
|
|
/// Download and cache Font Awesome 6.5.2 locally.
|
|
///
|
|
/// The `fa_dir` is the directory where `fa.css` and `fa_meta.json` live.
|
|
/// The `fa_dir/webfonts/` subdirectory holds individual webfont files.
|
|
///
|
|
/// If already cached (version matches, ok=true, CSS file exists), this is a
|
|
/// no-op. Otherwise, downloads the Font Awesome CSS, extracts all `url(...)`
|
|
/// references, downloads each font file (skipping `data:` URIs), rewrites
|
|
/// the CSS to use local paths, and writes the CSS and metadata.
|
|
pub async fn ensure_fontawesome_local(fa_dir: &Path) -> Result<(), String> {
|
|
fs::create_dir_all(fa_dir).map_err(|e| format!("Failed to create fa dir: {}", e))?;
|
|
|
|
let webfonts_dir = fa_dir.join("webfonts");
|
|
fs::create_dir_all(&webfonts_dir)
|
|
.map_err(|e| format!("Failed to create webfonts dir: {}", e))?;
|
|
|
|
let meta_path = fa_dir.join("fa_meta.json");
|
|
let css_path = fa_dir.join("fa.css");
|
|
|
|
// Check if already cached
|
|
if let Some(meta) = load_json_with_fallbacks(&meta_path, BACKUP_COUNT) {
|
|
if let Some(obj) = meta.as_object() {
|
|
let version_ok = obj
|
|
.get("version")
|
|
.and_then(|v| v.as_u64())
|
|
.map(|v| v == FA_META_VERSION)
|
|
.unwrap_or(false);
|
|
let ok_flag = obj
|
|
.get("ok")
|
|
.and_then(|v| v.as_bool())
|
|
.unwrap_or(false);
|
|
|
|
if version_ok && ok_flag && css_path.exists() {
|
|
return Ok(());
|
|
}
|
|
}
|
|
}
|
|
|
|
let client = reqwest::Client::builder()
|
|
.user_agent(USER_AGENT)
|
|
.build()
|
|
.map_err(|e| format!("Failed to build HTTP client: {}", e))?;
|
|
|
|
// Download the Font Awesome CSS
|
|
let css_text = client
|
|
.get(FA_CSS_URL)
|
|
.send()
|
|
.await
|
|
.map_err(|e| format!("Failed to download FA CSS: {}", e))?
|
|
.text()
|
|
.await
|
|
.map_err(|e| format!("Failed to read FA CSS: {}", e))?;
|
|
|
|
let mut downloaded_files: Vec<String> = Vec::new();
|
|
let mut errors: Vec<String> = Vec::new();
|
|
let mut replacements: HashMap<String, String> = HashMap::new();
|
|
|
|
for cap in FA_URL_RE.captures_iter(&css_text) {
|
|
let raw_url = &cap[1];
|
|
let resolved = clean_fa_url(raw_url);
|
|
|
|
// Skip data: URIs
|
|
if resolved.starts_with("data:") {
|
|
continue;
|
|
}
|
|
|
|
// Determine the filename from the resolved URL
|
|
let filename = resolved
|
|
.split('?')
|
|
.next()
|
|
.unwrap_or(&resolved)
|
|
.split('#')
|
|
.next()
|
|
.unwrap_or(&resolved)
|
|
.rsplit('/')
|
|
.next()
|
|
.unwrap_or("font.woff2")
|
|
.to_string();
|
|
|
|
if filename.is_empty() {
|
|
continue;
|
|
}
|
|
|
|
let local_path = webfonts_dir.join(&filename);
|
|
|
|
// Only download each file once
|
|
if !replacements.contains_key(raw_url) {
|
|
match client.get(&resolved).send().await {
|
|
Ok(resp) => match resp.bytes().await {
|
|
Ok(bytes) => {
|
|
if let Err(e) = fs::write(&local_path, &bytes) {
|
|
errors.push(format!("Failed to write {}: {}", filename, e));
|
|
continue;
|
|
}
|
|
downloaded_files.push(filename.clone());
|
|
}
|
|
Err(e) => {
|
|
errors.push(format!("Failed to read bytes for {}: {}", filename, e));
|
|
continue;
|
|
}
|
|
},
|
|
Err(e) => {
|
|
errors.push(format!("Failed to download {}: {}", resolved, e));
|
|
continue;
|
|
}
|
|
}
|
|
|
|
let replacement = format!("/fa/webfonts/{}", filename);
|
|
replacements.insert(raw_url.to_string(), replacement);
|
|
}
|
|
}
|
|
|
|
// Rewrite CSS with local paths
|
|
let mut rewritten_css = css_text.clone();
|
|
for (original, replacement) in &replacements {
|
|
let old = format!("url({})", original);
|
|
let new = format!("url({})", replacement);
|
|
rewritten_css = rewritten_css.replace(&old, &new);
|
|
}
|
|
|
|
// Write rewritten CSS
|
|
fs::write(&css_path, &rewritten_css)
|
|
.map_err(|e| format!("Failed to write fa.css: {}", e))?;
|
|
|
|
// Write metadata
|
|
let timestamp = SystemTime::now()
|
|
.duration_since(SystemTime::UNIX_EPOCH)
|
|
.map(|d| d.as_secs())
|
|
.unwrap_or(0);
|
|
|
|
let ok = errors.is_empty();
|
|
let meta = json!({
|
|
"version": FA_META_VERSION,
|
|
"ok": ok,
|
|
"timestamp": timestamp,
|
|
"downloaded": downloaded_files,
|
|
"errors": errors,
|
|
});
|
|
|
|
atomic_write_json(&meta_path, &meta, BACKUP_COUNT);
|
|
|
|
if ok {
|
|
Ok(())
|
|
} else {
|
|
Err(format!(
|
|
"Font Awesome download completed with errors: {}",
|
|
errors.join("; ")
|
|
))
|
|
}
|
|
}
|
|
|
|
// ===========================================================================
|
|
// Tests
|
|
// ===========================================================================
|
|
|
|
#[cfg(test)]
|
|
mod tests {
|
|
use super::*;
|
|
|
|
// -- safe_filename_from_url -----------------------------------------------
|
|
|
|
#[test]
|
|
fn test_safe_filename_from_url_basic() {
|
|
let url = "https://fonts.gstatic.com/s/sora/v12/abc123.woff2";
|
|
let result = safe_filename_from_url(url);
|
|
|
|
// Should contain the original stem
|
|
assert!(result.starts_with("abc123-"));
|
|
// Should end with .woff2
|
|
assert!(result.ends_with(".woff2"));
|
|
// Should contain a 10-char hash between stem and extension
|
|
let parts: Vec<&str> = result.rsplitn(2, '.').collect();
|
|
let before_ext = parts[1]; // "abc123-{hash}"
|
|
let hash_part = before_ext.rsplit('-').next().unwrap();
|
|
assert_eq!(hash_part.len(), 10);
|
|
}
|
|
|
|
#[test]
|
|
fn test_safe_filename_from_url_no_extension() {
|
|
let url = "https://example.com/fontfile";
|
|
let result = safe_filename_from_url(url);
|
|
|
|
// Should have .woff2 appended
|
|
assert!(result.ends_with(".woff2"));
|
|
assert!(result.starts_with("fontfile-"));
|
|
}
|
|
|
|
#[test]
|
|
fn test_safe_filename_from_url_deterministic() {
|
|
let url = "https://fonts.gstatic.com/s/sora/v12/abc.woff2";
|
|
let result1 = safe_filename_from_url(url);
|
|
let result2 = safe_filename_from_url(url);
|
|
assert_eq!(result1, result2);
|
|
}
|
|
|
|
#[test]
|
|
fn test_safe_filename_different_urls() {
|
|
let url1 = "https://fonts.gstatic.com/s/sora/v12/abc.woff2";
|
|
let url2 = "https://fonts.gstatic.com/s/manrope/v14/def.woff2";
|
|
let result1 = safe_filename_from_url(url1);
|
|
let result2 = safe_filename_from_url(url2);
|
|
assert_ne!(result1, result2);
|
|
}
|
|
|
|
// -- clean_fa_url ---------------------------------------------------------
|
|
|
|
#[test]
|
|
fn test_clean_fa_url_data() {
|
|
let result = clean_fa_url("data:font/woff2;base64,abc");
|
|
assert_eq!(result, "data:font/woff2;base64,abc");
|
|
}
|
|
|
|
#[test]
|
|
fn test_clean_fa_url_protocol_relative() {
|
|
let result = clean_fa_url("//example.com/font.woff2");
|
|
assert_eq!(result, "https://example.com/font.woff2");
|
|
}
|
|
|
|
#[test]
|
|
fn test_clean_fa_url_absolute() {
|
|
let result = clean_fa_url("https://example.com/font.woff2");
|
|
assert_eq!(result, "https://example.com/font.woff2");
|
|
}
|
|
|
|
#[test]
|
|
fn test_clean_fa_url_relative() {
|
|
let result = clean_fa_url("../webfonts/fa-solid-900.woff2");
|
|
assert_eq!(
|
|
result,
|
|
"https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.5.2/webfonts/webfonts/fa-solid-900.woff2"
|
|
);
|
|
}
|
|
|
|
#[test]
|
|
fn test_clean_fa_url_relative_dot_slash() {
|
|
let result = clean_fa_url("./webfonts/fa-solid-900.woff2");
|
|
assert_eq!(
|
|
result,
|
|
"https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.5.2/webfonts/webfonts/fa-solid-900.woff2"
|
|
);
|
|
}
|
|
|
|
#[test]
|
|
fn test_clean_fa_url_strips_quotes() {
|
|
let result = clean_fa_url("'https://example.com/font.woff2'");
|
|
assert_eq!(result, "https://example.com/font.woff2");
|
|
}
|
|
|
|
// -- Integration tests (require network) ----------------------------------
|
|
|
|
#[tokio::test]
|
|
#[ignore]
|
|
async fn test_google_fonts_download() {
|
|
let dir = tempfile::tempdir().unwrap();
|
|
let fonts_dir = dir.path().join("fonts");
|
|
|
|
let result = ensure_google_fonts_local(&fonts_dir).await;
|
|
assert!(result.is_ok(), "Google Fonts download failed: {:?}", result);
|
|
|
|
// Verify fonts.css was created
|
|
let css_path = fonts_dir.join("fonts.css");
|
|
assert!(css_path.exists(), "fonts.css should exist");
|
|
|
|
let css_content = fs::read_to_string(&css_path).unwrap();
|
|
assert!(!css_content.is_empty(), "fonts.css should not be empty");
|
|
// CSS should contain rewritten local paths
|
|
assert!(
|
|
css_content.contains("/fonts/"),
|
|
"CSS should contain /fonts/ local paths"
|
|
);
|
|
|
|
// Verify metadata was created
|
|
let meta_path = fonts_dir.join("fonts_meta.json");
|
|
assert!(meta_path.exists(), "fonts_meta.json should exist");
|
|
|
|
let meta = load_json_with_fallbacks(&meta_path, BACKUP_COUNT).unwrap();
|
|
assert_eq!(meta["version"], GOOGLE_FONTS_META_VERSION);
|
|
assert_eq!(meta["ok"], true);
|
|
assert!(
|
|
meta["downloaded"].as_array().unwrap().len() > 0,
|
|
"Should have downloaded at least one font file"
|
|
);
|
|
|
|
// Second call should be a no-op (cached)
|
|
let result2 = ensure_google_fonts_local(&fonts_dir).await;
|
|
assert!(result2.is_ok());
|
|
}
|
|
|
|
#[tokio::test]
|
|
#[ignore]
|
|
async fn test_fontawesome_download() {
|
|
let dir = tempfile::tempdir().unwrap();
|
|
let fa_dir = dir.path().join("fa");
|
|
|
|
let result = ensure_fontawesome_local(&fa_dir).await;
|
|
assert!(
|
|
result.is_ok(),
|
|
"Font Awesome download failed: {:?}",
|
|
result
|
|
);
|
|
|
|
// Verify fa.css was created
|
|
let css_path = fa_dir.join("fa.css");
|
|
assert!(css_path.exists(), "fa.css should exist");
|
|
|
|
let css_content = fs::read_to_string(&css_path).unwrap();
|
|
assert!(!css_content.is_empty(), "fa.css should not be empty");
|
|
// CSS should contain rewritten local paths
|
|
assert!(
|
|
css_content.contains("/fa/webfonts/"),
|
|
"CSS should contain /fa/webfonts/ local paths"
|
|
);
|
|
|
|
// Verify webfonts directory has files
|
|
let webfonts_dir = fa_dir.join("webfonts");
|
|
assert!(webfonts_dir.exists(), "webfonts dir should exist");
|
|
|
|
let webfont_files: Vec<_> = fs::read_dir(&webfonts_dir)
|
|
.unwrap()
|
|
.filter_map(|e| e.ok())
|
|
.collect();
|
|
assert!(
|
|
!webfont_files.is_empty(),
|
|
"Should have downloaded at least one webfont file"
|
|
);
|
|
|
|
// Verify metadata was created
|
|
let meta_path = fa_dir.join("fa_meta.json");
|
|
assert!(meta_path.exists(), "fa_meta.json should exist");
|
|
|
|
let meta = load_json_with_fallbacks(&meta_path, BACKUP_COUNT).unwrap();
|
|
assert_eq!(meta["version"], FA_META_VERSION);
|
|
assert_eq!(meta["ok"], true);
|
|
|
|
// Second call should be a no-op (cached)
|
|
let result2 = ensure_fontawesome_local(&fa_dir).await;
|
|
assert!(result2.is_ok());
|
|
}
|
|
}
|