Rename from TutorialDock to TutorialVault. Remove legacy Python app and scripts. Fix video playback, subtitles, metadata display, window state persistence, and auto-download of ffmpeg/ffprobe on first run. Bundle fonts via npm instead of runtime download.
1967 lines
70 KiB
Rust
1967 lines
70 KiB
Rust
//! Video library management: folder scanning, progress tracking, playlists,
|
|
//! subtitle integration, and background duration scanning.
|
|
|
|
use serde::{Deserialize, Serialize};
|
|
use serde_json::{json, Value};
|
|
use std::collections::HashMap;
|
|
use std::fs;
|
|
use std::path::{Path, PathBuf};
|
|
use std::sync::atomic::{AtomicBool, Ordering};
|
|
use std::sync::Arc;
|
|
use std::time::SystemTime;
|
|
|
|
use crate::ffmpeg::{self, VideoMetadata};
|
|
use crate::state::{atomic_write_json, load_json_with_fallbacks, BACKUP_COUNT};
|
|
use crate::subtitles;
|
|
use crate::utils::{
|
|
clamp, compute_library_id, file_fingerprint, is_within_root, natural_key,
|
|
pretty_title_from_filename,
|
|
};
|
|
|
|
// ---------------------------------------------------------------------------
|
|
// Constants
|
|
// ---------------------------------------------------------------------------
|
|
|
|
/// Supported video file extensions (lower-case, with leading dot).
|
|
pub const VIDEO_EXTS: &[&str] = &[
|
|
".mp4", ".m4v", ".mov", ".webm", ".mkv", ".avi", ".mpg", ".mpeg", ".m2ts", ".mts", ".ogv",
|
|
];
|
|
|
|
/// Current state file format version.
|
|
const STATE_VERSION: u32 = 1;
|
|
|
|
// ---------------------------------------------------------------------------
|
|
// Structs
|
|
// ---------------------------------------------------------------------------
|
|
|
|
/// Reference to a stored subtitle file for a video.
|
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
|
pub struct SubtitleRef {
|
|
pub vtt: String,
|
|
pub label: String,
|
|
}
|
|
|
|
/// Per-video metadata: position, progress, notes, subtitle reference.
|
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
|
pub struct VideoMeta {
|
|
pub pos: f64,
|
|
pub watched: f64,
|
|
pub duration: Option<f64>,
|
|
pub finished: bool,
|
|
pub note: String,
|
|
pub last_open: u64,
|
|
pub subtitle: Option<SubtitleRef>,
|
|
}
|
|
|
|
impl Default for VideoMeta {
|
|
fn default() -> Self {
|
|
Self {
|
|
pos: 0.0,
|
|
watched: 0.0,
|
|
duration: None,
|
|
finished: false,
|
|
note: String::new(),
|
|
last_open: 0,
|
|
subtitle: None,
|
|
}
|
|
}
|
|
}
|
|
|
|
/// Serializable library state persisted to disk.
|
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
|
pub struct LibraryState {
|
|
pub version: u32,
|
|
pub library_id: String,
|
|
pub last_path: String,
|
|
pub updated_at: u64,
|
|
pub current_fid: Option<String>,
|
|
pub current_time: f64,
|
|
pub volume: f64,
|
|
pub autoplay: bool,
|
|
pub playback_rate: f64,
|
|
pub order_fids: Vec<String>,
|
|
pub videos: HashMap<String, VideoMeta>,
|
|
}
|
|
|
|
impl Default for LibraryState {
|
|
fn default() -> Self {
|
|
Self {
|
|
version: STATE_VERSION,
|
|
library_id: String::new(),
|
|
last_path: String::new(),
|
|
updated_at: 0,
|
|
current_fid: None,
|
|
current_time: 0.0,
|
|
volume: 1.0,
|
|
autoplay: false,
|
|
playback_rate: 1.0,
|
|
order_fids: Vec::new(),
|
|
videos: HashMap::new(),
|
|
}
|
|
}
|
|
}
|
|
|
|
/// In-memory representation of the video library.
|
|
pub struct Library {
|
|
pub root: Option<PathBuf>,
|
|
pub files: Vec<PathBuf>,
|
|
pub fids: Vec<String>,
|
|
pub relpaths: Vec<String>,
|
|
pub rel_to_fid: HashMap<String, String>,
|
|
pub fid_to_rel: HashMap<String, String>,
|
|
pub state: LibraryState,
|
|
pub state_path: Option<PathBuf>,
|
|
// ffmpeg paths
|
|
pub ffprobe: Option<PathBuf>,
|
|
pub ffmpeg: Option<PathBuf>,
|
|
// metadata cache
|
|
pub meta_cache: HashMap<String, VideoMetadata>,
|
|
// background scan control
|
|
scan_stop: Arc<AtomicBool>,
|
|
}
|
|
|
|
// ---------------------------------------------------------------------------
|
|
// Helper functions (private)
|
|
// ---------------------------------------------------------------------------
|
|
|
|
/// Return the current unix timestamp in seconds.
|
|
fn now_secs() -> u64 {
|
|
SystemTime::now()
|
|
.duration_since(SystemTime::UNIX_EPOCH)
|
|
.map(|d| d.as_secs())
|
|
.unwrap_or(0)
|
|
}
|
|
|
|
/// Natural-sort relative paths and return the corresponding fid list.
|
|
fn _sorted_default(relpaths: &[String], rel_to_fid: &HashMap<String, String>) -> Vec<String> {
|
|
let mut sorted_rels: Vec<String> = relpaths.to_vec();
|
|
sorted_rels.sort_by(|a, b| natural_key(a).cmp(&natural_key(b)));
|
|
sorted_rels
|
|
.iter()
|
|
.filter_map(|r| rel_to_fid.get(r).cloned())
|
|
.collect()
|
|
}
|
|
|
|
/// Apply a saved ordering to the full set of file IDs.
|
|
///
|
|
/// Files present in `order_fids` keep their saved order. Files not in the
|
|
/// saved order are appended at the end, naturally sorted by their relative
|
|
/// path.
|
|
fn _apply_saved_order(
|
|
all_fids: &[String],
|
|
fid_to_rel: &HashMap<String, String>,
|
|
order_fids: &[String],
|
|
) -> Vec<String> {
|
|
if order_fids.is_empty() {
|
|
// Fall back to natural sort.
|
|
let mut fids = all_fids.to_vec();
|
|
fids.sort_by(|a, b| {
|
|
let ra = fid_to_rel.get(a).cloned().unwrap_or_default();
|
|
let rb = fid_to_rel.get(b).cloned().unwrap_or_default();
|
|
natural_key(&ra).cmp(&natural_key(&rb))
|
|
});
|
|
return fids;
|
|
}
|
|
|
|
let all_set: std::collections::HashSet<&String> = all_fids.iter().collect();
|
|
|
|
// Saved order, keeping only fids that still exist.
|
|
let mut result: Vec<String> = order_fids
|
|
.iter()
|
|
.filter(|f| all_set.contains(f))
|
|
.cloned()
|
|
.collect();
|
|
|
|
let ordered_set: std::collections::HashSet<&String> = result.iter().collect();
|
|
|
|
// New files not in the saved order, naturally sorted.
|
|
let mut new_fids: Vec<String> = all_fids
|
|
.iter()
|
|
.filter(|f| !ordered_set.contains(f))
|
|
.cloned()
|
|
.collect();
|
|
new_fids.sort_by(|a, b| {
|
|
let ra = fid_to_rel.get(a).cloned().unwrap_or_default();
|
|
let rb = fid_to_rel.get(b).cloned().unwrap_or_default();
|
|
natural_key(&ra).cmp(&natural_key(&rb))
|
|
});
|
|
|
|
result.extend(new_fids);
|
|
result
|
|
}
|
|
|
|
/// Compute per-file tree display flags: depth, pipe characters, is_last, etc.
|
|
///
|
|
/// Depth is limited to 1 level (immediate parent folder only).
|
|
fn _tree_flags(rels: &[String]) -> HashMap<String, Value> {
|
|
let mut result: HashMap<String, Value> = HashMap::new();
|
|
|
|
// Group files by their immediate parent folder.
|
|
// For a relpath like "subfolder/file.mp4", parent = "subfolder".
|
|
// For "file.mp4", parent = "".
|
|
let mut groups: HashMap<String, Vec<usize>> = HashMap::new();
|
|
for (i, rel) in rels.iter().enumerate() {
|
|
let parent = if let Some(pos) = rel.rfind('/') {
|
|
rel[..pos].to_string()
|
|
} else {
|
|
String::new()
|
|
};
|
|
groups.entry(parent).or_default().push(i);
|
|
}
|
|
|
|
// Track which parents we have seen so far for has_prev_in_parent.
|
|
let mut parent_seen_count: HashMap<String, usize> = HashMap::new();
|
|
|
|
for (_i, rel) in rels.iter().enumerate() {
|
|
let (parent, depth) = if let Some(pos) = rel.rfind('/') {
|
|
(rel[..pos].to_string(), 1)
|
|
} else {
|
|
(String::new(), 0)
|
|
};
|
|
|
|
let group = groups.get(&parent).unwrap();
|
|
let position_in_group = parent_seen_count.entry(parent.clone()).or_insert(0);
|
|
let is_last = *position_in_group + 1 == group.len();
|
|
let has_prev_in_parent = *position_in_group > 0;
|
|
*parent_seen_count.get_mut(&parent).unwrap() += 1;
|
|
|
|
// Build pipe prefix for tree display.
|
|
let pipes = if depth == 0 {
|
|
String::new()
|
|
} else {
|
|
if is_last {
|
|
"\u{2514}\u{2500} ".to_string() // "-- "
|
|
} else {
|
|
"\u{251C}\u{2500} ".to_string() // "|-- "
|
|
}
|
|
};
|
|
|
|
result.insert(
|
|
rel.clone(),
|
|
json!({
|
|
"depth": depth,
|
|
"pipes": pipes,
|
|
"is_last": is_last,
|
|
"has_prev_in_parent": has_prev_in_parent,
|
|
}),
|
|
);
|
|
}
|
|
|
|
result
|
|
}
|
|
|
|
/// Recursively scan a directory for video files.
|
|
fn scan_video_files(dir: &Path) -> Vec<PathBuf> {
|
|
let mut results = Vec::new();
|
|
_scan_recursive(dir, &mut results);
|
|
results
|
|
}
|
|
|
|
fn _scan_recursive(dir: &Path, out: &mut Vec<PathBuf>) {
|
|
let entries = match fs::read_dir(dir) {
|
|
Ok(e) => e,
|
|
Err(_) => return,
|
|
};
|
|
|
|
let mut dirs: Vec<PathBuf> = Vec::new();
|
|
let mut files: Vec<PathBuf> = Vec::new();
|
|
|
|
for entry in entries.flatten() {
|
|
let path = entry.path();
|
|
if path.is_dir() {
|
|
dirs.push(path);
|
|
} else if path.is_file() {
|
|
if let Some(ext) = path.extension() {
|
|
let ext_lower = format!(".{}", ext.to_string_lossy().to_lowercase());
|
|
if VIDEO_EXTS.contains(&ext_lower.as_str()) {
|
|
files.push(path);
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
// Sort files naturally for deterministic ordering.
|
|
files.sort_by(|a, b| {
|
|
let na = a.file_name().unwrap_or_default().to_string_lossy();
|
|
let nb = b.file_name().unwrap_or_default().to_string_lossy();
|
|
natural_key(&na).cmp(&natural_key(&nb))
|
|
});
|
|
out.extend(files);
|
|
|
|
// Recurse into subdirectories in sorted order.
|
|
dirs.sort_by(|a, b| {
|
|
let na = a.file_name().unwrap_or_default().to_string_lossy();
|
|
let nb = b.file_name().unwrap_or_default().to_string_lossy();
|
|
natural_key(&na).cmp(&natural_key(&nb))
|
|
});
|
|
for d in &dirs {
|
|
_scan_recursive(d, out);
|
|
}
|
|
}
|
|
|
|
// ---------------------------------------------------------------------------
|
|
// Library implementation
|
|
// ---------------------------------------------------------------------------
|
|
|
|
impl Library {
|
|
// -----------------------------------------------------------------------
|
|
// new
|
|
// -----------------------------------------------------------------------
|
|
|
|
/// Create a new empty library with default state.
|
|
pub fn new() -> Library {
|
|
Library {
|
|
root: None,
|
|
files: Vec::new(),
|
|
fids: Vec::new(),
|
|
relpaths: Vec::new(),
|
|
rel_to_fid: HashMap::new(),
|
|
fid_to_rel: HashMap::new(),
|
|
state: LibraryState::default(),
|
|
state_path: None,
|
|
ffprobe: None,
|
|
ffmpeg: None,
|
|
meta_cache: HashMap::new(),
|
|
scan_stop: Arc::new(AtomicBool::new(false)),
|
|
}
|
|
}
|
|
|
|
// -----------------------------------------------------------------------
|
|
// set_root
|
|
// -----------------------------------------------------------------------
|
|
|
|
/// Open a folder as the video library root.
|
|
///
|
|
/// Scans for video files, computes fingerprints, loads or creates state,
|
|
/// merges with existing progress, and returns `get_library_info()`.
|
|
pub fn set_root(&mut self, folder: &str, state_dir: &Path) -> Result<Value, String> {
|
|
// 1. Resolve and verify folder.
|
|
let folder_path = PathBuf::from(folder);
|
|
let folder_path = folder_path
|
|
.canonicalize()
|
|
.map_err(|e| format!("Cannot resolve folder '{}': {}", folder, e))?;
|
|
|
|
if !folder_path.is_dir() {
|
|
return Err(format!("'{}' is not a directory", folder));
|
|
}
|
|
|
|
// Signal any running background scan to stop.
|
|
self.scan_stop.store(true, Ordering::SeqCst);
|
|
|
|
// 2. Recursively scan for video files.
|
|
let found_files = scan_video_files(&folder_path);
|
|
|
|
if found_files.is_empty() {
|
|
return Err(format!("No video files found in '{}'", folder));
|
|
}
|
|
|
|
// 3. Compute fingerprints and build mappings.
|
|
let mut rel_to_fid: HashMap<String, String> = HashMap::new();
|
|
let mut fid_to_rel: HashMap<String, String> = HashMap::new();
|
|
let mut all_fids: Vec<String> = Vec::new();
|
|
let mut all_relpaths: Vec<String> = Vec::new();
|
|
|
|
for file in &found_files {
|
|
let relpath = file
|
|
.strip_prefix(&folder_path)
|
|
.unwrap_or(file.as_path())
|
|
.to_string_lossy()
|
|
.replace('\\', "/");
|
|
|
|
let fid = file_fingerprint(file);
|
|
|
|
// Skip duplicates (same fingerprint from hardlinks, etc.).
|
|
if fid_to_rel.contains_key(&fid) {
|
|
continue;
|
|
}
|
|
|
|
rel_to_fid.insert(relpath.clone(), fid.clone());
|
|
fid_to_rel.insert(fid.clone(), relpath.clone());
|
|
all_fids.push(fid);
|
|
all_relpaths.push(relpath);
|
|
}
|
|
|
|
// 4. Compute library ID.
|
|
let library_id = compute_library_id(&all_fids);
|
|
|
|
// 5. Load existing state or create baseline.
|
|
let state_file = state_dir.join(format!("library_{}.json", library_id));
|
|
let baseline = LibraryState {
|
|
version: STATE_VERSION,
|
|
library_id: library_id.clone(),
|
|
last_path: folder_path.to_string_lossy().to_string(),
|
|
updated_at: now_secs(),
|
|
current_fid: None,
|
|
current_time: 0.0,
|
|
volume: 1.0,
|
|
autoplay: false,
|
|
playback_rate: 1.0,
|
|
order_fids: Vec::new(),
|
|
videos: HashMap::new(),
|
|
};
|
|
|
|
let mut state = if let Some(loaded_val) = load_json_with_fallbacks(&state_file, BACKUP_COUNT)
|
|
{
|
|
// Try to deserialize the loaded JSON into LibraryState.
|
|
if let Ok(loaded_state) = serde_json::from_value::<LibraryState>(loaded_val.clone()) {
|
|
// 6. Merge: only if same library_id.
|
|
if loaded_state.library_id == library_id {
|
|
self._merge_state(loaded_state, &baseline)
|
|
} else {
|
|
baseline
|
|
}
|
|
} else {
|
|
baseline
|
|
}
|
|
} else {
|
|
baseline
|
|
};
|
|
|
|
// 7. Update last_path to current folder.
|
|
state.last_path = folder_path.to_string_lossy().to_string();
|
|
|
|
// 8. Normalize video metadata for all found fids.
|
|
let existing_videos = state.videos.clone();
|
|
let mut normalized: HashMap<String, VideoMeta> = HashMap::new();
|
|
for fid in &all_fids {
|
|
let meta = existing_videos.get(fid).cloned().unwrap_or_default();
|
|
normalized.insert(fid.clone(), meta);
|
|
}
|
|
state.videos = normalized;
|
|
|
|
// 9. Normalize settings.
|
|
state.volume = clamp(state.volume, 0.0, 1.0);
|
|
state.playback_rate = clamp(state.playback_rate, 0.25, 3.0);
|
|
|
|
// 10. Clean order_fids.
|
|
let valid_set: std::collections::HashSet<&String> = all_fids.iter().collect();
|
|
state.order_fids.retain(|f| valid_set.contains(f));
|
|
|
|
// 11. Apply saved ordering.
|
|
let ordered_fids =
|
|
_apply_saved_order(&all_fids, &fid_to_rel, &state.order_fids);
|
|
state.order_fids = ordered_fids.clone();
|
|
|
|
// 12. Build ordered file lists.
|
|
let ordered_relpaths: Vec<String> = ordered_fids
|
|
.iter()
|
|
.filter_map(|fid| fid_to_rel.get(fid).cloned())
|
|
.collect();
|
|
let ordered_files: Vec<PathBuf> = ordered_relpaths
|
|
.iter()
|
|
.map(|rel| folder_path.join(rel.replace('/', std::path::MAIN_SEPARATOR_STR)))
|
|
.collect();
|
|
|
|
// 13. Validate current_fid.
|
|
if let Some(ref cfid) = state.current_fid {
|
|
if !fid_to_rel.contains_key(cfid) {
|
|
state.current_fid = ordered_fids.first().cloned();
|
|
state.current_time = 0.0;
|
|
}
|
|
}
|
|
|
|
// Assign everything to self.
|
|
self.root = Some(folder_path);
|
|
self.files = ordered_files;
|
|
self.fids = ordered_fids;
|
|
self.relpaths = ordered_relpaths;
|
|
self.rel_to_fid = rel_to_fid;
|
|
self.fid_to_rel = fid_to_rel;
|
|
self.state = state;
|
|
self.state_path = Some(state_file);
|
|
self.meta_cache.clear();
|
|
self.scan_stop = Arc::new(AtomicBool::new(false));
|
|
|
|
// 14. Save state.
|
|
self.save_state();
|
|
|
|
// 15. Return library info.
|
|
Ok(self.get_library_info())
|
|
}
|
|
|
|
/// Merge a loaded state with a baseline, preferring loaded values.
|
|
fn _merge_state(&self, loaded: LibraryState, baseline: &LibraryState) -> LibraryState {
|
|
LibraryState {
|
|
version: STATE_VERSION,
|
|
library_id: loaded.library_id,
|
|
last_path: baseline.last_path.clone(),
|
|
updated_at: loaded.updated_at,
|
|
current_fid: loaded.current_fid,
|
|
current_time: loaded.current_time,
|
|
volume: loaded.volume,
|
|
autoplay: loaded.autoplay,
|
|
playback_rate: loaded.playback_rate,
|
|
order_fids: loaded.order_fids,
|
|
videos: loaded.videos,
|
|
}
|
|
}
|
|
|
|
// -----------------------------------------------------------------------
|
|
// save_state
|
|
// -----------------------------------------------------------------------
|
|
|
|
/// Persist the current state to disk with backup rotation.
|
|
pub fn save_state(&mut self) {
|
|
self.state.updated_at = now_secs();
|
|
if let Some(ref path) = self.state_path {
|
|
if let Ok(val) = serde_json::to_value(&self.state) {
|
|
atomic_write_json(path, &val, BACKUP_COUNT);
|
|
}
|
|
}
|
|
}
|
|
|
|
// -----------------------------------------------------------------------
|
|
// get_library_info
|
|
// -----------------------------------------------------------------------
|
|
|
|
/// Build a comprehensive JSON object describing the library state.
|
|
pub fn get_library_info(&self) -> Value {
|
|
let folder = self
|
|
.root
|
|
.as_ref()
|
|
.map(|p| {
|
|
let s = p.to_string_lossy().to_string();
|
|
// Strip Windows extended-length path prefix.
|
|
s.strip_prefix("\\\\?\\").unwrap_or(&s).to_string()
|
|
})
|
|
.unwrap_or_default();
|
|
|
|
let count = self.fids.len();
|
|
|
|
let current_index = self
|
|
.state
|
|
.current_fid
|
|
.as_ref()
|
|
.and_then(|cfid| self.fids.iter().position(|f| f == cfid));
|
|
|
|
// Build tree flags.
|
|
let tree_flags = _tree_flags(&self.relpaths);
|
|
|
|
// Check if any subdirectories exist.
|
|
let has_subdirs = self.relpaths.iter().any(|r| r.contains('/'));
|
|
|
|
// Build items array.
|
|
let mut items: Vec<Value> = Vec::new();
|
|
for (i, fid) in self.fids.iter().enumerate() {
|
|
let relpath = self.fid_to_rel.get(fid).cloned().unwrap_or_default();
|
|
let filename = relpath.rsplit('/').next().unwrap_or(&relpath);
|
|
let title = pretty_title_from_filename(filename);
|
|
|
|
let flags = tree_flags.get(&relpath);
|
|
let depth = flags
|
|
.and_then(|f| f.get("depth"))
|
|
.and_then(|v| v.as_u64())
|
|
.unwrap_or(0);
|
|
let pipes = flags
|
|
.and_then(|f| f.get("pipes"))
|
|
.and_then(|v| v.as_str())
|
|
.unwrap_or("");
|
|
let is_last = flags
|
|
.and_then(|f| f.get("is_last"))
|
|
.and_then(|v| v.as_bool())
|
|
.unwrap_or(true);
|
|
let has_prev_in_parent = flags
|
|
.and_then(|f| f.get("has_prev_in_parent"))
|
|
.and_then(|v| v.as_bool())
|
|
.unwrap_or(false);
|
|
|
|
let meta = self.state.videos.get(fid);
|
|
let pos = meta.map(|m| m.pos).unwrap_or(0.0);
|
|
let watched = meta.map(|m| m.watched).unwrap_or(0.0);
|
|
let duration = meta.and_then(|m| m.duration);
|
|
let finished = meta.map(|m| m.finished).unwrap_or(false);
|
|
let note_len = meta.map(|m| m.note.len()).unwrap_or(0);
|
|
let last_open = meta.map(|m| m.last_open).unwrap_or(0);
|
|
let has_sub = meta
|
|
.map(|m| m.subtitle.is_some())
|
|
.unwrap_or(false);
|
|
|
|
items.push(json!({
|
|
"index": i,
|
|
"fid": fid,
|
|
"name": filename,
|
|
"title": title,
|
|
"relpath": relpath,
|
|
"depth": depth,
|
|
"pipes": pipes,
|
|
"is_last": is_last,
|
|
"has_prev_in_parent": has_prev_in_parent,
|
|
"pos": pos,
|
|
"watched": watched,
|
|
"duration": duration,
|
|
"finished": finished,
|
|
"note_len": note_len,
|
|
"last_open": last_open,
|
|
"has_sub": has_sub,
|
|
}));
|
|
}
|
|
|
|
// Folder stats.
|
|
let stats = self._folder_stats();
|
|
|
|
// Next up: first unfinished video after current.
|
|
let next_up = self._compute_next_up(current_index);
|
|
|
|
json!({
|
|
"ok": true,
|
|
"folder": folder,
|
|
"library_id": self.state.library_id,
|
|
"count": count,
|
|
"current_index": current_index,
|
|
"current_fid": self.state.current_fid,
|
|
"current_time": self.state.current_time,
|
|
"folder_volume": self.state.volume,
|
|
"folder_autoplay": self.state.autoplay,
|
|
"folder_rate": self.state.playback_rate,
|
|
"items": items,
|
|
"has_subdirs": has_subdirs,
|
|
"overall_progress": stats.get("overall_progress").cloned().unwrap_or(json!(0.0)),
|
|
"durations_known": stats.get("durations_known").cloned().unwrap_or(json!(false)),
|
|
"finished_count": stats.get("finished_count").cloned().unwrap_or(json!(0)),
|
|
"remaining_count": stats.get("remaining_count").cloned().unwrap_or(json!(0)),
|
|
"remaining_seconds_known": stats.get("remaining_seconds_known").cloned().unwrap_or(json!(0.0)),
|
|
"top_folders": stats.get("top_folders").cloned().unwrap_or(json!([])),
|
|
"next_up": next_up,
|
|
})
|
|
}
|
|
|
|
/// Find the next unfinished video index after the current position.
|
|
fn _compute_next_up(&self, current_index: Option<usize>) -> Value {
|
|
let make_result = |i: usize| -> Value {
|
|
let fid = &self.fids[i];
|
|
let title = self.fid_to_rel.get(fid)
|
|
.map(|r| pretty_title_from_filename(r))
|
|
.unwrap_or_default();
|
|
json!({"index": i, "title": title})
|
|
};
|
|
|
|
let start = current_index.map(|i| i + 1).unwrap_or(0);
|
|
for i in start..self.fids.len() {
|
|
let fid = &self.fids[i];
|
|
let finished = self
|
|
.state
|
|
.videos
|
|
.get(fid)
|
|
.map(|m| m.finished)
|
|
.unwrap_or(false);
|
|
if !finished {
|
|
return make_result(i);
|
|
}
|
|
}
|
|
// Wrap around from beginning.
|
|
let end = current_index.unwrap_or(self.fids.len());
|
|
for i in 0..end.min(self.fids.len()) {
|
|
let fid = &self.fids[i];
|
|
let finished = self
|
|
.state
|
|
.videos
|
|
.get(fid)
|
|
.map(|m| m.finished)
|
|
.unwrap_or(false);
|
|
if !finished {
|
|
return make_result(i);
|
|
}
|
|
}
|
|
Value::Null
|
|
}
|
|
|
|
// -----------------------------------------------------------------------
|
|
// update_progress
|
|
// -----------------------------------------------------------------------
|
|
|
|
/// Update playback progress for a video at the given index.
|
|
///
|
|
/// The `finished` flag is sticky: once set, it remains true even if
|
|
/// `current_time` decreases.
|
|
pub fn update_progress(
|
|
&mut self,
|
|
index: usize,
|
|
current_time: f64,
|
|
duration: Option<f64>,
|
|
playing: bool,
|
|
) -> Value {
|
|
if self.fids.is_empty() {
|
|
return json!({"ok": false, "error": "library is empty"});
|
|
}
|
|
|
|
let index = index.min(self.fids.len() - 1);
|
|
let fid = self.fids[index].clone();
|
|
|
|
let meta = self
|
|
.state
|
|
.videos
|
|
.entry(fid.clone())
|
|
.or_insert_with(VideoMeta::default);
|
|
|
|
meta.pos = current_time;
|
|
|
|
// High-water mark.
|
|
if current_time > meta.watched {
|
|
meta.watched = current_time;
|
|
}
|
|
|
|
// Update duration if provided and positive.
|
|
if let Some(d) = duration {
|
|
if d > 0.0 {
|
|
meta.duration = Some(d);
|
|
}
|
|
}
|
|
|
|
// Finished is sticky.
|
|
let effective_duration = meta.duration.unwrap_or(0.0);
|
|
let threshold = (effective_duration - 2.0).max(0.0);
|
|
if effective_duration > 0.0 && current_time >= threshold {
|
|
meta.finished = true;
|
|
}
|
|
// Note: if already finished, it stays finished.
|
|
|
|
// Update last_open timestamp if actively playing.
|
|
if playing {
|
|
meta.last_open = now_secs();
|
|
}
|
|
|
|
// Update current tracking.
|
|
self.state.current_fid = Some(fid);
|
|
self.state.current_time = current_time;
|
|
|
|
// Snapshot values before releasing the borrow on self.state.videos.
|
|
let pos = meta.pos;
|
|
let watched = meta.watched;
|
|
let duration_val = meta.duration;
|
|
let finished = meta.finished;
|
|
|
|
self.save_state();
|
|
|
|
json!({
|
|
"ok": true,
|
|
"index": index,
|
|
"pos": pos,
|
|
"watched": watched,
|
|
"duration": duration_val,
|
|
"finished": finished,
|
|
})
|
|
}
|
|
|
|
// -----------------------------------------------------------------------
|
|
// set_current
|
|
// -----------------------------------------------------------------------
|
|
|
|
/// Set the currently active video by index and timecode.
|
|
pub fn set_current(&mut self, index: usize, timecode: f64) -> Value {
|
|
if self.fids.is_empty() {
|
|
return json!({"ok": false, "error": "library is empty"});
|
|
}
|
|
|
|
let index = index.min(self.fids.len() - 1);
|
|
let fid = self.fids[index].clone();
|
|
|
|
self.state.current_fid = Some(fid.clone());
|
|
self.state.current_time = timecode;
|
|
|
|
self.save_state();
|
|
|
|
json!({
|
|
"ok": true,
|
|
"index": index,
|
|
"fid": fid,
|
|
"current_time": timecode,
|
|
})
|
|
}
|
|
|
|
// -----------------------------------------------------------------------
|
|
// set_folder_volume
|
|
// -----------------------------------------------------------------------
|
|
|
|
/// Set the folder-level volume (clamped 0.0 to 1.0).
|
|
pub fn set_folder_volume(&mut self, volume: f64) -> Value {
|
|
self.state.volume = clamp(volume, 0.0, 1.0);
|
|
self.save_state();
|
|
json!({"ok": true, "folder_volume": self.state.volume})
|
|
}
|
|
|
|
// -----------------------------------------------------------------------
|
|
// set_folder_autoplay
|
|
// -----------------------------------------------------------------------
|
|
|
|
/// Set the folder-level autoplay preference.
|
|
pub fn set_folder_autoplay(&mut self, enabled: bool) -> Value {
|
|
self.state.autoplay = enabled;
|
|
self.save_state();
|
|
json!({"ok": true, "folder_autoplay": self.state.autoplay})
|
|
}
|
|
|
|
// -----------------------------------------------------------------------
|
|
// set_folder_rate
|
|
// -----------------------------------------------------------------------
|
|
|
|
/// Set the folder-level playback rate (clamped 0.25 to 3.0).
|
|
pub fn set_folder_rate(&mut self, rate: f64) -> Value {
|
|
self.state.playback_rate = clamp(rate, 0.25, 3.0);
|
|
self.save_state();
|
|
json!({"ok": true, "folder_rate": self.state.playback_rate})
|
|
}
|
|
|
|
// -----------------------------------------------------------------------
|
|
// set_order
|
|
// -----------------------------------------------------------------------
|
|
|
|
/// Reorder the playlist. `fids` contains the desired ordering; any fids
|
|
/// not in the list are appended at the end (naturally sorted).
|
|
pub fn set_order(&mut self, fids: Vec<String>) -> Value {
|
|
let valid_set: std::collections::HashSet<&String> = self.fids.iter().collect();
|
|
|
|
// Keep only fids that actually exist.
|
|
let mut ordered: Vec<String> = fids.into_iter().filter(|f| valid_set.contains(f)).collect();
|
|
|
|
// Append remaining fids not in the new order.
|
|
let ordered_set: std::collections::HashSet<&String> = ordered.iter().collect();
|
|
let mut remaining: Vec<String> = self
|
|
.fids
|
|
.iter()
|
|
.filter(|f| !ordered_set.contains(f))
|
|
.cloned()
|
|
.collect();
|
|
remaining.sort_by(|a, b| {
|
|
let ra = self.fid_to_rel.get(a).cloned().unwrap_or_default();
|
|
let rb = self.fid_to_rel.get(b).cloned().unwrap_or_default();
|
|
natural_key(&ra).cmp(&natural_key(&rb))
|
|
});
|
|
ordered.extend(remaining);
|
|
|
|
// Rebuild file lists.
|
|
self.fids = ordered.clone();
|
|
self.relpaths = ordered
|
|
.iter()
|
|
.filter_map(|fid| self.fid_to_rel.get(fid).cloned())
|
|
.collect();
|
|
if let Some(ref root) = self.root {
|
|
self.files = self
|
|
.relpaths
|
|
.iter()
|
|
.map(|rel| root.join(rel.replace('/', std::path::MAIN_SEPARATOR_STR)))
|
|
.collect();
|
|
}
|
|
|
|
self.state.order_fids = ordered;
|
|
self.save_state();
|
|
|
|
self.get_library_info()
|
|
}
|
|
|
|
// -----------------------------------------------------------------------
|
|
// get_note / set_note
|
|
// -----------------------------------------------------------------------
|
|
|
|
/// Return the note for the given fid, or an empty string if not found.
|
|
pub fn get_note(&self, fid: &str) -> String {
|
|
self.state
|
|
.videos
|
|
.get(fid)
|
|
.map(|m| m.note.clone())
|
|
.unwrap_or_default()
|
|
}
|
|
|
|
/// Set a note for the given fid and save state.
|
|
pub fn set_note(&mut self, fid: &str, note: &str) -> Value {
|
|
if let Some(meta) = self.state.videos.get_mut(fid) {
|
|
meta.note = note.to_string();
|
|
} else {
|
|
let mut m = VideoMeta::default();
|
|
m.note = note.to_string();
|
|
self.state.videos.insert(fid.to_string(), m);
|
|
}
|
|
self.save_state();
|
|
json!({"ok": true, "fid": fid, "note_len": note.len()})
|
|
}
|
|
|
|
// -----------------------------------------------------------------------
|
|
// get_video_path
|
|
// -----------------------------------------------------------------------
|
|
|
|
/// Get the full file path for a video by index.
|
|
///
|
|
/// Performs bounds checking and path-traversal protection.
|
|
pub fn get_video_path(&self, index: usize) -> Result<PathBuf, String> {
|
|
if self.files.is_empty() {
|
|
return Err("No files in library".to_string());
|
|
}
|
|
if index >= self.files.len() {
|
|
return Err(format!(
|
|
"Index {} out of range (0..{})",
|
|
index,
|
|
self.files.len()
|
|
));
|
|
}
|
|
|
|
let path = &self.files[index];
|
|
|
|
// Path-traversal protection.
|
|
if let Some(ref root) = self.root {
|
|
if !is_within_root(root, path) {
|
|
return Err("Path traversal detected".to_string());
|
|
}
|
|
}
|
|
|
|
if !path.exists() {
|
|
return Err(format!("File not found: {}", path.display()));
|
|
}
|
|
|
|
Ok(path.clone())
|
|
}
|
|
|
|
// -----------------------------------------------------------------------
|
|
// get_current_video_metadata
|
|
// -----------------------------------------------------------------------
|
|
|
|
/// Return basic file metadata plus cached ffprobe metadata for the
|
|
/// current video.
|
|
pub fn get_current_video_metadata(&mut self) -> Value {
|
|
let fid = match &self.state.current_fid {
|
|
Some(f) => f.clone(),
|
|
None => return json!({"ok": false, "error": "no current video"}),
|
|
};
|
|
|
|
let index = match self.fids.iter().position(|f| f == &fid) {
|
|
Some(i) => i,
|
|
None => return json!({"ok": false, "error": "current fid not in library"}),
|
|
};
|
|
|
|
let basic = self._basic_file_meta(&fid);
|
|
|
|
// Probe if not cached.
|
|
if !self.meta_cache.contains_key(&fid) {
|
|
if let Some(ref ffprobe) = self.ffprobe.clone() {
|
|
if index < self.files.len() {
|
|
if let Some(meta) =
|
|
ffmpeg::ffprobe_video_metadata(&self.files[index], ffprobe)
|
|
{
|
|
self.meta_cache.insert(fid.clone(), meta);
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
let mut result = json!({
|
|
"ok": true,
|
|
"basic": basic,
|
|
"ffprobe_found": self.ffprobe.is_some(),
|
|
});
|
|
|
|
if let Some(cached) = self.meta_cache.get(&fid) {
|
|
if let Ok(meta_val) = serde_json::to_value(cached) {
|
|
result
|
|
.as_object_mut()
|
|
.unwrap()
|
|
.insert("probe".to_string(), meta_val);
|
|
}
|
|
}
|
|
|
|
result
|
|
}
|
|
|
|
// -----------------------------------------------------------------------
|
|
// Subtitle methods
|
|
// -----------------------------------------------------------------------
|
|
|
|
/// Build a protocol URL for a stored subtitle VTT path.
|
|
/// The `vtt` field is like `"subtitles/{fid}_{name}.vtt"` — extract the filename.
|
|
fn _sub_url(vtt: &str) -> String {
|
|
let filename = vtt.rsplit('/').next().unwrap_or(vtt);
|
|
format!("http://tutdock.localhost/sub/{}", filename)
|
|
}
|
|
|
|
/// Build a successful subtitle JSON response with `has`, `url`, and `label`.
|
|
fn _sub_response(vtt: &str, label: &str) -> Value {
|
|
json!({
|
|
"ok": true,
|
|
"has": true,
|
|
"url": Self::_sub_url(vtt),
|
|
"label": label,
|
|
})
|
|
}
|
|
|
|
/// Get subtitle for the current video.
|
|
///
|
|
/// Priority: stored subtitle -> sidecar -> embedded.
|
|
pub fn get_subtitle_for_current(&mut self, state_dir: &Path) -> Value {
|
|
let fid = match &self.state.current_fid {
|
|
Some(f) => f.clone(),
|
|
None => return json!({"ok": false, "error": "no current video"}),
|
|
};
|
|
|
|
let index = match self.fids.iter().position(|f| f == &fid) {
|
|
Some(i) => i,
|
|
None => return json!({"ok": false, "error": "current fid not in library"}),
|
|
};
|
|
|
|
// 1. Check stored subtitle.
|
|
if let Some(meta) = self.state.videos.get(&fid) {
|
|
if let Some(ref sub_ref) = meta.subtitle {
|
|
let vtt_path = state_dir.join(&sub_ref.vtt);
|
|
if vtt_path.exists() {
|
|
return Self::_sub_response(&sub_ref.vtt, &sub_ref.label);
|
|
}
|
|
}
|
|
}
|
|
|
|
// 2. Try sidecar.
|
|
if index < self.files.len() {
|
|
if let Some(sidecar_path) = subtitles::auto_subtitle_sidecar(&self.files[index]) {
|
|
let subs_dir = state_dir.join("subtitles");
|
|
if let Some(stored) =
|
|
subtitles::store_subtitle_for_fid(&fid, &sidecar_path, &subs_dir)
|
|
{
|
|
// Save reference.
|
|
if let Some(meta) = self.state.videos.get_mut(&fid) {
|
|
meta.subtitle = Some(SubtitleRef {
|
|
vtt: stored.vtt.clone(),
|
|
label: stored.label.clone(),
|
|
});
|
|
}
|
|
self.save_state();
|
|
return Self::_sub_response(&stored.vtt, &stored.label);
|
|
}
|
|
}
|
|
}
|
|
|
|
// 3. Try embedded (first subtitle track).
|
|
if let Some(ref ffmpeg_path) = self.ffmpeg.clone() {
|
|
if let Some(ref ffprobe_path) = self.ffprobe.clone() {
|
|
if index < self.files.len() {
|
|
if let Some(meta) =
|
|
ffmpeg::ffprobe_video_metadata(&self.files[index], ffprobe_path)
|
|
{
|
|
if let Some(track) = meta.subtitle_tracks.first() {
|
|
let subs_dir = state_dir.join("subtitles");
|
|
if let Ok(stored) = subtitles::extract_embedded_subtitle(
|
|
&self.files[index],
|
|
track.index,
|
|
ffmpeg_path,
|
|
&subs_dir,
|
|
&fid,
|
|
) {
|
|
if let Some(vmeta) = self.state.videos.get_mut(&fid) {
|
|
vmeta.subtitle = Some(SubtitleRef {
|
|
vtt: stored.vtt.clone(),
|
|
label: stored.label.clone(),
|
|
});
|
|
}
|
|
self.save_state();
|
|
return Self::_sub_response(&stored.vtt, &stored.label);
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
json!({"ok": true, "has": false})
|
|
}
|
|
|
|
/// Store a user-selected subtitle file for the current video.
|
|
pub fn set_subtitle_for_current(&mut self, file_path: &str, state_dir: &Path) -> Value {
|
|
let fid = match &self.state.current_fid {
|
|
Some(f) => f.clone(),
|
|
None => return json!({"ok": false, "error": "no current video"}),
|
|
};
|
|
|
|
let src = PathBuf::from(file_path);
|
|
if !src.is_file() {
|
|
return json!({"ok": false, "error": "subtitle file not found"});
|
|
}
|
|
|
|
let subs_dir = state_dir.join("subtitles");
|
|
match subtitles::store_subtitle_for_fid(&fid, &src, &subs_dir) {
|
|
Some(stored) => {
|
|
if let Some(meta) = self.state.videos.get_mut(&fid) {
|
|
meta.subtitle = Some(SubtitleRef {
|
|
vtt: stored.vtt.clone(),
|
|
label: stored.label.clone(),
|
|
});
|
|
}
|
|
self.save_state();
|
|
Self::_sub_response(&stored.vtt, &stored.label)
|
|
}
|
|
None => {
|
|
json!({"ok": false, "error": "unsupported subtitle format"})
|
|
}
|
|
}
|
|
}
|
|
|
|
/// Get list of embedded subtitle tracks using ffprobe.
|
|
pub fn get_embedded_subtitles(&self) -> Value {
|
|
let fid = match &self.state.current_fid {
|
|
Some(f) => f.clone(),
|
|
None => return json!({"ok": false, "error": "no current video"}),
|
|
};
|
|
|
|
let index = match self.fids.iter().position(|f| f == &fid) {
|
|
Some(i) => i,
|
|
None => return json!({"ok": false, "error": "current fid not in library"}),
|
|
};
|
|
|
|
if let Some(ref ffprobe_path) = self.ffprobe {
|
|
if index < self.files.len() {
|
|
if let Some(meta) =
|
|
ffmpeg::ffprobe_video_metadata(&self.files[index], ffprobe_path)
|
|
{
|
|
let tracks: Vec<Value> = meta
|
|
.subtitle_tracks
|
|
.iter()
|
|
.map(|t| {
|
|
json!({
|
|
"index": t.index,
|
|
"codec": t.codec,
|
|
"language": t.language,
|
|
"title": t.title,
|
|
})
|
|
})
|
|
.collect();
|
|
return json!({"ok": true, "tracks": tracks});
|
|
}
|
|
}
|
|
}
|
|
|
|
json!({"ok": true, "tracks": []})
|
|
}
|
|
|
|
/// Extract an embedded subtitle track by index.
|
|
pub fn extract_embedded_subtitle(
|
|
&mut self,
|
|
track_index: u32,
|
|
state_dir: &Path,
|
|
) -> Value {
|
|
let fid = match &self.state.current_fid {
|
|
Some(f) => f.clone(),
|
|
None => return json!({"ok": false, "error": "no current video"}),
|
|
};
|
|
|
|
let index = match self.fids.iter().position(|f| f == &fid) {
|
|
Some(i) => i,
|
|
None => return json!({"ok": false, "error": "current fid not in library"}),
|
|
};
|
|
|
|
let ffmpeg_path = match &self.ffmpeg {
|
|
Some(p) => p.clone(),
|
|
None => return json!({"ok": false, "error": "ffmpeg not available"}),
|
|
};
|
|
|
|
if index >= self.files.len() {
|
|
return json!({"ok": false, "error": "index out of range"});
|
|
}
|
|
|
|
let subs_dir = state_dir.join("subtitles");
|
|
match subtitles::extract_embedded_subtitle(
|
|
&self.files[index],
|
|
track_index,
|
|
&ffmpeg_path,
|
|
&subs_dir,
|
|
&fid,
|
|
) {
|
|
Ok(stored) => {
|
|
if let Some(meta) = self.state.videos.get_mut(&fid) {
|
|
meta.subtitle = Some(SubtitleRef {
|
|
vtt: stored.vtt.clone(),
|
|
label: stored.label.clone(),
|
|
});
|
|
}
|
|
self.save_state();
|
|
Self::_sub_response(&stored.vtt, &stored.label)
|
|
}
|
|
Err(e) => {
|
|
json!({"ok": false, "error": e})
|
|
}
|
|
}
|
|
}
|
|
|
|
/// Get all available sidecar and embedded subtitles for the current video.
|
|
///
|
|
/// Sidecar discovery uses normalized matching:
|
|
/// - Check all .srt/.vtt files in video's parent directory
|
|
/// - Match by: exact stem, normalized stem, stem with language suffix
|
|
/// - Extract language labels from filename suffixes
|
|
/// - Sort: English first, then alphabetical
|
|
/// - Deduplicate by label
|
|
pub fn get_available_subtitles(&self) -> Value {
|
|
let fid = match &self.state.current_fid {
|
|
Some(f) => f.clone(),
|
|
None => return json!({"ok": false, "error": "no current video"}),
|
|
};
|
|
|
|
let index = match self.fids.iter().position(|f| f == &fid) {
|
|
Some(i) => i,
|
|
None => return json!({"ok": false, "error": "current fid not in library"}),
|
|
};
|
|
|
|
let mut sidecar_subs: Vec<Value> = Vec::new();
|
|
|
|
if index < self.files.len() {
|
|
let video_path = &self.files[index];
|
|
if let Some(parent) = video_path.parent() {
|
|
let video_stem = video_path
|
|
.file_stem()
|
|
.map(|s| s.to_string_lossy().to_string())
|
|
.unwrap_or_default();
|
|
let video_stem_lower = video_stem.to_lowercase();
|
|
let video_stem_norm = _normalize_stem(&video_stem);
|
|
|
|
if let Ok(entries) = fs::read_dir(parent) {
|
|
let mut found: Vec<(String, String, bool)> = Vec::new(); // (label, path, is_english)
|
|
let mut seen_labels: std::collections::HashSet<String> =
|
|
std::collections::HashSet::new();
|
|
|
|
for entry in entries.flatten() {
|
|
let path = entry.path();
|
|
if !path.is_file() {
|
|
continue;
|
|
}
|
|
|
|
let fname = match path.file_name() {
|
|
Some(n) => n.to_string_lossy().to_string(),
|
|
None => continue,
|
|
};
|
|
let fname_lower = fname.to_lowercase();
|
|
|
|
let is_sub = subtitles::SUB_EXTS
|
|
.iter()
|
|
.any(|ext| fname_lower.ends_with(ext));
|
|
if !is_sub {
|
|
continue;
|
|
}
|
|
|
|
let sub_stem = match path.file_stem() {
|
|
Some(s) => s.to_string_lossy().to_string(),
|
|
None => continue,
|
|
};
|
|
let sub_stem_lower = sub_stem.to_lowercase();
|
|
let sub_stem_norm = _normalize_stem(&sub_stem);
|
|
|
|
// Check if this subtitle matches the video.
|
|
let mut matched = false;
|
|
let mut lang_label = String::new();
|
|
let mut is_english = false;
|
|
|
|
// Exact stem match.
|
|
if sub_stem_lower == video_stem_lower {
|
|
matched = true;
|
|
lang_label = "Default".to_string();
|
|
}
|
|
|
|
// Normalized match.
|
|
if !matched && sub_stem_norm == video_stem_norm {
|
|
matched = true;
|
|
lang_label = "Default".to_string();
|
|
}
|
|
|
|
// Language suffix match.
|
|
if !matched {
|
|
if let Some(dot_pos) = sub_stem.rfind('.') {
|
|
let base = &sub_stem[..dot_pos];
|
|
let suffix = &sub_stem[dot_pos + 1..];
|
|
let base_lower = base.to_lowercase();
|
|
let base_norm = _normalize_stem(base);
|
|
let suffix_lower = suffix.to_lowercase();
|
|
|
|
if base_lower == video_stem_lower
|
|
|| base_norm == video_stem_norm
|
|
{
|
|
matched = true;
|
|
lang_label = _language_label(&suffix_lower);
|
|
is_english = ["en", "eng", "english"]
|
|
.contains(&suffix_lower.as_str());
|
|
}
|
|
}
|
|
}
|
|
|
|
if matched {
|
|
if seen_labels.insert(lang_label.clone()) {
|
|
found.push((
|
|
lang_label,
|
|
path.to_string_lossy().to_string(),
|
|
is_english,
|
|
));
|
|
}
|
|
}
|
|
}
|
|
|
|
// Sort: English first, then alphabetical.
|
|
found.sort_by(|a, b| {
|
|
if a.2 && !b.2 {
|
|
std::cmp::Ordering::Less
|
|
} else if !a.2 && b.2 {
|
|
std::cmp::Ordering::Greater
|
|
} else {
|
|
a.0.cmp(&b.0)
|
|
}
|
|
});
|
|
|
|
for (label, path, _) in &found {
|
|
let ext = std::path::Path::new(path)
|
|
.extension()
|
|
.map(|e| e.to_string_lossy().to_uppercase())
|
|
.unwrap_or_default();
|
|
sidecar_subs.push(json!({
|
|
"type": "sidecar",
|
|
"label": label,
|
|
"path": path,
|
|
"format": ext,
|
|
}));
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
// Embedded subtitles.
|
|
let mut embedded_subs: Vec<Value> = Vec::new();
|
|
if let Some(ref ffprobe_path) = self.ffprobe {
|
|
if index < self.files.len() {
|
|
if let Some(meta) =
|
|
ffmpeg::ffprobe_video_metadata(&self.files[index], ffprobe_path)
|
|
{
|
|
for track in &meta.subtitle_tracks {
|
|
let label = if !track.title.is_empty() {
|
|
track.title.clone()
|
|
} else if !track.language.is_empty() {
|
|
_language_label(&track.language)
|
|
} else {
|
|
format!("Track {}", track.index)
|
|
};
|
|
embedded_subs.push(json!({
|
|
"type": "embedded",
|
|
"label": label,
|
|
"index": track.index,
|
|
"codec": track.codec,
|
|
"language": track.language,
|
|
}));
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
json!({
|
|
"ok": true,
|
|
"sidecar": sidecar_subs,
|
|
"embedded": embedded_subs,
|
|
})
|
|
}
|
|
|
|
/// Load a specific sidecar subtitle file for the current video.
|
|
pub fn load_sidecar_subtitle(
|
|
&mut self,
|
|
file_path: &str,
|
|
state_dir: &Path,
|
|
) -> Value {
|
|
let fid = match &self.state.current_fid {
|
|
Some(f) => f.clone(),
|
|
None => return json!({"ok": false, "error": "no current video"}),
|
|
};
|
|
|
|
let src = PathBuf::from(file_path);
|
|
if !src.is_file() {
|
|
return json!({"ok": false, "error": "subtitle file not found"});
|
|
}
|
|
|
|
let subs_dir = state_dir.join("subtitles");
|
|
match subtitles::store_subtitle_for_fid(&fid, &src, &subs_dir) {
|
|
Some(stored) => {
|
|
if let Some(meta) = self.state.videos.get_mut(&fid) {
|
|
meta.subtitle = Some(SubtitleRef {
|
|
vtt: stored.vtt.clone(),
|
|
label: stored.label.clone(),
|
|
});
|
|
}
|
|
self.save_state();
|
|
Self::_sub_response(&stored.vtt, &stored.label)
|
|
}
|
|
None => {
|
|
json!({"ok": false, "error": "unsupported subtitle format or read error"})
|
|
}
|
|
}
|
|
}
|
|
|
|
// -----------------------------------------------------------------------
|
|
// reset_watch_progress
|
|
// -----------------------------------------------------------------------
|
|
|
|
/// Reset pos, watched, and finished for ALL videos.
|
|
///
|
|
/// Preserves notes, durations, and subtitle references.
|
|
pub fn reset_watch_progress(&mut self) -> Value {
|
|
for meta in self.state.videos.values_mut() {
|
|
meta.pos = 0.0;
|
|
meta.watched = 0.0;
|
|
meta.finished = false;
|
|
}
|
|
self.state.current_time = 0.0;
|
|
self.save_state();
|
|
json!({"ok": true})
|
|
}
|
|
|
|
// -----------------------------------------------------------------------
|
|
// Duration scanning
|
|
// -----------------------------------------------------------------------
|
|
|
|
/// Return a list of (fid, file_path) pairs where duration is unknown.
|
|
pub fn get_pending_scans(&self) -> Vec<(String, PathBuf)> {
|
|
let mut pending = Vec::new();
|
|
for (i, fid) in self.fids.iter().enumerate() {
|
|
let has_duration = self
|
|
.state
|
|
.videos
|
|
.get(fid)
|
|
.and_then(|m| m.duration)
|
|
.is_some();
|
|
if !has_duration && i < self.files.len() {
|
|
pending.push((fid.clone(), self.files[i].clone()));
|
|
}
|
|
}
|
|
pending
|
|
}
|
|
|
|
/// Apply a scanned duration to a video's metadata.
|
|
///
|
|
/// Also re-checks the finished status: if watched >= duration - 2,
|
|
/// mark finished.
|
|
pub fn apply_scanned_duration(&mut self, fid: &str, duration: f64) {
|
|
if duration <= 0.0 {
|
|
return;
|
|
}
|
|
let meta = self
|
|
.state
|
|
.videos
|
|
.entry(fid.to_string())
|
|
.or_insert_with(VideoMeta::default);
|
|
meta.duration = Some(duration);
|
|
|
|
// Re-check finished status with the new duration.
|
|
let threshold = (duration - 2.0).max(0.0);
|
|
if meta.watched >= threshold {
|
|
meta.finished = true;
|
|
}
|
|
|
|
self.save_state();
|
|
}
|
|
|
|
/// Start a background duration scan.
|
|
///
|
|
/// This method signals any previous scan to stop, then prepares the
|
|
/// pending list. The actual scanning is driven externally via
|
|
/// `get_pending_scans()` and `apply_scanned_duration()`.
|
|
pub fn start_duration_scan(&mut self) {
|
|
// Signal any running scan to stop.
|
|
self.scan_stop.store(true, Ordering::SeqCst);
|
|
// Create a fresh stop flag for any new scan cycle.
|
|
self.scan_stop = Arc::new(AtomicBool::new(false));
|
|
}
|
|
|
|
/// Check whether the background scan has been signalled to stop.
|
|
pub fn is_scan_stopped(&self) -> bool {
|
|
self.scan_stop.load(Ordering::SeqCst)
|
|
}
|
|
|
|
/// Get a reference to the scan stop flag for external use.
|
|
pub fn scan_stop_flag(&self) -> Arc<AtomicBool> {
|
|
Arc::clone(&self.scan_stop)
|
|
}
|
|
|
|
// -----------------------------------------------------------------------
|
|
// Private helpers
|
|
// -----------------------------------------------------------------------
|
|
|
|
/// Compute folder statistics: finished/remaining counts, progress, etc.
|
|
fn _folder_stats(&self) -> Value {
|
|
let total = self.fids.len();
|
|
let mut finished_count: usize = 0;
|
|
let mut total_duration: f64 = 0.0;
|
|
let mut total_watched: f64 = 0.0;
|
|
let mut all_durations_known = true;
|
|
let mut remaining_seconds: f64 = 0.0;
|
|
|
|
// Track top-level folder progress.
|
|
let mut folder_totals: HashMap<String, (usize, usize)> = HashMap::new(); // (total, finished)
|
|
|
|
for fid in &self.fids {
|
|
let meta = self.state.videos.get(fid);
|
|
let finished = meta.map(|m| m.finished).unwrap_or(false);
|
|
let duration = meta.and_then(|m| m.duration);
|
|
let watched = meta.map(|m| m.watched).unwrap_or(0.0);
|
|
|
|
if finished {
|
|
finished_count += 1;
|
|
}
|
|
|
|
if let Some(d) = duration {
|
|
total_duration += d;
|
|
total_watched += watched.min(d);
|
|
if !finished {
|
|
remaining_seconds += (d - watched).max(0.0);
|
|
}
|
|
} else {
|
|
all_durations_known = false;
|
|
}
|
|
|
|
// Top-level folder grouping.
|
|
let relpath = self.fid_to_rel.get(fid).cloned().unwrap_or_default();
|
|
let top_folder = if let Some(pos) = relpath.find('/') {
|
|
relpath[..pos].to_string()
|
|
} else {
|
|
String::new()
|
|
};
|
|
let entry = folder_totals.entry(top_folder).or_insert((0, 0));
|
|
entry.0 += 1;
|
|
if finished {
|
|
entry.1 += 1;
|
|
}
|
|
}
|
|
|
|
let overall_progress = if total > 0 {
|
|
if total_duration > 0.0 {
|
|
(total_watched / total_duration * 100.0).min(100.0)
|
|
} else {
|
|
(finished_count as f64 / total as f64) * 100.0
|
|
}
|
|
} else {
|
|
0.0
|
|
};
|
|
|
|
let remaining_count = total - finished_count;
|
|
|
|
// Build top_folders list (only if there are subdirs).
|
|
let mut top_folders: Vec<Value> = Vec::new();
|
|
let mut folder_names: Vec<String> = folder_totals.keys().cloned().collect();
|
|
folder_names.sort_by(|a, b| natural_key(a).cmp(&natural_key(b)));
|
|
for name in &folder_names {
|
|
if name.is_empty() {
|
|
continue; // Skip root-level files.
|
|
}
|
|
let (ft, ff) = folder_totals[name];
|
|
top_folders.push(json!({
|
|
"name": name,
|
|
"total": ft,
|
|
"finished": ff,
|
|
}));
|
|
}
|
|
|
|
json!({
|
|
"finished_count": finished_count,
|
|
"remaining_count": remaining_count,
|
|
"remaining_seconds_known": remaining_seconds,
|
|
"overall_progress": overall_progress,
|
|
"durations_known": all_durations_known,
|
|
"top_folders": top_folders,
|
|
})
|
|
}
|
|
|
|
/// Get basic file metadata (extension, size, mtime, folder).
|
|
fn _basic_file_meta(&self, fid: &str) -> Value {
|
|
let relpath = self.fid_to_rel.get(fid).cloned().unwrap_or_default();
|
|
let index = self.fids.iter().position(|f| f == fid);
|
|
|
|
if let Some(i) = index {
|
|
if i < self.files.len() {
|
|
let path = &self.files[i];
|
|
let ext = path
|
|
.extension()
|
|
.map(|e| e.to_string_lossy().to_string())
|
|
.unwrap_or_default();
|
|
|
|
let (size, mtime) = match fs::metadata(path) {
|
|
Ok(m) => {
|
|
let size = m.len();
|
|
let mtime = m
|
|
.modified()
|
|
.ok()
|
|
.and_then(|t| t.duration_since(SystemTime::UNIX_EPOCH).ok())
|
|
.map(|d| d.as_secs())
|
|
.unwrap_or(0);
|
|
(size, mtime)
|
|
}
|
|
Err(_) => (0, 0),
|
|
};
|
|
|
|
let folder = path
|
|
.parent()
|
|
.map(|p| {
|
|
let s = p.to_string_lossy().to_string();
|
|
s.strip_prefix("\\\\?\\").unwrap_or(&s).to_string()
|
|
})
|
|
.unwrap_or_default();
|
|
|
|
return json!({
|
|
"fid": fid,
|
|
"relpath": relpath,
|
|
"ext": ext,
|
|
"size": size,
|
|
"mtime": mtime,
|
|
"folder": folder,
|
|
});
|
|
}
|
|
}
|
|
|
|
json!({
|
|
"fid": fid,
|
|
"relpath": relpath,
|
|
"ext": "",
|
|
"size": 0,
|
|
"mtime": 0,
|
|
"folder": "",
|
|
})
|
|
}
|
|
}
|
|
|
|
// ---------------------------------------------------------------------------
|
|
// Module-level helpers
|
|
// ---------------------------------------------------------------------------
|
|
|
|
/// Normalize a stem for fuzzy matching (lowercase, collapse separators).
|
|
fn _normalize_stem(s: &str) -> String {
|
|
let lower = s.to_lowercase();
|
|
let re = regex::Regex::new(r"[-_\s]+").unwrap();
|
|
let replaced = re.replace_all(&lower, " ");
|
|
replaced.trim().to_string()
|
|
}
|
|
|
|
/// Convert a language suffix to a human-readable label.
|
|
fn _language_label(lang: &str) -> String {
|
|
match lang.to_lowercase().as_str() {
|
|
"en" | "eng" | "english" => "English".to_string(),
|
|
"fr" | "fre" | "fra" | "french" => "French".to_string(),
|
|
"de" | "deu" | "ger" | "german" => "German".to_string(),
|
|
"es" | "spa" | "spanish" => "Spanish".to_string(),
|
|
"it" | "ita" | "italian" => "Italian".to_string(),
|
|
"pt" | "por" | "portuguese" => "Portuguese".to_string(),
|
|
"ru" | "rus" | "russian" => "Russian".to_string(),
|
|
"ja" | "jpn" | "japanese" => "Japanese".to_string(),
|
|
"ko" | "kor" | "korean" => "Korean".to_string(),
|
|
"zh" | "zho" | "chi" | "chinese" => "Chinese".to_string(),
|
|
"ar" | "ara" | "arabic" => "Arabic".to_string(),
|
|
"hi" | "hin" | "hindi" => "Hindi".to_string(),
|
|
"nl" | "dut" | "nld" | "dutch" => "Dutch".to_string(),
|
|
"sv" | "swe" | "swedish" => "Swedish".to_string(),
|
|
"pl" | "pol" | "polish" => "Polish".to_string(),
|
|
other => {
|
|
// Capitalize first letter.
|
|
let mut c = other.chars();
|
|
match c.next() {
|
|
Some(first) => {
|
|
let upper: String = first.to_uppercase().collect();
|
|
format!("{}{}", upper, c.as_str())
|
|
}
|
|
None => String::new(),
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
// ===========================================================================
|
|
// Tests
|
|
// ===========================================================================
|
|
|
|
#[cfg(test)]
|
|
mod tests {
|
|
use super::*;
|
|
use std::fs;
|
|
use tempfile::TempDir;
|
|
|
|
// -----------------------------------------------------------------------
|
|
// 1. test_new_library_is_empty
|
|
// -----------------------------------------------------------------------
|
|
|
|
#[test]
|
|
fn test_new_library_is_empty() {
|
|
let lib = Library::new();
|
|
assert!(lib.root.is_none());
|
|
assert!(lib.files.is_empty());
|
|
assert!(lib.fids.is_empty());
|
|
assert!(lib.relpaths.is_empty());
|
|
assert!(lib.rel_to_fid.is_empty());
|
|
assert!(lib.fid_to_rel.is_empty());
|
|
assert!(lib.state_path.is_none());
|
|
assert!(lib.state.current_fid.is_none());
|
|
assert_eq!(lib.state.library_id, "");
|
|
assert_eq!(lib.state.volume, 1.0);
|
|
assert_eq!(lib.state.playback_rate, 1.0);
|
|
assert!(!lib.state.autoplay);
|
|
}
|
|
|
|
// -----------------------------------------------------------------------
|
|
// 2. test_sorted_default
|
|
// -----------------------------------------------------------------------
|
|
|
|
#[test]
|
|
fn test_sorted_default() {
|
|
let relpaths = vec![
|
|
"video10.mp4".to_string(),
|
|
"video2.mp4".to_string(),
|
|
"video1.mp4".to_string(),
|
|
"video20.mp4".to_string(),
|
|
];
|
|
let mut rel_to_fid = HashMap::new();
|
|
for (i, rel) in relpaths.iter().enumerate() {
|
|
rel_to_fid.insert(rel.clone(), format!("fid{}", i));
|
|
}
|
|
|
|
let sorted = _sorted_default(&relpaths, &rel_to_fid);
|
|
let sorted_rels: Vec<String> = sorted
|
|
.iter()
|
|
.map(|fid| {
|
|
rel_to_fid
|
|
.iter()
|
|
.find(|(_, v)| *v == fid)
|
|
.map(|(k, _)| k.clone())
|
|
.unwrap()
|
|
})
|
|
.collect();
|
|
|
|
assert_eq!(
|
|
sorted_rels,
|
|
vec![
|
|
"video1.mp4".to_string(),
|
|
"video2.mp4".to_string(),
|
|
"video10.mp4".to_string(),
|
|
"video20.mp4".to_string(),
|
|
]
|
|
);
|
|
}
|
|
|
|
// -----------------------------------------------------------------------
|
|
// 3. test_apply_saved_order_with_valid_order
|
|
// -----------------------------------------------------------------------
|
|
|
|
#[test]
|
|
fn test_apply_saved_order_with_valid_order() {
|
|
let all_fids = vec!["a".to_string(), "b".to_string(), "c".to_string()];
|
|
let mut fid_to_rel = HashMap::new();
|
|
fid_to_rel.insert("a".to_string(), "01.mp4".to_string());
|
|
fid_to_rel.insert("b".to_string(), "02.mp4".to_string());
|
|
fid_to_rel.insert("c".to_string(), "03.mp4".to_string());
|
|
|
|
let order = vec!["c".to_string(), "a".to_string(), "b".to_string()];
|
|
let result = _apply_saved_order(&all_fids, &fid_to_rel, &order);
|
|
assert_eq!(result, vec!["c", "a", "b"]);
|
|
}
|
|
|
|
// -----------------------------------------------------------------------
|
|
// 4. test_apply_saved_order_with_new_files
|
|
// -----------------------------------------------------------------------
|
|
|
|
#[test]
|
|
fn test_apply_saved_order_with_new_files() {
|
|
let all_fids = vec![
|
|
"a".to_string(),
|
|
"b".to_string(),
|
|
"c".to_string(),
|
|
"d".to_string(),
|
|
];
|
|
let mut fid_to_rel = HashMap::new();
|
|
fid_to_rel.insert("a".to_string(), "01.mp4".to_string());
|
|
fid_to_rel.insert("b".to_string(), "02.mp4".to_string());
|
|
fid_to_rel.insert("c".to_string(), "03.mp4".to_string());
|
|
fid_to_rel.insert("d".to_string(), "04.mp4".to_string());
|
|
|
|
// Saved order only has a and c; b and d are "new".
|
|
let order = vec!["c".to_string(), "a".to_string()];
|
|
let result = _apply_saved_order(&all_fids, &fid_to_rel, &order);
|
|
|
|
// c, a come first (saved order), then b, d (naturally sorted).
|
|
assert_eq!(result, vec!["c", "a", "b", "d"]);
|
|
}
|
|
|
|
// -----------------------------------------------------------------------
|
|
// 5. test_apply_saved_order_empty
|
|
// -----------------------------------------------------------------------
|
|
|
|
#[test]
|
|
fn test_apply_saved_order_empty() {
|
|
let all_fids = vec!["b".to_string(), "a".to_string(), "c".to_string()];
|
|
let mut fid_to_rel = HashMap::new();
|
|
fid_to_rel.insert("a".to_string(), "01_alpha.mp4".to_string());
|
|
fid_to_rel.insert("b".to_string(), "02_beta.mp4".to_string());
|
|
fid_to_rel.insert("c".to_string(), "03_gamma.mp4".to_string());
|
|
|
|
let result = _apply_saved_order(&all_fids, &fid_to_rel, &[]);
|
|
|
|
// Falls back to natural sort by relpath.
|
|
assert_eq!(result, vec!["a", "b", "c"]);
|
|
}
|
|
|
|
// -----------------------------------------------------------------------
|
|
// 6. test_tree_flags_flat
|
|
// -----------------------------------------------------------------------
|
|
|
|
#[test]
|
|
fn test_tree_flags_flat() {
|
|
let rels = vec![
|
|
"video1.mp4".to_string(),
|
|
"video2.mp4".to_string(),
|
|
"video3.mp4".to_string(),
|
|
];
|
|
let flags = _tree_flags(&rels);
|
|
|
|
for rel in &rels {
|
|
let f = flags.get(rel).unwrap();
|
|
assert_eq!(f["depth"], 0);
|
|
assert_eq!(f["pipes"], "");
|
|
}
|
|
|
|
// Last file should have is_last = true.
|
|
assert_eq!(flags["video3.mp4"]["is_last"], true);
|
|
// First file should not have has_prev_in_parent.
|
|
assert_eq!(flags["video1.mp4"]["has_prev_in_parent"], false);
|
|
// Second file should have has_prev_in_parent.
|
|
assert_eq!(flags["video2.mp4"]["has_prev_in_parent"], true);
|
|
}
|
|
|
|
// -----------------------------------------------------------------------
|
|
// 7. test_tree_flags_nested
|
|
// -----------------------------------------------------------------------
|
|
|
|
#[test]
|
|
fn test_tree_flags_nested() {
|
|
let rels = vec![
|
|
"intro.mp4".to_string(),
|
|
"chapter1/lesson1.mp4".to_string(),
|
|
"chapter1/lesson2.mp4".to_string(),
|
|
"chapter2/lesson1.mp4".to_string(),
|
|
];
|
|
let flags = _tree_flags(&rels);
|
|
|
|
// Root-level file.
|
|
assert_eq!(flags["intro.mp4"]["depth"], 0);
|
|
|
|
// Nested files.
|
|
assert_eq!(flags["chapter1/lesson1.mp4"]["depth"], 1);
|
|
assert_eq!(flags["chapter1/lesson2.mp4"]["depth"], 1);
|
|
assert_eq!(flags["chapter2/lesson1.mp4"]["depth"], 1);
|
|
|
|
// chapter1/lesson1 should not have prev in parent.
|
|
assert_eq!(flags["chapter1/lesson1.mp4"]["has_prev_in_parent"], false);
|
|
// chapter1/lesson2 should have prev in parent.
|
|
assert_eq!(flags["chapter1/lesson2.mp4"]["has_prev_in_parent"], true);
|
|
// chapter1/lesson2 is the last in chapter1.
|
|
assert_eq!(flags["chapter1/lesson2.mp4"]["is_last"], true);
|
|
// chapter2/lesson1 is the only (and last) in chapter2.
|
|
assert_eq!(flags["chapter2/lesson1.mp4"]["is_last"], true);
|
|
}
|
|
|
|
// -----------------------------------------------------------------------
|
|
// 8. test_set_root_with_video_files
|
|
// -----------------------------------------------------------------------
|
|
|
|
#[test]
|
|
fn test_set_root_with_video_files() {
|
|
let dir = TempDir::new().unwrap();
|
|
let state_dir = TempDir::new().unwrap();
|
|
|
|
// Create dummy video files.
|
|
fs::write(dir.path().join("video_a.mp4"), b"fake video a content").unwrap();
|
|
fs::write(dir.path().join("video_b.mp4"), b"fake video b content").unwrap();
|
|
fs::write(dir.path().join("video_c.mp4"), b"fake video c content").unwrap();
|
|
|
|
// Also create a non-video file (should be ignored).
|
|
fs::write(dir.path().join("readme.txt"), b"not a video").unwrap();
|
|
|
|
let mut lib = Library::new();
|
|
let folder_str = dir.path().to_string_lossy().to_string();
|
|
let result = lib.set_root(&folder_str, state_dir.path());
|
|
|
|
assert!(result.is_ok(), "set_root failed: {:?}", result.err());
|
|
|
|
let info = result.unwrap();
|
|
assert_eq!(info["ok"], true);
|
|
assert_eq!(info["count"], 3);
|
|
assert_eq!(lib.fids.len(), 3);
|
|
assert_eq!(lib.relpaths.len(), 3);
|
|
assert_eq!(lib.files.len(), 3);
|
|
|
|
// All relpaths should be video files.
|
|
for rel in &lib.relpaths {
|
|
assert!(rel.ends_with(".mp4"), "unexpected relpath: {}", rel);
|
|
}
|
|
|
|
// State file should exist.
|
|
assert!(lib.state_path.is_some());
|
|
assert!(lib.state_path.as_ref().unwrap().exists());
|
|
|
|
// Library ID should be non-empty.
|
|
assert!(!lib.state.library_id.is_empty());
|
|
}
|
|
|
|
// -----------------------------------------------------------------------
|
|
// 9. test_update_progress_finished_sticky
|
|
// -----------------------------------------------------------------------
|
|
|
|
#[test]
|
|
fn test_update_progress_finished_sticky() {
|
|
let dir = TempDir::new().unwrap();
|
|
let state_dir = TempDir::new().unwrap();
|
|
|
|
fs::write(dir.path().join("video.mp4"), b"content").unwrap();
|
|
|
|
let mut lib = Library::new();
|
|
let folder_str = dir.path().to_string_lossy().to_string();
|
|
lib.set_root(&folder_str, state_dir.path()).unwrap();
|
|
|
|
// Set duration and advance to near the end to trigger finished.
|
|
let result = lib.update_progress(0, 98.5, Some(100.0), true);
|
|
assert_eq!(result["finished"], true);
|
|
|
|
// Now go back to an earlier time -- finished should remain true.
|
|
let result = lib.update_progress(0, 10.0, Some(100.0), true);
|
|
assert_eq!(result["finished"], true);
|
|
|
|
// Verify pos updated but finished stayed sticky.
|
|
assert_eq!(result["pos"], 10.0);
|
|
|
|
// Watched should still be the high-water mark.
|
|
assert_eq!(result["watched"], 98.5);
|
|
}
|
|
|
|
// -----------------------------------------------------------------------
|
|
// 10. test_reset_watch_progress
|
|
// -----------------------------------------------------------------------
|
|
|
|
#[test]
|
|
fn test_reset_watch_progress() {
|
|
let dir = TempDir::new().unwrap();
|
|
let state_dir = TempDir::new().unwrap();
|
|
|
|
fs::write(dir.path().join("vid1.mp4"), b"video 1").unwrap();
|
|
fs::write(dir.path().join("vid2.mp4"), b"video 2").unwrap();
|
|
|
|
let mut lib = Library::new();
|
|
let folder_str = dir.path().to_string_lossy().to_string();
|
|
lib.set_root(&folder_str, state_dir.path()).unwrap();
|
|
|
|
// Add some progress and notes.
|
|
lib.update_progress(0, 50.0, Some(100.0), true);
|
|
lib.update_progress(1, 99.0, Some(100.0), true);
|
|
lib.set_note(&lib.fids[0].clone(), "Important lecture");
|
|
|
|
// Verify pre-conditions.
|
|
let fid0 = lib.fids[0].clone();
|
|
let fid1 = lib.fids[1].clone();
|
|
assert!(lib.state.videos[&fid0].watched > 0.0);
|
|
assert!(lib.state.videos[&fid1].finished);
|
|
assert!(!lib.state.videos[&fid0].note.is_empty());
|
|
assert!(lib.state.videos[&fid0].duration.is_some());
|
|
|
|
// Reset.
|
|
let result = lib.reset_watch_progress();
|
|
assert_eq!(result["ok"], true);
|
|
|
|
// Progress should be reset.
|
|
assert_eq!(lib.state.videos[&fid0].pos, 0.0);
|
|
assert_eq!(lib.state.videos[&fid0].watched, 0.0);
|
|
assert!(!lib.state.videos[&fid0].finished);
|
|
assert_eq!(lib.state.videos[&fid1].pos, 0.0);
|
|
assert_eq!(lib.state.videos[&fid1].watched, 0.0);
|
|
assert!(!lib.state.videos[&fid1].finished);
|
|
|
|
// Notes and durations should be preserved.
|
|
assert_eq!(lib.state.videos[&fid0].note, "Important lecture");
|
|
assert!(lib.state.videos[&fid0].duration.is_some());
|
|
assert!(lib.state.videos[&fid1].duration.is_some());
|
|
}
|
|
}
|