initial commit with full project

This commit is contained in:
2026-04-26 17:50:04 +03:00
commit 53044e7d40
68 changed files with 34115 additions and 0 deletions

5660
src-tauri/Cargo.lock generated Normal file

File diff suppressed because it is too large Load Diff

26
src-tauri/Cargo.toml Normal file
View File

@@ -0,0 +1,26 @@
[package]
name = "cinch"
version = "1.0.0"
edition = "2021"
[build-dependencies]
tauri-build = { version = "2", features = [] }
[features]
custom-protocol = ["tauri/custom-protocol"]
[dependencies]
tauri = { version = "2", features = ["devtools"] }
tauri-plugin-shell = "2"
tauri-plugin-dialog = "2"
tauri-plugin-process = "2"
tauri-plugin-fs = "2"
tauri-plugin-opener = "2"
serde = { version = "1", features = ["derive"] }
serde_json = "1"
tokio = { version = "1", features = ["full"] }
uuid = { version = "1", features = ["v4"] }
http = "1"
http-range = "0.1"
percent-encoding = "2"
tauri-plugin-window-state = "2.4.1"

3
src-tauri/build.rs Normal file
View File

@@ -0,0 +1,3 @@
fn main() {
tauri_build::build();
}

View File

@@ -0,0 +1,41 @@
{
"$schema": "https://raw.githubusercontent.com/tauri-apps/tauri/dev/crates/tauri-utils/schema.json",
"identifier": "default",
"description": "Default capabilities for Cinch",
"windows": ["main"],
"permissions": [
"core:default",
"shell:allow-open",
"shell:allow-execute",
"shell:allow-spawn",
"shell:allow-stdin-write",
"shell:allow-kill",
"dialog:allow-open",
"dialog:allow-save",
"fs:default",
"fs:allow-read",
"fs:allow-read-file",
"fs:allow-write",
"fs:allow-write-file",
"fs:allow-exists",
"fs:allow-mkdir",
"fs:allow-remove",
"fs:allow-rename",
{
"identifier": "fs:scope",
"allow": [
{ "path": "**" }
]
},
"process:allow-exit",
"process:allow-restart",
"opener:default",
{
"identifier": "opener:allow-open-path",
"allow": [
{ "path": "**" }
]
},
"opener:allow-reveal-item-in-dir"
]
}

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1 @@
{"default":{"identifier":"default","description":"Default capabilities for Cinch","local":true,"windows":["main"],"permissions":["core:default","shell:allow-open","shell:allow-execute","shell:allow-spawn","shell:allow-stdin-write","shell:allow-kill","dialog:allow-open","dialog:allow-save","fs:default","fs:allow-read","fs:allow-read-file","fs:allow-write","fs:allow-write-file","fs:allow-exists","fs:allow-mkdir","fs:allow-remove","fs:allow-rename",{"identifier":"fs:scope","allow":[{"path":"**"}]},"process:allow-exit","process:allow-restart","opener:default",{"identifier":"opener:allow-open-path","allow":[{"path":"**"}]},"opener:allow-reveal-item-in-dir"]}}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

BIN
src-tauri/icons/icon.ico Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 279 KiB

BIN
src-tauri/icons/icon.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 116 KiB

View File

@@ -0,0 +1,165 @@
use tauri::State;
use crate::config;
use crate::ffmpeg::{discovery, probe};
use crate::types::*;
use crate::AppState;
// Step 1: Fast metadata only (instant)
#[tauri::command]
pub async fn analyze_video(path: String, state: State<'_, AppState>) -> Result<VideoInfo, String> {
let ffmpeg_path = {
let cfg = state.config.lock().map_err(|e| e.to_string())?;
cfg.ffmpeg_path.clone()
};
let ffmpeg = ffmpeg_path.unwrap_or_else(|| "ffmpeg".into());
let result = tokio::task::spawn_blocking(move || {
probe::probe_metadata(&path, &ffmpeg)
})
.await
.map_err(|e| e.to_string())?;
match &result {
Ok(info) => eprintln!("[cinch-rs] analyze_video OK: {}x{} {} {:.1}s", info.width, info.height, info.video_codec, info.duration),
Err(e) => eprintln!("[cinch-rs] analyze_video ERROR: {}", e),
}
result
}
// Step 2: Keyframe extraction (can be slow on large files)
#[tauri::command]
pub async fn extract_keyframes(path: String, state: State<'_, AppState>) -> Result<Vec<f64>, String> {
let ffmpeg_path = {
let cfg = state.config.lock().map_err(|e| e.to_string())?;
cfg.ffmpeg_path.clone()
};
let ffmpeg = ffmpeg_path.unwrap_or_else(|| "ffmpeg".into());
let times = tokio::task::spawn_blocking(move || {
probe::extract_keyframes_fast(&path, &ffmpeg)
})
.await
.map_err(|e| e.to_string())?;
Ok(times)
}
#[tauri::command]
pub async fn generate_thumbnails(
path: String,
count: u32,
duration: Option<f64>,
state: State<'_, AppState>,
) -> Result<Vec<String>, String> {
let ffmpeg_path = {
let cfg = state.config.lock().map_err(|e| e.to_string())?;
cfg.ffmpeg_path.clone()
};
let ffmpeg = ffmpeg_path.clone().unwrap_or_else(|| "ffmpeg".into());
let dur = match duration {
Some(d) => d,
None => {
let info = tokio::task::spawn_blocking({
let path = path.clone();
let ffmpeg = ffmpeg.clone();
move || probe::probe_metadata(&path, &ffmpeg)
})
.await
.map_err(|e| e.to_string())??;
info.duration
}
};
let hash = format!("{:x}", md5_simple(&path));
let thumb_dir = config::ensure_temp_subdir(&format!("thumbs/{}", hash));
let pattern = thumb_dir.join("thumb_%04d.jpg").to_string_lossy().to_string();
let cmd = crate::ffmpeg::commands::build_thumbnail(&path, &pattern, count, dur);
tokio::task::spawn_blocking(move || {
crate::ffmpeg::runner::run_ffmpeg_silent(&ffmpeg, &cmd)
})
.await
.map_err(|e| e.to_string())??;
let mut paths = Vec::new();
for i in 1..=count {
let p = thumb_dir.join(format!("thumb_{:04}.jpg", i));
if p.exists() {
paths.push(p.to_string_lossy().to_string());
}
}
Ok(paths)
}
#[tauri::command]
pub async fn generate_preview(
path: String,
codec: Option<String>,
state: State<'_, AppState>,
) -> Result<String, String> {
let ffmpeg_path = {
let cfg = state.config.lock().map_err(|e| e.to_string())?;
cfg.ffmpeg_path.clone()
};
let ffmpeg = ffmpeg_path.unwrap_or_else(|| "ffmpeg".into());
let hash = format!("{:x}", md5_simple(&path));
let preview_dir = config::ensure_temp_subdir(&format!("preview/{}", hash));
let output = preview_dir.join("preview.mp4").to_string_lossy().to_string();
// skip if already exists
if std::path::Path::new(&output).exists() {
return Ok(output);
}
let codec_str = codec.unwrap_or_default();
let cmd = crate::ffmpeg::commands::build_preview(&path, &output, &codec_str);
tokio::task::spawn_blocking(move || {
crate::ffmpeg::runner::run_ffmpeg_silent(&ffmpeg, &cmd)
})
.await
.map_err(|e| e.to_string())??;
Ok(output)
}
#[tauri::command]
pub async fn detect_hardware(state: State<'_, AppState>) -> Result<HardwareInfo, String> {
let ffmpeg_path = {
let cfg = state.config.lock().map_err(|e| e.to_string())?;
cfg.ffmpeg_path.clone()
};
let ffmpeg = ffmpeg_path.unwrap_or_else(|| "ffmpeg".into());
let info = tokio::task::spawn_blocking(move || {
discovery::detect_hardware_encoders(&ffmpeg)
})
.await
.map_err(|e| e.to_string())?;
{
let mut hw = state.hw_info.lock().map_err(|e| e.to_string())?;
*hw = Some(info.clone());
}
Ok(info)
}
fn md5_simple(input: &str) -> u64 {
let mut hash: u64 = 0xcbf29ce484222325;
for b in input.bytes() {
hash ^= b as u64;
hash = hash.wrapping_mul(0x100000001b3);
}
hash
}

View File

@@ -0,0 +1,3 @@
pub mod analyze;
pub mod process;
pub mod utility;

View File

@@ -0,0 +1,312 @@
use std::fs;
use tauri::{AppHandle, State};
use crate::config;
use crate::ffmpeg::{commands, probe, runner};
use crate::recovery;
use crate::types::*;
use crate::AppState;
#[tauri::command]
pub async fn compress(
input: String,
output: String,
settings: CompressSettings,
trim: Option<TrimRange>,
app: AppHandle,
state: State<'_, AppState>,
) -> Result<OutputInfo, String> {
let ffmpeg_path = {
let cfg = state.config.lock().map_err(|e| e.to_string())?;
cfg.ffmpeg_path.clone()
};
let ffmpeg = ffmpeg_path.unwrap_or_else(|| "ffmpeg".into());
let app_config = {
let cfg = state.config.lock().map_err(|e| e.to_string())?;
cfg.clone()
};
let hw_info = {
let hw = state.hw_info.lock().map_err(|e| e.to_string())?;
hw.clone().unwrap_or(HardwareInfo {
nvenc: false,
qsv: false,
amf: false,
nvenc_codecs: Vec::new(),
qsv_codecs: Vec::new(),
amf_codecs: Vec::new(),
})
};
let job_id = uuid::Uuid::new_v4().to_string();
let jobs = state.jobs.clone();
let info = {
let input = input.clone();
let ffmpeg = ffmpeg.clone();
tokio::task::spawn_blocking(move || probe::probe_video(&input, &ffmpeg))
.await
.map_err(|e| e.to_string())??
};
let duration = match &trim {
Some(t) => t.end - t.start,
None => info.duration,
};
let has_audio = info.audio_codec.is_some();
let settings_json = serde_json::to_string(&settings).unwrap_or_default();
recovery::write_job_info(&config::temp_dir(), &input, &output, "compress", &settings_json);
let (out_path, attempts) = {
let input = input.clone();
let output = output.clone();
let settings = settings.clone();
let trim = trim.clone();
let ffmpeg = ffmpeg.clone();
let hw = hw_info.clone();
let cfg = app_config.clone();
let jid = job_id.clone();
let app = app.clone();
let jobs = jobs.clone();
tokio::task::spawn_blocking(move || {
runner::run_compress_with_retry(
&ffmpeg,
&input,
&output,
&settings,
trim.as_ref(),
&hw,
&cfg,
&jid,
duration,
has_audio,
&app,
&jobs,
)
})
.await
.map_err(|e| e.to_string())??
};
recovery::delete_job_info(&config::temp_dir());
let out_info = {
let path = out_path.clone();
let ffmpeg = ffmpeg.clone();
tokio::task::spawn_blocking(move || probe::probe_video(&path, &ffmpeg))
.await
.map_err(|e| e.to_string())??
};
let file_size = fs::metadata(&out_path).map(|m| m.len()).unwrap_or(0);
Ok(OutputInfo {
path: out_path,
file_size,
duration: out_info.duration,
width: out_info.width,
height: out_info.height,
video_codec: out_info.video_codec,
video_bitrate: out_info.video_bitrate,
audio_codec: out_info.audio_codec,
audio_bitrate: out_info.audio_bitrate,
attempts,
})
}
#[tauri::command]
pub async fn trim(
input: String,
output: String,
range: TrimRange,
smart_cut: bool,
strip_audio: Option<bool>,
app: AppHandle,
state: State<'_, AppState>,
) -> Result<OutputInfo, String> {
let ffmpeg_path = {
let cfg = state.config.lock().map_err(|e| e.to_string())?;
cfg.ffmpeg_path.clone()
};
let ffmpeg = ffmpeg_path.unwrap_or_else(|| "ffmpeg".into());
let job_id = uuid::Uuid::new_v4().to_string();
let jobs = state.jobs.clone();
let range_json = serde_json::to_string(&range).unwrap_or_default();
recovery::write_job_info(&config::temp_dir(), &input, &output, "trim", &range_json);
let do_strip = strip_audio.unwrap_or(false);
if smart_cut {
do_smart_cut(&input, &output, &range, &ffmpeg, &job_id, &app, &jobs, do_strip).await?;
} else {
let cmd = commands::build_trim_keyframe(&input, &output, &range, do_strip);
let ffmpeg_c = ffmpeg.clone();
let jid = job_id.clone();
let duration = range.end - range.start;
let app_c = app.clone();
let jobs_c = jobs.clone();
tokio::task::spawn_blocking(move || {
runner::run_ffmpeg(&ffmpeg_c, &cmd, &jid, duration, "encoding", &app_c, &jobs_c)
})
.await
.map_err(|e| e.to_string())??;
}
recovery::delete_job_info(&config::temp_dir());
let out_info = {
let path = output.clone();
let ffmpeg = ffmpeg.clone();
tokio::task::spawn_blocking(move || probe::probe_video(&path, &ffmpeg))
.await
.map_err(|e| e.to_string())??
};
let file_size = fs::metadata(&output).map(|m| m.len()).unwrap_or(0);
Ok(OutputInfo {
path: output,
file_size,
duration: out_info.duration,
width: out_info.width,
height: out_info.height,
video_codec: out_info.video_codec,
video_bitrate: out_info.video_bitrate,
audio_codec: out_info.audio_codec,
audio_bitrate: out_info.audio_bitrate,
attempts: 1,
})
}
async fn do_smart_cut(
input: &str,
output: &str,
range: &TrimRange,
ffmpeg: &str,
job_id: &str,
app: &AppHandle,
jobs: &runner::JobMap,
strip_audio: bool,
) -> Result<(), String> {
let info = {
let input = input.to_string();
let ffmpeg = ffmpeg.to_string();
tokio::task::spawn_blocking(move || probe::probe_video(&input, &ffmpeg))
.await
.map_err(|e| e.to_string())??
};
let keyframes = &info.keyframe_times;
let first_kf = keyframes
.iter()
.find(|&&t| t >= range.start)
.copied()
.unwrap_or(range.start);
let last_kf = keyframes
.iter()
.rev()
.find(|&&t| t <= range.end)
.copied()
.unwrap_or(range.end);
let has_audio = info.audio_codec.is_some() && !strip_audio;
if first_kf >= last_kf || (range.end - range.start) < 5.0 {
let cmd = commands::build_trim_keyframe(input, output, range, strip_audio);
let ffmpeg = ffmpeg.to_string();
let jid = job_id.to_string();
let dur = range.end - range.start;
let app = app.clone();
let jobs = jobs.clone();
tokio::task::spawn_blocking(move || {
runner::run_ffmpeg(&ffmpeg, &cmd, &jid, dur, "encoding", &app, &jobs)
})
.await
.map_err(|e| e.to_string())??;
return Ok(());
}
let work_dir = config::ensure_temp_subdir(&format!("smartcut/{}", job_id));
let head_path = work_dir.join("head.mp4").to_string_lossy().to_string();
let middle_path = work_dir.join("middle.mp4").to_string_lossy().to_string();
let tail_path = work_dir.join("tail.mp4").to_string_lossy().to_string();
let filelist_path = work_dir.join("filelist.txt").to_string_lossy().to_string();
// head
{
let cmd = commands::build_smart_cut_head(input, &head_path, range.start, first_kf, has_audio);
let ffmpeg = ffmpeg.to_string();
let jid = job_id.to_string();
let dur = first_kf - range.start;
let app = app.clone();
let jobs = jobs.clone();
tokio::task::spawn_blocking(move || {
runner::run_ffmpeg(&ffmpeg, &cmd, &jid, dur, "encoding", &app, &jobs)
})
.await
.map_err(|e| e.to_string())??;
}
// middle
{
let cmd = commands::build_smart_cut_middle(input, &middle_path, first_kf, last_kf);
let ffmpeg = ffmpeg.to_string();
tokio::task::spawn_blocking(move || runner::run_ffmpeg_silent(&ffmpeg, &cmd))
.await
.map_err(|e| e.to_string())??;
}
// tail
{
let cmd = commands::build_smart_cut_tail(input, &tail_path, last_kf, range.end, has_audio);
let ffmpeg = ffmpeg.to_string();
let jid = job_id.to_string();
let dur = range.end - last_kf;
let app = app.clone();
let jobs = jobs.clone();
tokio::task::spawn_blocking(move || {
runner::run_ffmpeg(&ffmpeg, &cmd, &jid, dur, "encoding", &app, &jobs)
})
.await
.map_err(|e| e.to_string())??;
}
// concat
let filelist_content = format!(
"file '{}'\nfile '{}'\nfile '{}'",
head_path.replace('\\', "/"),
middle_path.replace('\\', "/"),
tail_path.replace('\\', "/"),
);
fs::write(&filelist_path, &filelist_content).map_err(|e| e.to_string())?;
{
let cmd = commands::build_concat(&filelist_path, output);
let ffmpeg = ffmpeg.to_string();
tokio::task::spawn_blocking(move || runner::run_ffmpeg_silent(&ffmpeg, &cmd))
.await
.map_err(|e| e.to_string())??;
}
// cleanup temp
let _ = fs::remove_dir_all(&work_dir);
Ok(())
}
#[tauri::command]
pub async fn cancel_job(job_id: String, state: State<'_, AppState>) -> Result<(), String> {
let result = runner::cancel_job(&job_id, &state.jobs);
recovery::delete_job_info(&config::temp_dir());
result
}

View File

@@ -0,0 +1,422 @@
use std::path::PathBuf;
use std::process::Command;
#[cfg(windows)]
use std::os::windows::process::CommandExt;
use tauri::State;
use crate::config;
use crate::ffmpeg::discovery;
use crate::recovery;
use crate::types::*;
use crate::AppState;
#[tauri::command]
pub async fn get_stream_url_cmd(
path: String,
state: State<'_, AppState>,
) -> Result<String, String> {
let port = state.stream_port.lock().map_err(|e| e.to_string())?;
Ok(format!("http://127.0.0.1:{}/{}", *port, percent_encoding::percent_encode(
path.as_bytes(),
percent_encoding::NON_ALPHANUMERIC
)))
}
#[tauri::command]
pub async fn get_stream_port_cmd(
state: State<'_, AppState>,
) -> Result<u16, String> {
let port = state.stream_port.lock().map_err(|e| e.to_string())?;
Ok(*port)
}
#[tauri::command]
pub async fn check_ffmpeg(state: State<'_, AppState>) -> Result<FFmpegStatus, String> {
let override_path = {
let cfg = state.config.lock().map_err(|e| e.to_string())?;
cfg.ffmpeg_path.clone()
};
let status = tokio::task::spawn_blocking(move || {
discovery::find_ffmpeg(override_path.as_deref())
})
.await
.map_err(|e| e.to_string())?;
if status.found {
if let Some(ref path) = status.path {
let mut cfg = state.config.lock().map_err(|e| e.to_string())?;
cfg.ffmpeg_path = Some(path.clone());
let _ = config::save_config(&cfg);
}
}
Ok(status)
}
#[tauri::command]
pub async fn open_in_explorer(path: String) -> Result<(), String> {
let p = PathBuf::from(&path);
let dir = if p.is_dir() {
p
} else {
p.parent().map(|d| d.to_path_buf()).unwrap_or(p)
};
std::process::Command::new("explorer")
.arg(dir.to_string_lossy().to_string())
.spawn()
.map_err(|e| format!("Failed to open explorer: {}", e))?;
Ok(())
}
#[tauri::command]
pub async fn get_config(state: State<'_, AppState>) -> Result<AppConfig, String> {
let cfg = state.config.lock().map_err(|e| e.to_string())?;
Ok(cfg.clone())
}
#[tauri::command]
pub async fn save_config_cmd(
new_config: AppConfig,
state: State<'_, AppState>,
) -> Result<(), String> {
config::save_config(&new_config)?;
let mut cfg = state.config.lock().map_err(|e| e.to_string())?;
*cfg = new_config;
Ok(())
}
#[tauri::command]
pub async fn get_output_path(
input: String,
mode: String,
container: String,
state: State<'_, AppState>,
) -> Result<String, String> {
let default_dir = {
let cfg = state.config.lock().map_err(|e| e.to_string())?;
cfg.default_output_dir.clone()
};
let input_path = PathBuf::from(&input);
let stem = input_path
.file_stem()
.and_then(|s| s.to_str())
.unwrap_or("output");
let ext = match container.to_lowercase().as_str() {
"mkv" => "mkv",
"webm" => "webm",
"mov" => "mov",
"avi" => "avi",
"ts" => "ts",
_ => "mp4",
};
let suffix = match mode.as_str() {
"compress" => "compressed",
"trim" => "trimmed",
"trimcomp" => "trimcomp",
_ => "output",
};
let timestamp = chrono_free_timestamp();
let filename = format!("{}_{}_{}.{}", stem, suffix, timestamp, ext);
let dir = match default_dir {
Some(d) if !d.is_empty() => PathBuf::from(d),
_ => input_path
.parent()
.map(|p| p.to_path_buf())
.unwrap_or_else(|| PathBuf::from(".")),
};
let out = dir.join(&filename);
Ok(out.to_string_lossy().to_string())
}
fn chrono_free_timestamp() -> String {
use std::time::{SystemTime, UNIX_EPOCH};
let now = SystemTime::now()
.duration_since(UNIX_EPOCH)
.unwrap_or_default();
let secs = now.as_secs();
let days = secs / 86400;
let time_of_day = secs % 86400;
let hours = time_of_day / 3600;
let minutes = (time_of_day % 3600) / 60;
// days since epoch to Y/M/D - good enough approximation
let mut y = 1970i64;
let mut remaining = days as i64;
loop {
let days_in_year = if is_leap(y) { 366 } else { 365 };
if remaining < days_in_year {
break;
}
remaining -= days_in_year;
y += 1;
}
let month_days = if is_leap(y) {
[31, 29, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]
} else {
[31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]
};
let mut m = 1u32;
for &md in &month_days {
if remaining < md {
break;
}
remaining -= md;
m += 1;
}
let d = remaining + 1;
format!("{}{:02}{:02}_{:02}{:02}", y, m, d, hours, minutes)
}
fn is_leap(y: i64) -> bool {
(y % 4 == 0 && y % 100 != 0) || y % 400 == 0
}
#[tauri::command]
pub async fn init_app(state: State<'_, AppState>) -> Result<FFmpegStatus, String> {
let override_path = {
let cfg = state.config.lock().map_err(|e| e.to_string())?;
cfg.ffmpeg_path.clone()
};
let status = tokio::task::spawn_blocking(move || {
discovery::find_ffmpeg(override_path.as_deref())
})
.await
.map_err(|e| e.to_string())?;
if status.found {
if let Some(ref path) = status.path {
let mut cfg = state.config.lock().map_err(|e| e.to_string())?;
cfg.ffmpeg_path = Some(path.clone());
let _ = config::save_config(&cfg);
}
let ffmpeg = status.path.clone().unwrap_or_else(|| "ffmpeg".into());
let hw = tokio::task::spawn_blocking(move || {
discovery::detect_hardware_encoders(&ffmpeg)
})
.await
.map_err(|e| e.to_string())?;
let mut hw_state = state.hw_info.lock().map_err(|e| e.to_string())?;
*hw_state = Some(hw);
}
Ok(status)
}
#[tauri::command]
pub async fn download_ffmpeg(app_handle: tauri::AppHandle, state: State<'_, AppState>) -> Result<FFmpegStatus, String> {
use tauri::Emitter;
let exe_dir = std::env::current_exe()
.map_err(|e| e.to_string())?
.parent()
.ok_or("Could not find exe directory")?
.to_path_buf();
let zip_path = exe_dir.join("ffmpeg-download.zip");
let extract_dir = exe_dir.join("ffmpeg-download");
let _ = std::fs::remove_file(&zip_path);
let url = "https://github.com/BtbN/FFmpeg-Builds/releases/download/latest/ffmpeg-master-latest-win64-gpl.zip";
let expected_size: u64 = 90_000_000; // ~90MB estimate
// emit starting
let _ = app_handle.emit("ffmpeg-download-progress", serde_json::json!({
"phase": "downloading", "percent": 0, "message": "Connecting..."
}));
// start curl download in background (curl ships with Windows 10+, gives us progress)
let child = Command::new("curl")
.args(["-L", "-o", &zip_path.to_string_lossy(), "--progress-bar", url])
.creation_flags(0x08000000) // CREATE_NO_WINDOW
.stderr(std::process::Stdio::piped())
.spawn()
.map_err(|_| {
// fallback to PowerShell if curl not available
"curl not found".to_string()
});
let use_curl = child.is_ok();
let done_flag = std::sync::Arc::new(std::sync::atomic::AtomicBool::new(false));
if use_curl {
let mut child = child.unwrap();
let zip_clone = zip_path.clone();
let handle_clone = app_handle.clone();
let flag = done_flag.clone();
let poll_thread = std::thread::spawn(move || {
while !flag.load(std::sync::atomic::Ordering::Relaxed) {
std::thread::sleep(std::time::Duration::from_millis(500));
if let Ok(meta) = std::fs::metadata(&zip_clone) {
let pct = ((meta.len() as f64 / expected_size as f64) * 100.0).min(99.0);
let mb = meta.len() as f64 / 1_048_576.0;
let _ = handle_clone.emit("ffmpeg-download-progress", serde_json::json!({
"phase": "downloading",
"percent": pct as u32,
"message": format!("{:.1} MB downloaded", mb)
}));
}
}
});
let status = child.wait().map_err(|e| format!("Download failed: {}", e))?;
done_flag.store(true, std::sync::atomic::Ordering::Relaxed);
let _ = poll_thread.join();
if !status.success() {
let _ = std::fs::remove_file(&zip_path);
return Err("Download failed. Check your internet connection.".into());
}
} else {
let _ = app_handle.emit("ffmpeg-download-progress", serde_json::json!({
"phase": "downloading", "percent": 0, "message": "Downloading..."
}));
let dl_status = Command::new("powershell")
.args([
"-NoProfile", "-NonInteractive", "-Command",
&format!(
"[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12; Invoke-WebRequest -Uri '{}' -OutFile '{}'",
url, zip_path.to_string_lossy()
)
])
.creation_flags(0x08000000)
.status()
.map_err(|e| format!("Download failed: {}", e))?;
if !dl_status.success() {
let _ = std::fs::remove_file(&zip_path);
return Err("Download failed. Check your internet connection.".into());
}
}
// extract - use tar (faster than PowerShell Expand-Archive, ships with Windows 10+)
let _ = app_handle.emit("ffmpeg-download-progress", serde_json::json!({
"phase": "extracting", "percent": 100, "message": "Extracting..."
}));
let ex_status = Command::new("tar")
.args(["-xf", &zip_path.to_string_lossy(), "-C", &exe_dir.to_string_lossy()])
.creation_flags(0x08000000)
.status()
.or_else(|_| {
// fallback to PowerShell if tar fails
Command::new("powershell")
.args([
"-NoProfile", "-NonInteractive", "-Command",
&format!(
"Expand-Archive -Path '{}' -DestinationPath '{}' -Force",
zip_path.to_string_lossy(),
extract_dir.to_string_lossy()
)
])
.creation_flags(0x08000000)
.status()
})
.map_err(|e| format!("Extraction failed: {}", e))?;
if !ex_status.success() {
let _ = std::fs::remove_file(&zip_path);
let _ = std::fs::remove_dir_all(&extract_dir);
return Err("Could not extract the archive.".into());
}
let _ = app_handle.emit("ffmpeg-download-progress", serde_json::json!({
"phase": "installing", "percent": 100, "message": "Installing..."
}));
// find ffmpeg.exe - tar extracts to exe_dir, PowerShell to extract_dir
let mut ffmpeg_src = None;
let mut ffprobe_src = None;
for search_dir in [&exe_dir, &extract_dir] {
if let Ok(entries) = std::fs::read_dir(search_dir) {
for entry in entries.flatten() {
let path = entry.path();
// check nested bin/ directory (zip has one top-level folder)
let bin_dir = path.join("bin");
if bin_dir.is_dir() {
let ff = bin_dir.join("ffmpeg.exe");
let fp = bin_dir.join("ffprobe.exe");
if ff.exists() { ffmpeg_src = Some(ff); }
if fp.exists() { ffprobe_src = Some(fp); }
if ffmpeg_src.is_some() { break; }
}
}
}
if ffmpeg_src.is_some() { break; }
}
let ffmpeg_src = ffmpeg_src.ok_or("Could not find ffmpeg.exe in the download.")?;
let ffprobe_src = ffprobe_src.ok_or("Could not find ffprobe.exe in the download.")?;
let ffmpeg_dest = exe_dir.join("ffmpeg.exe");
let ffprobe_dest = exe_dir.join("ffprobe.exe");
std::fs::copy(&ffmpeg_src, &ffmpeg_dest).map_err(|e| format!("Could not copy ffmpeg: {}", e))?;
std::fs::copy(&ffprobe_src, &ffprobe_dest).map_err(|e| format!("Could not copy ffprobe: {}", e))?;
// cleanup zip and any extracted folders
let _ = std::fs::remove_file(&zip_path);
let _ = std::fs::remove_dir_all(&extract_dir);
// also clean up the tar-extracted folder in exe_dir (matches ffmpeg-master-*)
if let Ok(entries) = std::fs::read_dir(&exe_dir) {
for entry in entries.flatten() {
let name = entry.file_name().to_string_lossy().to_string();
if name.starts_with("ffmpeg-master-") && entry.path().is_dir() {
let _ = std::fs::remove_dir_all(entry.path());
}
}
}
// verify and update config
let path_str = ffmpeg_dest.to_string_lossy().to_string();
let version = discovery::parse_version(&PathBuf::from(&path_str));
let status = FFmpegStatus {
found: true,
path: Some(path_str.clone()),
version,
};
{
let mut cfg = state.config.lock().map_err(|e| e.to_string())?;
cfg.ffmpeg_path = Some(path_str);
let _ = config::save_config(&cfg);
}
Ok(status)
}
#[tauri::command]
pub async fn check_recovery() -> Result<Option<recovery::InterruptedJob>, String> {
let temp = config::temp_dir();
Ok(recovery::check_interrupted_job(&temp))
}
#[tauri::command]
pub async fn cleanup_recovery() -> Result<(), String> {
let temp = config::temp_dir();
recovery::cleanup_orphaned_temps(&temp);
recovery::delete_job_info(&temp);
Ok(())
}

36
src-tauri/src/config.rs Normal file
View File

@@ -0,0 +1,36 @@
use std::fs;
use std::path::PathBuf;
use crate::types::AppConfig;
fn config_path() -> PathBuf {
let exe = std::env::current_exe().unwrap_or_default();
exe.parent().unwrap_or(&PathBuf::from(".")).join("config.json")
}
pub fn load_config() -> AppConfig {
let path = config_path();
match fs::read_to_string(&path) {
Ok(contents) => serde_json::from_str(&contents).unwrap_or_default(),
Err(_) => AppConfig::default(),
}
}
pub fn save_config(config: &AppConfig) -> Result<(), String> {
let path = config_path();
let json = serde_json::to_string_pretty(config).map_err(|e| e.to_string())?;
fs::write(&path, json).map_err(|e| format!("Failed to write config: {}", e))
}
pub fn temp_dir() -> PathBuf {
let exe = std::env::current_exe().unwrap_or_default();
let dir = exe.parent().unwrap_or(&PathBuf::from(".")).join("temp");
let _ = fs::create_dir_all(&dir);
dir
}
pub fn ensure_temp_subdir(subdir: &str) -> PathBuf {
let dir = temp_dir().join(subdir);
let _ = fs::create_dir_all(&dir);
dir
}

View File

@@ -0,0 +1,453 @@
use crate::types::*;
pub struct FfmpegCommand {
pub args: Vec<String>,
}
impl FfmpegCommand {
fn new() -> Self {
Self { args: vec!["-y".into()] }
}
fn arg(mut self, a: &str) -> Self {
self.args.push(a.into());
self
}
fn args(mut self, a: &[&str]) -> Self {
self.args.extend(a.iter().map(|s| s.to_string()));
self
}
}
pub fn build_compress_pass1(
input: &str,
settings: &CompressSettings,
bitrate_kbps: u32,
passlog_prefix: &str,
) -> FfmpegCommand {
let encoder = software_encoder(&settings.video_codec);
let mut cmd = FfmpegCommand::new()
.arg("-i").arg(input)
.arg("-c:v").arg(&encoder)
.arg("-preset").arg(&sw_preset(&settings.speed_preset, &settings.video_codec))
.arg("-b:v").arg(&format!("{}k", bitrate_kbps))
.arg("-pass").arg("1")
.arg("-passlogfile").arg(passlog_prefix)
.arg("-an")
.arg("-f").arg("null");
cmd = apply_resolution(cmd, &settings.resolution);
cmd.arg("NUL")
}
pub fn build_compress_pass2(
input: &str,
output: &str,
settings: &CompressSettings,
bitrate_kbps: u32,
passlog_prefix: &str,
trim: Option<&TrimRange>,
has_audio: bool,
) -> FfmpegCommand {
let encoder = software_encoder(&settings.video_codec);
let mut cmd = FfmpegCommand::new();
if let Some(t) = trim {
cmd = cmd
.arg("-ss").arg(&format!("{}", t.start))
.arg("-to").arg(&format!("{}", t.end));
}
cmd = cmd
.arg("-i").arg(input)
.arg("-c:v").arg(&encoder)
.arg("-preset").arg(&sw_preset(&settings.speed_preset, &settings.video_codec))
.arg("-b:v").arg(&format!("{}k", bitrate_kbps))
.arg("-pass").arg("2")
.arg("-passlogfile").arg(passlog_prefix);
if has_audio {
cmd = apply_audio(cmd, &settings.audio_codec, settings.audio_bitrate);
} else {
cmd = cmd.arg("-an");
}
cmd = apply_resolution(cmd, &settings.resolution);
cmd = apply_container_flags(cmd, &settings.container);
cmd.arg("-progress").arg("pipe:1").arg(output)
}
pub fn build_compress_hw(
input: &str,
output: &str,
settings: &CompressSettings,
bitrate_kbps: u32,
hw_encoder: &str,
trim: Option<&TrimRange>,
has_audio: bool,
) -> FfmpegCommand {
let mut cmd = FfmpegCommand::new();
if let Some(t) = trim {
cmd = cmd
.arg("-ss").arg(&format!("{}", t.start))
.arg("-to").arg(&format!("{}", t.end));
}
let maxrate = (bitrate_kbps as f64 * 1.5) as u32;
let bufsize = bitrate_kbps * 2;
cmd = cmd
.arg("-i").arg(input)
.arg("-c:v").arg(hw_encoder)
.arg("-preset").arg(&hw_preset(&settings.speed_preset, hw_encoder))
.arg("-b:v").arg(&format!("{}k", bitrate_kbps))
.arg("-maxrate").arg(&format!("{}k", maxrate))
.arg("-bufsize").arg(&format!("{}k", bufsize));
if has_audio {
cmd = apply_audio(cmd, &settings.audio_codec, settings.audio_bitrate);
} else {
cmd = cmd.arg("-an");
}
cmd = apply_resolution(cmd, &settings.resolution);
cmd = apply_container_flags(cmd, &settings.container);
cmd.arg("-progress").arg("pipe:1").arg(output)
}
pub fn build_crf_command(
input: &str,
output: &str,
settings: &CompressSettings,
crf_value: u32,
encoder: &str,
trim: Option<&TrimRange>,
has_audio: bool,
) -> FfmpegCommand {
let mut cmd = FfmpegCommand::new();
if let Some(t) = trim {
cmd = cmd
.arg("-ss").arg(&format!("{}", t.start))
.arg("-to").arg(&format!("{}", t.end));
}
cmd = cmd
.arg("-i").arg(input)
.arg("-c:v").arg(encoder);
if encoder.contains("svtav1") || encoder == "libsvtav1" {
cmd = cmd.arg("-crf").arg(&format!("{}", crf_value));
} else if encoder.contains("nvenc") || encoder.contains("amf") || encoder.contains("qsv") {
cmd = cmd.arg("-cq").arg(&format!("{}", crf_value));
} else {
cmd = cmd.arg("-crf").arg(&format!("{}", crf_value));
}
cmd = cmd.arg("-preset").arg(&resolve_preset(&settings.speed_preset, encoder));
if has_audio {
cmd = apply_audio(cmd, &settings.audio_codec, settings.audio_bitrate);
} else {
cmd = cmd.arg("-an");
}
cmd = apply_resolution(cmd, &settings.resolution);
cmd = apply_container_flags(cmd, &settings.container);
cmd.arg("-progress").arg("pipe:1").arg(output)
}
pub fn build_bitrate_command(
input: &str,
output: &str,
settings: &CompressSettings,
bitrate_kbps: u32,
encoder: &str,
trim: Option<&TrimRange>,
has_audio: bool,
) -> FfmpegCommand {
let mut cmd = FfmpegCommand::new();
if let Some(t) = trim {
cmd = cmd
.arg("-ss").arg(&format!("{}", t.start))
.arg("-to").arg(&format!("{}", t.end));
}
cmd = cmd
.arg("-i").arg(input)
.arg("-c:v").arg(encoder)
.arg("-b:v").arg(&format!("{}k", bitrate_kbps))
.arg("-preset").arg(&resolve_preset(&settings.speed_preset, encoder));
if has_audio {
cmd = apply_audio(cmd, &settings.audio_codec, settings.audio_bitrate);
} else {
cmd = cmd.arg("-an");
}
cmd = apply_resolution(cmd, &settings.resolution);
cmd = apply_container_flags(cmd, &settings.container);
cmd.arg("-progress").arg("pipe:1").arg(output)
}
pub fn build_trim_keyframe(
input: &str,
output: &str,
range: &TrimRange,
strip_audio: bool,
) -> FfmpegCommand {
let mut cmd = FfmpegCommand::new()
.arg("-ss").arg(&format!("{}", range.start))
.arg("-to").arg(&format!("{}", range.end))
.arg("-i").arg(input)
.arg("-c").arg("copy")
.arg("-avoid_negative_ts").arg("make_zero");
if strip_audio {
cmd = cmd.arg("-an");
}
cmd.arg(output)
}
pub fn build_smart_cut_head(
input: &str,
output: &str,
start: f64,
first_keyframe: f64,
has_audio: bool,
) -> FfmpegCommand {
let mut cmd = FfmpegCommand::new()
.arg("-ss").arg(&format!("{}", start))
.arg("-to").arg(&format!("{}", first_keyframe))
.arg("-i").arg(input)
.arg("-c:v").arg("libx264")
.arg("-crf").arg("18");
if has_audio {
cmd = cmd.arg("-c:a").arg("aac");
} else {
cmd = cmd.arg("-an");
}
cmd.arg("-progress").arg("pipe:1").arg(output)
}
pub fn build_smart_cut_middle(
input: &str,
output: &str,
first_keyframe: f64,
last_keyframe: f64,
) -> FfmpegCommand {
FfmpegCommand::new()
.arg("-ss").arg(&format!("{}", first_keyframe))
.arg("-to").arg(&format!("{}", last_keyframe))
.arg("-i").arg(input)
.arg("-c").arg("copy")
.arg("-avoid_negative_ts").arg("make_zero")
.arg(output)
}
pub fn build_smart_cut_tail(
input: &str,
output: &str,
last_keyframe: f64,
end: f64,
has_audio: bool,
) -> FfmpegCommand {
let mut cmd = FfmpegCommand::new()
.arg("-ss").arg(&format!("{}", last_keyframe))
.arg("-to").arg(&format!("{}", end))
.arg("-i").arg(input)
.arg("-c:v").arg("libx264")
.arg("-crf").arg("18");
if has_audio {
cmd = cmd.arg("-c:a").arg("aac");
} else {
cmd = cmd.arg("-an");
}
cmd.arg("-progress").arg("pipe:1").arg(output)
}
pub fn build_concat(filelist: &str, output: &str) -> FfmpegCommand {
FfmpegCommand::new()
.arg("-f").arg("concat")
.arg("-safe").arg("0")
.arg("-i").arg(filelist)
.arg("-c").arg("copy")
.arg(output)
}
pub fn build_thumbnail(
input: &str,
output_pattern: &str,
count: u32,
duration: f64,
) -> FfmpegCommand {
let interval = if count > 1 {
duration / (count as f64)
} else {
duration / 2.0
};
FfmpegCommand::new()
.arg("-i").arg(input)
.args(&["-vf", &format!("fps=1/{:.2},scale=240:-1", interval)])
.arg("-q:v").arg("8")
.arg(output_pattern)
}
pub fn build_preview(input: &str, output: &str, codec: &str) -> FfmpegCommand {
let mut cmd = FfmpegCommand::new();
cmd = cmd.arg("-i").arg(input);
// if the source is already h264, just remux to mp4 (instant, no re-encoding)
let dominated = codec.to_lowercase();
if dominated == "h264" || dominated == "h265" || dominated == "hevc" {
cmd = cmd.arg("-c:v").arg("copy")
.arg("-c:a").arg("copy");
} else {
// transcode only first 30 seconds for preview
cmd = cmd.arg("-t").arg("30")
.arg("-c:v").arg("libx264")
.arg("-preset").arg("ultrafast")
.arg("-crf").arg("28")
.args(&["-vf", "scale=640:-2"])
.arg("-c:a").arg("aac")
.arg("-b:a").arg("64k");
}
cmd.args(&["-movflags", "+faststart"]).arg(output)
}
pub fn calculate_bitrate(target_mb: f64, duration: f64, audio_bitrate_kbps: u32) -> u32 {
let target_bits = target_mb * 1024.0 * 1024.0 * 8.0;
let audio_bits = audio_bitrate_kbps as f64 * 1000.0 * duration;
let video_bits = target_bits - audio_bits;
let kbps = (video_bits / duration / 1000.0).max(10.0);
kbps as u32
}
pub fn select_encoder(
codec: &VideoCodec,
hw_accel: &HwAccelMode,
hw_info: &HardwareInfo,
) -> String {
match hw_accel {
HwAccelMode::ForceCPU => software_encoder(codec),
HwAccelMode::ForceGPU => {
hw_encoder(codec, hw_info).unwrap_or_else(|| software_encoder(codec))
}
HwAccelMode::Auto => {
hw_encoder(codec, hw_info).unwrap_or_else(|| software_encoder(codec))
}
}
}
pub fn is_hw_encoder(encoder: &str) -> bool {
encoder.contains("nvenc")
|| encoder.contains("qsv")
|| encoder.contains("amf")
}
fn software_encoder(codec: &VideoCodec) -> String {
match codec {
VideoCodec::H264 => "libx264".into(),
VideoCodec::HEVC => "libx265".into(),
VideoCodec::AV1 => "libsvtav1".into(),
}
}
fn hw_encoder(codec: &VideoCodec, hw: &HardwareInfo) -> Option<String> {
let target = match codec {
VideoCodec::H264 => "h264",
VideoCodec::HEVC => "hevc",
VideoCodec::AV1 => "av1",
};
// prefer nvenc, then amf, then qsv
for name in &hw.nvenc_codecs {
if name.starts_with(target) {
return Some(name.clone());
}
}
for name in &hw.amf_codecs {
if name.starts_with(target) {
return Some(name.clone());
}
}
for name in &hw.qsv_codecs {
if name.starts_with(target) {
return Some(name.clone());
}
}
None
}
fn apply_audio(cmd: FfmpegCommand, codec: &AudioCodec, bitrate: u32) -> FfmpegCommand {
match codec {
AudioCodec::None => cmd.arg("-an"),
AudioCodec::AAC => cmd
.arg("-c:a").arg("aac")
.arg("-b:a").arg(&format!("{}k", bitrate)),
AudioCodec::Opus => cmd
.arg("-c:a").arg("libopus")
.arg("-b:a").arg(&format!("{}k", bitrate)),
}
}
fn apply_resolution(cmd: FfmpegCommand, res: &Resolution) -> FfmpegCommand {
match res {
Resolution::Original => cmd,
Resolution::P720 => cmd.args(&["-vf", "scale=-2:720"]),
Resolution::P1080 => cmd.args(&["-vf", "scale=-2:1080"]),
Resolution::P1440 => cmd.args(&["-vf", "scale=-2:1440"]),
Resolution::P4K => cmd.args(&["-vf", "scale=-2:2160"]),
Resolution::Custom { width, height } => {
cmd.args(&["-vf", &format!("scale={}:{}", width, height)])
}
}
}
fn apply_container_flags(cmd: FfmpegCommand, container: &Container) -> FfmpegCommand {
match container {
Container::MP4 | Container::MOV => cmd.args(&["-movflags", "+faststart"]),
Container::MKV | Container::WebM | Container::AVI | Container::TS => cmd,
}
}
fn sw_preset(preset: &str, codec: &VideoCodec) -> String {
if preset.is_empty() || preset == "medium" {
return match codec {
VideoCodec::AV1 => "6".into(),
_ => "medium".into(),
};
}
preset.to_string()
}
fn hw_preset(preset: &str, encoder: &str) -> String {
if preset.is_empty() || preset == "medium" {
if encoder.contains("nvenc") {
return "p5".into();
}
return "medium".into();
}
preset.to_string()
}
fn resolve_preset(preset: &str, encoder: &str) -> String {
if is_hw_encoder(encoder) {
hw_preset(preset, encoder)
} else {
if preset.is_empty() || preset == "medium" {
if encoder == "libsvtav1" {
return "6".into();
}
return "medium".into();
}
preset.to_string()
}
}

View File

@@ -0,0 +1,190 @@
use std::path::PathBuf;
use std::process::Command;
use crate::types::{FFmpegStatus, HardwareInfo};
pub fn find_ffmpeg(override_path: Option<&str>) -> FFmpegStatus {
if let Some(p) = override_path {
let path = PathBuf::from(p);
if path.exists() {
if let Some(ver) = parse_version(&path) {
return FFmpegStatus {
found: true,
path: Some(p.to_string()),
version: Some(ver),
};
}
}
}
if let Some(status) = try_from_path("ffmpeg") {
return status;
}
let candidates = get_search_paths();
for dir in candidates {
let bin = dir.join("ffmpeg.exe");
if bin.exists() {
if let Some(status) = try_from_path(bin.to_str().unwrap_or_default()) {
return status;
}
}
}
FFmpegStatus {
found: false,
path: None,
version: None,
}
}
fn get_search_paths() -> Vec<PathBuf> {
let mut paths = vec![
PathBuf::from(r"C:\ffmpeg\bin"),
PathBuf::from(r"C:\ffmpeg"),
];
if let Ok(local) = std::env::var("LOCALAPPDATA") {
paths.push(PathBuf::from(&local).join("ffmpeg"));
paths.push(PathBuf::from(&local).join("ffmpeg").join("bin"));
}
if let Ok(pf) = std::env::var("PROGRAMFILES") {
paths.push(PathBuf::from(&pf).join("ffmpeg"));
paths.push(PathBuf::from(&pf).join("ffmpeg").join("bin"));
}
let exe = std::env::current_exe().unwrap_or_default();
if let Some(parent) = exe.parent() {
paths.push(parent.to_path_buf());
paths.push(parent.join("ffmpeg"));
paths.push(parent.join("ffmpeg").join("bin"));
}
paths
}
fn try_from_path(cmd: &str) -> Option<FFmpegStatus> {
let path = resolve_path(cmd)?;
let ver = parse_version(&path)?;
Some(FFmpegStatus {
found: true,
path: Some(path.to_string_lossy().to_string()),
version: Some(ver),
})
}
fn resolve_path(cmd: &str) -> Option<PathBuf> {
let p = PathBuf::from(cmd);
if p.is_absolute() && p.exists() {
return Some(p);
}
let output = Command::new("where").arg(cmd).output().ok()?;
if output.status.success() {
let stdout = String::from_utf8_lossy(&output.stdout);
let first_line = stdout.lines().next()?;
let resolved = PathBuf::from(first_line.trim());
if resolved.exists() {
return Some(resolved);
}
}
None
}
pub fn parse_version(ffmpeg_path: &PathBuf) -> Option<String> {
let output = Command::new(ffmpeg_path).arg("-version").output().ok()?;
if !output.status.success() {
return None;
}
let stdout = String::from_utf8_lossy(&output.stdout);
let first_line = stdout.lines().next()?;
// "ffmpeg version 7.1 Copyright ..." or "ffmpeg version N-123456-g..."
let parts: Vec<&str> = first_line.split_whitespace().collect();
if parts.len() >= 3 && parts[0] == "ffmpeg" && parts[1] == "version" {
let ver = parts[2];
// strip trailing dash or git hash portions for cleanliness
let clean = ver.split('-').next().unwrap_or(ver);
Some(clean.to_string())
} else {
None
}
}
pub fn ffprobe_path(ffmpeg_path: &str) -> String {
let p = PathBuf::from(ffmpeg_path);
if let Some(parent) = p.parent() {
let probe = parent.join("ffprobe.exe");
if probe.exists() {
eprintln!("[cinch-rs] ffprobe_path: found {}", probe.display());
return probe.to_string_lossy().to_string();
}
let probe2 = parent.join("ffprobe");
if probe2.exists() {
eprintln!("[cinch-rs] ffprobe_path: found {}", probe2.display());
return probe2.to_string_lossy().to_string();
}
}
eprintln!("[cinch-rs] ffprobe_path: falling back to 'ffprobe'");
"ffprobe".to_string()
}
pub fn detect_hardware_encoders(ffmpeg_path: &str) -> HardwareInfo {
let mut info = HardwareInfo {
nvenc: false,
qsv: false,
amf: false,
nvenc_codecs: Vec::new(),
qsv_codecs: Vec::new(),
amf_codecs: Vec::new(),
};
let output = Command::new(ffmpeg_path)
.args(["-hide_banner", "-encoders"])
.output();
let output = match output {
Ok(o) => o,
Err(_) => return info,
};
let stdout = String::from_utf8_lossy(&output.stdout);
for line in stdout.lines() {
let trimmed = line.trim();
if trimmed.contains("nvenc") {
let codec = extract_encoder_name(trimmed);
if !codec.is_empty() {
info.nvenc = true;
info.nvenc_codecs.push(codec);
}
}
if trimmed.contains("qsv") {
let codec = extract_encoder_name(trimmed);
if !codec.is_empty() {
info.qsv = true;
info.qsv_codecs.push(codec);
}
}
if trimmed.contains("amf") {
let codec = extract_encoder_name(trimmed);
if !codec.is_empty() {
info.amf = true;
info.amf_codecs.push(codec);
}
}
}
info
}
fn extract_encoder_name(line: &str) -> String {
// encoder lines look like: " V..... h264_nvenc NVIDIA NVENC H.264 encoder"
let parts: Vec<&str> = line.split_whitespace().collect();
if parts.len() >= 2 {
let name = parts[1];
if name.contains("nvenc") || name.contains("qsv") || name.contains("amf") {
return name.to_string();
}
}
String::new()
}

View File

@@ -0,0 +1,4 @@
pub mod commands;
pub mod discovery;
pub mod probe;
pub mod runner;

View File

@@ -0,0 +1,162 @@
use std::process::Command;
#[cfg(windows)]
use std::os::windows::process::CommandExt;
use crate::ffmpeg::discovery::ffprobe_path;
use crate::types::VideoInfo;
// Step 1: Fast metadata - no packet scanning, instant
pub fn probe_metadata(input: &str, ffmpeg_path: &str) -> Result<VideoInfo, String> {
let probe = ffprobe_path(ffmpeg_path);
eprintln!("[cinch-rs] probe_metadata start: {} using {}", input, probe);
let output = Command::new(&probe)
.args([
"-v", "quiet",
"-print_format", "json",
"-show_format",
"-show_streams",
input,
])
.creation_flags(0x08000000)
.output()
.map_err(|e| format!("Failed to run ffprobe: {}", e))?;
eprintln!("[cinch-rs] probe_metadata ffprobe returned, status: {}", output.status);
if !output.status.success() {
let stderr = String::from_utf8_lossy(&output.stderr);
return Err(format!(
"Couldn't read this file. It may be corrupted or not a video. {}",
stderr.trim()
));
}
let stdout = String::from_utf8_lossy(&output.stdout);
let json: serde_json::Value =
serde_json::from_str(&stdout).map_err(|e| format!("Failed to parse ffprobe output: {}", e))?;
let streams = json["streams"].as_array().ok_or("No streams found")?;
let video_stream = streams
.iter()
.find(|s| s["codec_type"].as_str() == Some("video"))
.ok_or("This file has no video stream.")?;
let width = video_stream["width"].as_u64().unwrap_or(0) as u32;
let height = video_stream["height"].as_u64().unwrap_or(0) as u32;
let video_codec = video_stream["codec_name"]
.as_str()
.unwrap_or("unknown")
.to_string();
let video_bitrate = video_stream["bit_rate"]
.as_str()
.and_then(|s| s.parse::<u64>().ok())
.unwrap_or(0);
let frame_rate = parse_frame_rate(
video_stream["r_frame_rate"].as_str().unwrap_or("0/1"),
);
let avg_frame_rate = parse_frame_rate(
video_stream["avg_frame_rate"].as_str().unwrap_or("0/1"),
);
let is_vfr = (frame_rate - avg_frame_rate).abs() > 0.5;
let format = &json["format"];
let duration = format["duration"]
.as_str()
.and_then(|s| s.parse::<f64>().ok())
.unwrap_or(0.0);
let file_size = format["size"]
.as_str()
.and_then(|s| s.parse::<u64>().ok())
.unwrap_or(0);
let container = format["format_name"]
.as_str()
.unwrap_or("unknown")
.to_string();
// audio from the same data - no extra call
let audio_stream = streams
.iter()
.find(|s| s["codec_type"].as_str() == Some("audio"));
let audio_codec = audio_stream
.and_then(|s| s["codec_name"].as_str())
.map(|s| s.to_string());
let audio_bitrate = audio_stream
.and_then(|s| s["bit_rate"].as_str())
.and_then(|s| s.parse::<u64>().ok());
let audio_channels = audio_stream
.and_then(|s| s["channels"].as_u64())
.map(|c| c as u32);
Ok(VideoInfo {
path: input.to_string(),
file_size,
duration,
width,
height,
video_codec,
video_bitrate,
frame_rate,
is_vfr,
audio_codec,
audio_bitrate,
audio_channels,
keyframe_times: Vec::new(), // filled in step 2
container,
})
}
// Step 2: Keyframe extraction - uses -skip_frame nokey for speed
pub fn extract_keyframes_fast(input: &str, ffmpeg_path: &str) -> Vec<f64> {
let probe = ffprobe_path(ffmpeg_path);
// use -skip_frame nokey to only decode keyframes, MUCH faster than reading all packets
let output = Command::new(&probe)
.args([
"-v", "quiet",
"-select_streams", "v:0",
"-skip_frame", "nokey",
"-show_entries", "frame=pts_time",
"-of", "csv=p=0",
input,
])
.creation_flags(0x08000000)
.output();
let output = match output {
Ok(o) if o.status.success() => o,
_ => return Vec::new(),
};
let stdout = String::from_utf8_lossy(&output.stdout);
let mut times: Vec<f64> = stdout
.lines()
.filter_map(|line| line.trim().parse::<f64>().ok())
.collect();
times.sort_by(|a, b| a.partial_cmp(b).unwrap_or(std::cmp::Ordering::Equal));
times.dedup();
times
}
// Legacy compat - calls both steps
pub fn probe_video(input: &str, ffmpeg_path: &str) -> Result<VideoInfo, String> {
let mut info = probe_metadata(input, ffmpeg_path)?;
info.keyframe_times = extract_keyframes_fast(input, ffmpeg_path);
Ok(info)
}
fn parse_frame_rate(s: &str) -> f64 {
let parts: Vec<&str> = s.split('/').collect();
if parts.len() == 2 {
let num = parts[0].parse::<f64>().unwrap_or(0.0);
let den = parts[1].parse::<f64>().unwrap_or(1.0);
if den > 0.0 {
return num / den;
}
}
s.parse::<f64>().unwrap_or(0.0)
}

View File

@@ -0,0 +1,303 @@
use std::collections::HashMap;
use std::io::{BufRead, BufReader};
use std::process::{Child, Command, Stdio};
use std::sync::{Arc, Mutex};
use std::time::Instant;
#[cfg(windows)]
use std::os::windows::process::CommandExt;
use tauri::{Emitter, AppHandle};
use crate::config;
use crate::ffmpeg::commands::{self, FfmpegCommand};
use crate::types::*;
const CREATE_NO_WINDOW: u32 = 0x08000000;
pub type JobMap = Arc<Mutex<HashMap<String, Child>>>;
pub fn new_job_map() -> JobMap {
Arc::new(Mutex::new(HashMap::new()))
}
pub struct RunResult {
pub success: bool,
}
fn spawn_hidden(cmd: &mut Command) -> std::io::Result<Child> {
#[cfg(windows)]
{
cmd.creation_flags(CREATE_NO_WINDOW);
}
cmd.spawn()
}
pub fn run_ffmpeg(
ffmpeg_path: &str,
cmd: &FfmpegCommand,
job_id: &str,
total_duration: f64,
phase: &str,
app: &AppHandle,
jobs: &JobMap,
) -> Result<RunResult, String> {
let mut proc = Command::new(ffmpeg_path);
proc.args(&cmd.args)
.stdout(Stdio::piped())
.stderr(Stdio::piped());
let mut child = spawn_hidden(&mut proc)
.map_err(|e| format!("Failed to spawn ffmpeg: {}", e))?;
let stdout = child.stdout.take();
{
let mut map = jobs.lock().map_err(|e| e.to_string())?;
map.insert(job_id.to_string(), child);
}
let start = Instant::now();
let mut last_progress = ProgressEvent {
job_id: job_id.to_string(),
percent: 0.0,
fps: 0.0,
bitrate: String::new(),
size_current: 0,
time_elapsed: 0.0,
eta_seconds: 0.0,
phase: phase.to_string(),
message: None,
};
if let Some(out) = stdout {
let reader = BufReader::new(out);
for line in reader.lines() {
let line = match line {
Ok(l) => l,
Err(_) => continue,
};
let parts: Vec<&str> = line.splitn(2, '=').collect();
if parts.len() != 2 {
continue;
}
let key = parts[0].trim();
let val = parts[1].trim();
match key {
"out_time_us" => {
if let Ok(us) = val.parse::<f64>() {
let secs = us / 1_000_000.0;
let elapsed = start.elapsed().as_secs_f64();
last_progress.time_elapsed = elapsed;
if total_duration > 0.0 {
let pct = (secs / total_duration * 100.0).min(100.0).max(0.0);
last_progress.percent = pct;
if pct > 0.0 {
let remaining = elapsed / pct * (100.0 - pct);
last_progress.eta_seconds = remaining;
}
}
}
}
"fps" => {
last_progress.fps = val.parse::<f64>().unwrap_or(0.0);
}
"bitrate" => {
last_progress.bitrate = val.to_string();
}
"total_size" => {
last_progress.size_current = val.parse::<u64>().unwrap_or(0);
}
"progress" => {
let _ = app.emit("progress", &last_progress);
if val == "end" {
last_progress.percent = 100.0;
last_progress.phase = "done".into();
let _ = app.emit("progress", &last_progress);
}
}
_ => {}
}
}
}
let status = {
let mut map = jobs.lock().map_err(|e| e.to_string())?;
if let Some(mut child) = map.remove(job_id) {
child.wait().map_err(|e| e.to_string())?
} else {
return Err("Job was cancelled".into());
}
};
Ok(RunResult {
success: status.success(),
})
}
pub fn run_ffmpeg_silent(
ffmpeg_path: &str,
cmd: &FfmpegCommand,
) -> Result<(), String> {
let mut proc = Command::new(ffmpeg_path);
proc.args(&cmd.args)
.stdout(Stdio::null())
.stderr(Stdio::piped());
let child = spawn_hidden(&mut proc)
.map_err(|e| format!("Failed to spawn ffmpeg: {}", e))?;
let status = child.wait_with_output().map_err(|e| e.to_string())?;
if status.status.success() {
Ok(())
} else {
Err("FFmpeg process failed".into())
}
}
pub fn cancel_job(job_id: &str, jobs: &JobMap) -> Result<(), String> {
let mut map = jobs.lock().map_err(|e| e.to_string())?;
if let Some(mut child) = map.remove(job_id) {
let _ = child.kill();
let _ = child.wait();
Ok(())
} else {
Err("No active job found".into())
}
}
pub fn run_compress_with_retry(
ffmpeg_path: &str,
input: &str,
output: &str,
settings: &CompressSettings,
trim: Option<&TrimRange>,
hw_info: &HardwareInfo,
app_config: &AppConfig,
job_id: &str,
total_duration: f64,
has_audio: bool,
app: &AppHandle,
jobs: &JobMap,
) -> Result<(String, u32), String> {
let encoder = commands::select_encoder(&settings.video_codec, &settings.hw_accel, hw_info);
let is_hw = commands::is_hw_encoder(&encoder);
match &settings.strategy {
SizingStrategy::CRF { value } => {
let cmd = commands::build_crf_command(
input, output, settings, *value, &encoder, trim, has_audio,
);
let result = run_ffmpeg(ffmpeg_path, &cmd, job_id, total_duration, "encoding", app, jobs)?;
if !result.success {
return Err("Encoding failed".into());
}
Ok((output.to_string(), 1))
}
SizingStrategy::TargetBitrate { kbps } => {
let cmd = commands::build_bitrate_command(
input, output, settings, *kbps, &encoder, trim, has_audio,
);
let result = run_ffmpeg(ffmpeg_path, &cmd, job_id, total_duration, "encoding", app, jobs)?;
if !result.success {
return Err("Encoding failed".into());
}
Ok((output.to_string(), 1))
}
SizingStrategy::TargetSize { mb } => {
let audio_kbps = match settings.audio_codec {
AudioCodec::None => 0,
_ => settings.audio_bitrate,
};
let mut bitrate_kbps = commands::calculate_bitrate(*mb, total_duration, audio_kbps);
let target_bytes = (*mb * 1024.0 * 1024.0) as u64;
let max_attempts = app_config.max_retry_attempts;
let threshold = app_config.retry_threshold_percent;
for attempt in 1..=max_attempts {
let phase = if attempt == 1 { "encoding" } else { "retrying" };
if is_hw {
let cmd = commands::build_compress_hw(
input, output, settings, bitrate_kbps, &encoder, trim, has_audio,
);
let result = run_ffmpeg(ffmpeg_path, &cmd, job_id, total_duration, phase, app, jobs)?;
if !result.success {
return Err("Encoding failed".into());
}
} else {
let passlog_dir = config::ensure_temp_subdir(&format!("passlog/{}", job_id));
let passlog_prefix = passlog_dir.join("ffmpeg2pass").to_string_lossy().to_string();
let pass1 = commands::build_compress_pass1(
input, settings, bitrate_kbps, &passlog_prefix,
);
let _ = app.emit("progress", &ProgressEvent {
job_id: job_id.to_string(),
percent: 0.0,
fps: 0.0,
bitrate: String::new(),
size_current: 0,
time_elapsed: 0.0,
eta_seconds: 0.0,
phase: "analyzing".into(),
message: Some(format!("Pass 1 of 2 (attempt {})", attempt)),
});
run_ffmpeg_silent(ffmpeg_path, &pass1)?;
let pass2 = commands::build_compress_pass2(
input, output, settings, bitrate_kbps, &passlog_prefix, trim, has_audio,
);
let result = run_ffmpeg(ffmpeg_path, &pass2, job_id, total_duration, phase, app, jobs)?;
if !result.success {
return Err("Encoding failed".into());
}
}
let output_size = std::fs::metadata(output)
.map(|m| m.len())
.unwrap_or(0);
if output_size <= target_bytes {
return Ok((output.to_string(), attempt));
}
let overshoot_pct = ((output_size as f64 - target_bytes as f64) / target_bytes as f64) * 100.0;
if overshoot_pct <= threshold {
return Ok((output.to_string(), attempt));
}
if attempt < max_attempts {
let _ = app.emit("compress-retry", &serde_json::json!({
"job_id": job_id,
"attempt": attempt + 1,
"reason": format!(
"Output {:.1}MB exceeds {:.1}MB target by {:.1}%",
output_size as f64 / 1024.0 / 1024.0,
mb,
overshoot_pct
),
"adjusted_bitrate": bitrate_kbps
}));
let ratio = target_bytes as f64 / output_size as f64;
bitrate_kbps = ((bitrate_kbps as f64) * ratio * 0.95) as u32;
bitrate_kbps = bitrate_kbps.max(10);
}
}
Ok((output.to_string(), max_attempts))
}
}
}

153
src-tauri/src/lib.rs Normal file
View File

@@ -0,0 +1,153 @@
use std::path::PathBuf;
use std::sync::Mutex;
use tauri::Manager;
mod commands;
mod config;
mod ffmpeg;
mod recovery;
mod stream_server;
mod types;
use types::*;
pub struct AppState {
pub config: Mutex<AppConfig>,
pub hw_info: Mutex<Option<HardwareInfo>>,
pub jobs: ffmpeg::runner::JobMap,
pub stream_port: Mutex<u16>,
}
#[derive(Debug, serde::Serialize, serde::Deserialize)]
struct SavedWindowState {
width: u32,
height: u32,
x: i32,
y: i32,
maximized: bool,
}
fn exe_dir() -> PathBuf {
std::env::current_exe()
.ok()
.and_then(|p| p.parent().map(|d| d.to_path_buf()))
.unwrap_or_else(|| std::env::temp_dir())
}
fn window_state_path() -> PathBuf {
exe_dir().join(".window-state")
}
fn load_window_state() -> Option<SavedWindowState> {
let path = window_state_path();
if !path.exists() {
return None;
}
let content = std::fs::read_to_string(&path).ok()?;
serde_json::from_str(&content).ok()
}
fn save_window_state(state: &SavedWindowState) {
let path = window_state_path();
if let Ok(json) = serde_json::to_string_pretty(state) {
let _ = std::fs::write(path, json);
}
}
fn do_save(window: &tauri::Window) {
if let Ok(size) = window.inner_size() {
if let Ok(pos) = window.outer_position() {
let maximized = window.is_maximized().unwrap_or(false);
save_window_state(&SavedWindowState {
width: size.width,
height: size.height,
x: pos.x,
y: pos.y,
maximized,
});
}
}
}
#[cfg_attr(mobile, tauri::mobile_entry_point)]
pub fn run() {
let cfg = config::load_config();
let stream_port = match tauri::async_runtime::block_on(stream_server::start()) {
Ok(port) => port,
Err(e) => {
eprintln!("Failed to start stream server: {}", e);
0
}
};
tauri::Builder::default()
.plugin(tauri_plugin_shell::init())
.plugin(tauri_plugin_dialog::init())
.plugin(tauri_plugin_process::init())
.plugin(tauri_plugin_fs::init())
.plugin(tauri_plugin_opener::init())
.manage(AppState {
config: Mutex::new(cfg),
hw_info: Mutex::new(None),
jobs: ffmpeg::runner::new_job_map(),
stream_port: Mutex::new(stream_port),
})
.invoke_handler(tauri::generate_handler![
commands::analyze::analyze_video,
commands::analyze::extract_keyframes,
commands::analyze::generate_thumbnails,
commands::analyze::generate_preview,
commands::analyze::detect_hardware,
commands::process::compress,
commands::process::trim,
commands::process::cancel_job,
commands::utility::check_ffmpeg,
commands::utility::open_in_explorer,
commands::utility::get_config,
commands::utility::save_config_cmd,
commands::utility::get_output_path,
commands::utility::init_app,
commands::utility::download_ffmpeg,
commands::utility::check_recovery,
commands::utility::cleanup_recovery,
commands::utility::get_stream_url_cmd,
commands::utility::get_stream_port_cmd,
])
.setup(|app| {
if let Some(window) = app.get_webview_window("main") {
let window = window.clone();
tauri::async_runtime::spawn(async move {
tokio::time::sleep(tokio::time::Duration::from_millis(100)).await;
if let Some(state) = load_window_state() {
if !state.maximized {
let _ = window.set_size(tauri::Size::Physical(tauri::PhysicalSize::new(
state.width,
state.height,
)));
let _ = window.set_position(tauri::Position::Physical(tauri::PhysicalPosition::new(
state.x,
state.y,
)));
}
if state.maximized {
let _ = window.maximize();
}
} else {
let _ = window.center();
}
});
}
Ok(())
})
.on_window_event(|window, event| {
match event {
tauri::WindowEvent::Resized(_) | tauri::WindowEvent::Moved(_) => {
do_save(window);
}
_ => {}
}
})
.run(tauri::generate_context!())
.expect("failed to run cinch");
}

5
src-tauri/src/main.rs Normal file
View File

@@ -0,0 +1,5 @@
#![cfg_attr(not(debug_assertions), windows_subsystem = "windows")]
fn main() {
cinch::run();
}

59
src-tauri/src/recovery.rs Normal file
View File

@@ -0,0 +1,59 @@
use std::fs;
use std::path::Path;
use serde::{Deserialize, Serialize};
const JOB_FILE: &str = "last_job.json";
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct InterruptedJob {
pub input_path: String,
pub output_path: String,
pub mode: String,
pub settings_json: String,
}
pub fn write_job_info(
temp_dir: &Path,
input: &str,
output: &str,
mode: &str,
settings_json: &str,
) {
let job = InterruptedJob {
input_path: input.to_string(),
output_path: output.to_string(),
mode: mode.to_string(),
settings_json: settings_json.to_string(),
};
let path = temp_dir.join(JOB_FILE);
if let Ok(json) = serde_json::to_string_pretty(&job) {
let _ = fs::write(&path, json);
}
}
pub fn delete_job_info(temp_dir: &Path) {
let path = temp_dir.join(JOB_FILE);
let _ = fs::remove_file(&path);
}
pub fn check_interrupted_job(temp_dir: &Path) -> Option<InterruptedJob> {
let path = temp_dir.join(JOB_FILE);
let contents = fs::read_to_string(&path).ok()?;
serde_json::from_str(&contents).ok()
}
pub fn cleanup_orphaned_temps(temp_dir: &Path) {
if temp_dir.exists() {
if let Ok(entries) = fs::read_dir(temp_dir) {
for entry in entries.flatten() {
let p = entry.path();
if p.is_dir() {
let _ = fs::remove_dir_all(&p);
} else {
let _ = fs::remove_file(&p);
}
}
}
}
}

View File

@@ -0,0 +1,167 @@
use tokio::io::{AsyncReadExt, AsyncWriteExt};
use tokio::net::{TcpListener, TcpStream};
const MAX_CHUNK: u64 = 1024 * 1024; // 1MB max per response
pub async fn start() -> Result<u16, std::io::Error> {
let listener = TcpListener::bind("127.0.0.1:0").await?;
let port = listener.local_addr()?.port();
tokio::spawn(async move {
loop {
match listener.accept().await {
Ok((stream, _)) => {
tokio::spawn(handle_client(stream));
}
Err(_) => continue,
}
}
});
Ok(port)
}
async fn handle_client(mut stream: TcpStream) {
// read headers until \r\n\r\n (up to 64KB)
let mut buf = Vec::with_capacity(4096);
let mut temp = [0u8; 4096];
loop {
let n = match stream.read(&mut temp).await {
Ok(0) => return,
Ok(n) => n,
Err(_) => return,
};
buf.extend_from_slice(&temp[..n]);
if buf.windows(4).any(|w| w == b"\r\n\r\n") {
break;
}
if buf.len() > 65536 {
return; // headers too large
}
}
let request = String::from_utf8_lossy(&buf);
let mut lines = request.lines();
let request_line = match lines.next() {
Some(line) => line,
None => return,
};
let parts: Vec<&str> = request_line.split_whitespace().collect();
if parts.len() < 2 || parts[0] != "GET" {
return;
}
let uri_path = percent_encoding::percent_decode_str(&parts[1][1..])
.decode_utf8_lossy()
.to_string();
let mut range_header = None;
for line in lines {
if line.is_empty() {
break;
}
let lower = line.to_lowercase();
if lower.starts_with("range:") {
range_header = Some(line[6..].trim().to_string());
}
}
let response = match serve_file(&uri_path, range_header).await {
Some(data) => data,
None => {
let body = b"Not Found";
let header = format!(
"HTTP/1.1 404 Not Found\r\nContent-Length: {}\r\nAccess-Control-Allow-Origin: *\r\nConnection: close\r\n\r\n",
body.len()
);
let mut resp = header.into_bytes();
resp.extend_from_slice(body);
resp
}
};
let _ = stream.write_all(&response).await;
let _ = stream.flush().await;
}
async fn serve_file(path: &str, range_header: Option<String>) -> Option<Vec<u8>> {
use std::io::{Read, Seek, SeekFrom};
let mut file = std::fs::File::open(path).ok()?;
let file_len = {
let pos = file.stream_position().ok()?;
let len = file.seek(SeekFrom::End(0)).ok()?;
file.seek(SeekFrom::Start(pos)).ok()?;
len
};
let ext = path.rsplit('.').next().unwrap_or("mp4").to_lowercase();
let mime = match ext.as_str() {
"webm" => "video/webm",
"mkv" => "video/x-matroska",
"avi" => "video/x-msvideo",
"mov" => "video/quicktime",
"flv" => "video/x-flv",
"ts" | "m2ts" => "video/mp2t",
"wmv" => "video/x-ms-wmv",
"mpg" | "mpeg" => "video/mpeg",
"jpg" | "jpeg" => "image/jpeg",
"png" => "image/png",
_ => "video/mp4",
};
if let Some(range_str) = range_header {
if let Ok(ranges) = http_range::HttpRange::parse(&range_str, file_len) {
if let Some(r) = ranges.first() {
let start = r.start;
let end = (start + r.length - 1).min(file_len - 1);
let chunk_len = end + 1 - start;
file.seek(SeekFrom::Start(start)).ok()?;
let mut body = Vec::with_capacity(chunk_len as usize);
file.take(chunk_len).read_to_end(&mut body).ok()?;
let header = format!(
"HTTP/1.1 206 Partial Content\r\n\
Content-Type: {}\r\n\
Content-Length: {}\r\n\
Content-Range: bytes {}-{}/{}\r\n\
Accept-Ranges: bytes\r\n\
Access-Control-Allow-Origin: *\r\n\
Connection: close\r\n\r\n",
mime,
body.len(),
start,
end,
file_len
);
let mut response = header.into_bytes();
response.extend(body);
return Some(response);
}
}
}
let chunk_len = MAX_CHUNK.min(file_len);
file.seek(SeekFrom::Start(0)).ok()?;
let mut body = Vec::with_capacity(chunk_len as usize);
file.take(chunk_len).read_to_end(&mut body).ok()?;
let header = format!(
"HTTP/1.1 200 OK\r\n\
Content-Type: {}\r\n\
Content-Length: {}\r\n\
Accept-Ranges: bytes\r\n\
Access-Control-Allow-Origin: *\r\n\
Connection: close\r\n\r\n",
mime,
body.len()
);
let mut response = header.into_bytes();
response.extend(body);
Some(response)
}

204
src-tauri/src/types.rs Normal file
View File

@@ -0,0 +1,204 @@
use serde::{Deserialize, Serialize};
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct VideoInfo {
pub path: String,
pub file_size: u64,
pub duration: f64,
pub width: u32,
pub height: u32,
pub video_codec: String,
pub video_bitrate: u64,
pub frame_rate: f64,
pub is_vfr: bool,
pub audio_codec: Option<String>,
pub audio_bitrate: Option<u64>,
pub audio_channels: Option<u32>,
pub keyframe_times: Vec<f64>,
pub container: String,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct CompressSettings {
pub strategy: SizingStrategy,
pub video_codec: VideoCodec,
pub audio_codec: AudioCodec,
pub audio_bitrate: u32,
pub container: Container,
pub resolution: Resolution,
pub speed_preset: String,
pub hw_accel: HwAccelMode,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(tag = "type")]
pub enum SizingStrategy {
TargetSize { mb: f64 },
TargetBitrate { kbps: u32 },
CRF { value: u32 },
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct TrimRange {
pub start: f64,
pub end: f64,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub enum VideoCodec {
H264,
HEVC,
AV1,
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
pub enum AudioCodec {
AAC,
Opus,
None,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub enum Container {
MP4,
MKV,
WebM,
MOV,
AVI,
TS,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub enum HwAccelMode {
Auto,
ForceGPU,
ForceCPU,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(tag = "type")]
pub enum Resolution {
Original,
P720,
P1080,
P1440,
P4K,
Custom { width: u32, height: u32 },
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct OutputInfo {
pub path: String,
pub file_size: u64,
pub duration: f64,
pub width: u32,
pub height: u32,
pub video_codec: String,
pub video_bitrate: u64,
pub audio_codec: Option<String>,
pub audio_bitrate: Option<u64>,
pub attempts: u32,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct HardwareInfo {
pub nvenc: bool,
pub qsv: bool,
pub amf: bool,
pub nvenc_codecs: Vec<String>,
pub qsv_codecs: Vec<String>,
pub amf_codecs: Vec<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct FFmpegStatus {
pub found: bool,
pub path: Option<String>,
pub version: Option<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ProgressEvent {
pub job_id: String,
pub percent: f64,
pub fps: f64,
pub bitrate: String,
pub size_current: u64,
pub time_elapsed: f64,
pub eta_seconds: f64,
pub phase: String,
pub message: Option<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct AppConfig {
pub theme: String,
pub ui_zoom: u32,
pub ffmpeg_path: Option<String>,
pub default_output_dir: Option<String>,
pub last_compress_settings: Option<CompressSettings>,
pub default_presets: Vec<u32>,
pub auto_retry: bool,
pub retry_threshold_percent: f64,
pub max_retry_attempts: u32,
pub show_ffmpeg_log: bool,
pub remember_window_position: bool,
pub window_position: Option<WindowPosition>,
#[serde(default = "default_target_size")]
pub default_target_size: u32,
#[serde(default)]
pub default_smart_cut: bool,
#[serde(default = "default_naming_pattern")]
pub naming_pattern: String,
#[serde(default)]
pub last_open_dir: Option<String>,
#[serde(default)]
pub last_save_dir: Option<String>,
#[serde(default = "default_preview_volume")]
pub preview_volume: f32,
}
fn default_target_size() -> u32 {
8
}
fn default_naming_pattern() -> String {
"{name}_{mode}_{timestamp}".into()
}
fn default_preview_volume() -> f32 {
1.0
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct WindowPosition {
pub x: i32,
pub y: i32,
pub width: u32,
pub height: u32,
}
impl Default for AppConfig {
fn default() -> Self {
Self {
theme: "system".into(),
ui_zoom: 100,
ffmpeg_path: None,
default_output_dir: None,
last_compress_settings: None,
default_presets: vec![8, 25, 50, 100],
auto_retry: true,
retry_threshold_percent: 2.0,
max_retry_attempts: 3,
show_ffmpeg_log: false,
remember_window_position: true,
window_position: None,
default_target_size: 8,
default_smart_cut: false,
naming_pattern: "{name}_{mode}_{timestamp}".into(),
last_open_dir: None,
last_save_dir: None,
preview_volume: 1.0,
}
}
}

41
src-tauri/tauri.conf.json Normal file
View File

@@ -0,0 +1,41 @@
{
"$schema": "https://raw.githubusercontent.com/tauri-apps/tauri/dev/crates/tauri-config-schema/schema.json",
"productName": "Cinch",
"version": "1.0.0",
"identifier": "com.cinch.app",
"build": {
"frontendDist": "../build",
"devUrl": "http://localhost:1420",
"beforeDevCommand": "npm run dev",
"beforeBuildCommand": "npm run build"
},
"app": {
"withGlobalTauri": false,
"security": {
"csp": null
},
"windows": [
{
"title": "Cinch",
"width": 960,
"height": 700,
"minWidth": 700,
"minHeight": 500,
"resizable": true,
"decorations": false,
"transparent": false
}
]
},
"bundle": {
"icon": [
"icons/icon.ico",
"icons/icon.png"
]
},
"plugins": {
"shell": {
"open": true
}
}
}