Feat: Integrated Local LLM (Llama 3.2 1B) for Intelligent Correction -- New Core: Added LLMEngine utilizing llama-cpp-python for local private text post-processing. -- Forensic Protocol: Engineered strict system prompts to prevent LLM refusals, censorship, or assistant chatter. -- Three Modes: Grammar, Standard, Rewrite. -- Start/Stop Logic: Consolidated conflicting recording methods. -- Hotkeys: Added dedicated F9 (Correct) vs F8 (Transcribe). -- UI: Updated Settings. -- Build: Updated portable_build.py. -- Docs: Updated README.

This commit is contained in:
Your Name
2026-01-31 01:02:24 +02:00
parent 3137770742
commit baa5e2e69e
10 changed files with 601 additions and 61 deletions

View File

@@ -245,18 +245,38 @@ class Bootstrapper:
req_file = self.source_path / "requirements.txt"
# Use --prefer-binary to avoid building from source on Windows if possible
# Use --no-warn-script-location to reduce noise
# CRITICAL: Force --only-binary for llama-cpp-python to prevent picking new source-only versions
cmd = [
str(self.python_path / "python.exe"), "-m", "pip", "install",
"--prefer-binary",
"--only-binary", "llama-cpp-python",
"--extra-index-url", "https://abetlen.github.io/llama-cpp-python/whl/cpu",
"-r", str(req_file)
]
process = subprocess.Popen(
[str(self.python_path / "python.exe"), "-m", "pip", "install", "-r", str(req_file)],
cmd,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
stderr=subprocess.STDOUT, # Merge stderr into stdout
text=True,
cwd=str(self.python_path),
creationflags=subprocess.CREATE_NO_WINDOW
)
output_buffer = []
for line in process.stdout:
if self.ui: self.ui.set_detail(line.strip()[:60])
process.wait()
line_stripped = line.strip()
if self.ui: self.ui.set_detail(line_stripped[:60])
output_buffer.append(line_stripped)
log(line_stripped)
return_code = process.wait()
if return_code != 0:
err_msg = "\n".join(output_buffer[-15:]) # Show last 15 lines
raise RuntimeError(f"Pip install failed (Exit code {return_code}):\n{err_msg}")
def refresh_app_source(self):
"""
@@ -348,8 +368,22 @@ class Bootstrapper:
return False
def check_dependencies(self):
"""Quick check if critical dependencies are installed."""
return True # Deprecated logic placeholder
"""Check if critical dependencies are importable in the embedded python."""
if not self.is_python_ready(): return False
try:
# Check for core libs that might be missing
# We use a subprocess to check imports in the runtime environment
subprocess.check_call(
[str(self.python_path / "python.exe"), "-c", "import faster_whisper; import llama_cpp; import PySide6"],
stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL,
cwd=str(self.python_path),
creationflags=subprocess.CREATE_NO_WINDOW
)
return True
except (subprocess.CalledProcessError, FileNotFoundError):
return False
def setup_and_run(self):
"""Full setup/update and run flow."""
@@ -359,10 +393,16 @@ class Bootstrapper:
self.download_python()
self._fix_pth_file() # Ensure pth is fixed immediately after download
self.install_pip()
self.install_packages()
# self.install_packages() # We'll do this in the dependency check step now
# Always refresh source to ensure we have the latest bundled code
self.refresh_app_source()
# 2. Check and Install Dependencies
# We do this AFTER refreshing source so we have the latest requirements.txt
if not self.check_dependencies():
log("Dependencies missing or incomplete. Installing...")
self.install_packages()
# Launch
if self.run_app():