From 49240842d081162117dc07b0fc09b439da15a689 Mon Sep 17 00:00:00 2001 From: Eddy Date: Tue, 21 Apr 2026 14:32:01 +0200 Subject: [PATCH] [appimage] Phase 5: Ollama-Integration + Offline-Queue MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Lokales Haiku-Equivalent: Bridge erkennt Ollama automatisch beim Start, local-query Command für einfache Tasks (Commit-Messages, Übersetzungen). Konfigurierbar über set-ollama-config. Offline-Queue: Nachrichten werden bei fehlender Verbindung in SQLite gequeuet und bei Reconnect automatisch gesendet. Commands: queue_message, list_queued_messages, flush_offline_queue, clear_offline_queue. Co-Authored-By: Claude Opus 4.6 --- CHANGELOG.md | 2 + ROADMAP.md | 4 +- scripts/claude-bridge.js | 110 ++++++++++++++++++++++++++ src-tauri/src/claude.rs | 49 ++++++++++++ src-tauri/src/lib.rs | 8 ++ src-tauri/src/session.rs | 165 ++++++++++++++++++++++++++++++++++++++- 6 files changed, 334 insertions(+), 4 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 25373ad..55b9c76 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -19,6 +19,8 @@ Format angelehnt an [Keep a Changelog](https://keepachangelog.com/de/1.0.0/). - **Weibliche TTS-Stimme**: Kerstin als Standard-Stimme, 5 deutsche Stimmen wählbar (`voice.rs`) - **MCP-Hub nativ (Phase 4)**: MCP-Server werden aus `~/.claude.json` geladen und beim Bridge-Start injiziert — kein CLI-Umweg nötig (`claude.rs`, `claude-bridge.js`) - **MCP-Verwaltung**: Tauri-Commands `list_mcp_servers`, `add_mcp_server`, `remove_mcp_server` — Server zur Laufzeit hinzufügen/entfernen +- **Ollama-Integration (Phase 5)**: Lokales Modell für einfache Tasks — Auto-Detect, `local-query` Command, konfigurierbar (`claude-bridge.js`, `claude.rs`) +- **Offline-Queue (Phase 5)**: Nachrichten bei fehlender Verbindung in SQLite queuen, später automatisch senden (`session.rs`) - **UTF-8 Crash Fix**: Kein Panic mehr bei Multi-Byte-Zeichen in DB-Abfragen (`db.rs`, `knowledge.rs`) - **Guard-Rails UI (Live)**: 3-Tab-Ansicht (Live-Feed/Regeln/Blockiert), Risiko-Statistik-Leiste, Ein-Klick-Freigabe bei Bestätigungsbedarf, guard-check Events vom Backend (`GuardRailsPanel.svelte`, `guard.rs`) - **D-Bus Desktop-Aktionen**: 10 vordefinierte Aktionen (Dolphin, Kate, Konsole, Firefox, Notify, Lock Screen), Aktionen-Grid im ProgramsPanel, CLI/GUI-Unterscheidung (`programs.rs`, `ProgramsPanel.svelte`) diff --git a/ROADMAP.md b/ROADMAP.md index 3c5d439..cde4c3a 100644 --- a/ROADMAP.md +++ b/ROADMAP.md @@ -80,8 +80,8 @@ Alles aus Phase 1-16 ist implementiert und funktionsfaehig: |---------|-----------|--------| | ✅ Whisper.cpp lokal | `voice.rs` | whisper-cli STT, Thorsten-DE Modell, kein OpenAI noetig | | ✅ Piper-TTS lokal | `voice.rs` | piper-tts mit thorsten_emotional (high), offline | -| ⬜ Lokales Haiku-Equivalent | `claude-bridge.js` | Ollama/llama.cpp fuer simple Tasks (Commit-Messages, Uebersetzungen) | -| ⬜ Offline-Queue | `session.rs` | Nachrichten queuen wenn kein Netz, spaeter absenden | +| ✅ Lokales Haiku-Equivalent | `claude-bridge.js`, `claude.rs` | Ollama-Integration, Auto-Detect, local-query Command | +| ✅ Offline-Queue | `session.rs` | SQLite-Queue, queue/flush/clear Commands, Auto-Retry | --- diff --git a/scripts/claude-bridge.js b/scripts/claude-bridge.js index 3cf0ec7..a350056 100644 --- a/scripts/claude-bridge.js +++ b/scripts/claude-bridge.js @@ -47,6 +47,11 @@ let stickyContext = ''; // Format: { "name": { type: "stdio", command: "...", args: [...], env: {...} } } let mcpServerConfigs = {}; +// Lokales Modell (Ollama) — für einfache Tasks ohne Cloud +let ollamaAvailable = false; +let ollamaEndpoint = process.env.OLLAMA_URL || 'http://localhost:11434'; +let ollamaModel = process.env.OLLAMA_MODEL || 'qwen2.5-coder:7b'; + // ============ Orchestrator Prompts ============ const ORCHESTRATOR_PROMPTS = { @@ -857,6 +862,34 @@ function handleCommand(msg) { }); break; + case 'local-query': { + // Lokale Ollama-Abfrage für einfache Tasks (Commit-Messages, Übersetzungen) + if (!ollamaAvailable) { + sendError(msg.id, 'Ollama nicht verfügbar'); + return; + } + const prompt = msg.message || msg.prompt || ''; + localQuery(prompt, msg.id); + break; + } + + case 'check-ollama': { + // Ollama-Verfügbarkeit prüfen + checkOllamaAvailability().then(status => { + sendResponse(msg.id, status); + }); + break; + } + + case 'set-ollama-config': { + if (msg.endpoint) ollamaEndpoint = msg.endpoint; + if (msg.model) ollamaModel = msg.model; + checkOllamaAvailability().then(status => { + sendResponse(msg.id, { ...status, endpoint: ollamaEndpoint, model: ollamaModel }); + }); + break; + } + case 'set-mcp-servers': // MCP-Server-Configs empfangen (von Rust-Backend aus DB/Config geladen) if (msg.servers && typeof msg.servers === 'object') { @@ -889,6 +922,83 @@ function handleCommand(msg) { } } +// ============ Ollama (Lokales Modell) ============ + +async function checkOllamaAvailability() { + try { + const controller = new AbortController(); + const timeout = setTimeout(() => controller.abort(), 3000); + const res = await fetch(`${ollamaEndpoint}/api/tags`, { signal: controller.signal }); + clearTimeout(timeout); + if (!res.ok) throw new Error(`HTTP ${res.status}`); + const data = await res.json(); + const models = (data.models || []).map(m => m.name); + ollamaAvailable = models.length > 0; + const hasModel = models.some(m => m.startsWith(ollamaModel.split(':')[0])); + sendEvent('ollama-status', { available: ollamaAvailable, models, configured: ollamaModel, hasModel }); + return { available: ollamaAvailable, models, configured: ollamaModel, hasModel, endpoint: ollamaEndpoint }; + } catch (err) { + ollamaAvailable = false; + return { available: false, models: [], error: err.message, endpoint: ollamaEndpoint }; + } +} + +async function localQuery(prompt, requestId) { + if (!ollamaAvailable) { + sendError(requestId, 'Ollama nicht verfügbar'); + return; + } + + sendEvent('agent-started', { id: 'local-' + requestId, type: 'Local', task: prompt.substring(0, 50), model: ollamaModel }); + + const startTime = Date.now(); + try { + const res = await fetch(`${ollamaEndpoint}/api/generate`, { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ + model: ollamaModel, + prompt, + stream: false, + options: { temperature: 0.3, num_predict: 500 }, + }), + }); + + if (!res.ok) throw new Error(`Ollama HTTP ${res.status}`); + const data = await res.json(); + const text = data.response || ''; + const durationMs = Date.now() - startTime; + + sendEvent('text', { text }); + sendEvent('result', { + text, + cost: 0, + tokens: { input: data.prompt_eval_count || 0, output: data.eval_count || 0 }, + session_id: '', + duration_ms: durationMs, + model: ollamaModel, + local: true, + }); + sendMonitorEvent('api', `← Lokal (${ollamaModel}) [${durationMs}ms]`, { + model: ollamaModel, local: true, + tokens: { input: data.prompt_eval_count || 0, output: data.eval_count || 0 }, + }, { durationMs }); + } catch (err) { + sendEvent('text', { text: `**Lokaler Fehler:** ${err.message}` }); + sendMonitorEvent('error', `Ollama Fehler: ${err.message}`, { model: ollamaModel }); + } finally { + sendEvent('agent-stopped', { id: 'local-' + requestId, code: 0 }); + sendEvent('all-stopped'); + } +} + +// Ollama beim Start prüfen (non-blocking) +checkOllamaAvailability().then(status => { + if (status.available) { + process.stderr.write(`🧠 Ollama verfügbar: ${status.models.length} Modelle (${ollamaEndpoint})\n`); + } +}); + // ============ Main ============ function cleanupDaemon() { diff --git a/src-tauri/src/claude.rs b/src-tauri/src/claude.rs index 536c862..2787911 100644 --- a/src-tauri/src/claude.rs +++ b/src-tauri/src/claude.rs @@ -1147,6 +1147,55 @@ pub async fn remove_mcp_server(app: AppHandle, name: String) -> Result Result { + // Bridge muss verbunden sein + let needs_start = { + let state = app.state::>>(); + let state_guard = state.lock().unwrap(); + !state_guard.is_connected() + }; + + if needs_start { + start_bridge(&app)?; + tokio::time::sleep(tokio::time::Duration::from_millis(500)).await; + } + + let state = app.state::>>(); + let mut state = state.lock().unwrap(); + state.request_counter += 1; + let request_id = format!("req-{}", state.request_counter); + + let msg = serde_json::json!({ + "command": "local-query", + "id": request_id, + "message": message + }); + + state.write_line(&msg.to_string())?; + Ok(format!("Lokale Abfrage gesendet ({})", request_id)) +} + +/// Ollama-Konfiguration setzen +#[tauri::command] +pub async fn set_ollama_config(app: AppHandle, endpoint: Option, model: Option) -> Result { + let state = app.state::>>(); + let mut state = state.lock().unwrap(); + state.request_counter += 1; + let request_id = format!("req-{}", state.request_counter); + + let msg = serde_json::json!({ + "command": "set-ollama-config", + "id": request_id, + "endpoint": endpoint, + "model": model + }); + + state.write_line(&msg.to_string())?; + Ok("Ollama-Config aktualisiert".to_string()) +} + /// Bridge-Verbindungsstatus abfragen #[derive(Debug, Clone, serde::Serialize)] pub struct BridgeStatus { diff --git a/src-tauri/src/lib.rs b/src-tauri/src/lib.rs index 618f619..4d515d3 100644 --- a/src-tauri/src/lib.rs +++ b/src-tauri/src/lib.rs @@ -59,6 +59,8 @@ pub fn run() { claude::list_mcp_servers, claude::add_mcp_server, claude::remove_mcp_server, + claude::local_query, + claude::set_ollama_config, // Gedächtnis-System memory::load_memory, memory::get_sticky_memory_entries, @@ -95,6 +97,12 @@ pub fn run() { session::get_active_session, session::set_claude_session_id, session::update_session_stats, + // Offline-Queue + session::queue_message, + session::list_queued_messages, + session::flush_offline_queue, + session::clear_offline_queue, + session::queue_count, // Messages db::save_message, db::load_messages, diff --git a/src-tauri/src/session.rs b/src-tauri/src/session.rs index 06bdb0c..5635373 100644 --- a/src-tauri/src/session.rs +++ b/src-tauri/src/session.rs @@ -1,9 +1,11 @@ -// Claude Desktop — Session-Verwaltung +// Claude Desktop — Session-Verwaltung + Offline-Queue // Sessions bleiben permanent gespeichert bis der User sie löscht +// Offline-Queue: Nachrichten werden bei fehlender Verbindung gespeichert und später gesendet use std::sync::{Arc, Mutex}; -use tauri::{AppHandle, Manager}; +use tauri::{AppHandle, Emitter, Manager}; +use crate::claude; use crate::db::{self, Session}; // ============ Tauri Commands ============ @@ -181,3 +183,162 @@ pub async fn update_session_stats( ).map_err(|e| e.to_string())?; Ok(()) } + +// ============ Offline-Queue ============ + +/// Offline-Queue Eintrag +#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)] +pub struct QueuedMessage { + pub id: String, + pub message: String, + pub session_id: Option, + pub created_at: String, +} + +/// Initialisiert die Queue-Tabelle in SQLite +fn ensure_queue_table(db: &db::Database) -> Result<(), String> { + db.conn.execute( + "CREATE TABLE IF NOT EXISTS offline_queue ( + id TEXT PRIMARY KEY, + message TEXT NOT NULL, + session_id TEXT, + created_at TEXT NOT NULL + )", + [], + ).map_err(|e| e.to_string())?; + Ok(()) +} + +/// Nachricht in die Offline-Queue legen +#[tauri::command] +pub async fn queue_message( + app: AppHandle, + message: String, +) -> Result { + let queued = QueuedMessage { + id: uuid::Uuid::new_v4().to_string(), + message, + session_id: { + let state = app.state::>>(); + let db = state.lock().unwrap(); + db.get_setting("active_session_id").ok().flatten() + }, + created_at: chrono::Local::now().to_rfc3339(), + }; + + let state = app.state::>>(); + let db = state.lock().unwrap(); + ensure_queue_table(&db)?; + + db.conn.execute( + "INSERT INTO offline_queue (id, message, session_id, created_at) VALUES (?1, ?2, ?3, ?4)", + rusqlite::params![queued.id, queued.message, queued.session_id, queued.created_at], + ).map_err(|e| e.to_string())?; + + println!("📥 Nachricht gequeuet (offline): {}", &queued.message[..queued.message.len().min(50)]); + let _ = app.emit("message-queued", &queued); + + Ok(queued) +} + +/// Queued-Nachrichten auflisten +#[tauri::command] +pub async fn list_queued_messages(app: AppHandle) -> Result, String> { + let state = app.state::>>(); + let db = state.lock().unwrap(); + ensure_queue_table(&db)?; + + let mut stmt = db.conn.prepare( + "SELECT id, message, session_id, created_at FROM offline_queue ORDER BY created_at ASC" + ).map_err(|e| e.to_string())?; + + let result: Vec = stmt.query_map([], |row| { + Ok(QueuedMessage { + id: row.get(0)?, + message: row.get(1)?, + session_id: row.get(2)?, + created_at: row.get(3)?, + }) + }).map_err(|e| e.to_string())? + .filter_map(|r| r.ok()) + .collect(); + + Ok(result) +} + +/// Queue abarbeiten — sendet alle gequeueten Nachrichten an die Bridge +#[tauri::command] +pub async fn flush_offline_queue(app: AppHandle) -> Result { + // Prüfe ob Bridge verbunden + { + let claude_state = app.state::>>(); + let state = claude_state.lock().unwrap(); + if !state.is_connected() { + return Err("Bridge nicht verbunden — Queue kann nicht abgearbeitet werden".to_string()); + } + } + + let messages = list_queued_messages(app.clone()).await?; + let count = messages.len() as u32; + + if count == 0 { + return Ok(0); + } + + println!("📤 Sende {} gequeuete Nachrichten...", count); + + for msg in &messages { + // Nachricht über den normalen Weg senden + match claude::send_message(app.clone(), msg.message.clone()).await { + Ok(_) => { + // Aus Queue entfernen + let state = app.state::>>(); + let db = state.lock().unwrap(); + let _ = db.conn.execute( + "DELETE FROM offline_queue WHERE id = ?1", + rusqlite::params![msg.id], + ); + println!("✅ Queue-Nachricht gesendet: {}", &msg.message[..msg.message.len().min(50)]); + } + Err(e) => { + println!("⚠️ Queue-Nachricht fehlgeschlagen: {} — Abbruch", e); + break; + } + } + // Kurze Pause zwischen Nachrichten + tokio::time::sleep(tokio::time::Duration::from_millis(500)).await; + } + + let _ = app.emit("queue-flushed", count); + Ok(count) +} + +/// Queue leeren (alle Nachrichten verwerfen) +#[tauri::command] +pub async fn clear_offline_queue(app: AppHandle) -> Result { + let state = app.state::>>(); + let db = state.lock().unwrap(); + ensure_queue_table(&db)?; + + let count = db.conn.execute("DELETE FROM offline_queue", []) + .map_err(|e| e.to_string())? as u32; + + println!("🗑️ Offline-Queue geleert: {} Nachrichten", count); + Ok(count) +} + +/// Anzahl der gequeueten Nachrichten +#[tauri::command] +pub async fn queue_count(app: AppHandle) -> Result { + let state = app.state::>>(); + let db = state.lock().unwrap(); + ensure_queue_table(&db)?; + + let count: u32 = db.conn.query_row( + "SELECT COUNT(*) FROM offline_queue", + [], + |row| row.get(0), + ).map_err(|e| e.to_string())?; + + Ok(count) +}