aboutsummaryrefslogtreecommitdiffstatshomepage
diff options
context:
space:
mode:
authorHsiangNianian <i@jyunko.cn>2026-01-16 14:16:42 +0800
committerHsiangNianian <i@jyunko.cn>2026-01-16 14:18:21 +0800
commitd0e7da7ec7745be3d34efe3949c0592f6723136a (patch)
tree78fcd73118f5f001d70670cb30b87d973ab7cee4
parentf1eaa8fcc90c3fec0bcedc1ce060f1dcfea5bcd6 (diff)
downloadDropOut-d0e7da7ec7745be3d34efe3949c0592f6723136a.tar.gz
DropOut-d0e7da7ec7745be3d34efe3949c0592f6723136a.zip
feat: implement assistant state management and messaging functionality
Added a new module for managing the assistant's state, including message handling, health checks, and streaming responses. Introduced interfaces for message structure and generation statistics. Enhanced the logs state by removing unnecessary listener setup during initialization. Updated the types to include assistant configuration options.
-rw-r--r--ui/src/stores/assistant.svelte.ts166
-rw-r--r--ui/src/stores/logs.svelte.ts8
-rw-r--r--ui/src/types/index.ts28
3 files changed, 199 insertions, 3 deletions
diff --git a/ui/src/stores/assistant.svelte.ts b/ui/src/stores/assistant.svelte.ts
new file mode 100644
index 0000000..a3f47ea
--- /dev/null
+++ b/ui/src/stores/assistant.svelte.ts
@@ -0,0 +1,166 @@
+import { invoke } from "@tauri-apps/api/core";
+import { listen, type UnlistenFn } from "@tauri-apps/api/event";
+
+export interface GenerationStats {
+ total_duration: number;
+ load_duration: number;
+ prompt_eval_count: number;
+ prompt_eval_duration: number;
+ eval_count: number;
+ eval_duration: number;
+}
+
+export interface Message {
+ role: "user" | "assistant" | "system";
+ content: string;
+ stats?: GenerationStats;
+}
+
+interface StreamChunk {
+ content: string;
+ done: boolean;
+ stats?: GenerationStats;
+}
+
+// Module-level state using $state
+let messages = $state<Message[]>([]);
+let isProcessing = $state(false);
+let isProviderHealthy = $state(false);
+let streamingContent = "";
+let initialized = false;
+let streamUnlisten: UnlistenFn | null = null;
+
+async function init() {
+ if (initialized) return;
+ initialized = true;
+ await checkHealth();
+}
+
+async function checkHealth() {
+ try {
+ isProviderHealthy = await invoke("assistant_check_health");
+ } catch (e) {
+ console.error("Failed to check provider health:", e);
+ isProviderHealthy = false;
+ }
+}
+
+function finishStreaming() {
+ isProcessing = false;
+ streamingContent = "";
+ if (streamUnlisten) {
+ streamUnlisten();
+ streamUnlisten = null;
+ }
+}
+
+async function sendMessage(
+ content: string,
+ isEnabled: boolean,
+ provider: string,
+ endpoint: string,
+) {
+ if (!content.trim()) return;
+ if (!isEnabled) {
+ messages = [
+ ...messages,
+ {
+ role: "assistant",
+ content: "Assistant is disabled. Enable it in Settings > AI Assistant.",
+ },
+ ];
+ return;
+ }
+
+ // Add user message
+ messages = [...messages, { role: "user", content }];
+ isProcessing = true;
+ streamingContent = "";
+
+ // Add empty assistant message for streaming
+ messages = [...messages, { role: "assistant", content: "" }];
+
+ try {
+ // Set up stream listener
+ streamUnlisten = await listen<StreamChunk>("assistant-stream", (event) => {
+ const chunk = event.payload;
+
+ if (chunk.content) {
+ streamingContent += chunk.content;
+ // Update the last message (assistant's response)
+ const lastIdx = messages.length - 1;
+ if (lastIdx >= 0 && messages[lastIdx].role === "assistant") {
+ messages[lastIdx] = {
+ ...messages[lastIdx],
+ content: streamingContent,
+ };
+ // Trigger reactivity
+ messages = [...messages];
+ }
+ }
+
+ if (chunk.done) {
+ if (chunk.stats) {
+ const lastIdx = messages.length - 1;
+ if (lastIdx >= 0 && messages[lastIdx].role === "assistant") {
+ messages[lastIdx] = {
+ ...messages[lastIdx],
+ stats: chunk.stats,
+ };
+ messages = [...messages];
+ }
+ }
+ finishStreaming();
+ }
+ });
+
+ // Start streaming chat
+ await invoke<string>("assistant_chat_stream", {
+ messages: messages.slice(0, -1), // Exclude the empty assistant message
+ });
+ } catch (e) {
+ console.error("Failed to send message:", e);
+ const errorMessage = e instanceof Error ? e.message : String(e);
+
+ let helpText = "";
+ if (provider === "ollama") {
+ helpText = `\n\nPlease ensure Ollama is running at ${endpoint}.`;
+ } else if (provider === "openai") {
+ helpText = "\n\nPlease check your OpenAI API key in Settings.";
+ }
+
+ // Update the last message with error
+ const lastIdx = messages.length - 1;
+ if (lastIdx >= 0 && messages[lastIdx].role === "assistant") {
+ messages[lastIdx] = {
+ role: "assistant",
+ content: `Error: ${errorMessage}${helpText}`,
+ };
+ messages = [...messages];
+ }
+
+ finishStreaming();
+ }
+}
+
+function clearHistory() {
+ messages = [];
+ streamingContent = "";
+}
+
+// Export as an object with getters for reactive access
+export const assistantState = {
+ get messages() {
+ return messages;
+ },
+ get isProcessing() {
+ return isProcessing;
+ },
+ get isProviderHealthy() {
+ return isProviderHealthy;
+ },
+ init,
+ checkHealth,
+ sendMessage,
+ clearHistory,
+};
diff --git a/ui/src/stores/logs.svelte.ts b/ui/src/stores/logs.svelte.ts
index 5df9abc..c9d4acc 100644
--- a/ui/src/stores/logs.svelte.ts
+++ b/ui/src/stores/logs.svelte.ts
@@ -39,7 +39,6 @@ export class LogsState {
constructor() {
this.addLog("info", "Launcher", "Logs initialized");
- this.setupListeners();
}
addLog(level: LogEntry["level"], source: string, message: string) {
@@ -95,7 +94,12 @@ export class LogsState {
.join("\n");
}
- private async setupListeners() {
+ private initialized = false;
+
+ async init() {
+ if (this.initialized) return;
+ this.initialized = true;
+
// General Launcher Logs
await listen<string>("launcher-log", (e) => {
this.addLog("info", "Launcher", e.payload);
diff --git a/ui/src/types/index.ts b/ui/src/types/index.ts
index 83e7f9e..6471869 100644
--- a/ui/src/types/index.ts
+++ b/ui/src/types/index.ts
@@ -1,4 +1,4 @@
-export type ViewType = "home" | "versions" | "settings";
+export type ViewType = "home" | "versions" | "settings" | "guide";
export interface Version {
id: string;
@@ -26,6 +26,31 @@ export interface DeviceCodeResponse {
message?: string;
}
+export interface AssistantConfig {
+ enabled: boolean;
+ llm_provider: "ollama" | "openai";
+ // Ollama settings
+ ollama_endpoint: string;
+ ollama_model: string;
+ // OpenAI settings
+ openai_api_key?: string;
+ openai_endpoint: string;
+ openai_model: string;
+ // Common settings
+ system_prompt: string;
+ response_language: string;
+ // TTS settings
+ tts_enabled: boolean;
+ tts_provider: string;
+}
+
+export interface ModelInfo {
+ id: string;
+ name: string;
+ size?: string;
+ details?: string;
+}
+
export interface LauncherConfig {
min_memory: number;
max_memory: number;
@@ -40,6 +65,7 @@ export interface LauncherConfig {
theme: string;
log_upload_service: "paste.rs" | "pastebin.com";
pastebin_api_key?: string;
+ assistant: AssistantConfig;
}
export interface JavaInstallation {