aboutsummaryrefslogtreecommitdiffstatshomepage
path: root/packages/ui/src/stores/assistant.svelte.ts
diff options
context:
space:
mode:
author苏向夜 <46275354+fu050409@users.noreply.github.com>2026-02-25 02:06:07 +0800
committerGitHub <noreply@github.com>2026-02-25 02:06:07 +0800
commit78ac61904d78d558d092eff08c9f261cbdb187e8 (patch)
tree96f68d1f1554ee3a0532793afaaa52b0c73dcbec /packages/ui/src/stores/assistant.svelte.ts
parent8ff3af6cb908fd824b512379dd21ed4f595ab6bb (diff)
parent329734b23957b84cde2af459fa61c7385fb5b5f1 (diff)
downloadDropOut-78ac61904d78d558d092eff08c9f261cbdb187e8.tar.gz
DropOut-78ac61904d78d558d092eff08c9f261cbdb187e8.zip
feat(ui): partial react rewrite (#77)
## Summary by Sourcery Export backend data structures to TypeScript for the new React-based UI and update CI to build additional targets. New Features: - Generate TypeScript definitions for core backend structs and enums used by the UI. - Now use our own Azure app(_DropOut_) to finish the authorize process. Enhancements: - Annotate existing Rust models with ts-rs metadata to control exported TypeScript shapes, including tagged enums and opaque JSON fields. Build: - Add ts-rs as a dependency for generating TypeScript bindings from Rust types. CI: - Extend the Semifold CI workflow to run on the dev branch and build additional Linux musl and Windows GNU targets using cross where needed.
Diffstat (limited to 'packages/ui/src/stores/assistant.svelte.ts')
-rw-r--r--packages/ui/src/stores/assistant.svelte.ts166
1 files changed, 0 insertions, 166 deletions
diff --git a/packages/ui/src/stores/assistant.svelte.ts b/packages/ui/src/stores/assistant.svelte.ts
deleted file mode 100644
index a3f47ea..0000000
--- a/packages/ui/src/stores/assistant.svelte.ts
+++ /dev/null
@@ -1,166 +0,0 @@
-import { invoke } from "@tauri-apps/api/core";
-import { listen, type UnlistenFn } from "@tauri-apps/api/event";
-
-export interface GenerationStats {
- total_duration: number;
- load_duration: number;
- prompt_eval_count: number;
- prompt_eval_duration: number;
- eval_count: number;
- eval_duration: number;
-}
-
-export interface Message {
- role: "user" | "assistant" | "system";
- content: string;
- stats?: GenerationStats;
-}
-
-interface StreamChunk {
- content: string;
- done: boolean;
- stats?: GenerationStats;
-}
-
-// Module-level state using $state
-let messages = $state<Message[]>([]);
-let isProcessing = $state(false);
-let isProviderHealthy = $state(false);
-let streamingContent = "";
-let initialized = false;
-let streamUnlisten: UnlistenFn | null = null;
-
-async function init() {
- if (initialized) return;
- initialized = true;
- await checkHealth();
-}
-
-async function checkHealth() {
- try {
- isProviderHealthy = await invoke("assistant_check_health");
- } catch (e) {
- console.error("Failed to check provider health:", e);
- isProviderHealthy = false;
- }
-}
-
-function finishStreaming() {
- isProcessing = false;
- streamingContent = "";
- if (streamUnlisten) {
- streamUnlisten();
- streamUnlisten = null;
- }
-}
-
-async function sendMessage(
- content: string,
- isEnabled: boolean,
- provider: string,
- endpoint: string,
-) {
- if (!content.trim()) return;
- if (!isEnabled) {
- messages = [
- ...messages,
- {
- role: "assistant",
- content: "Assistant is disabled. Enable it in Settings > AI Assistant.",
- },
- ];
- return;
- }
-
- // Add user message
- messages = [...messages, { role: "user", content }];
- isProcessing = true;
- streamingContent = "";
-
- // Add empty assistant message for streaming
- messages = [...messages, { role: "assistant", content: "" }];
-
- try {
- // Set up stream listener
- streamUnlisten = await listen<StreamChunk>("assistant-stream", (event) => {
- const chunk = event.payload;
-
- if (chunk.content) {
- streamingContent += chunk.content;
- // Update the last message (assistant's response)
- const lastIdx = messages.length - 1;
- if (lastIdx >= 0 && messages[lastIdx].role === "assistant") {
- messages[lastIdx] = {
- ...messages[lastIdx],
- content: streamingContent,
- };
- // Trigger reactivity
- messages = [...messages];
- }
- }
-
- if (chunk.done) {
- if (chunk.stats) {
- const lastIdx = messages.length - 1;
- if (lastIdx >= 0 && messages[lastIdx].role === "assistant") {
- messages[lastIdx] = {
- ...messages[lastIdx],
- stats: chunk.stats,
- };
- messages = [...messages];
- }
- }
- finishStreaming();
- }
- });
-
- // Start streaming chat
- await invoke<string>("assistant_chat_stream", {
- messages: messages.slice(0, -1), // Exclude the empty assistant message
- });
- } catch (e) {
- console.error("Failed to send message:", e);
- const errorMessage = e instanceof Error ? e.message : String(e);
-
- let helpText = "";
- if (provider === "ollama") {
- helpText = `\n\nPlease ensure Ollama is running at ${endpoint}.`;
- } else if (provider === "openai") {
- helpText = "\n\nPlease check your OpenAI API key in Settings.";
- }
-
- // Update the last message with error
- const lastIdx = messages.length - 1;
- if (lastIdx >= 0 && messages[lastIdx].role === "assistant") {
- messages[lastIdx] = {
- role: "assistant",
- content: `Error: ${errorMessage}${helpText}`,
- };
- messages = [...messages];
- }
-
- finishStreaming();
- }
-}
-
-function clearHistory() {
- messages = [];
- streamingContent = "";
-}
-
-// Export as an object with getters for reactive access
-export const assistantState = {
- get messages() {
- return messages;
- },
- get isProcessing() {
- return isProcessing;
- },
- get isProviderHealthy() {
- return isProviderHealthy;
- },
- init,
- checkHealth,
- sendMessage,
- clearHistory,
-};