aboutsummaryrefslogtreecommitdiffstatshomepage
path: root/packages/ui/src/models/assistant-store.ts.bk
diff options
context:
space:
mode:
authorNtskwK <natsukawa247@outlook.com>2026-03-30 16:49:08 +0800
committerNtskwK <natsukawa247@outlook.com>2026-03-30 16:49:08 +0800
commit878d66f9add4e4026a26ae2fa2a1226b5259154d (patch)
treeaf78680c8d4f357843ab336bdac6e56a622de3c7 /packages/ui/src/models/assistant-store.ts.bk
parentf8b4bcb3bdc8f11323103081ef8c05b06159d684 (diff)
parentc4dc0676d794bca2613be282867d369328ebf073 (diff)
downloadDropOut-878d66f9add4e4026a26ae2fa2a1226b5259154d.tar.gz
DropOut-878d66f9add4e4026a26ae2fa2a1226b5259154d.zip
Merge branch 'main' of https://github.com/HydroRoll-Team/DropOut
Diffstat (limited to 'packages/ui/src/models/assistant-store.ts.bk')
-rw-r--r--packages/ui/src/models/assistant-store.ts.bk201
1 files changed, 201 insertions, 0 deletions
diff --git a/packages/ui/src/models/assistant-store.ts.bk b/packages/ui/src/models/assistant-store.ts.bk
new file mode 100644
index 0000000..180031b
--- /dev/null
+++ b/packages/ui/src/models/assistant-store.ts.bk
@@ -0,0 +1,201 @@
+import { invoke } from "@tauri-apps/api/core";
+import { listen, type UnlistenFn } from "@tauri-apps/api/event";
+import { create } from "zustand";
+import type { GenerationStats, StreamChunk } from "@/types/bindings/assistant";
+
+export interface Message {
+ role: "user" | "assistant" | "system";
+ content: string;
+ stats?: GenerationStats;
+}
+
+interface AssistantState {
+ // State
+ messages: Message[];
+ isProcessing: boolean;
+ isProviderHealthy: boolean | undefined;
+ streamingContent: string;
+ initialized: boolean;
+ streamUnlisten: UnlistenFn | null;
+
+ // Actions
+ init: () => Promise<void>;
+ checkHealth: () => Promise<void>;
+ sendMessage: (
+ content: string,
+ isEnabled: boolean,
+ provider: string,
+ endpoint: string,
+ ) => Promise<void>;
+ finishStreaming: () => void;
+ clearHistory: () => void;
+ setMessages: (messages: Message[]) => void;
+ setIsProcessing: (isProcessing: boolean) => void;
+ setIsProviderHealthy: (isProviderHealthy: boolean | undefined) => void;
+ setStreamingContent: (streamingContent: string) => void;
+}
+
+export const useAssistantStore = create<AssistantState>((set, get) => ({
+ // Initial state
+ messages: [],
+ isProcessing: false,
+ isProviderHealthy: false,
+ streamingContent: "",
+ initialized: false,
+ streamUnlisten: null,
+
+ // Actions
+ init: async () => {
+ const { initialized } = get();
+ if (initialized) return;
+ set({ initialized: true });
+ await get().checkHealth();
+ },
+
+ checkHealth: async () => {
+ try {
+ const isHealthy = await invoke<boolean>("assistant_check_health");
+ set({ isProviderHealthy: isHealthy });
+ } catch (e) {
+ console.error("Failed to check provider health:", e);
+ set({ isProviderHealthy: false });
+ }
+ },
+
+ finishStreaming: () => {
+ const { streamUnlisten } = get();
+ set({ isProcessing: false, streamingContent: "" });
+
+ if (streamUnlisten) {
+ streamUnlisten();
+ set({ streamUnlisten: null });
+ }
+ },
+
+ sendMessage: async (content, isEnabled, provider, endpoint) => {
+ if (!content.trim()) return;
+
+ const { messages } = get();
+
+ if (!isEnabled) {
+ const newMessage: Message = {
+ role: "assistant",
+ content: "Assistant is disabled. Enable it in Settings > AI Assistant.",
+ };
+ set({ messages: [...messages, { role: "user", content }, newMessage] });
+ return;
+ }
+
+ // Add user message
+ const userMessage: Message = { role: "user", content };
+ const updatedMessages = [...messages, userMessage];
+ set({
+ messages: updatedMessages,
+ isProcessing: true,
+ streamingContent: "",
+ });
+
+ // Add empty assistant message for streaming
+ const assistantMessage: Message = { role: "assistant", content: "" };
+ const withAssistantMessage = [...updatedMessages, assistantMessage];
+ set({ messages: withAssistantMessage });
+
+ try {
+ // Set up stream listener
+ const unlisten = await listen<StreamChunk>(
+ "assistant-stream",
+ (event) => {
+ const chunk = event.payload;
+ const currentState = get();
+
+ if (chunk.content) {
+ const newStreamingContent =
+ currentState.streamingContent + chunk.content;
+ const currentMessages = [...currentState.messages];
+ const lastIdx = currentMessages.length - 1;
+
+ if (lastIdx >= 0 && currentMessages[lastIdx].role === "assistant") {
+ currentMessages[lastIdx] = {
+ ...currentMessages[lastIdx],
+ content: newStreamingContent,
+ };
+ set({
+ streamingContent: newStreamingContent,
+ messages: currentMessages,
+ });
+ }
+ }
+
+ if (chunk.done) {
+ const finalMessages = [...currentState.messages];
+ const lastIdx = finalMessages.length - 1;
+
+ if (
+ chunk.stats &&
+ lastIdx >= 0 &&
+ finalMessages[lastIdx].role === "assistant"
+ ) {
+ finalMessages[lastIdx] = {
+ ...finalMessages[lastIdx],
+ stats: chunk.stats,
+ };
+ set({ messages: finalMessages });
+ }
+
+ get().finishStreaming();
+ }
+ },
+ );
+
+ set({ streamUnlisten: unlisten });
+
+ // Start streaming chat
+ await invoke<string>("assistant_chat_stream", {
+ messages: withAssistantMessage.slice(0, -1), // Exclude the empty assistant message
+ });
+ } catch (e) {
+ console.error("Failed to send message:", e);
+ const errorMessage = e instanceof Error ? e.message : String(e);
+
+ let helpText = "";
+ if (provider === "ollama") {
+ helpText = `\n\nPlease ensure Ollama is running at ${endpoint}.`;
+ } else if (provider === "openai") {
+ helpText = "\n\nPlease check your OpenAI API key in Settings.";
+ }
+
+ // Update the last message with error
+ const currentMessages = [...get().messages];
+ const lastIdx = currentMessages.length - 1;
+ if (lastIdx >= 0 && currentMessages[lastIdx].role === "assistant") {
+ currentMessages[lastIdx] = {
+ role: "assistant",
+ content: `Error: ${errorMessage}${helpText}`,
+ };
+ set({ messages: currentMessages });
+ }
+
+ get().finishStreaming();
+ }
+ },
+
+ clearHistory: () => {
+ set({ messages: [], streamingContent: "" });
+ },
+
+ setMessages: (messages) => {
+ set({ messages });
+ },
+
+ setIsProcessing: (isProcessing) => {
+ set({ isProcessing });
+ },
+
+ setIsProviderHealthy: (isProviderHealthy) => {
+ set({ isProviderHealthy });
+ },
+
+ setStreamingContent: (streamingContent) => {
+ set({ streamingContent });
+ },
+}));