aboutsummaryrefslogtreecommitdiffstatshomepage
path: root/ui/src/stores/assistant.svelte.ts
diff options
context:
space:
mode:
author苏向夜 <46275354+fu050409@users.noreply.github.com>2026-01-19 14:33:07 +0800
committerGitHub <noreply@github.com>2026-01-19 14:33:07 +0800
commit49545e67ce1ab4ec86248ac6edb07ec89c282eec (patch)
tree50f5fc3ae156cc853660a1aa1556c0bced9054b4 /ui/src/stores/assistant.svelte.ts
parent887e415314014c3da7db3048fa0e724f3078c5cb (diff)
parent91d4590dff7ed3dbce5929926c718ac93aad056a (diff)
downloadDropOut-49545e67ce1ab4ec86248ac6edb07ec89c282eec.tar.gz
DropOut-49545e67ce1ab4ec86248ac6edb07ec89c282eec.zip
chore(ui): refactor workspace to monorepo (#70)
## Summary by Sourcery Refactor the UI project structure into a pnpm monorepo packages layout and align tooling and automation with the new paths. Enhancements: - Reorganize the UI app from the root ui directory into packages/ui within a pnpm workspace. - Update pnpm workspace configuration to include all packages under packages/*. - Adjust paths in changeset configuration so the @dropout/ui package resolves from packages/ui. Build: - Update pre-commit configuration paths and arguments to reflect the new UI location and normalize hook argument formatting. - Update Dependabot configuration so npm updates target /packages/ui instead of /ui. CI: - Update GitHub Actions workflows to watch packages/** instead of ui/** and to run frontend tasks from packages/ui. - Update pnpm cache dependency paths in workflows to use the root pnpm-lock.yaml. - Simplify frontend install steps in test workflows to run from the repository root. Chores: - Add a new index.html under packages/ui and remove the old ui/index.html to match the new project layout.
Diffstat (limited to 'ui/src/stores/assistant.svelte.ts')
-rw-r--r--ui/src/stores/assistant.svelte.ts166
1 files changed, 0 insertions, 166 deletions
diff --git a/ui/src/stores/assistant.svelte.ts b/ui/src/stores/assistant.svelte.ts
deleted file mode 100644
index a3f47ea..0000000
--- a/ui/src/stores/assistant.svelte.ts
+++ /dev/null
@@ -1,166 +0,0 @@
-import { invoke } from "@tauri-apps/api/core";
-import { listen, type UnlistenFn } from "@tauri-apps/api/event";
-
-export interface GenerationStats {
- total_duration: number;
- load_duration: number;
- prompt_eval_count: number;
- prompt_eval_duration: number;
- eval_count: number;
- eval_duration: number;
-}
-
-export interface Message {
- role: "user" | "assistant" | "system";
- content: string;
- stats?: GenerationStats;
-}
-
-interface StreamChunk {
- content: string;
- done: boolean;
- stats?: GenerationStats;
-}
-
-// Module-level state using $state
-let messages = $state<Message[]>([]);
-let isProcessing = $state(false);
-let isProviderHealthy = $state(false);
-let streamingContent = "";
-let initialized = false;
-let streamUnlisten: UnlistenFn | null = null;
-
-async function init() {
- if (initialized) return;
- initialized = true;
- await checkHealth();
-}
-
-async function checkHealth() {
- try {
- isProviderHealthy = await invoke("assistant_check_health");
- } catch (e) {
- console.error("Failed to check provider health:", e);
- isProviderHealthy = false;
- }
-}
-
-function finishStreaming() {
- isProcessing = false;
- streamingContent = "";
- if (streamUnlisten) {
- streamUnlisten();
- streamUnlisten = null;
- }
-}
-
-async function sendMessage(
- content: string,
- isEnabled: boolean,
- provider: string,
- endpoint: string,
-) {
- if (!content.trim()) return;
- if (!isEnabled) {
- messages = [
- ...messages,
- {
- role: "assistant",
- content: "Assistant is disabled. Enable it in Settings > AI Assistant.",
- },
- ];
- return;
- }
-
- // Add user message
- messages = [...messages, { role: "user", content }];
- isProcessing = true;
- streamingContent = "";
-
- // Add empty assistant message for streaming
- messages = [...messages, { role: "assistant", content: "" }];
-
- try {
- // Set up stream listener
- streamUnlisten = await listen<StreamChunk>("assistant-stream", (event) => {
- const chunk = event.payload;
-
- if (chunk.content) {
- streamingContent += chunk.content;
- // Update the last message (assistant's response)
- const lastIdx = messages.length - 1;
- if (lastIdx >= 0 && messages[lastIdx].role === "assistant") {
- messages[lastIdx] = {
- ...messages[lastIdx],
- content: streamingContent,
- };
- // Trigger reactivity
- messages = [...messages];
- }
- }
-
- if (chunk.done) {
- if (chunk.stats) {
- const lastIdx = messages.length - 1;
- if (lastIdx >= 0 && messages[lastIdx].role === "assistant") {
- messages[lastIdx] = {
- ...messages[lastIdx],
- stats: chunk.stats,
- };
- messages = [...messages];
- }
- }
- finishStreaming();
- }
- });
-
- // Start streaming chat
- await invoke<string>("assistant_chat_stream", {
- messages: messages.slice(0, -1), // Exclude the empty assistant message
- });
- } catch (e) {
- console.error("Failed to send message:", e);
- const errorMessage = e instanceof Error ? e.message : String(e);
-
- let helpText = "";
- if (provider === "ollama") {
- helpText = `\n\nPlease ensure Ollama is running at ${endpoint}.`;
- } else if (provider === "openai") {
- helpText = "\n\nPlease check your OpenAI API key in Settings.";
- }
-
- // Update the last message with error
- const lastIdx = messages.length - 1;
- if (lastIdx >= 0 && messages[lastIdx].role === "assistant") {
- messages[lastIdx] = {
- role: "assistant",
- content: `Error: ${errorMessage}${helpText}`,
- };
- messages = [...messages];
- }
-
- finishStreaming();
- }
-}
-
-function clearHistory() {
- messages = [];
- streamingContent = "";
-}
-
-// Export as an object with getters for reactive access
-export const assistantState = {
- get messages() {
- return messages;
- },
- get isProcessing() {
- return isProcessing;
- },
- get isProviderHealthy() {
- return isProviderHealthy;
- },
- init,
- checkHealth,
- sendMessage,
- clearHistory,
-};