diff options
| author | 2026-01-16 14:18:04 +0800 | |
|---|---|---|
| committer | 2026-01-16 14:18:22 +0800 | |
| commit | 73ddf24b04bf94ee7fa76974e1af55eb94112b93 (patch) | |
| tree | 421bd2e8af7e720ed5b2fb23e92601cfeecd25ad /src-tauri/src | |
| parent | a38e61c30798efa3ab2231f99537828be5d5637b (diff) | |
| download | DropOut-73ddf24b04bf94ee7fa76974e1af55eb94112b93.tar.gz DropOut-73ddf24b04bf94ee7fa76974e1af55eb94112b93.zip | |
feat: integrate AI assistant functionality and configuration management
Implemented new commands for managing the AI assistant, including health checks, chat interactions, and model listings for both Ollama and OpenAI. Enhanced the configuration system to support raw JSON editing and added a dedicated AssistantConfig structure for better management of assistant settings. This update significantly improves the user experience by providing comprehensive control over AI interactions and configurations.
Diffstat (limited to 'src-tauri/src')
| -rw-r--r-- | src-tauri/src/core/account_storage.rs | 2 | ||||
| -rw-r--r-- | src-tauri/src/core/assistant.rs | 694 | ||||
| -rw-r--r-- | src-tauri/src/core/auth.rs | 7 | ||||
| -rw-r--r-- | src-tauri/src/core/config.rs | 40 | ||||
| -rw-r--r-- | src-tauri/src/core/downloader.rs | 33 | ||||
| -rw-r--r-- | src-tauri/src/core/fabric.rs | 18 | ||||
| -rw-r--r-- | src-tauri/src/core/forge.rs | 69 | ||||
| -rw-r--r-- | src-tauri/src/core/java.rs | 53 | ||||
| -rw-r--r-- | src-tauri/src/core/manifest.rs | 9 | ||||
| -rw-r--r-- | src-tauri/src/core/maven.rs | 5 | ||||
| -rw-r--r-- | src-tauri/src/core/mod.rs | 1 | ||||
| -rw-r--r-- | src-tauri/src/core/version_merge.rs | 2 | ||||
| -rw-r--r-- | src-tauri/src/main.rs | 168 | ||||
| -rw-r--r-- | src-tauri/src/utils/mod.rs | 2 |
14 files changed, 998 insertions, 105 deletions
diff --git a/src-tauri/src/core/account_storage.rs b/src-tauri/src/core/account_storage.rs index 569df7b..8998206 100644 --- a/src-tauri/src/core/account_storage.rs +++ b/src-tauri/src/core/account_storage.rs @@ -138,6 +138,7 @@ impl AccountStorage { } } + #[allow(dead_code)] pub fn set_active_account(&self, uuid: &str) -> Result<(), String> { let mut store = self.load(); if store.accounts.iter().any(|a| a.id() == uuid) { @@ -148,6 +149,7 @@ impl AccountStorage { } } + #[allow(dead_code)] pub fn get_all_accounts(&self) -> Vec<StoredAccount> { self.load().accounts } diff --git a/src-tauri/src/core/assistant.rs b/src-tauri/src/core/assistant.rs new file mode 100644 index 0000000..9a8f7bf --- /dev/null +++ b/src-tauri/src/core/assistant.rs @@ -0,0 +1,694 @@ +use super::config::AssistantConfig; +use futures::StreamExt; +use serde::{Deserialize, Serialize}; +use std::collections::VecDeque; +use std::sync::{Arc, Mutex}; +use tauri::{Emitter, Window}; + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct Message { + pub role: String, + pub content: String, +} + +#[derive(Debug, Serialize)] +pub struct OllamaChatRequest { + pub model: String, + pub messages: Vec<Message>, + pub stream: bool, +} + +#[derive(Debug, Deserialize)] +#[allow(dead_code)] +pub struct OllamaChatResponse { + pub model: String, + pub created_at: String, + pub message: Message, + pub done: bool, +} + +// Ollama model list response structures +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct OllamaModelDetails { + pub format: Option<String>, + pub family: Option<String>, + pub parameter_size: Option<String>, + pub quantization_level: Option<String>, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct OllamaModel { + pub name: String, + pub modified_at: Option<String>, + pub size: Option<u64>, + pub digest: Option<String>, + pub details: Option<OllamaModelDetails>, +} + +#[derive(Debug, Deserialize)] +pub struct OllamaTagsResponse { + pub models: Vec<OllamaModel>, +} + +// Simplified model info for frontend +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ModelInfo { + pub id: String, + pub name: String, + pub size: Option<String>, + pub details: Option<String>, +} + +#[derive(Debug, Serialize)] +pub struct OpenAIChatRequest { + pub model: String, + pub messages: Vec<Message>, + pub stream: bool, +} + +#[derive(Debug, Deserialize)] +#[allow(dead_code)] +pub struct OpenAIChoice { + pub index: i32, + pub message: Message, + pub finish_reason: Option<String>, +} + +#[derive(Debug, Deserialize)] +#[allow(dead_code)] +pub struct OpenAIChatResponse { + pub id: String, + pub object: String, + pub created: i64, + pub model: String, + pub choices: Vec<OpenAIChoice>, +} + +// OpenAI models list response +#[derive(Debug, Deserialize)] +#[allow(dead_code)] +pub struct OpenAIModelData { + pub id: String, + pub object: String, + pub created: Option<i64>, + pub owned_by: Option<String>, +} + +#[derive(Debug, Deserialize)] +#[allow(dead_code)] +pub struct OpenAIModelsResponse { + pub object: String, + pub data: Vec<OpenAIModelData>, +} + +// Streaming response structures +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct GenerationStats { + pub total_duration: u64, + pub load_duration: u64, + pub prompt_eval_count: u64, + pub prompt_eval_duration: u64, + pub eval_count: u64, + pub eval_duration: u64, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct StreamChunk { + pub content: String, + pub done: bool, + pub stats: Option<GenerationStats>, +} + +// Ollama streaming response (each line is a JSON object) +#[derive(Debug, Deserialize)] +#[allow(dead_code)] +pub struct OllamaStreamResponse { + pub model: Option<String>, + pub created_at: Option<String>, + pub message: Option<Message>, + pub done: bool, + pub total_duration: Option<u64>, + pub load_duration: Option<u64>, + pub prompt_eval_count: Option<u64>, + pub prompt_eval_duration: Option<u64>, + pub eval_count: Option<u64>, + pub eval_duration: Option<u64>, +} + +// OpenAI streaming response +#[derive(Debug, Deserialize)] +#[allow(dead_code)] +pub struct OpenAIStreamDelta { + pub role: Option<String>, + pub content: Option<String>, +} + +#[derive(Debug, Deserialize)] +#[allow(dead_code)] +pub struct OpenAIStreamChoice { + pub index: i32, + pub delta: OpenAIStreamDelta, + pub finish_reason: Option<String>, +} + +#[derive(Debug, Deserialize)] +#[allow(dead_code)] +pub struct OpenAIStreamResponse { + pub id: Option<String>, + pub object: Option<String>, + pub created: Option<i64>, + pub model: Option<String>, + pub choices: Vec<OpenAIStreamChoice>, +} + +#[derive(Clone)] +pub struct GameAssistant { + client: reqwest::Client, + pub log_buffer: VecDeque<String>, + pub max_log_lines: usize, +} + +impl GameAssistant { + pub fn new() -> Self { + Self { + client: reqwest::Client::new(), + log_buffer: VecDeque::new(), + max_log_lines: 100, + } + } + + pub fn add_log(&mut self, line: String) { + if self.log_buffer.len() >= self.max_log_lines { + self.log_buffer.pop_front(); + } + self.log_buffer.push_back(line); + } + + pub fn get_log_context(&self) -> String { + self.log_buffer + .iter() + .cloned() + .collect::<Vec<_>>() + .join("\n") + } + + pub async fn check_health(&self, config: &AssistantConfig) -> bool { + if config.llm_provider == "ollama" { + match self + .client + .get(format!("{}/api/tags", config.ollama_endpoint)) + .send() + .await + { + Ok(res) => res.status().is_success(), + Err(_) => false, + } + } else if config.llm_provider == "openai" { + // For OpenAI, just check if API key is set + config.openai_api_key.is_some() && !config.openai_api_key.as_ref().unwrap().is_empty() + } else { + false + } + } + + pub async fn chat( + &self, + mut messages: Vec<Message>, + config: &AssistantConfig, + ) -> Result<Message, String> { + // Inject system prompt and log context + if !messages.iter().any(|m| m.role == "system") { + let context = self.get_log_context(); + let mut system_content = config.system_prompt.clone(); + + // Add language instruction if not auto + if config.response_language != "auto" { + system_content = format!("{}\n\nIMPORTANT: Respond in {}. Do not include Pinyin or English translations unless explicitly requested.", system_content, config.response_language); + } + + // Add log context if available + if !context.is_empty() { + system_content = format!( + "{}\n\nRecent game logs:\n```\n{}\n```", + system_content, context + ); + } + + messages.insert( + 0, + Message { + role: "system".to_string(), + content: system_content, + }, + ); + } + + if config.llm_provider == "ollama" { + self.chat_ollama(messages, config).await + } else if config.llm_provider == "openai" { + self.chat_openai(messages, config).await + } else { + Err(format!("Unknown LLM provider: {}", config.llm_provider)) + } + } + + async fn chat_ollama( + &self, + messages: Vec<Message>, + config: &AssistantConfig, + ) -> Result<Message, String> { + let request = OllamaChatRequest { + model: config.ollama_model.clone(), + messages, + stream: false, + }; + + let response = self + .client + .post(format!("{}/api/chat", config.ollama_endpoint)) + .json(&request) + .send() + .await + .map_err(|e| format!("Ollama request failed: {}", e))?; + + if !response.status().is_success() { + return Err(format!("Ollama API returned error: {}", response.status())); + } + + let chat_response: OllamaChatResponse = response + .json() + .await + .map_err(|e| format!("Failed to parse Ollama response: {}", e))?; + + Ok(chat_response.message) + } + + async fn chat_openai( + &self, + messages: Vec<Message>, + config: &AssistantConfig, + ) -> Result<Message, String> { + let api_key = config + .openai_api_key + .as_ref() + .ok_or("OpenAI API key not configured")?; + + let request = OpenAIChatRequest { + model: config.openai_model.clone(), + messages, + stream: false, + }; + + let response = self + .client + .post(format!("{}/chat/completions", config.openai_endpoint)) + .header("Authorization", format!("Bearer {}", api_key)) + .header("Content-Type", "application/json") + .json(&request) + .send() + .await + .map_err(|e| format!("OpenAI request failed: {}", e))?; + + if !response.status().is_success() { + let status = response.status(); + let error_text = response.text().await.unwrap_or_default(); + return Err(format!("OpenAI API error ({}): {}", status, error_text)); + } + + let chat_response: OpenAIChatResponse = response + .json() + .await + .map_err(|e| format!("Failed to parse OpenAI response: {}", e))?; + + chat_response + .choices + .into_iter() + .next() + .map(|c| c.message) + .ok_or_else(|| "No response from OpenAI".to_string()) + } + + pub async fn list_ollama_models(&self, endpoint: &str) -> Result<Vec<ModelInfo>, String> { + let response = self + .client + .get(format!("{}/api/tags", endpoint)) + .send() + .await + .map_err(|e| format!("Failed to connect to Ollama: {}", e))?; + + if !response.status().is_success() { + return Err(format!("Ollama API error: {}", response.status())); + } + + let tags_response: OllamaTagsResponse = response + .json() + .await + .map_err(|e| format!("Failed to parse Ollama response: {}", e))?; + + let models: Vec<ModelInfo> = tags_response + .models + .into_iter() + .map(|m| { + let size_str = m.size.map(format_size); + let details_str = m.details.map(|d| { + let mut parts = Vec::new(); + if let Some(family) = d.family { + parts.push(family); + } + if let Some(params) = d.parameter_size { + parts.push(params); + } + if let Some(quant) = d.quantization_level { + parts.push(quant); + } + parts.join(" / ") + }); + + ModelInfo { + id: m.name.clone(), + name: m.name, + size: size_str, + details: details_str, + } + }) + .collect(); + + Ok(models) + } + + pub async fn list_openai_models( + &self, + config: &AssistantConfig, + ) -> Result<Vec<ModelInfo>, String> { + let api_key = config + .openai_api_key + .as_ref() + .ok_or("OpenAI API key not configured")?; + + let response = self + .client + .get(format!("{}/models", config.openai_endpoint)) + .header("Authorization", format!("Bearer {}", api_key)) + .send() + .await + .map_err(|e| format!("Failed to connect to OpenAI: {}", e))?; + + if !response.status().is_success() { + let status = response.status(); + let error_text = response.text().await.unwrap_or_default(); + return Err(format!("OpenAI API error ({}): {}", status, error_text)); + } + + let models_response: OpenAIModelsResponse = response + .json() + .await + .map_err(|e| format!("Failed to parse OpenAI response: {}", e))?; + + // Filter to only show chat models (gpt-*) + let models: Vec<ModelInfo> = models_response + .data + .into_iter() + .filter(|m| { + m.id.starts_with("gpt-") || m.id.starts_with("o1") || m.id.contains("turbo") + }) + .map(|m| ModelInfo { + id: m.id.clone(), + name: m.id, + size: None, + details: m.owned_by, + }) + .collect(); + + Ok(models) + } + + // Streaming chat methods + pub async fn chat_stream( + &self, + mut messages: Vec<Message>, + config: &AssistantConfig, + window: &Window, + ) -> Result<String, String> { + // Inject system prompt and log context + if !messages.iter().any(|m| m.role == "system") { + let context = self.get_log_context(); + let mut system_content = config.system_prompt.clone(); + + if config.response_language != "auto" { + system_content = format!("{}\n\nIMPORTANT: Respond in {}. Do not include Pinyin or English translations unless explicitly requested.", system_content, config.response_language); + } + + if !context.is_empty() { + system_content = format!( + "{}\n\nRecent game logs:\n```\n{}\n```", + system_content, context + ); + } + + messages.insert( + 0, + Message { + role: "system".to_string(), + content: system_content, + }, + ); + } + + if config.llm_provider == "ollama" { + self.chat_stream_ollama(messages, config, window).await + } else if config.llm_provider == "openai" { + self.chat_stream_openai(messages, config, window).await + } else { + Err(format!("Unknown LLM provider: {}", config.llm_provider)) + } + } + + async fn chat_stream_ollama( + &self, + messages: Vec<Message>, + config: &AssistantConfig, + window: &Window, + ) -> Result<String, String> { + let request = OllamaChatRequest { + model: config.ollama_model.clone(), + messages, + stream: true, + }; + + let response = self + .client + .post(format!("{}/api/chat", config.ollama_endpoint)) + .json(&request) + .send() + .await + .map_err(|e| format!("Ollama request failed: {}", e))?; + + if !response.status().is_success() { + return Err(format!("Ollama API returned error: {}", response.status())); + } + + let mut full_content = String::new(); + let mut stream = response.bytes_stream(); + + while let Some(chunk_result) = stream.next().await { + match chunk_result { + Ok(chunk) => { + let text = String::from_utf8_lossy(&chunk); + // Ollama returns newline-delimited JSON + for line in text.lines() { + if line.trim().is_empty() { + continue; + } + if let Ok(stream_response) = + serde_json::from_str::<OllamaStreamResponse>(line) + { + if let Some(msg) = stream_response.message { + full_content.push_str(&msg.content); + let _ = window.emit( + "assistant-stream", + StreamChunk { + content: msg.content, + done: stream_response.done, + stats: None, + }, + ); + } + if stream_response.done { + let stats = if let ( + Some(total), + Some(load), + Some(prompt_cnt), + Some(prompt_dur), + Some(eval_cnt), + Some(eval_dur), + ) = ( + stream_response.total_duration, + stream_response.load_duration, + stream_response.prompt_eval_count, + stream_response.prompt_eval_duration, + stream_response.eval_count, + stream_response.eval_duration, + ) { + Some(GenerationStats { + total_duration: total, + load_duration: load, + prompt_eval_count: prompt_cnt, + prompt_eval_duration: prompt_dur, + eval_count: eval_cnt, + eval_duration: eval_dur, + }) + } else { + None + }; + + let _ = window.emit( + "assistant-stream", + StreamChunk { + content: String::new(), + done: true, + stats, + }, + ); + } + } + } + } + Err(e) => { + return Err(format!("Stream error: {}", e)); + } + } + } + + Ok(full_content) + } + + async fn chat_stream_openai( + &self, + messages: Vec<Message>, + config: &AssistantConfig, + window: &Window, + ) -> Result<String, String> { + let api_key = config + .openai_api_key + .as_ref() + .ok_or("OpenAI API key not configured")?; + + let request = OpenAIChatRequest { + model: config.openai_model.clone(), + messages, + stream: true, + }; + + let response = self + .client + .post(format!("{}/chat/completions", config.openai_endpoint)) + .header("Authorization", format!("Bearer {}", api_key)) + .header("Content-Type", "application/json") + .json(&request) + .send() + .await + .map_err(|e| format!("OpenAI request failed: {}", e))?; + + if !response.status().is_success() { + let status = response.status(); + let error_text = response.text().await.unwrap_or_default(); + return Err(format!("OpenAI API error ({}): {}", status, error_text)); + } + + let mut full_content = String::new(); + let mut stream = response.bytes_stream(); + let mut buffer = String::new(); + + while let Some(chunk_result) = stream.next().await { + match chunk_result { + Ok(chunk) => { + buffer.push_str(&String::from_utf8_lossy(&chunk)); + + // Process complete lines + while let Some(pos) = buffer.find('\n') { + let line = buffer[..pos].to_string(); + buffer = buffer[pos + 1..].to_string(); + + let line = line.trim(); + if line.is_empty() || line == "data: [DONE]" { + if line == "data: [DONE]" { + let _ = window.emit( + "assistant-stream", + StreamChunk { + content: String::new(), + done: true, + stats: None, + }, + ); + } + continue; + } + + if let Some(data) = line.strip_prefix("data: ") { + if let Ok(stream_response) = + serde_json::from_str::<OpenAIStreamResponse>(data) + { + if let Some(choice) = stream_response.choices.first() { + if let Some(content) = &choice.delta.content { + full_content.push_str(content); + let _ = window.emit( + "assistant-stream", + StreamChunk { + content: content.clone(), + done: false, + stats: None, + }, + ); + } + if choice.finish_reason.is_some() { + let _ = window.emit( + "assistant-stream", + StreamChunk { + content: String::new(), + done: true, + stats: None, + }, + ); + } + } + } + } + } + } + Err(e) => { + return Err(format!("Stream error: {}", e)); + } + } + } + + Ok(full_content) + } +} + +fn format_size(bytes: u64) -> String { + const KB: u64 = 1024; + const MB: u64 = KB * 1024; + const GB: u64 = MB * 1024; + + if bytes >= GB { + format!("{:.1} GB", bytes as f64 / GB as f64) + } else if bytes >= MB { + format!("{:.1} MB", bytes as f64 / MB as f64) + } else if bytes >= KB { + format!("{:.1} KB", bytes as f64 / KB as f64) + } else { + format!("{} B", bytes) + } +} + +pub struct AssistantState { + pub assistant: Arc<Mutex<GameAssistant>>, +} + +impl AssistantState { + pub fn new() -> Self { + Self { + assistant: Arc::new(Mutex::new(GameAssistant::new())), + } + } +} diff --git a/src-tauri/src/core/auth.rs b/src-tauri/src/core/auth.rs index 5f01a58..ac5904c 100644 --- a/src-tauri/src/core/auth.rs +++ b/src-tauri/src/core/auth.rs @@ -136,6 +136,7 @@ pub async fn refresh_microsoft_token(refresh_token: &str) -> Result<TokenRespons } /// Check if a Microsoft account token is expired or about to expire +#[allow(dead_code)] pub fn is_token_expired(expires_at: i64) -> bool { let now = std::time::SystemTime::now() .duration_since(std::time::UNIX_EPOCH) @@ -430,17 +431,21 @@ pub async fn fetch_profile(mc_access_token: &str) -> Result<MinecraftProfile, St // 7. Check Game Ownership #[derive(Debug, Serialize, Deserialize)] +#[allow(dead_code)] pub struct Entitlement { pub name: String, } #[derive(Debug, Serialize, Deserialize)] +#[allow(dead_code)] pub struct EntitlementsResponse { pub items: Vec<Entitlement>, pub signature: Option<String>, - pub keyId: Option<String>, + #[serde(rename = "keyId")] + pub key_id: Option<String>, } +#[allow(dead_code)] pub async fn check_ownership(mc_access_token: &str) -> Result<bool, String> { let client = get_client(); let url = "https://api.minecraftservices.com/entitlements/mcstore"; diff --git a/src-tauri/src/core/config.rs b/src-tauri/src/core/config.rs index 43c8145..4c4acad 100644 --- a/src-tauri/src/core/config.rs +++ b/src-tauri/src/core/config.rs @@ -6,6 +6,44 @@ use tauri::{AppHandle, Manager}; #[derive(Debug, Clone, Serialize, Deserialize)] #[serde(default)] +pub struct AssistantConfig { + pub enabled: bool, + pub llm_provider: String, // "ollama" or "openai" + // Ollama settings + pub ollama_endpoint: String, + pub ollama_model: String, + // OpenAI settings + pub openai_api_key: Option<String>, + pub openai_endpoint: String, + pub openai_model: String, + // Common settings + pub system_prompt: String, + pub response_language: String, + // TTS settings + pub tts_enabled: bool, + pub tts_provider: String, // "disabled", "piper", "edge" +} + +impl Default for AssistantConfig { + fn default() -> Self { + Self { + enabled: true, + llm_provider: "ollama".to_string(), + ollama_endpoint: "http://localhost:11434".to_string(), + ollama_model: "llama3".to_string(), + openai_api_key: None, + openai_endpoint: "https://api.openai.com/v1".to_string(), + openai_model: "gpt-3.5-turbo".to_string(), + system_prompt: "You are a helpful Minecraft expert assistant. You help players with game issues, mod installation, performance optimization, and gameplay tips. Analyze any game logs provided and give concise, actionable advice.".to_string(), + response_language: "auto".to_string(), + tts_enabled: false, + tts_provider: "disabled".to_string(), + } + } +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(default)] pub struct LauncherConfig { pub min_memory: u32, // in MB pub max_memory: u32, // in MB @@ -20,6 +58,7 @@ pub struct LauncherConfig { pub theme: String, pub log_upload_service: String, // "paste.rs" or "pastebin.com" pub pastebin_api_key: Option<String>, + pub assistant: AssistantConfig, } impl Default for LauncherConfig { @@ -38,6 +77,7 @@ impl Default for LauncherConfig { theme: "dark".to_string(), log_upload_service: "paste.rs".to_string(), pastebin_api_key: None, + assistant: AssistantConfig::default(), } } } diff --git a/src-tauri/src/core/downloader.rs b/src-tauri/src/core/downloader.rs index bf6334f..9c6b7f0 100644 --- a/src-tauri/src/core/downloader.rs +++ b/src-tauri/src/core/downloader.rs @@ -111,9 +111,8 @@ impl DownloadQueue { /// Remove a completed or cancelled download pub fn remove(&mut self, major_version: u32, image_type: &str) { - self.pending_downloads.retain(|d| { - !(d.major_version == major_version && d.image_type == image_type) - }); + self.pending_downloads + .retain(|d| !(d.major_version == major_version && d.image_type == image_type)); } } @@ -174,7 +173,8 @@ pub async fn download_with_resume( let content = tokio::fs::read_to_string(&meta_path) .await .map_err(|e| e.to_string())?; - serde_json::from_str(&content).unwrap_or_else(|_| create_new_metadata(url, &file_name, total_size, checksum)) + serde_json::from_str(&content) + .unwrap_or_else(|_| create_new_metadata(url, &file_name, total_size, checksum)) } else { create_new_metadata(url, &file_name, total_size, checksum) }; @@ -191,6 +191,7 @@ pub async fn download_with_resume( .create(true) .write(true) .read(true) + .truncate(false) .open(&part_path) .await .map_err(|e| format!("Failed to open part file: {}", e))?; @@ -220,9 +221,7 @@ pub async fn download_with_resume( let segment_end = segment.end; let app_handle = app_handle.clone(); let file_name = file_name.clone(); - let total_size = total_size; let last_progress_bytes = last_progress_bytes.clone(); - let start_time = start_time.clone(); let handle = tokio::spawn(async move { let _permit = semaphore.acquire().await.unwrap(); @@ -240,7 +239,9 @@ pub async fn download_with_resume( .await .map_err(|e| format!("Request failed: {}", e))?; - if !response.status().is_success() && response.status() != reqwest::StatusCode::PARTIAL_CONTENT { + if !response.status().is_success() + && response.status() != reqwest::StatusCode::PARTIAL_CONTENT + { return Err(format!("Server returned error: {}", response.status())); } @@ -319,7 +320,8 @@ pub async fn download_with_resume( if e.contains("cancelled") { // Save progress for resume metadata.downloaded_bytes = progress.load(Ordering::Relaxed); - let meta_content = serde_json::to_string_pretty(&metadata).map_err(|e| e.to_string())?; + let meta_content = + serde_json::to_string_pretty(&metadata).map_err(|e| e.to_string())?; tokio::fs::write(&meta_path, meta_content).await.ok(); return Err(e); } @@ -357,7 +359,7 @@ pub async fn download_with_resume( let data = tokio::fs::read(&part_path) .await .map_err(|e| format!("Failed to read file for verification: {}", e))?; - + if !verify_checksum(&data, Some(expected), None) { // Checksum failed, delete files and retry tokio::fs::remove_file(&part_path).await.ok(); @@ -378,7 +380,12 @@ pub async fn download_with_resume( } /// Create new download metadata with segments -fn create_new_metadata(url: &str, file_name: &str, total_size: u64, checksum: Option<&str>) -> DownloadMetadata { +fn create_new_metadata( + url: &str, + file_name: &str, + total_size: u64, + checksum: Option<&str>, +) -> DownloadMetadata { let segment_count = get_segment_count(total_size); let segment_size = total_size / segment_count as u64; let mut segments = Vec::new(); @@ -559,11 +566,7 @@ pub async fn download_files( if task.sha256.is_some() || task.sha1.is_some() { if let Ok(data) = tokio::fs::read(&task.path).await { - if verify_checksum( - &data, - task.sha256.as_deref(), - task.sha1.as_deref(), - ) { + if verify_checksum(&data, task.sha256.as_deref(), task.sha1.as_deref()) { // Already valid, skip download let skipped_size = tokio::fs::metadata(&task.path) .await diff --git a/src-tauri/src/core/fabric.rs b/src-tauri/src/core/fabric.rs index 3e4d50d..32790c7 100644 --- a/src-tauri/src/core/fabric.rs +++ b/src-tauri/src/core/fabric.rs @@ -67,13 +67,11 @@ pub struct FabricLibrary { #[derive(Debug, Deserialize, Serialize, Clone)] #[serde(untagged)] pub enum FabricMainClass { - Structured { - client: String, - server: String, - }, + Structured { client: String, server: String }, Simple(String), } +#[allow(dead_code)] impl FabricMainClass { pub fn client(&self) -> &str { match self { @@ -81,7 +79,7 @@ impl FabricMainClass { FabricMainClass::Simple(s) => s, } } - + pub fn server(&self) -> &str { match self { FabricMainClass::Structured { server, .. } => server, @@ -200,7 +198,7 @@ pub fn generate_version_id(game_version: &str, loader_version: &str) -> String { /// # Returns /// Information about the installed version. pub async fn install_fabric( - game_dir: &PathBuf, + game_dir: &std::path::Path, game_version: &str, loader_version: &str, ) -> Result<InstalledFabricVersion, Box<dyn Error + Send + Sync>> { @@ -240,7 +238,11 @@ pub async fn install_fabric( /// /// # Returns /// `true` if the version JSON exists, `false` otherwise. -pub fn is_fabric_installed(game_dir: &PathBuf, game_version: &str, loader_version: &str) -> bool { +pub fn is_fabric_installed( + game_dir: &std::path::Path, + game_version: &str, + loader_version: &str, +) -> bool { let version_id = generate_version_id(game_version, loader_version); let json_path = game_dir .join("versions") @@ -257,7 +259,7 @@ pub fn is_fabric_installed(game_dir: &PathBuf, game_version: &str, loader_versio /// # Returns /// A list of installed Fabric version IDs. pub async fn list_installed_fabric_versions( - game_dir: &PathBuf, + game_dir: &std::path::Path, ) -> Result<Vec<String>, Box<dyn Error + Send + Sync>> { let versions_dir = game_dir.join("versions"); let mut installed = Vec::new(); diff --git a/src-tauri/src/core/forge.rs b/src-tauri/src/core/forge.rs index e69b296..c8bd6e4 100644 --- a/src-tauri/src/core/forge.rs +++ b/src-tauri/src/core/forge.rs @@ -43,6 +43,7 @@ pub struct InstalledForgeVersion { /// Forge installer manifest structure (from version.json inside installer JAR) #[derive(Debug, Deserialize)] +#[allow(dead_code)] struct ForgeInstallerManifest { id: Option<String>, #[serde(rename = "inheritsFrom")] @@ -183,30 +184,30 @@ async fn fetch_forge_installer_manifest( forge_version: &str, ) -> Result<ForgeInstallerManifest, Box<dyn Error + Send + Sync>> { let forge_full = format!("{}-{}", game_version, forge_version); - + // Download the installer JAR to extract version.json let installer_url = format!( "{}net/minecraftforge/forge/{}/forge-{}-installer.jar", FORGE_MAVEN_URL, forge_full, forge_full ); - + println!("Fetching Forge installer from: {}", installer_url); - + let response = reqwest::get(&installer_url).await?; if !response.status().is_success() { return Err(format!("Failed to download Forge installer: {}", response.status()).into()); } - + let bytes = response.bytes().await?; - + // Extract version.json from the JAR (which is a ZIP file) let cursor = std::io::Cursor::new(bytes.as_ref()); let mut archive = zip::ZipArchive::new(cursor)?; - + // Look for version.json in the archive let version_json = archive.by_name("version.json")?; let manifest: ForgeInstallerManifest = serde_json::from_reader(version_json)?; - + Ok(manifest) } @@ -224,7 +225,7 @@ async fn fetch_forge_installer_manifest( /// # Returns /// Information about the installed version. pub async fn install_forge( - game_dir: &PathBuf, + game_dir: &std::path::Path, game_version: &str, forge_version: &str, ) -> Result<InstalledForgeVersion, Box<dyn Error + Send + Sync>> { @@ -234,7 +235,8 @@ pub async fn install_forge( let manifest = fetch_forge_installer_manifest(game_version, forge_version).await?; // Create version JSON from the manifest - let version_json = create_forge_version_json_from_manifest(game_version, forge_version, &manifest)?; + let version_json = + create_forge_version_json_from_manifest(game_version, forge_version, &manifest)?; // Create the version directory let version_dir = game_dir.join("versions").join(&version_id); @@ -275,20 +277,20 @@ pub async fn run_forge_installer( "{}net/minecraftforge/forge/{}-{}/forge-{}-{}-installer.jar", FORGE_MAVEN_URL, game_version, forge_version, game_version, forge_version ); - + let installer_path = game_dir.join("forge-installer.jar"); - + // Download installer let client = reqwest::Client::new(); let response = client.get(&installer_url).send().await?; - + if !response.status().is_success() { return Err(format!("Failed to download Forge installer: {}", response.status()).into()); } - + let bytes = response.bytes().await?; tokio::fs::write(&installer_path, &bytes).await?; - + // Run the installer in headless mode // The installer accepts --installClient <path> to install to a specific directory let output = tokio::process::Command::new(java_path) @@ -298,19 +300,20 @@ pub async fn run_forge_installer( .arg(game_dir) .output() .await?; - + // Clean up installer let _ = tokio::fs::remove_file(&installer_path).await; - + if !output.status.success() { let stderr = String::from_utf8_lossy(&output.stderr); let stdout = String::from_utf8_lossy(&output.stdout); return Err(format!( "Forge installer failed:\nstdout: {}\nstderr: {}", stdout, stderr - ).into()); + ) + .into()); } - + Ok(()) } @@ -332,13 +335,14 @@ fn create_forge_version_json_from_manifest( }); // Convert libraries to JSON format, preserving download info - let lib_entries: Vec<serde_json::Value> = manifest.libraries + let lib_entries: Vec<serde_json::Value> = manifest + .libraries .iter() .map(|lib| { let mut entry = serde_json::json!({ "name": lib.name }); - + // Add URL if present if let Some(url) = &lib.url { entry["url"] = serde_json::Value::String(url.clone()); @@ -346,19 +350,22 @@ fn create_forge_version_json_from_manifest( // Default to Forge Maven for Forge libraries entry["url"] = serde_json::Value::String(FORGE_MAVEN_URL.to_string()); } - + // Add downloads if present if let Some(downloads) = &lib.downloads { if let Some(artifact) = &downloads.artifact { let mut artifact_json = serde_json::Map::new(); if let Some(path) = &artifact.path { - artifact_json.insert("path".to_string(), serde_json::Value::String(path.clone())); + artifact_json + .insert("path".to_string(), serde_json::Value::String(path.clone())); } if let Some(url) = &artifact.url { - artifact_json.insert("url".to_string(), serde_json::Value::String(url.clone())); + artifact_json + .insert("url".to_string(), serde_json::Value::String(url.clone())); } if let Some(sha1) = &artifact.sha1 { - artifact_json.insert("sha1".to_string(), serde_json::Value::String(sha1.clone())); + artifact_json + .insert("sha1".to_string(), serde_json::Value::String(sha1.clone())); } if !artifact_json.is_empty() { entry["downloads"] = serde_json::json!({ @@ -367,7 +374,7 @@ fn create_forge_version_json_from_manifest( } } } - + entry }) .collect(); @@ -377,7 +384,7 @@ fn create_forge_version_json_from_manifest( "game": [], "jvm": [] }); - + if let Some(args) = &manifest.arguments { if let Some(game_args) = &args.game { arguments["game"] = serde_json::Value::Array(game_args.clone()); @@ -461,7 +468,12 @@ fn is_modern_forge(game_version: &str) -> bool { /// /// # Returns /// `true` if the version JSON exists, `false` otherwise. -pub fn is_forge_installed(game_dir: &PathBuf, game_version: &str, forge_version: &str) -> bool { +#[allow(dead_code)] +pub fn is_forge_installed( + game_dir: &std::path::Path, + game_version: &str, + forge_version: &str, +) -> bool { let version_id = generate_version_id(game_version, forge_version); let json_path = game_dir .join("versions") @@ -477,8 +489,9 @@ pub fn is_forge_installed(game_dir: &PathBuf, game_version: &str, forge_version: /// /// # Returns /// A list of installed Forge version IDs. +#[allow(dead_code)] pub async fn list_installed_forge_versions( - game_dir: &PathBuf, + game_dir: &std::path::Path, ) -> Result<Vec<String>, Box<dyn Error + Send + Sync>> { let versions_dir = game_dir.join("versions"); let mut installed = Vec::new(); diff --git a/src-tauri/src/core/java.rs b/src-tauri/src/core/java.rs index 8341138..ac52da6 100644 --- a/src-tauri/src/core/java.rs +++ b/src-tauri/src/core/java.rs @@ -5,7 +5,7 @@ use tauri::AppHandle; use tauri::Emitter; use tauri::Manager; -use crate::core::downloader::{self, JavaDownloadProgress, DownloadQueue, PendingJavaDownload}; +use crate::core::downloader::{self, DownloadQueue, JavaDownloadProgress, PendingJavaDownload}; use crate::utils::zip; const ADOPTIUM_API_BASE: &str = "https://api.adoptium.net/v3"; @@ -58,7 +58,7 @@ pub struct JavaReleaseInfo { } /// Java catalog containing all available versions -#[derive(Debug, Clone, Serialize, Deserialize)] +#[derive(Debug, Clone, Serialize, Deserialize, Default)] pub struct JavaCatalog { pub releases: Vec<JavaReleaseInfo>, pub available_major_versions: Vec<u32>, @@ -66,17 +66,6 @@ pub struct JavaCatalog { pub cached_at: u64, } -impl Default for JavaCatalog { - fn default() -> Self { - Self { - releases: Vec::new(), - available_major_versions: Vec::new(), - lts_versions: Vec::new(), - cached_at: 0, - } - } -} - /// Adoptium `/v3/assets/latest/{version}/hotspot` API response structures #[derive(Debug, Clone, Deserialize)] pub struct AdoptiumAsset { @@ -86,6 +75,7 @@ pub struct AdoptiumAsset { } #[derive(Debug, Clone, Deserialize)] +#[allow(dead_code)] pub struct AdoptiumBinary { pub os: String, pub architecture: String, @@ -104,6 +94,7 @@ pub struct AdoptiumPackage { } #[derive(Debug, Clone, Deserialize)] +#[allow(dead_code)] pub struct AdoptiumVersionData { pub major: u32, pub minor: u32, @@ -114,6 +105,7 @@ pub struct AdoptiumVersionData { /// Adoptium available releases response #[derive(Debug, Clone, Deserialize)] +#[allow(dead_code)] pub struct AvailableReleases { pub available_releases: Vec<u32>, pub available_lts_releases: Vec<u32>, @@ -231,6 +223,7 @@ pub fn save_catalog_cache(app_handle: &AppHandle, catalog: &JavaCatalog) -> Resu } /// Clear Java catalog cache +#[allow(dead_code)] pub fn clear_catalog_cache(app_handle: &AppHandle) -> Result<(), String> { let cache_path = get_catalog_cache_path(app_handle); if cache_path.exists() { @@ -240,7 +233,10 @@ pub fn clear_catalog_cache(app_handle: &AppHandle) -> Result<(), String> { } /// Fetch complete Java catalog from Adoptium API with platform availability check -pub async fn fetch_java_catalog(app_handle: &AppHandle, force_refresh: bool) -> Result<JavaCatalog, String> { +pub async fn fetch_java_catalog( + app_handle: &AppHandle, + force_refresh: bool, +) -> Result<JavaCatalog, String> { // Check cache first unless force refresh if !force_refresh { if let Some(cached) = load_cached_catalog(app_handle) { @@ -294,7 +290,9 @@ pub async fn fetch_java_catalog(app_handle: &AppHandle, force_refresh: bool) -> file_size: asset.binary.package.size, checksum: asset.binary.package.checksum, download_url: asset.binary.package.link, - is_lts: available.available_lts_releases.contains(major_version), + is_lts: available + .available_lts_releases + .contains(major_version), is_available: true, architecture: asset.binary.architecture.clone(), }); @@ -547,7 +545,11 @@ pub async fn download_and_install_java( // Linux/Windows: jdk-xxx/bin/java let java_home = version_dir.join(&top_level_dir); let java_bin = if cfg!(target_os = "macos") { - java_home.join("Contents").join("Home").join("bin").join("java") + java_home + .join("Contents") + .join("Home") + .join("bin") + .join("java") } else if cfg!(windows) { java_home.join("bin").join("java.exe") } else { @@ -885,7 +887,7 @@ pub fn detect_all_java_installations(app_handle: &AppHandle) -> Vec<JavaInstalla installations } -//// Find the java executable in a directory using a limited-depth search +/// Find the java executable in a directory using a limited-depth search fn find_java_executable(dir: &PathBuf) -> Option<PathBuf> { let bin_name = if cfg!(windows) { "java.exe" } else { "java" }; @@ -918,7 +920,11 @@ fn find_java_executable(dir: &PathBuf) -> Option<PathBuf> { // macOS: nested/Contents/Home/bin/java #[cfg(target_os = "macos")] { - let macos_nested = path.join("Contents").join("Home").join("bin").join(bin_name); + let macos_nested = path + .join("Contents") + .join("Home") + .join("bin") + .join(bin_name); if macos_nested.exists() { return Some(macos_nested); } @@ -931,7 +937,9 @@ fn find_java_executable(dir: &PathBuf) -> Option<PathBuf> { } /// Resume pending Java downloads from queue -pub async fn resume_pending_downloads(app_handle: &AppHandle) -> Result<Vec<JavaInstallation>, String> { +pub async fn resume_pending_downloads( + app_handle: &AppHandle, +) -> Result<Vec<JavaInstallation>, String> { let queue = DownloadQueue::load(app_handle); let mut installed = Vec::new(); @@ -978,7 +986,12 @@ pub fn get_pending_downloads(app_handle: &AppHandle) -> Vec<PendingJavaDownload> } /// Clear a specific pending download -pub fn clear_pending_download(app_handle: &AppHandle, major_version: u32, image_type: &str) -> Result<(), String> { +#[allow(dead_code)] +pub fn clear_pending_download( + app_handle: &AppHandle, + major_version: u32, + image_type: &str, +) -> Result<(), String> { let mut queue = DownloadQueue::load(app_handle); queue.remove(major_version, image_type); queue.save(app_handle) diff --git a/src-tauri/src/core/manifest.rs b/src-tauri/src/core/manifest.rs index bae87c9..d92ae58 100644 --- a/src-tauri/src/core/manifest.rs +++ b/src-tauri/src/core/manifest.rs @@ -45,7 +45,7 @@ pub async fn fetch_version_manifest() -> Result<VersionManifest, Box<dyn Error + /// # Returns /// The parsed `GameVersion` if found, or an error if not found. pub async fn load_local_version( - game_dir: &PathBuf, + game_dir: &std::path::Path, version_id: &str, ) -> Result<GameVersion, Box<dyn Error + Send + Sync>> { let json_path = game_dir @@ -102,7 +102,7 @@ pub async fn fetch_vanilla_version( /// # Returns /// A fully resolved `GameVersion` ready for launching. pub async fn load_version( - game_dir: &PathBuf, + game_dir: &std::path::Path, version_id: &str, ) -> Result<GameVersion, Box<dyn Error + Send + Sync>> { // Try loading from local first @@ -138,7 +138,7 @@ pub async fn load_version( /// # Returns /// The path where the JSON was saved. pub async fn save_local_version( - game_dir: &PathBuf, + game_dir: &std::path::Path, version: &GameVersion, ) -> Result<PathBuf, Box<dyn Error + Send + Sync>> { let version_dir = game_dir.join("versions").join(&version.id); @@ -158,8 +158,9 @@ pub async fn save_local_version( /// /// # Returns /// A list of version IDs found in the versions directory. +#[allow(dead_code)] pub async fn list_local_versions( - game_dir: &PathBuf, + game_dir: &std::path::Path, ) -> Result<Vec<String>, Box<dyn Error + Send + Sync>> { let versions_dir = game_dir.join("versions"); let mut versions = Vec::new(); diff --git a/src-tauri/src/core/maven.rs b/src-tauri/src/core/maven.rs index 8c89768..760e68b 100644 --- a/src-tauri/src/core/maven.rs +++ b/src-tauri/src/core/maven.rs @@ -8,6 +8,7 @@ use std::path::PathBuf; /// Known Maven repository URLs for mod loaders +#[allow(dead_code)] pub const MAVEN_CENTRAL: &str = "https://repo1.maven.org/maven2/"; pub const FABRIC_MAVEN: &str = "https://maven.fabricmc.net/"; pub const FORGE_MAVEN: &str = "https://maven.minecraftforge.net/"; @@ -114,7 +115,7 @@ impl MavenCoordinate { /// /// # Returns /// The full path where the library should be stored - pub fn to_local_path(&self, libraries_dir: &PathBuf) -> PathBuf { + pub fn to_local_path(&self, libraries_dir: &std::path::Path) -> PathBuf { let rel_path = self.to_path(); libraries_dir.join(rel_path.replace('/', std::path::MAIN_SEPARATOR_STR)) } @@ -183,7 +184,7 @@ pub fn resolve_library_url( /// /// # Returns /// The path where the library should be stored -pub fn get_library_path(name: &str, libraries_dir: &PathBuf) -> Option<PathBuf> { +pub fn get_library_path(name: &str, libraries_dir: &std::path::Path) -> Option<PathBuf> { let coord = MavenCoordinate::parse(name)?; Some(coord.to_local_path(libraries_dir)) } diff --git a/src-tauri/src/core/mod.rs b/src-tauri/src/core/mod.rs index 3c09a76..7ad6ef9 100644 --- a/src-tauri/src/core/mod.rs +++ b/src-tauri/src/core/mod.rs @@ -1,4 +1,5 @@ pub mod account_storage; +pub mod assistant; pub mod auth; pub mod config; pub mod downloader; diff --git a/src-tauri/src/core/version_merge.rs b/src-tauri/src/core/version_merge.rs index fe6b3cd..098d271 100644 --- a/src-tauri/src/core/version_merge.rs +++ b/src-tauri/src/core/version_merge.rs @@ -101,6 +101,7 @@ fn merge_json_arrays( /// /// # Returns /// `true` if the version has an `inheritsFrom` field that needs resolution. +#[allow(dead_code)] pub fn needs_inheritance_resolution(version: &GameVersion) -> bool { version.inherits_from.is_some() } @@ -116,6 +117,7 @@ pub fn needs_inheritance_resolution(version: &GameVersion) -> bool { /// /// # Returns /// A fully merged `GameVersion` with all inheritance resolved. +#[allow(dead_code)] pub async fn resolve_inheritance<F, Fut>( version: GameVersion, version_loader: F, diff --git a/src-tauri/src/main.rs b/src-tauri/src/main.rs index b59ae31..6ea6ece 100644 --- a/src-tauri/src/main.rs +++ b/src-tauri/src/main.rs @@ -1,12 +1,12 @@ // Prevents additional console window on Windows in release, DO NOT REMOVE!! #![cfg_attr(not(debug_assertions), windows_subsystem = "windows")] +use serde::Serialize; use std::process::Stdio; use std::sync::Mutex; use tauri::{Emitter, Manager, State, Window}; // Added Emitter use tokio::io::{AsyncBufReadExt, BufReader}; -use tokio::process::Command; -use serde::Serialize; // Added Serialize +use tokio::process::Command; // Added Serialize #[cfg(target_os = "windows")] use std::os::windows::process::CommandExt; @@ -67,6 +67,7 @@ async fn start_game( window: Window, auth_state: State<'_, core::auth::AccountState>, config_state: State<'_, core::config::ConfigState>, + assistant_state: State<'_, core::assistant::AssistantState>, version_id: String, ) -> Result<String, String> { emit_log!( @@ -83,10 +84,7 @@ async fn start_game( .clone() .ok_or("No active account found. Please login first.")?; - emit_log!( - window, - format!("Account found: {}", account.username()) - ); + emit_log!(window, format!("Account found: {}", account.username())); let config = config_state.config.lock().unwrap().clone(); emit_log!(window, format!("Java path: {}", config.java_path)); @@ -119,10 +117,11 @@ async fn start_game( // First, load the local version to get the original inheritsFrom value // (before merge clears it) - let original_inherits_from = match core::manifest::load_local_version(&game_dir, &version_id).await { - Ok(local_version) => local_version.inherits_from.clone(), - Err(_) => None, - }; + let original_inherits_from = + match core::manifest::load_local_version(&game_dir, &version_id).await { + Ok(local_version) => local_version.inherits_from.clone(), + Err(_) => None, + }; let version_details = core::manifest::load_version(&game_dir, &version_id) .await @@ -138,8 +137,7 @@ async fn start_game( // Determine the actual minecraft version for client.jar // (for modded versions, this is the parent vanilla version) - let minecraft_version = original_inherits_from - .unwrap_or_else(|| version_id.clone()); + let minecraft_version = original_inherits_from.unwrap_or_else(|| version_id.clone()); // 2. Prepare download tasks emit_log!(window, "Preparing download tasks...".to_string()); @@ -573,9 +571,11 @@ async fn start_game( ); let window_rx = window.clone(); + let assistant_arc = assistant_state.assistant.clone(); tokio::spawn(async move { let mut reader = BufReader::new(stdout).lines(); while let Ok(Some(line)) = reader.next_line().await { + assistant_arc.lock().unwrap().add_log(line.clone()); let _ = window_rx.emit("game-stdout", line); } // Emit log when stdout stream ends (game closing) @@ -583,10 +583,12 @@ async fn start_game( }); let window_rx_err = window.clone(); + let assistant_arc_err = assistant_state.assistant.clone(); let window_exit = window.clone(); tokio::spawn(async move { let mut reader = BufReader::new(stderr).lines(); while let Ok(Some(line)) = reader.next_line().await { + assistant_arc_err.lock().unwrap().add_log(line.clone()); let _ = window_rx_err.emit("game-stderr", line); } // Emit log when stderr stream ends @@ -700,10 +702,18 @@ async fn check_version_installed(window: Window, version_id: String) -> Result<b // For modded versions, check the parent vanilla version let minecraft_version = if version_id.starts_with("fabric-loader-") { // Format: fabric-loader-X.X.X-1.20.4 - version_id.split('-').last().unwrap_or(&version_id).to_string() + version_id + .split('-') + .next_back() + .unwrap_or(&version_id) + .to_string() } else if version_id.contains("-forge-") { // Format: 1.20.4-forge-49.0.38 - version_id.split("-forge-").next().unwrap_or(&version_id).to_string() + version_id + .split("-forge-") + .next() + .unwrap_or(&version_id) + .to_string() } else { version_id.clone() }; @@ -749,21 +759,24 @@ async fn install_version( ); // First, try to fetch the vanilla version from Mojang and save it locally - let version_details = match core::manifest::load_local_version(&game_dir, &version_id).await { + let _version_details = match core::manifest::load_local_version(&game_dir, &version_id).await { Ok(v) => v, Err(_) => { // Not found locally, fetch from Mojang - emit_log!(window, format!("Fetching version {} from Mojang...", version_id)); + emit_log!( + window, + format!("Fetching version {} from Mojang...", version_id) + ); let fetched = core::manifest::fetch_vanilla_version(&version_id) .await .map_err(|e| e.to_string())?; - + // Save the version JSON locally emit_log!(window, format!("Saving version JSON...")); core::manifest::save_local_version(&game_dir, &fetched) .await .map_err(|e| e.to_string())?; - + fetched } }; @@ -1056,6 +1069,38 @@ async fn save_settings( } #[tauri::command] +async fn get_config_path(state: State<'_, core::config::ConfigState>) -> Result<String, String> { + Ok(state.file_path.to_string_lossy().to_string()) +} + +#[tauri::command] +async fn read_raw_config(state: State<'_, core::config::ConfigState>) -> Result<String, String> { + tokio::fs::read_to_string(&state.file_path) + .await + .map_err(|e| e.to_string()) +} + +#[tauri::command] +async fn save_raw_config( + state: State<'_, core::config::ConfigState>, + content: String, +) -> Result<(), String> { + // Validate JSON + let new_config: core::config::LauncherConfig = + serde_json::from_str(&content).map_err(|e| format!("Invalid JSON: {}", e))?; + + // Save to file + tokio::fs::write(&state.file_path, &content) + .await + .map_err(|e| e.to_string())?; + + // Update in-memory state + *state.config.lock().unwrap() = new_config; + + Ok(()) +} + +#[tauri::command] async fn start_microsoft_login() -> Result<core::auth::DeviceCodeResponse, String> { core::auth::start_device_flow().await } @@ -1166,7 +1211,9 @@ async fn refresh_account( /// Detect Java installations on the system #[tauri::command] -async fn detect_java(app_handle: tauri::AppHandle) -> Result<Vec<core::java::JavaInstallation>, String> { +async fn detect_java( + app_handle: tauri::AppHandle, +) -> Result<Vec<core::java::JavaInstallation>, String> { Ok(core::java::detect_all_java_installations(&app_handle)) } @@ -1484,11 +1531,13 @@ async fn install_forge( config.java_path.clone() } else { // Try to find a suitable Java installation - let javas = core::java::detect_all_java_installations(&app_handle); + let javas = core::java::detect_all_java_installations(app_handle); if let Some(java) = javas.first() { java.path.clone() } else { - return Err("No Java installation found. Please configure Java in settings.".to_string()); + return Err( + "No Java installation found. Please configure Java in settings.".to_string(), + ); } }; let java_path = std::path::PathBuf::from(&java_path_str); @@ -1500,7 +1549,10 @@ async fn install_forge( .await .map_err(|e| format!("Forge installer failed: {}", e))?; - emit_log!(window, "Forge installer completed, creating version profile...".to_string()); + emit_log!( + window, + "Forge installer completed, creating version profile...".to_string() + ); // Now create the version JSON let result = core::forge::install_forge(&game_dir, &game_version, &forge_version) @@ -1547,7 +1599,7 @@ async fn get_github_releases() -> Result<Vec<GithubRelease>, String> { r["name"].as_str(), r["published_at"].as_str(), r["body"].as_str(), - r["html_url"].as_str() + r["html_url"].as_str(), ) { result.push(GithubRelease { tag_name: tag.to_string(), @@ -1589,8 +1641,7 @@ async fn upload_to_pastebin( match service.as_str() { "pastebin.com" => { - let api_key = api_key - .ok_or("Pastebin API Key not configured in settings")?; + let api_key = api_key.ok_or("Pastebin API Key not configured in settings")?; let res = client .post("https://pastebin.com/api/api_post.php") @@ -1636,6 +1687,60 @@ async fn upload_to_pastebin( } } +#[tauri::command] +async fn assistant_check_health( + assistant_state: State<'_, core::assistant::AssistantState>, + config_state: State<'_, core::config::ConfigState>, +) -> Result<bool, String> { + let assistant = assistant_state.assistant.lock().unwrap().clone(); + let config = config_state.config.lock().unwrap().clone(); + Ok(assistant.check_health(&config.assistant).await) +} + +#[tauri::command] +async fn assistant_chat( + assistant_state: State<'_, core::assistant::AssistantState>, + config_state: State<'_, core::config::ConfigState>, + messages: Vec<core::assistant::Message>, +) -> Result<core::assistant::Message, String> { + let assistant = assistant_state.assistant.lock().unwrap().clone(); + let config = config_state.config.lock().unwrap().clone(); + assistant.chat(messages, &config.assistant).await +} + +#[tauri::command] +async fn list_ollama_models( + assistant_state: State<'_, core::assistant::AssistantState>, + endpoint: String, +) -> Result<Vec<core::assistant::ModelInfo>, String> { + let assistant = assistant_state.assistant.lock().unwrap().clone(); + assistant.list_ollama_models(&endpoint).await +} + +#[tauri::command] +async fn list_openai_models( + assistant_state: State<'_, core::assistant::AssistantState>, + config_state: State<'_, core::config::ConfigState>, +) -> Result<Vec<core::assistant::ModelInfo>, String> { + let assistant = assistant_state.assistant.lock().unwrap().clone(); + let config = config_state.config.lock().unwrap().clone(); + assistant.list_openai_models(&config.assistant).await +} + +#[tauri::command] +async fn assistant_chat_stream( + window: tauri::Window, + assistant_state: State<'_, core::assistant::AssistantState>, + config_state: State<'_, core::config::ConfigState>, + messages: Vec<core::assistant::Message>, +) -> Result<String, String> { + let assistant = assistant_state.assistant.lock().unwrap().clone(); + let config = config_state.config.lock().unwrap().clone(); + assistant + .chat_stream(messages, &config.assistant, &window) + .await +} + fn main() { tauri::Builder::default() .plugin(tauri_plugin_fs::init()) @@ -1643,6 +1748,7 @@ fn main() { .plugin(tauri_plugin_shell::init()) .manage(core::auth::AccountState::new()) .manage(MsRefreshTokenState::new()) + .manage(core::assistant::AssistantState::new()) .setup(|app| { let config_state = core::config::ConfigState::new(app.handle()); app.manage(config_state); @@ -1666,7 +1772,7 @@ fn main() { } // Check for pending Java downloads and notify frontend - let pending = core::java::get_pending_downloads(&app.app_handle()); + let pending = core::java::get_pending_downloads(app.app_handle()); if !pending.is_empty() { println!("[Startup] Found {} pending Java download(s)", pending.len()); let _ = app.emit("pending-java-downloads", pending.len()); @@ -1685,6 +1791,9 @@ fn main() { logout, get_settings, save_settings, + get_config_path, + read_raw_config, + save_raw_config, start_microsoft_login, complete_microsoft_login, refresh_account, @@ -1711,7 +1820,12 @@ fn main() { get_forge_versions_for_game, install_forge, get_github_releases, - upload_to_pastebin + upload_to_pastebin, + assistant_check_health, + assistant_chat, + assistant_chat_stream, + list_ollama_models, + list_openai_models ]) .run(tauri::generate_context!()) .expect("error while running tauri application"); diff --git a/src-tauri/src/utils/mod.rs b/src-tauri/src/utils/mod.rs index c0aed36..00b9087 100644 --- a/src-tauri/src/utils/mod.rs +++ b/src-tauri/src/utils/mod.rs @@ -1,6 +1,7 @@ pub mod zip; // File system related utility functions +#[allow(dead_code)] pub mod file_utils { use std::fs; use std::io::{self, Write}; @@ -16,6 +17,7 @@ pub mod file_utils { } // Configuration parsing utilities +#[allow(dead_code)] pub mod config_parser { use std::collections::HashMap; |