aboutsummaryrefslogtreecommitdiffstatshomepage
path: root/src-tauri/src/core/downloader.rs
blob: 0ba9aecd61681e50aaa5477d4a6a1a760e6f135d (plain) (blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
use std::path::PathBuf;
use serde::{Serialize, Deserialize};
use tauri::{Emitter, Window};
use futures::StreamExt;
use tokio::io::AsyncWriteExt;
use std::sync::Arc;
use tokio::sync::Semaphore;

#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct DownloadTask {
    pub url: String,
    pub path: PathBuf,
    pub sha1: Option<String>,
}

#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ProgressEvent {
    pub file: String,
    pub downloaded: u64,
    pub total: u64,
    pub status: String, // "Downloading", "Verifying", "Finished", "Error"
}

pub async fn download_files(window: Window, tasks: Vec<DownloadTask>) -> Result<(), String> {
    let client = reqwest::Client::new();
    let semaphore = Arc::new(Semaphore::new(10)); // Max 10 concurrent downloads
    
    // Notify start (total files)
    let _ = window.emit("download-start", tasks.len());

    let tasks_stream = futures::stream::iter(tasks).map(|task| {
        let client = client.clone();
        let window = window.clone();
        let semaphore = semaphore.clone();
        
        async move {
            let _permit = semaphore.acquire().await.unwrap();
            let file_name = task.path.file_name().unwrap().to_string_lossy().to_string();

            // 1. Check if file exists and verify SHA1
            if task.path.exists() {
                let _ = window.emit("download-progress", ProgressEvent {
                    file: file_name.clone(),
                    downloaded: 0,
                    total: 0,
                    status: "Verifying".into(),
                });

                if let Some(expected_sha1) = &task.sha1 {
                    if let Ok(data) = tokio::fs::read(&task.path).await {
                       let mut hasher = sha1::Sha1::new();
                       use sha1::Digest;
                       hasher.update(&data);
                       let result = hex::encode(hasher.finalize());
                       if &result == expected_sha1 {
                           // Already valid
                           let _ = window.emit("download-progress", ProgressEvent {
                               file: file_name.clone(),
                               downloaded: 0,
                               total: 0,
                               status: "Skipped".into(),
                           });
                           return Ok(());
                       }
                    }
                }
            }

            // 2. Download
            if let Some(parent) = task.path.parent() {
                 let _ = tokio::fs::create_dir_all(parent).await;
            }

            match client.get(&task.url).send().await {
                Ok(resp) => {
                    let total_size = resp.content_length().unwrap_or(0);
                    let mut file = match tokio::fs::File::create(&task.path).await {
                        Ok(f) => f,
                        Err(e) => return Err(format!("Create file error: {}", e)),
                    };
                    
                    // reqwest::Response::bytes_stream() is only available if the 'stream' feature is enabled
                    // But we used 'blocking' and 'json'. We should add 'stream' feature to Cargo.toml?
                    // Or just use chunk().
                    // Actually, let's just create a loop if stream feature is missing or use chunk() manually if blocking is used?
                    // Wait, we are in async context. 'reqwest' dependency in Cargo.toml has 'json', 'blocking'
                    // We need 'stream' feature for .bytes_stream()
                    
                    // Let's use loop with chunk()
                    loop {
                        match resp.chunk().await {
                            Ok(Some(chunk)) => {
                                if let Err(e) = file.write_all(&chunk).await {
                                    return Err(format!("Write error: {}", e));
                                }
                                downloaded += chunk.len() as u64;
                                let _ = window.emit("download-progress", ProgressEvent {
                                    file: file_name.clone(),
                                    downloaded,
                                    total: total_size,
                                    status: "Downloading".into(),
                                });
                            }
                            Ok(None) => break,
                            Err(e) => return Err(format!("Download error: {}", e)),
                        }
                    }
                },
                Err(e) => return Err(format!("Request error: {}", e)),
            }

            let _ = window.emit("download-progress", ProgressEvent {
                file: file_name.clone(),
                downloaded: 0,
                total: 0,
                status: "Finished".into(),
            });

            Ok(())
        }
    });

    // Buffer unordered to run concurrently
    tasks_stream.buffer_unordered(10).collect::<Vec<Result<(), String>>>().await;
    
    let _ = window.emit("download-complete", ());
    Ok(())
}