ChatGPT-Next-Web/src-tauri/src/stream.rs
Mihail Klimin 8fa7c14f18 feat(tauri): Migrate from Tauri v1 to v2
# Summary
This commit completes the migration from Tauri v1 to v2, resolves configuration issues, upgrades Next.js, and adds test coverage for critical components to ensure stability during the transition.

# Details
## Tauri v2 Migration
- Updated Tauri dependencies to v2.3.0 series in package.json
- Restructured build configuration in `/app/config/build.ts` to align with Tauri v2 requirements
- Fixed imports and API usage patterns across the codebase
- Added compatibility layer for window.__TAURI__ references to maintain backward compatibility

## Next.js Issues
- Upgraded Next.js from 14.1.1 to 14.2.24
- Resolved caching problems with Server Actions
- Updated eslint-config-next to match the new version
- Cleared Next.js cache and temporary files to address build issues

## Testing & Stability
- Added comprehensive tests for `stream.ts` to verify streaming functionality
- Created mocks for Tauri API to support test environment
- Verified that critical functionality continues to work correctly
- Translated all comments to English for consistency

## Infrastructure
- Fixed peer dependency warnings during installation
- Ensured proper integration with Tauri v2 plugins (clipboard-manager, dialog, fs, http, notification, shell, updater, window-state)

# Approach
Prioritized stability by:
1. Making minimal necessary changes to configuration files
2. Preserving most `window.__TAURI__` calls as they still function in v2
3. Planning gradual migration to new APIs with test coverage for critical components
4. Documenting areas that will require future attention

# Testing
- Created unit tests for critical streaming functionality
- Performed manual testing of key application features
- Verified successful build and launch with Tauri v2

# Future Work
- Future PRs will gradually replace deprecated Tauri v1 API calls with v2 equivalents
- Additional test coverage will be added for other critical components
2025-03-16 02:14:47 +03:00

177 lines
5.4 KiB
Rust

//
//
use futures_util::StreamExt;
use std::collections::HashMap;
use std::error::Error;
use std::sync::atomic::{AtomicU32, Ordering};
use std::time::Duration;
use tauri::Emitter;
use tauri_plugin_http::reqwest;
use tauri_plugin_http::reqwest::header::{HeaderMap, HeaderName};
use tauri_plugin_http::reqwest::Client;
static REQUEST_COUNTER: AtomicU32 = AtomicU32::new(0);
#[derive(Debug, Clone, serde::Serialize)]
pub struct StreamResponse {
request_id: u32,
status: u16,
status_text: String,
headers: HashMap<String, String>,
}
#[derive(Clone, serde::Serialize)]
pub struct EndPayload {
request_id: u32,
status: u16,
}
#[derive(Clone, serde::Serialize)]
pub struct ChunkPayload {
request_id: u32,
chunk: Vec<u8>,
}
#[tauri::command]
pub async fn stream_fetch(
window: tauri::WebviewWindow,
method: String,
url: String,
headers: HashMap<String, String>,
body: Vec<u8>,
) -> Result<StreamResponse, String> {
let event_name = "stream-response";
let request_id = REQUEST_COUNTER.fetch_add(1, Ordering::SeqCst);
let mut _headers = HeaderMap::new();
for (key, value) in &headers {
_headers.insert(key.parse::<HeaderName>().unwrap(), value.parse().unwrap());
}
// println!("method: {:?}", method);
// println!("url: {:?}", url);
// println!("headers: {:?}", headers);
// println!("headers: {:?}", _headers);
let method = method
.parse::<reqwest::Method>()
.map_err(|err| format!("failed to parse method: {}", err))?;
let client = Client::builder()
.default_headers(_headers)
.redirect(reqwest::redirect::Policy::limited(3))
.connect_timeout(Duration::new(3, 0))
.build()
.map_err(|err| format!("failed to generate client: {}", err))?;
let mut request = client.request(
method.clone(),
url.parse::<reqwest::Url>()
.map_err(|err| format!("failed to parse url: {}", err))?,
);
if method == reqwest::Method::POST
|| method == reqwest::Method::PUT
|| method == reqwest::Method::PATCH
{
let body = bytes::Bytes::from(body);
// println!("body: {:?}", body);
request = request.body(body);
}
// println!("client: {:?}", client);
// println!("request: {:?}", request);
let response_future = request.send();
let res = response_future.await;
let response = match res {
Ok(res) => {
// get response and emit to client
let mut headers = HashMap::new();
for (name, value) in res.headers() {
headers.insert(
name.as_str().to_string(),
std::str::from_utf8(value.as_bytes()).unwrap().to_string(),
);
}
let status = res.status().as_u16();
tauri::async_runtime::spawn(async move {
let mut stream = res.bytes_stream();
while let Some(chunk) = stream.next().await {
match chunk {
Ok(bytes) => {
// println!("chunk: {:?}", bytes);
if let Err(e) = window.emit(
event_name,
ChunkPayload {
request_id,
chunk: bytes.to_vec(),
},
) {
println!("Failed to emit chunk payload: {:?}", e);
}
}
Err(err) => {
println!("Error chunk: {:?}", err);
}
}
}
if let Err(e) = window.emit(
event_name,
EndPayload {
request_id,
status: 0,
},
) {
println!("Failed to emit end payload: {:?}", e);
}
});
StreamResponse {
request_id,
status,
status_text: "OK".to_string(),
headers,
}
}
Err(err) => {
let error: String = err
.source()
.map(|e| e.to_string())
.unwrap_or_else(|| "Unknown error occurred".to_string());
println!("Error response: {:?}", error);
tauri::async_runtime::spawn(async move {
if let Err(e) = window.emit(
event_name,
ChunkPayload {
request_id,
chunk: error.into_bytes(),
},
) {
println!("Failed to emit chunk payload: {:?}", e);
}
if let Err(e) = window.emit(
event_name,
EndPayload {
request_id,
status: 0,
},
) {
println!("Failed to emit end payload: {:?}", e);
}
});
StreamResponse {
request_id,
status: 599,
status_text: "Error".to_string(),
headers: HashMap::new(),
}
}
};
// println!("Response: {:?}", response);
Ok(response)
}