Skip to content

Commit ca11d43

Browse files
milispclaude
andcommitted
refactor: Unify LLM clients and enhance file tree UX
- Replace separate GeminiClient and OpenAIClient with unified LlmClient - Add refresh button to FileTree header for better UX - Remove auto-close behavior from FileViewer when adding to notepad - Update README download link to point to releases page 🤖 Generated with [Claude Code](https://claude.ai/code) Co-Authored-By: Claude <[email protected]>
1 parent 539bb47 commit ca11d43

File tree

8 files changed

+392
-296
lines changed

8 files changed

+392
-296
lines changed

README.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@
1515

1616
*Built for developers, researchers, writers, and anyone tired of copy-pasting files into AI chats.*
1717

18-
[🚀 **Download Now**](#getting-started)[📖 **Documentation**](./docs)[💬 **Community**](https://github.com/milisp/plux/discussions)[**Star Us**](https://github.com/milisp/plux/stargazers)
18+
[🚀 **Download Now**](https://github.com/milisp/plux/releases)[📖 **Documentation**](./docs)[💬 **Community**](https://github.com/milisp/plux/discussions)[**Star Us**](https://github.com/milisp/plux/stargazers)
1919

2020
</div>
2121

src-tauri/src/cmd/chat.rs

Lines changed: 9 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
use crate::{
22
mcp_client::{
33
chat::ChatSession,
4-
client::{common::{ChatClient, StreamingChatClient}, gemini::GeminiClient, openai::OpenAIClient},
4+
client::{common::{ChatClient, StreamingChatClient}, llm::LlmClient},
55
},
66
GlobalToolSet,
77
};
@@ -47,9 +47,9 @@ pub async fn send_message(
4747
println!("Creating new session with provider: {}, api_key length: {}",
4848
request.provider, request.api_key.len());
4949
let client: Arc<dyn ChatClient> = if request.provider == "google" {
50-
Arc::new(GeminiClient::new(request.api_key.clone(), request.base_url.clone(), None))
50+
Arc::new(LlmClient::new_gemini(request.api_key.clone(), request.base_url.clone(), None))
5151
} else {
52-
Arc::new(OpenAIClient::new(request.api_key.clone(), request.base_url.clone(), None))
52+
Arc::new(LlmClient::new_openai(request.api_key.clone(), request.base_url.clone(), None))
5353
};
5454
let mut new_session = ChatSession::new(client, (*tool_set.0).clone(), request.model);
5555
new_session
@@ -125,9 +125,9 @@ pub async fn send_message_stream(
125125
println!("Creating new session with provider: {}, api_key length: {}",
126126
request.provider, request.api_key.len());
127127
let client: Arc<dyn ChatClient> = if request.provider == "google" {
128-
Arc::new(GeminiClient::new(request.api_key.clone(), request.base_url.clone(), None))
128+
Arc::new(LlmClient::new_gemini(request.api_key.clone(), request.base_url.clone(), None))
129129
} else {
130-
Arc::new(OpenAIClient::new(request.api_key.clone(), request.base_url.clone(), None))
130+
Arc::new(LlmClient::new_openai(request.api_key.clone(), request.base_url.clone(), None))
131131
};
132132
let mut new_session = ChatSession::new(client, (*tool_set.0).clone(), request.model);
133133
new_session
@@ -184,13 +184,13 @@ pub async fn send_message_stream(
184184
println!("🔥 Calling streaming method for provider: {}", request.provider);
185185
let result = if request.provider == "google" {
186186
println!("📡 Using Gemini streaming");
187-
let gemini_client = GeminiClient::new(request.api_key.clone(), request.base_url.clone(), None);
188-
gemini_client.complete_stream(stream_request, callback).await
187+
let llm_client = LlmClient::new_gemini(request.api_key.clone(), request.base_url.clone(), None);
188+
llm_client.complete_stream(stream_request, callback).await
189189
} else {
190190
println!("📡 Using OpenAI-compatible streaming for provider: {}", request.provider);
191191
// OpenAI-compatible providers (OpenAI, Ollama, OpenRouter, Anthropic, etc.)
192-
let openai_client = OpenAIClient::new(request.api_key.clone(), request.base_url.clone(), None);
193-
openai_client.complete_stream(stream_request, callback).await
192+
let llm_client = LlmClient::new_openai(request.api_key.clone(), request.base_url.clone(), None);
193+
llm_client.complete_stream(stream_request, callback).await
194194
};
195195
println!("🎯 Streaming method result: {:?}", result.is_ok());
196196

src-tauri/src/mcp_client/client/gemini.rs

Lines changed: 0 additions & 276 deletions
This file was deleted.

0 commit comments

Comments
 (0)