Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

50 changes: 27 additions & 23 deletions crates/language_models/src/provider/ollama.rs
Original file line number Diff line number Diff line change
Expand Up @@ -381,10 +381,14 @@ impl OllamaLanguageModel {
thinking = Some(text)
}
MessageContent::ToolUse(tool_use) => {
tool_calls.push(OllamaToolCall::Function(OllamaFunctionCall {
name: tool_use.name.to_string(),
arguments: tool_use.input,
}));
tool_calls.push(OllamaToolCall {
id: None,
function: OllamaFunctionCall {
index: None,
name: tool_use.name.to_string(),
arguments: tool_use.input,
},
});
}
_ => (),
}
Expand Down Expand Up @@ -575,25 +579,25 @@ fn map_to_language_model_completion_events(
}

if let Some(tool_call) = tool_calls.and_then(|v| v.into_iter().next()) {
match tool_call {
OllamaToolCall::Function(function) => {
let tool_id = format!(
"{}-{}",
&function.name,
TOOL_CALL_COUNTER.fetch_add(1, Ordering::Relaxed)
);
let event =
LanguageModelCompletionEvent::ToolUse(LanguageModelToolUse {
id: LanguageModelToolUseId::from(tool_id),
name: Arc::from(function.name),
raw_input: function.arguments.to_string(),
input: function.arguments,
is_input_complete: true,
});
events.push(Ok(event));
state.used_tools = true;
}
}
// Directly access the struct fields
let function = tool_call.function; // Accessing the function field
let tool_id = tool_call.id.unwrap_or_else(|| {
format!(
"{}-{}",
&function.name, // Assuming `function` has a `name` field
TOOL_CALL_COUNTER.fetch_add(1, Ordering::Relaxed)
)
}); // Access id, handle Option

let event = LanguageModelCompletionEvent::ToolUse(LanguageModelToolUse {
id: LanguageModelToolUseId::from(tool_id),
name: Arc::from(function.name),
raw_input: function.arguments.to_string(),
input: function.arguments,
is_input_complete: true,
});
events.push(Ok(event));
state.used_tools = true;
} else if !content.is_empty() {
events.push(Ok(LanguageModelCompletionEvent::Text(content)));
}
Expand Down
1 change: 1 addition & 0 deletions crates/ollama/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ schemars = ["dep:schemars"]
anyhow.workspace = true
futures.workspace = true
http_client.workspace = true
log.workspace = true
schemars = { workspace = true, optional = true }
serde.workspace = true
serde_json.workspace = true
Expand Down
85 changes: 77 additions & 8 deletions crates/ollama/src/ollama.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ use anyhow::{Context as _, Result};
use futures::{AsyncBufReadExt, AsyncReadExt, StreamExt, io::BufReader, stream::BoxStream};
use http_client::{AsyncBody, HttpClient, HttpRequestExt, Method, Request as HttpRequest};
use serde::{Deserialize, Serialize};
use serde_json::Value;
use serde_json::{Number, Value};
pub use settings::KeepAlive;

pub const OLLAMA_API_URL: &str = "http://localhost:11434";
Expand Down Expand Up @@ -103,12 +103,14 @@ pub enum ChatMessage {

#[derive(Serialize, Deserialize, Debug)]
#[serde(rename_all = "lowercase")]
pub enum OllamaToolCall {
Function(OllamaFunctionCall),
pub struct OllamaToolCall {
pub function: OllamaFunctionCall,
pub id: Option<String>,
}

#[derive(Serialize, Deserialize, Debug)]
pub struct OllamaFunctionCall {
pub index: Option<Number>,
pub name: String,
pub arguments: Value,
}
Expand Down Expand Up @@ -281,24 +283,43 @@ pub async fn stream_chat_completion(
request: ChatRequest,
) -> Result<BoxStream<'static, Result<ChatResponseDelta>>> {
let uri = format!("{api_url}/api/chat");
log::info!(
"Chat request: {:?}",
serde_json::to_string(&request).unwrap()
);
let request = HttpRequest::builder()
.method(Method::POST)
.uri(uri)
.uri(&uri)
.header("Content-Type", "application/json")
.when_some(api_key, |builder, api_key| {
builder.header("Authorization", format!("Bearer {api_key}"))
})
.body(AsyncBody::from(serde_json::to_string(&request)?))?;

log::info!("Sending request to Ollama: {}", uri);
let mut response = client.send(request).await?;
log::info!(
"Received response from Ollama: status = {}",
response.status()
);
if response.status().is_success() {
let reader = BufReader::new(response.into_body());

Ok(reader
.lines()
.map(|line| match line {
Ok(line) => serde_json::from_str(&line).context("Unable to parse chat response"),
Err(e) => Err(e.into()),
.map(|line| {
match line {
Ok(line) => {
log::info!("Received line from Ollama: {}", line);
match serde_json::from_str::<ChatResponseDelta>(&line) {
Ok(parsed) => Ok(parsed), // Successfully parsed
Err(parse_error) => {
log::error!("Unable to parse chat response: {:?}", parse_error);
Err(parse_error.into()) // Handle parsing error
}
}
}
Err(e) => Err(e.into()),
}
})
.boxed())
} else {
Expand Down Expand Up @@ -479,6 +500,54 @@ mod tests {
}
}

#[test]
fn parse_tool_call_ollama_0_10_12() {
// Tool call response as of 2025-11: https://github.com/ollama/ollama/pull/12956
let response = serde_json::json!({
"model": "llama3.2:3b",
"created_at": "2025-04-28T20:02:02.140489Z",
"message": {
"role": "assistant",
"content": "",
"tool_calls": [
{
"id": "call_f5kqwpjg",
"function": {
"index": 0,
"name": "weather",
"arguments": {
"city": "london",
}
}
}
]
},
"done_reason": "stop",
"done": true,
"total_duration": 2758629166u64,
"load_duration": 1770059875,
"prompt_eval_count": 147,
"prompt_eval_duration": 684637583,
"eval_count": 16,
"eval_duration": 302561917,
});

let result: ChatResponseDelta = serde_json::from_value(response).unwrap();
match result.message {
ChatMessage::Assistant {
content,
tool_calls,
images: _,
thinking,
} => {
assert!(content.is_empty());
assert!(tool_calls.is_some_and(|v| !v.is_empty()));
assert!(thinking.is_none());
}
_ => panic!("Deserialized wrong role"),
}
}

#[test]
fn parse_show_model() {
let response = serde_json::json!({
Expand Down
Loading