Skip to content

Commit f9ff5c8

Browse files
committed
add missed file
1 parent 52436e8 commit f9ff5c8

File tree

2 files changed

+59
-4
lines changed

2 files changed

+59
-4
lines changed

klaudbiusz/cli/codegen_multi.py

Lines changed: 29 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
import asyncio
2+
import json
23
import logging
34
import os
4-
import json
55
from dataclasses import dataclass
66
from datetime import datetime
77
from pathlib import Path
@@ -13,7 +13,6 @@
1313
from dotenv import load_dotenv
1414
from mcp import ClientSession, StdioServerParameters
1515
from mcp.client.stdio import stdio_client
16-
1716
from shared import ScaffoldTracker, Tracker, build_mcp_command, setup_logging, validate_mcp_manifest
1817

1918
logger = logging.getLogger(__name__)
@@ -95,13 +94,39 @@ async def initialize(self):
9594
if not self.suppress_logs:
9695
logger.info(f"Loaded {len(self.tools)} MCP tools")
9796

97+
def _clean_schema_for_databricks(self, schema: dict[str, Any]) -> dict[str, Any]:
98+
"""Remove JSON schema fields that Databricks serving doesn't support."""
99+
if not isinstance(schema, dict):
100+
return schema
101+
102+
cleaned = {}
103+
for key, value in schema.items():
104+
if key in ("minimum", "maximum", "exclusiveMinimum", "exclusiveMaximum"):
105+
if schema.get("type") == "integer":
106+
continue
107+
108+
if key == "input_examples":
109+
continue
110+
111+
if isinstance(value, dict):
112+
cleaned[key] = self._clean_schema_for_databricks(value)
113+
elif isinstance(value, list):
114+
cleaned[key] = [
115+
self._clean_schema_for_databricks(item) if isinstance(item, dict) else item for item in value
116+
]
117+
else:
118+
cleaned[key] = value
119+
120+
return cleaned
121+
98122
def _convert_mcp_tool(self, mcp_tool) -> dict[str, Any]:
123+
parameters = self._clean_schema_for_databricks(mcp_tool.inputSchema)
99124
return {
100125
"type": "function",
101126
"function": {
102127
"name": mcp_tool.name,
103128
"description": mcp_tool.description or "",
104-
"parameters": mcp_tool.inputSchema,
129+
"parameters": parameters,
105130
},
106131
}
107132

@@ -346,7 +371,7 @@ def cli(
346371
prompt: str,
347372
app_name: str | None = None,
348373
model: str = "openrouter/minimax/minimax-m2", # other good options: "openrouter/moonshotai/kimi-k2-thinking", "gemini/gemini-2.5-pro",
349-
# some open-weights platform provide openai/anthropic-like API that can be used like
374+
# some open-weights platform provide openai/anthropic-like API that can be used like
350375
# OPENAI_API_KEY=$DATABRICKS_TOKEN OPENAI_API_BASE=https://$DATABRICKS_HOST/serving-endpoints uv run cli/single_run.py "..." --backend=litellm --model="openai/databricks-gpt-oss-120b"
351376
# OPENAI_API_BASE="https://api.minimax.io/v1" OPENAI_API_KEY="$MINIMAX_API_KEY" uv run cli/single_run.py "..."" --backend=litellm --model="openai/MiniMax-M2"
352377
# ANTHROPIC_BASE_URL="https://api.minimax.io/anthropic" ANTHROPIC_API_KEY="$MINIMAX_API_KEY" uv run cli/single_run.py "..." --backend=litellm --model="anthropic/MiniMax-M2"
Lines changed: 30 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,30 @@
1+
"""Ugly workaround for litellm's incompatibility with multiprocessing.
2+
3+
litellm creates async logging workers with event loop-bound queues at import time,
4+
which breaks when using joblib/multiprocessing. This is a known bug tracked at:
5+
https://github.com/BerriAI/litellm/issues/14521
6+
7+
Workaround: detach the queue from the logging worker to prevent event loop binding errors.
8+
"""
9+
10+
11+
def patch_litellm_for_multiprocessing():
12+
"""Disable litellm's async logging worker to prevent event loop issues."""
13+
import litellm
14+
15+
# disable all callback infrastructure
16+
litellm.turn_off_message_logging = True
17+
litellm.drop_params = True
18+
litellm.success_callback = []
19+
litellm.failure_callback = []
20+
litellm._async_success_callback = []
21+
litellm._async_failure_callback = []
22+
23+
# detach queue from logging worker to prevent event loop binding
24+
# see: https://github.com/BerriAI/litellm/issues/14521
25+
try:
26+
from litellm.litellm_core_utils.logging_worker import GLOBAL_LOGGING_WORKER
27+
28+
GLOBAL_LOGGING_WORKER._queue = None
29+
except Exception:
30+
pass # ignore if litellm internals change

0 commit comments

Comments
 (0)