We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent d7ff0a4 commit 86e6f6cCopy full SHA for 86e6f6c
any_llm_client/clients/openai.py
@@ -245,7 +245,6 @@ async def request_llm_message(
245
_handle_status_error(status_code=exception.response.status_code, content=exception.response.content)
246
247
try:
248
- print(response.content)
249
validated_message_model: typing.Final = (
250
ChatCompletionsNotStreamingResponse.model_validate_json(response.content).choices[0].message
251
)
0 commit comments