mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-25 18:54:30 +00:00
Support openrouter reasoning_content
on streaming (#9094)
* feat(convert_dict_to_response.py): support openrouter format of reasoning content * fix(transformation.py): fix openrouter streaming with reasoning content Fixes https://github.com/BerriAI/litellm/issues/8193#issuecomment-270892962 * fix: fix type error
This commit is contained in:
parent
42b7921ca1
commit
f899b828cf
6 changed files with 99 additions and 18 deletions
|
@ -2274,23 +2274,22 @@ def completion( # type: ignore # noqa: PLR0915
|
|||
data = {"model": model, "messages": messages, **optional_params}
|
||||
|
||||
## COMPLETION CALL
|
||||
response = openai_like_chat_completion.completion(
|
||||
response = base_llm_http_handler.completion(
|
||||
model=model,
|
||||
stream=stream,
|
||||
messages=messages,
|
||||
headers=headers,
|
||||
api_key=api_key,
|
||||
acompletion=acompletion,
|
||||
api_base=api_base,
|
||||
model_response=model_response,
|
||||
print_verbose=print_verbose,
|
||||
optional_params=optional_params,
|
||||
litellm_params=litellm_params,
|
||||
logger_fn=logger_fn,
|
||||
logging_obj=logging,
|
||||
acompletion=acompletion,
|
||||
timeout=timeout, # type: ignore
|
||||
custom_llm_provider="openrouter",
|
||||
custom_prompt_dict=custom_prompt_dict,
|
||||
timeout=timeout,
|
||||
headers=headers,
|
||||
encoding=encoding,
|
||||
api_key=api_key,
|
||||
logging_obj=logging, # model call logging done inside the class as we make need to modify I/O to fit aleph alpha's requirements
|
||||
client=client,
|
||||
)
|
||||
## LOGGING
|
||||
logging.post_call(
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue