forked from phoenix/litellm-mirror
Merge pull request #3554 from paneru-rajan/Issue-3544-fix-message
Fixes #3544 based on the data-type of message
This commit is contained in:
commit
e92f433566
1 changed files with 2 additions and 2 deletions
|
@ -300,7 +300,7 @@ def get_ollama_response(
|
||||||
model_response["choices"][0]["message"] = message
|
model_response["choices"][0]["message"] = message
|
||||||
model_response["choices"][0]["finish_reason"] = "tool_calls"
|
model_response["choices"][0]["finish_reason"] = "tool_calls"
|
||||||
else:
|
else:
|
||||||
model_response["choices"][0]["message"] = response_json["message"]
|
model_response["choices"][0]["message"]["content"] = response_json["message"]["content"]
|
||||||
model_response["created"] = int(time.time())
|
model_response["created"] = int(time.time())
|
||||||
model_response["model"] = "ollama/" + model
|
model_response["model"] = "ollama/" + model
|
||||||
prompt_tokens = response_json.get("prompt_eval_count", litellm.token_counter(messages=messages)) # type: ignore
|
prompt_tokens = response_json.get("prompt_eval_count", litellm.token_counter(messages=messages)) # type: ignore
|
||||||
|
@ -484,7 +484,7 @@ async def ollama_acompletion(
|
||||||
model_response["choices"][0]["message"] = message
|
model_response["choices"][0]["message"] = message
|
||||||
model_response["choices"][0]["finish_reason"] = "tool_calls"
|
model_response["choices"][0]["finish_reason"] = "tool_calls"
|
||||||
else:
|
else:
|
||||||
model_response["choices"][0]["message"] = response_json["message"]
|
model_response["choices"][0]["message"]["content"] = response_json["message"]["content"]
|
||||||
|
|
||||||
model_response["created"] = int(time.time())
|
model_response["created"] = int(time.time())
|
||||||
model_response["model"] = "ollama_chat/" + data["model"]
|
model_response["model"] = "ollama_chat/" + data["model"]
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue