mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-25 18:54:30 +00:00
changing ollama response parsing to expected behaviour
This commit is contained in:
parent
650036071f
commit
ed07de2729
2 changed files with 8 additions and 13 deletions
|
@ -199,12 +199,13 @@ def get_ollama_response(
|
|||
## RESPONSE OBJECT
|
||||
model_response["choices"][0]["finish_reason"] = "stop"
|
||||
if optional_params.get("format", "") == "json":
|
||||
function_call = json.loads(response_json["response"])
|
||||
message = litellm.Message(
|
||||
content=None,
|
||||
tool_calls=[
|
||||
{
|
||||
"id": f"call_{str(uuid.uuid4())}",
|
||||
"function": {"arguments": response_json["response"], "name": ""},
|
||||
"function": {"name": function_call["name"], "arguments": json.dumps(function_call["arguments"])},
|
||||
"type": "function",
|
||||
}
|
||||
],
|
||||
|
@ -295,15 +296,13 @@ async def ollama_acompletion(url, data, model_response, encoding, logging_obj):
|
|||
## RESPONSE OBJECT
|
||||
model_response["choices"][0]["finish_reason"] = "stop"
|
||||
if data.get("format", "") == "json":
|
||||
function_call = json.loads(response_json["response"])
|
||||
message = litellm.Message(
|
||||
content=None,
|
||||
tool_calls=[
|
||||
{
|
||||
"id": f"call_{str(uuid.uuid4())}",
|
||||
"function": {
|
||||
"arguments": response_json["response"],
|
||||
"name": "",
|
||||
},
|
||||
"function": {"name": function_call["name"], "arguments": json.dumps(function_call["arguments"])},
|
||||
"type": "function",
|
||||
}
|
||||
],
|
||||
|
|
|
@ -202,15 +202,13 @@ def get_ollama_response(
|
|||
## RESPONSE OBJECT
|
||||
model_response["choices"][0]["finish_reason"] = "stop"
|
||||
if data.get("format", "") == "json":
|
||||
function_call = json.loads(response_json["message"]["content"])
|
||||
message = litellm.Message(
|
||||
content=None,
|
||||
tool_calls=[
|
||||
{
|
||||
"id": f"call_{str(uuid.uuid4())}",
|
||||
"function": {
|
||||
"arguments": response_json["message"]["content"],
|
||||
"name": "",
|
||||
},
|
||||
"function": {"name": function_call["name"], "arguments": json.dumps(function_call["arguments"])},
|
||||
"type": "function",
|
||||
}
|
||||
],
|
||||
|
@ -302,15 +300,13 @@ async def ollama_acompletion(url, data, model_response, encoding, logging_obj):
|
|||
## RESPONSE OBJECT
|
||||
model_response["choices"][0]["finish_reason"] = "stop"
|
||||
if data.get("format", "") == "json":
|
||||
function_call = json.loads(response_json["message"]["content"])
|
||||
message = litellm.Message(
|
||||
content=None,
|
||||
tool_calls=[
|
||||
{
|
||||
"id": f"call_{str(uuid.uuid4())}",
|
||||
"function": {
|
||||
"arguments": response_json["message"]["content"],
|
||||
"name": "",
|
||||
},
|
||||
"function": {"name": function_call["name"], "arguments": json.dumps(function_call["arguments"])},
|
||||
"type": "function",
|
||||
}
|
||||
],
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue