Merge pull request #1526 from TheDiscoMole/litellm_ollama_tool_call_reponse

changing ollama response parsing to expected behaviour
This commit is contained in:
Krish Dholakia 2024-05-01 10:24:11 -07:00 committed by GitHub
commit a24f32fb5a
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
2 changed files with 8 additions and 13 deletions

View file

@ -213,12 +213,13 @@ def get_ollama_response(
## RESPONSE OBJECT ## RESPONSE OBJECT
model_response["choices"][0]["finish_reason"] = "stop" model_response["choices"][0]["finish_reason"] = "stop"
if optional_params.get("format", "") == "json": if optional_params.get("format", "") == "json":
function_call = json.loads(response_json["response"])
message = litellm.Message( message = litellm.Message(
content=None, content=None,
tool_calls=[ tool_calls=[
{ {
"id": f"call_{str(uuid.uuid4())}", "id": f"call_{str(uuid.uuid4())}",
"function": {"arguments": response_json["response"], "name": ""}, "function": {"name": function_call["name"], "arguments": json.dumps(function_call["arguments"])},
"type": "function", "type": "function",
} }
], ],
@ -310,15 +311,13 @@ async def ollama_acompletion(url, data, model_response, encoding, logging_obj):
## RESPONSE OBJECT ## RESPONSE OBJECT
model_response["choices"][0]["finish_reason"] = "stop" model_response["choices"][0]["finish_reason"] = "stop"
if data.get("format", "") == "json": if data.get("format", "") == "json":
function_call = json.loads(response_json["response"])
message = litellm.Message( message = litellm.Message(
content=None, content=None,
tool_calls=[ tool_calls=[
{ {
"id": f"call_{str(uuid.uuid4())}", "id": f"call_{str(uuid.uuid4())}",
"function": { "function": {"name": function_call["name"], "arguments": json.dumps(function_call["arguments"])},
"arguments": response_json["response"],
"name": "",
},
"type": "function", "type": "function",
} }
], ],

View file

@ -285,15 +285,13 @@ def get_ollama_response(
## RESPONSE OBJECT ## RESPONSE OBJECT
model_response["choices"][0]["finish_reason"] = "stop" model_response["choices"][0]["finish_reason"] = "stop"
if data.get("format", "") == "json": if data.get("format", "") == "json":
function_call = json.loads(response_json["message"]["content"])
message = litellm.Message( message = litellm.Message(
content=None, content=None,
tool_calls=[ tool_calls=[
{ {
"id": f"call_{str(uuid.uuid4())}", "id": f"call_{str(uuid.uuid4())}",
"function": { "function": {"name": function_call["name"], "arguments": json.dumps(function_call["arguments"])},
"arguments": response_json["message"]["content"],
"name": "",
},
"type": "function", "type": "function",
} }
], ],
@ -415,15 +413,13 @@ async def ollama_acompletion(
## RESPONSE OBJECT ## RESPONSE OBJECT
model_response["choices"][0]["finish_reason"] = "stop" model_response["choices"][0]["finish_reason"] = "stop"
if data.get("format", "") == "json": if data.get("format", "") == "json":
function_call = json.loads(response_json["message"]["content"])
message = litellm.Message( message = litellm.Message(
content=None, content=None,
tool_calls=[ tool_calls=[
{ {
"id": f"call_{str(uuid.uuid4())}", "id": f"call_{str(uuid.uuid4())}",
"function": { "function": {"name": function_call["name"], "arguments": json.dumps(function_call["arguments"])},
"arguments": response_json["message"]["content"],
"name": function_name or "",
},
"type": "function", "type": "function",
} }
], ],