Merge pull request #4373 from CorrM/main

[Fix-Improve] Improve Ollama prompt input and fix Ollama function calling key error
This commit is contained in:
Krish Dholakia 2024-06-25 06:27:43 -07:00 committed by GitHub
commit 75c8f77d9a
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
3 changed files with 35 additions and 13 deletions

View file

@ -126,7 +126,7 @@ class OllamaConfig:
)
and v is not None
}
def get_required_params(self) -> List[ProviderField]:
"""For a given provider, return it's required fields with a description"""
return [
@ -451,7 +451,7 @@ async def ollama_acompletion(url, data, model_response, encoding, logging_obj):
{
"id": f"call_{str(uuid.uuid4())}",
"function": {
"name": function_call["name"],
"name": function_call.get("name", function_call.get("function", None)),
"arguments": json.dumps(function_call["arguments"]),
},
"type": "function",

View file

@ -434,7 +434,7 @@ async def ollama_async_streaming(
{
"id": f"call_{str(uuid.uuid4())}",
"function": {
"name": function_call["name"],
"name": function_call.get("name", function_call.get("function", None)),
"arguments": json.dumps(function_call["arguments"]),
},
"type": "function",

View file

@ -135,7 +135,7 @@ def convert_to_ollama_image(openai_image_url: str):
def ollama_pt(
model, messages
model, messages
): # https://github.com/ollama/ollama/blob/af4cf55884ac54b9e637cd71dadfe9b7a5685877/docs/modelfile.md#template
if "instruct" in model:
prompt = custom_prompt(
@ -172,14 +172,36 @@ def ollama_pt(
images.append(base64_image)
return {"prompt": prompt, "images": images}
else:
prompt = "".join(
(
m["content"]
if isinstance(m["content"], str) is str
else "".join(m["content"])
)
for m in messages
)
prompt = ""
for message in messages:
role = message["role"]
content = message.get("content", "")
if "tool_calls" in message:
tool_calls = []
for call in message["tool_calls"]:
call_id: str = call["id"]
function_name: str = call["function"]["name"]
arguments = json.loads(call["function"]["arguments"])
tool_calls.append({
"id": call_id,
"type": "function",
"function": {
"name": function_name,
"arguments": arguments
}
})
prompt += f"### Assistant:\nTool Calls: {json.dumps(tool_calls, indent=2)}\n\n"
elif "tool_call_id" in message:
prompt += f"### User:\n{message["content"]}\n\n"
elif content:
prompt += f"### {role.capitalize()}:\n{content}\n\n"
return prompt
@ -710,7 +732,7 @@ def convert_to_anthropic_tool_result_xml(message: dict) -> str:
"""
Anthropic tool_results look like:
[Successful results]
<function_results>
<result>