(feat) ollama/chat

This commit is contained in:
ishaan-jaff 2023-12-25 23:01:05 +05:30
parent e0ea2aa147
commit 03de92eec0

View file

@ -125,7 +125,7 @@ class OllamaConfig:
def get_ollama_response( def get_ollama_response(
api_base="http://localhost:11434", api_base="http://localhost:11434",
model="llama2", model="llama2",
prompt="Why is the sky blue?", messages=None,
optional_params=None, optional_params=None,
logging_obj=None, logging_obj=None,
acompletion: bool = False, acompletion: bool = False,
@ -146,7 +146,7 @@ def get_ollama_response(
optional_params[k] = v optional_params[k] = v
optional_params["stream"] = optional_params.get("stream", False) optional_params["stream"] = optional_params.get("stream", False)
data = {"model": model, "prompt": prompt, **optional_params} data = {"model": model, "messages": messages, **optional_params}
## LOGGING ## LOGGING
logging_obj.pre_call( logging_obj.pre_call(
input=None, input=None,
@ -188,7 +188,7 @@ def get_ollama_response(
## LOGGING ## LOGGING
logging_obj.post_call( logging_obj.post_call(
input=prompt, input=messages,
api_key="", api_key="",
original_response=response.text, original_response=response.text,
additional_args={ additional_args={