(docs) litellm ollama docker image

This commit is contained in:
ishaan-jaff 2023-10-13 08:45:17 -07:00
parent f2eb1b4658
commit 5a39a3554d
4 changed files with 64 additions and 0 deletions

View file

@ -0,0 +1,36 @@
import openai
api_base = f"http://0.0.0.0:8000"
openai.api_base = api_base
openai.api_key = "temp-key"
print(openai.api_base)
print(f'LiteLLM: response from proxy with streaming')
response = openai.ChatCompletion.create(
model="ollama/llama2",
messages = [
{
"role": "user",
"content": "this is a test request, acknowledge that you got it"
}
],
stream=True
)
for chunk in response:
print(f'LiteLLM: streaming response from proxy {chunk}')
response = openai.ChatCompletion.create(
model="ollama/llama2",
messages = [
{
"role": "user",
"content": "this is a test request, acknowledge that you got it"
}
]
)
print(f'LiteLLM: response from proxy {response}')