forked from phoenix/litellm-mirror
(docs) using proxy
This commit is contained in:
parent
79e93ee7aa
commit
54b6a767f4
1 changed files with 11 additions and 3 deletions
14
README.md
14
README.md
|
@ -91,9 +91,17 @@ $ litellm --model huggingface/bigcode/starcoder
|
|||
|
||||
### Step 2: Replace openai base
|
||||
```python
|
||||
import openai
|
||||
client = openai.OpenAI(api_key="anything", base_url="http://0.0.0.0:8000")
|
||||
print(openai.chat.completions.create(model="test", messages=[{"role":"user", "content":"Hey!"}]))
|
||||
import openai # openai v1.0.0+
|
||||
client = openai.OpenAI(api_key="anything",base_url="http://0.0.0.0:8000") # set proxy to base_url
|
||||
# request sent to model set on litellm proxy, `litellm --model`
|
||||
response = client.chat.completions.create(model="gpt-3.5-turbo", messages = [
|
||||
{
|
||||
"role": "user",
|
||||
"content": "this is a test request, write a short poem"
|
||||
}
|
||||
])
|
||||
|
||||
print(response)
|
||||
```
|
||||
|
||||
## Logging Observability ([Docs](https://docs.litellm.ai/docs/observability/callbacks))
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue