forked from phoenix/litellm-mirror
docs: Fix import
This commit is contained in:
parent
c550357199
commit
82449ffcfd
1 changed files with 2 additions and 2 deletions
|
@ -11,7 +11,7 @@ pip install litellm vllm
|
||||||
```python
|
```python
|
||||||
import litellm
|
import litellm
|
||||||
|
|
||||||
response = completion(
|
response = litellm.completion(
|
||||||
model="vllm/facebook/opt-125m", # add a vllm prefix so litellm knows the custom_llm_provider==vllm
|
model="vllm/facebook/opt-125m", # add a vllm prefix so litellm knows the custom_llm_provider==vllm
|
||||||
messages=messages,
|
messages=messages,
|
||||||
temperature=0.2,
|
temperature=0.2,
|
||||||
|
@ -29,7 +29,7 @@ In order to use litellm to call a hosted vllm server add the following to your c
|
||||||
```python
|
```python
|
||||||
import litellm
|
import litellm
|
||||||
|
|
||||||
response = completion(
|
response = litellm.completion(
|
||||||
model="openai/facebook/opt-125m", # pass the vllm model name
|
model="openai/facebook/opt-125m", # pass the vllm model name
|
||||||
messages=messages,
|
messages=messages,
|
||||||
api_base="https://hosted-vllm-api.co",
|
api_base="https://hosted-vllm-api.co",
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue