diff --git a/docs/my-website/docs/providers/vllm.md b/docs/my-website/docs/providers/vllm.md index df9e07ef7..b8285da71 100644 --- a/docs/my-website/docs/providers/vllm.md +++ b/docs/my-website/docs/providers/vllm.md @@ -11,7 +11,7 @@ pip install litellm vllm ```python import litellm -response = completion( +response = litellm.completion( model="vllm/facebook/opt-125m", # add a vllm prefix so litellm knows the custom_llm_provider==vllm messages=messages, temperature=0.2, @@ -29,7 +29,7 @@ In order to use litellm to call a hosted vllm server add the following to your c ```python import litellm -response = completion( +response = litellm.completion( model="openai/facebook/opt-125m", # pass the vllm model name messages=messages, api_base="https://hosted-vllm-api.co",