forked from phoenix/litellm-mirror
update docs
This commit is contained in:
parent
04b71d26bb
commit
70a36740bc
2 changed files with 4 additions and 4 deletions
|
@ -52,15 +52,15 @@ More details 👉
|
||||||
LiteLLM maps exceptions across all supported providers to the OpenAI exceptions. All our exceptions inherit from OpenAI's exception types, so any error-handling you have for that, should work out of the box with LiteLLM.
|
LiteLLM maps exceptions across all supported providers to the OpenAI exceptions. All our exceptions inherit from OpenAI's exception types, so any error-handling you have for that, should work out of the box with LiteLLM.
|
||||||
|
|
||||||
```python
|
```python
|
||||||
from openai.errors import AuthenticationError
|
from openai.errors import OpenAIError
|
||||||
from litellm import completion
|
from litellm import completion
|
||||||
|
|
||||||
os.environ["ANTHROPIC_API_KEY"] = "bad-key"
|
os.environ["ANTHROPIC_API_KEY"] = "bad-key"
|
||||||
try:
|
try:
|
||||||
# some code
|
# some code
|
||||||
completion(model="claude-instant-1", messages=[{"role": "user", "content": "Hey, how's it going?"}])
|
completion(model="claude-instant-1", messages=[{"role": "user", "content": "Hey, how's it going?"}])
|
||||||
except AuthenticationError as e:
|
except OpenAIError as e:
|
||||||
print(e.llm_provider)
|
print(e)
|
||||||
```
|
```
|
||||||
|
|
||||||
More details 👉
|
More details 👉
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
# Use Completion() for OpenAI, Azure
|
# Replacing OpenAI ChatCompletion with Completion()
|
||||||
|
|
||||||
* [Supported OpenAI LLMs](https://docs.litellm.ai/docs/providers/openai)
|
* [Supported OpenAI LLMs](https://docs.litellm.ai/docs/providers/openai)
|
||||||
* [Supported Azure OpenAI LLMs](https://docs.litellm.ai/docs/providers/azure)
|
* [Supported Azure OpenAI LLMs](https://docs.litellm.ai/docs/providers/azure)
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue