litellm-mirror/cookbook/litellm_model_fallback.ipynb
Ishaan Jaff c7f14e936a
(code quality) run ruff rule to ban unused imports (#7313)
* remove unused imports

* fix AmazonConverseConfig

* fix test

* fix import

* ruff check fixes

* test fixes

* fix testing

* fix imports
2024-12-19 12:33:42 -08:00

51 lines
1 KiB
Text
Vendored

{
"cells": [
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"id": "j6yJsCGeaq8G"
},
"outputs": [],
"source": [
"!pip install litellm"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"id": "u129iWNPaf72"
},
"outputs": [],
"source": [
"from litellm import completion\n",
"\n",
"model_fallback_list = [\"claude-instant-1\", \"gpt-3.5-turbo\", \"chatgpt-test\"]\n",
"\n",
"user_message = \"Hello, how are you?\"\n",
"messages = [{ \"content\": user_message,\"role\": \"user\"}]\n",
"\n",
"for model in model_fallback_list:\n",
" try:\n",
" response = completion(model=model, messages=messages)\n",
" except Exception:\n",
" print(f\"error occurred: {traceback.format_exc()}\")"
]
}
],
"metadata": {
"colab": {
"provenance": []
},
"kernelspec": {
"display_name": "Python 3",
"name": "python3"
},
"language_info": {
"name": "python"
}
},
"nbformat": 4,
"nbformat_minor": 0
}