litellm/cookbook/litellm_model_fallback.ipynb

52 lines
1.2 KiB
Text
Vendored

{
"nbformat": 4,
"nbformat_minor": 0,
"metadata": {
"colab": {
"provenance": []
},
"kernelspec": {
"name": "python3",
"display_name": "Python 3"
},
"language_info": {
"name": "python"
}
},
"cells": [
{
"cell_type": "code",
"source": [
"!pip install litellm"
],
"metadata": {
"id": "j6yJsCGeaq8G"
},
"execution_count": null,
"outputs": []
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"id": "u129iWNPaf72"
},
"outputs": [],
"source": [
"import litellm\n",
"from litellm import embedding, completion\n",
"\n",
"model_fallback_list = [\"claude-instant-1\", \"gpt-3.5-turbo\", \"chatgpt-test\"]\n",
"\n",
"user_message = \"Hello, how are you?\"\n",
"messages = [{ \"content\": user_message,\"role\": \"user\"}]\n",
"\n",
"for model in model_fallback_list:\n",
" try:\n",
" response = completion(model=model, messages=messages)\n",
" except Exception as e:\n",
" print(f\"error occurred: {traceback.format_exc()}\")"
]
}
]
}