diff --git a/docs/my-website/docs/completion/supported.md b/docs/my-website/docs/completion/supported.md index 0420d7421..57670a86e 100644 --- a/docs/my-website/docs/completion/supported.md +++ b/docs/my-website/docs/completion/supported.md @@ -13,9 +13,16 @@ liteLLM reads key naming, all keys should be named in the following format: | Model Name | Function Call | Required OS Variables | |------------------|----------------------------------------|--------------------------------------| | gpt-3.5-turbo | `completion('gpt-3.5-turbo', messages)` | `os.environ['OPENAI_API_KEY']` | +| gpt-3.5-turbo-0301 | `completion('gpt-3.5-turbo-0301', messages)` | `os.environ['OPENAI_API_KEY']` | +| gpt-3.5-turbo-0613 | `completion('gpt-3.5-turbo-0613', messages)` | `os.environ['OPENAI_API_KEY']` | | gpt-3.5-turbo-16k | `completion('gpt-3.5-turbo-16k', messages)` | `os.environ['OPENAI_API_KEY']` | | gpt-3.5-turbo-16k-0613 | `completion('gpt-3.5-turbo-16k-0613', messages)` | `os.environ['OPENAI_API_KEY']` | | gpt-4 | `completion('gpt-4', messages)` | `os.environ['OPENAI_API_KEY']` | +| gpt-4-0314 | `completion('gpt-4-0314', messages)` | `os.environ['OPENAI_API_KEY']` | +| gpt-4-0613 | `completion('gpt-4-0613', messages)` | `os.environ['OPENAI_API_KEY']` | +| gpt-4-32k | `completion('gpt-4-32k', messages)` | `os.environ['OPENAI_API_KEY']` | +| gpt-4-32k-0314 | `completion('gpt-4-32k-0314', messages)` | `os.environ['OPENAI_API_KEY']` | +| gpt-4-32k-0613 | `completion('gpt-4-32k-0613', messages)` | `os.environ['OPENAI_API_KEY']` | These also support the `OPENAI_API_BASE` environment variable, which can be used to specify a custom API endpoint. @@ -31,6 +38,11 @@ These also support the `OPENAI_API_BASE` environment variable, which can be used | Model Name | Function Call | Required OS Variables | |------------------|--------------------------------------------|--------------------------------------| | text-davinci-003 | `completion('text-davinci-003', messages)` | `os.environ['OPENAI_API_KEY']` | +| ada-001 | `completion('ada-001', messages)` | `os.environ['OPENAI_API_KEY']` | +| curie-001 | `completion('curie-001', messages)` | `os.environ['OPENAI_API_KEY']` | +| babbage-001 | `completion('babbage-001', messages)` | `os.environ['OPENAI_API_KEY']` | +| babbage-002 | `completion('ada-001', messages)` | `os.environ['OPENAI_API_KEY']` | +| davinci-002 | `completion('davinci-002', messages)` | `os.environ['OPENAI_API_KEY']` | ### Google VertexAI Models diff --git a/litellm/__init__.py b/litellm/__init__.py index 192f33844..1602f8ffd 100644 --- a/litellm/__init__.py +++ b/litellm/__init__.py @@ -157,11 +157,11 @@ open_ai_chat_completion_models = [ ################# "gpt-3.5-turbo", "gpt-3.5-turbo-0301", - "gpt-3.5-turbo-16k", "gpt-3.5-turbo-0613", + "gpt-3.5-turbo-16k", "gpt-3.5-turbo-16k-0613", ] -open_ai_text_completion_models = ["text-davinci-003", "babbage-002", "davinci-002"] +open_ai_text_completion_models = ["text-davinci-003", "curie-001", "babbage-001", "ada-001", "babbage-002", "davinci-002"] cohere_models = [ "command-nightly", diff --git a/pyproject.toml b/pyproject.toml index 1a4cd97ca..3501c4bed 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "litellm" -version = "0.1.505" +version = "0.1.506" description = "Library to easily interface with LLM API providers" authors = ["BerriAI"] license = "MIT License"