allow custom base to openai

This commit is contained in:
Krrish Dholakia 2023-08-10 18:01:52 -07:00
parent 85753c0d18
commit 24e51ec75c
11 changed files with 3 additions and 3 deletions

View file

@ -44,7 +44,7 @@ def completion(
presence_penalty=0, frequency_penalty=0, logit_bias={}, user="", deployment_id=None,
# Optional liteLLM function params
*, return_async=False, api_key=None, force_timeout=60, azure=False, logger_fn=None, verbose=False,
hugging_face = False, replicate=False,together_ai = False
hugging_face = False, replicate=False,together_ai = False, llm_provider=None
):
try:
global new_response
@ -85,7 +85,7 @@ def completion(
messages = messages,
**optional_params
)
elif model in litellm.open_ai_chat_completion_models:
elif model in litellm.open_ai_chat_completion_models or llm_provider == "custom": # allow user to make an openai call with a custom base
openai.api_type = "openai"
# note: if a user sets a custom base - we should ensure this works
openai.api_base = litellm.api_base if litellm.api_base is not None else "https://api.openai.com/v1"

View file

@ -1,6 +1,6 @@
[tool.poetry]
name = "litellm"
version = "0.1.375"
version = "0.1.376"
description = "Library to easily interface with LLM API providers"
authors = ["BerriAI"]
license = "MIT License"