forked from phoenix/litellm-mirror
custom base with openai completion
This commit is contained in:
parent
8acf959430
commit
c05606b587
1 changed files with 17 additions and 14 deletions
|
@ -285,13 +285,18 @@ def completion(
|
||||||
model in litellm.open_ai_text_completion_models
|
model in litellm.open_ai_text_completion_models
|
||||||
or "ft:babbage-002" in model
|
or "ft:babbage-002" in model
|
||||||
or "ft:davinci-002" in model # support for finetuned completion models
|
or "ft:davinci-002" in model # support for finetuned completion models
|
||||||
|
or custom_llm_provider == "openai"
|
||||||
):
|
):
|
||||||
|
# print("calling custom openai provider")
|
||||||
openai.api_type = "openai"
|
openai.api_type = "openai"
|
||||||
openai.api_base = (
|
|
||||||
litellm.api_base
|
api_base = (
|
||||||
if litellm.api_base is not None
|
api_base
|
||||||
else "https://api.openai.com/v1"
|
or litellm.api_base
|
||||||
|
or get_secret("OPENAI_API_BASE")
|
||||||
|
or "https://api.openai.com/v1"
|
||||||
)
|
)
|
||||||
|
|
||||||
openai.api_version = None
|
openai.api_version = None
|
||||||
# set API KEY
|
# set API KEY
|
||||||
if not api_key and litellm.openai_key:
|
if not api_key and litellm.openai_key:
|
||||||
|
@ -311,20 +316,18 @@ def completion(
|
||||||
additional_args={
|
additional_args={
|
||||||
"openai_organization": litellm.organization,
|
"openai_organization": litellm.organization,
|
||||||
"headers": litellm.headers,
|
"headers": litellm.headers,
|
||||||
"api_base": openai.api_base,
|
"api_base": api_base,
|
||||||
"api_type": openai.api_type,
|
"api_type": openai.api_type,
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
## COMPLETION CALL
|
## COMPLETION CALL
|
||||||
if litellm.headers:
|
response = openai.Completion.create(
|
||||||
response = openai.Completion.create(
|
model=model,
|
||||||
model=model,
|
prompt=prompt,
|
||||||
prompt=prompt,
|
headers=litellm.headers,
|
||||||
headers=litellm.headers,
|
api_base=api_base,
|
||||||
)
|
**optional_params
|
||||||
else:
|
)
|
||||||
response = openai.Completion.create(model=model, prompt=prompt, **optional_params)
|
|
||||||
|
|
||||||
if "stream" in optional_params and optional_params["stream"] == True:
|
if "stream" in optional_params and optional_params["stream"] == True:
|
||||||
response = CustomStreamWrapper(response, model, logging_obj=logging)
|
response = CustomStreamWrapper(response, model, logging_obj=logging)
|
||||||
return response
|
return response
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue