forked from phoenix/litellm-mirror
Merge pull request #618 from Undertone0809/fix-add-finetune-provider-compaibility
fix: llm_provider add openai finetune compatibility
This commit is contained in:
commit
09b51fcdb4
2 changed files with 2 additions and 2 deletions
|
@ -375,7 +375,7 @@ def completion(
|
||||||
model in litellm.open_ai_chat_completion_models
|
model in litellm.open_ai_chat_completion_models
|
||||||
or custom_llm_provider == "custom_openai"
|
or custom_llm_provider == "custom_openai"
|
||||||
or custom_llm_provider == "openai"
|
or custom_llm_provider == "openai"
|
||||||
or "ft:gpt-3.5-turbo" in model # finetuned gpt-3.5-turbo
|
or "ft:gpt-3.5-turbo" in model # finetune gpt-3.5-turbo
|
||||||
): # allow user to make an openai call with a custom base
|
): # allow user to make an openai call with a custom base
|
||||||
# note: if a user sets a custom base - we should ensure this works
|
# note: if a user sets a custom base - we should ensure this works
|
||||||
# allow for the setting of dynamic and stateful api-bases
|
# allow for the setting of dynamic and stateful api-bases
|
||||||
|
|
|
@ -1461,7 +1461,7 @@ def get_llm_provider(model: str, custom_llm_provider: Optional[str] = None, api_
|
||||||
|
|
||||||
# check if model in known model provider list -> for huggingface models, raise exception as they don't have a fixed provider (can be togetherai, anyscale, baseten, runpod, et.)
|
# check if model in known model provider list -> for huggingface models, raise exception as they don't have a fixed provider (can be togetherai, anyscale, baseten, runpod, et.)
|
||||||
## openai - chatcompletion + text completion
|
## openai - chatcompletion + text completion
|
||||||
if model in litellm.open_ai_chat_completion_models:
|
if model in litellm.open_ai_chat_completion_models or "ft:gpt-3.5-turbo" in model:
|
||||||
custom_llm_provider = "openai"
|
custom_llm_provider = "openai"
|
||||||
elif model in litellm.open_ai_text_completion_models:
|
elif model in litellm.open_ai_text_completion_models:
|
||||||
custom_llm_provider = "text-completion-openai"
|
custom_llm_provider = "text-completion-openai"
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue