LiteLLM Minor Fixes & Improvements (09/20/2024) (#5807)

* fix(vertex_llm_base.py): Handle api_base = ""

Fixes https://github.com/BerriAI/litellm/issues/5798

* fix(o1_transformation.py): handle stream_options not being supported

https://github.com/BerriAI/litellm/issues/5803

* docs(routing.md): fix docs

Closes https://github.com/BerriAI/litellm/issues/5808

* perf(internal_user_endpoints.py): reduce db calls for getting team_alias for a key

Use the list gotten earlier in `/user/info` endpoint

 Reduces ui keys tab load time to 800ms (prev. 28s+)

* feat(proxy_server.py): support CONFIG_FILE_PATH as env var

Closes https://github.com/BerriAI/litellm/issues/5744

* feat(get_llm_provider_logic.py): add `litellm_proxy/` as a known openai-compatible route

simplifies calling litellm proxy

Reduces confusion when calling models on litellm proxy from litellm sdk

* docs(litellm_proxy.md): cleanup docs

* fix(internal_user_endpoints.py): fix pydantic obj

* test(test_key_generate_prisma.py): fix test
This commit is contained in:
Krish Dholakia 2024-09-20 20:21:32 -07:00 committed by GitHub
parent 0c488cf4ca
commit d6ca7fed18
14 changed files with 204 additions and 84 deletions

View file

@ -248,7 +248,7 @@ from litellm.secret_managers.aws_secret_manager import (
load_aws_secret_manager,
)
from litellm.secret_managers.google_kms import load_google_kms
from litellm.secret_managers.main import get_secret, str_to_bool
from litellm.secret_managers.main import get_secret, get_secret_str, str_to_bool
from litellm.types.llms.anthropic import (
AnthropicMessagesRequest,
AnthropicResponse,
@ -2728,9 +2728,21 @@ async def startup_event():
### LOAD CONFIG ###
worker_config: Optional[Union[str, dict]] = get_secret("WORKER_CONFIG") # type: ignore
env_config_yaml: Optional[str] = get_secret_str("CONFIG_FILE_PATH")
verbose_proxy_logger.debug("worker_config: %s", worker_config)
# check if it's a valid file path
if worker_config is not None:
if env_config_yaml is not None:
if os.path.isfile(env_config_yaml) and proxy_config.is_yaml(
config_file_path=env_config_yaml
):
(
llm_router,
llm_model_list,
general_settings,
) = await proxy_config.load_config(
router=llm_router, config_file_path=env_config_yaml
)
elif worker_config is not None:
if (
isinstance(worker_config, str)
and os.path.isfile(worker_config)