mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-25 18:54:30 +00:00
fix(proxy_server.py): load default team config straight from config file
This commit is contained in:
parent
142e7cf1ce
commit
1f0598a277
2 changed files with 37 additions and 32 deletions
|
@ -1031,12 +1031,17 @@ class ProxyConfig:
|
||||||
- for a given team id
|
- for a given team id
|
||||||
- return the relevant completion() call params
|
- return the relevant completion() call params
|
||||||
"""
|
"""
|
||||||
all_teams_config = litellm.default_team_settings
|
# load existing config
|
||||||
|
config = await self.get_config()
|
||||||
|
## LITELLM MODULE SETTINGS (e.g. litellm.drop_params=True,..)
|
||||||
|
litellm_settings = config.get("litellm_settings", None)
|
||||||
|
all_teams_config = litellm_settings.get("default_team_settings", None)
|
||||||
team_config: dict = {}
|
team_config: dict = {}
|
||||||
if all_teams_config is None:
|
if all_teams_config is None:
|
||||||
return team_config
|
return team_config
|
||||||
for team in all_teams_config:
|
for team in all_teams_config:
|
||||||
assert "team_id" in team
|
if "team_id" not in team:
|
||||||
|
raise Exception(f"team_id missing from team: {team}")
|
||||||
if team_id == team["team_id"]:
|
if team_id == team["team_id"]:
|
||||||
team_config = team
|
team_config = team
|
||||||
break
|
break
|
||||||
|
|
|
@ -1,36 +1,36 @@
|
||||||
#### What this tests ####
|
# #### What this tests ####
|
||||||
# This tests if setting team_config actually works
|
# # This tests if setting team_config actually works
|
||||||
import sys, os
|
# import sys, os
|
||||||
import traceback
|
# import traceback
|
||||||
import pytest
|
# import pytest
|
||||||
|
|
||||||
sys.path.insert(
|
# sys.path.insert(
|
||||||
0, os.path.abspath("../..")
|
# 0, os.path.abspath("../..")
|
||||||
) # Adds the parent directory to the system path
|
# ) # Adds the parent directory to the system path
|
||||||
import litellm
|
# import litellm
|
||||||
from litellm.proxy.proxy_server import ProxyConfig
|
# from litellm.proxy.proxy_server import ProxyConfig
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
# @pytest.mark.asyncio
|
||||||
async def test_team_config():
|
# async def test_team_config():
|
||||||
litellm.default_team_settings = [
|
# litellm.default_team_settings = [
|
||||||
{
|
# {
|
||||||
"team_id": "my-special-team",
|
# "team_id": "my-special-team",
|
||||||
"success_callback": ["langfuse"],
|
# "success_callback": ["langfuse"],
|
||||||
"langfuse_public_key": "os.environ/LANGFUSE_PUB_KEY_2",
|
# "langfuse_public_key": "os.environ/LANGFUSE_PUB_KEY_2",
|
||||||
"langfuse_secret": "os.environ/LANGFUSE_PRIVATE_KEY_2",
|
# "langfuse_secret": "os.environ/LANGFUSE_PRIVATE_KEY_2",
|
||||||
}
|
# }
|
||||||
]
|
# ]
|
||||||
proxyconfig = ProxyConfig()
|
# proxyconfig = ProxyConfig()
|
||||||
|
|
||||||
team_config = await proxyconfig.load_team_config(team_id="my-special-team")
|
# team_config = await proxyconfig.load_team_config(team_id="my-special-team")
|
||||||
assert len(team_config) > 0
|
# assert len(team_config) > 0
|
||||||
|
|
||||||
data = {
|
# data = {
|
||||||
"model": "gpt-3.5-turbo",
|
# "model": "gpt-3.5-turbo",
|
||||||
"messages": [{"role": "user", "content": "Hey, how's it going?"}],
|
# "messages": [{"role": "user", "content": "Hey, how's it going?"}],
|
||||||
}
|
# }
|
||||||
team_config.pop("team_id")
|
# team_config.pop("team_id")
|
||||||
response = litellm.completion(**{**data, **team_config})
|
# response = litellm.completion(**{**data, **team_config})
|
||||||
|
|
||||||
print(f"response: {response}")
|
# print(f"response: {response}")
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue