fix(proxy_server.py): fix model check for /v1/models endpoint when team has restricted access

This commit is contained in:
Krrish Dholakia 2024-05-25 13:02:03 -07:00
parent 3c961136ea
commit 25a2f00db6
5 changed files with 134 additions and 36 deletions

View file

@ -11441,11 +11441,8 @@ class CustomStreamWrapper:
self.response_id = original_chunk.id
if len(original_chunk.choices) > 0:
delta = original_chunk.choices[0].delta
if (
delta is not None and (
delta.function_call is not None
or delta.tool_calls is not None
)
if delta is not None and (
delta.function_call is not None or delta.tool_calls is not None
):
try:
model_response.system_fingerprint = (
@ -11506,7 +11503,11 @@ class CustomStreamWrapper:
model_response.choices[0].delta = Delta()
else:
try:
delta = dict() if original_chunk.choices[0].delta is None else dict(original_chunk.choices[0].delta)
delta = (
dict()
if original_chunk.choices[0].delta is None
else dict(original_chunk.choices[0].delta)
)
print_verbose(f"original delta: {delta}")
model_response.choices[0].delta = Delta(**delta)
print_verbose(
@ -12256,7 +12257,7 @@ def trim_messages(
return messages
def get_valid_models():
def get_valid_models() -> List[str]:
"""
Returns a list of valid LLMs based on the set environment variables