Merge pull request #3587 from BerriAI/litellm_proxy_use_batch_completions_model_csv

[Feat] Use csv values for proxy batch completions (OpenAI Python compatible)
This commit is contained in:
Ishaan Jaff 2024-05-13 07:55:12 -07:00 committed by GitHub
commit 514c5737f8
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
3 changed files with 104 additions and 8 deletions

View file

@ -3698,8 +3698,9 @@ async def chat_completion(
# skip router if user passed their key
if "api_key" in data:
tasks.append(litellm.acompletion(**data))
elif isinstance(data["model"], list) and llm_router is not None:
_models = data.pop("model")
elif "," in data["model"] and llm_router is not None:
_models_csv_string = data.pop("model")
_models = _models_csv_string.split(",")
tasks.append(llm_router.abatch_completion(models=_models, **data))
elif "user_config" in data:
# initialize a new router instance. make request using this Router