mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-27 11:43:54 +00:00
fix optional params
This commit is contained in:
parent
d57f8dbf43
commit
30453bd751
2 changed files with 1170 additions and 8 deletions
|
@ -560,8 +560,7 @@ def completion(
|
|||
)
|
||||
response = llm_model.predict(
|
||||
endpoint=endpoint_path,
|
||||
instances=instances,
|
||||
**optional_params,
|
||||
instances=instances
|
||||
).predictions
|
||||
|
||||
completion_response = response[0]
|
||||
|
@ -590,8 +589,7 @@ def completion(
|
|||
f"llm_model.predict(instances={instances}, **{optional_params})\n"
|
||||
)
|
||||
response = llm_model.predict(
|
||||
instances=instances,
|
||||
**optional_params,
|
||||
instances=instances
|
||||
).predictions
|
||||
|
||||
completion_response = response[0]
|
||||
|
@ -791,7 +789,6 @@ async def async_completion(
|
|||
response_obj = await llm_model.predict(
|
||||
endpoint=endpoint_path,
|
||||
instances=instances,
|
||||
**optional_params,
|
||||
)
|
||||
response = response_obj.predictions
|
||||
completion_response = response[0]
|
||||
|
@ -807,7 +804,6 @@ async def async_completion(
|
|||
)
|
||||
response_obj = await llm_model.predict_async(
|
||||
instances=instances,
|
||||
**optional_params,
|
||||
)
|
||||
|
||||
response = response_obj.predictions
|
||||
|
@ -980,7 +976,6 @@ async def async_streaming(
|
|||
response_obj = await llm_model.predict(
|
||||
endpoint=endpoint_path,
|
||||
instances=instances,
|
||||
**optional_params,
|
||||
)
|
||||
|
||||
response = response_obj.predictions
|
||||
|
@ -999,7 +994,6 @@ async def async_streaming(
|
|||
request_str += f"llm_model.predict_async(instances={instances}, **{optional_params})\n"
|
||||
response_obj = await llm_model.predict_async(
|
||||
instances=instances,
|
||||
**optional_params,
|
||||
)
|
||||
response = response_obj.predictions
|
||||
completion_response = response[0]
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue