# What does this PR do?


## Test Plan
This commit is contained in:
Eric Huang 2025-10-08 13:54:19 -07:00
parent 96886afaca
commit 521009048a
207 changed files with 71733 additions and 2042 deletions

View file

@ -1052,6 +1052,7 @@ class InferenceProvider(Protocol):
prompt_logprobs: int | None = None,
# for fill-in-the-middle type completion
suffix: str | None = None,
**kwargs: Any,
) -> OpenAICompletion:
"""Create completion.
@ -1075,6 +1076,7 @@ class InferenceProvider(Protocol):
:param top_p: (Optional) The top p to use.
:param user: (Optional) The user to use.
:param suffix: (Optional) The suffix that should be appended to the completion.
:param kwargs: (Optional) Additional provider-specific parameters to pass through as extra_body.
:returns: An OpenAICompletion.
"""
...
@ -1106,6 +1108,7 @@ class InferenceProvider(Protocol):
top_logprobs: int | None = None,
top_p: float | None = None,
user: str | None = None,
**kwargs: Any,
) -> OpenAIChatCompletion | AsyncIterator[OpenAIChatCompletionChunk]:
"""Create chat completions.
@ -1134,6 +1137,7 @@ class InferenceProvider(Protocol):
:param top_logprobs: (Optional) The top log probabilities to use.
:param top_p: (Optional) The top p to use.
:param user: (Optional) The user to use.
:param kwargs: (Optional) Additional provider-specific parameters to pass through as extra_body (e.g., chat_template_kwargs for vLLM).
:returns: An OpenAIChatCompletion.
"""
...