mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-07-29 07:14:20 +00:00
Fix pre-commit
Signed-off-by: Yuan Tang <terrytangyuan@gmail.com>
This commit is contained in:
parent
7bbce6394a
commit
f93f7f0199
2 changed files with 8 additions and 10 deletions
|
@ -9,9 +9,7 @@ from .vllm import VLLMInferenceAdapter
|
|||
|
||||
|
||||
async def get_adapter_impl(config: VLLMImplConfig, _deps):
|
||||
assert isinstance(
|
||||
config, VLLMImplConfig
|
||||
), f"Unexpected config type: {type(config)}"
|
||||
assert isinstance(config, VLLMImplConfig), f"Unexpected config type: {type(config)}"
|
||||
impl = VLLMInferenceAdapter(config)
|
||||
await impl.initialize()
|
||||
return impl
|
||||
return impl
|
||||
|
|
|
@ -50,12 +50,12 @@ class VLLMInferenceAdapter(ModelRegistryHelper, Inference):
|
|||
pass
|
||||
|
||||
def completion(
|
||||
self,
|
||||
model: str,
|
||||
content: InterleavedTextMedia,
|
||||
sampling_params: Optional[SamplingParams] = SamplingParams(),
|
||||
stream: Optional[bool] = False,
|
||||
logprobs: Optional[LogProbConfig] = None,
|
||||
self,
|
||||
model: str,
|
||||
content: InterleavedTextMedia,
|
||||
sampling_params: Optional[SamplingParams] = SamplingParams(),
|
||||
stream: Optional[bool] = False,
|
||||
logprobs: Optional[LogProbConfig] = None,
|
||||
) -> Union[CompletionResponse, CompletionResponseStreamChunk]:
|
||||
raise NotImplementedError()
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue