From 71c90636572ced72473b278c6799bd4b0b32613c Mon Sep 17 00:00:00 2001 From: Xi Yan Date: Mon, 10 Feb 2025 21:55:36 -0800 Subject: [PATCH] clean up --- .../providers/remote/inference/databricks/databricks.py | 2 +- llama_stack/providers/utils/inference/openai_compat.py | 4 ---- 2 files changed, 1 insertion(+), 5 deletions(-) diff --git a/llama_stack/providers/remote/inference/databricks/databricks.py b/llama_stack/providers/remote/inference/databricks/databricks.py index fe4865080..ee3c6e99b 100644 --- a/llama_stack/providers/remote/inference/databricks/databricks.py +++ b/llama_stack/providers/remote/inference/databricks/databricks.py @@ -123,7 +123,7 @@ class DatabricksInferenceAdapter(ModelRegistryHelper, Inference): yield chunk stream = _to_async_generator() - async for chunk in process_chat_completion_stream_response(stream, self.formatter): + async for chunk in process_chat_completion_stream_response(stream, self.formatter, request): yield chunk def _get_params(self, request: ChatCompletionRequest) -> dict: diff --git a/llama_stack/providers/utils/inference/openai_compat.py b/llama_stack/providers/utils/inference/openai_compat.py index 01e0cbb6d..2fd157cc0 100644 --- a/llama_stack/providers/utils/inference/openai_compat.py +++ b/llama_stack/providers/utils/inference/openai_compat.py @@ -265,12 +265,8 @@ async def process_chat_completion_stream_response( buffer = "" ipython = False stop_reason = None - from rich.pretty import pprint async for chunk in stream: - print("!! CHUNK !!") - pprint(chunk) - choice = chunk.choices[0] finish_reason = choice.finish_reason