forked from phoenix/litellm-mirror
async anthropic streaming
This commit is contained in:
parent
8e5e99533b
commit
7849c29f70
1 changed files with 4 additions and 1 deletions
|
@ -8710,7 +8710,9 @@ class CustomStreamWrapper:
|
|||
return hold, curr_chunk
|
||||
|
||||
def handle_anthropic_chunk(self, chunk):
|
||||
str_line = chunk.decode("utf-8") # Convert bytes to string
|
||||
str_line = chunk
|
||||
if isinstance(chunk, bytes): # Handle binary data
|
||||
str_line = chunk.decode("utf-8") # Convert bytes to string
|
||||
text = ""
|
||||
is_finished = False
|
||||
finish_reason = None
|
||||
|
@ -9970,6 +9972,7 @@ class CustomStreamWrapper:
|
|||
or self.custom_llm_provider == "custom_openai"
|
||||
or self.custom_llm_provider == "text-completion-openai"
|
||||
or self.custom_llm_provider == "azure_text"
|
||||
or self.custom_llm_provider == "anthropic"
|
||||
or self.custom_llm_provider == "huggingface"
|
||||
or self.custom_llm_provider == "ollama"
|
||||
or self.custom_llm_provider == "ollama_chat"
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue