mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-26 11:14:04 +00:00
build(pyproject.toml): add new dev dependencies - for type checking (#9631)
* build(pyproject.toml): add new dev dependencies - for type checking * build: reformat files to fit black * ci: reformat to fit black * ci(test-litellm.yml): make tests run clear * build(pyproject.toml): add ruff * fix: fix ruff checks * build(mypy/): fix mypy linting errors * fix(hashicorp_secret_manager.py): fix passing cert for tls auth * build(mypy/): resolve all mypy errors * test: update test * fix: fix black formatting * build(pre-commit-config.yaml): use poetry run black * fix(proxy_server.py): fix linting error * fix: fix ruff safe representation error
This commit is contained in:
parent
72198737f8
commit
d7b294dd0a
214 changed files with 1553 additions and 1433 deletions
|
@ -496,9 +496,9 @@ class BedrockLLM(BaseAWSLLM):
|
|||
content=None,
|
||||
)
|
||||
model_response.choices[0].message = _message # type: ignore
|
||||
model_response._hidden_params["original_response"] = (
|
||||
outputText # allow user to access raw anthropic tool calling response
|
||||
)
|
||||
model_response._hidden_params[
|
||||
"original_response"
|
||||
] = outputText # allow user to access raw anthropic tool calling response
|
||||
if (
|
||||
_is_function_call is True
|
||||
and stream is not None
|
||||
|
@ -806,9 +806,9 @@ class BedrockLLM(BaseAWSLLM):
|
|||
): # completion(top_k=3) > anthropic_config(top_k=3) <- allows for dynamic variables to be passed in
|
||||
inference_params[k] = v
|
||||
if stream is True:
|
||||
inference_params["stream"] = (
|
||||
True # cohere requires stream = True in inference params
|
||||
)
|
||||
inference_params[
|
||||
"stream"
|
||||
] = True # cohere requires stream = True in inference params
|
||||
data = json.dumps({"prompt": prompt, **inference_params})
|
||||
elif provider == "anthropic":
|
||||
if model.startswith("anthropic.claude-3"):
|
||||
|
@ -1205,7 +1205,6 @@ class BedrockLLM(BaseAWSLLM):
|
|||
def get_response_stream_shape():
|
||||
global _response_stream_shape_cache
|
||||
if _response_stream_shape_cache is None:
|
||||
|
||||
from botocore.loaders import Loader
|
||||
from botocore.model import ServiceModel
|
||||
|
||||
|
@ -1539,7 +1538,6 @@ class AmazonDeepSeekR1StreamDecoder(AWSEventStreamDecoder):
|
|||
model: str,
|
||||
sync_stream: bool,
|
||||
) -> None:
|
||||
|
||||
super().__init__(model=model)
|
||||
from litellm.llms.bedrock.chat.invoke_transformations.amazon_deepseek_transformation import (
|
||||
AmazonDeepseekR1ResponseIterator,
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue