VertexAI non-jsonl file storage support (#9781)

* test: add initial e2e test

* fix(vertex_ai/files): initial commit adding sync file create support

* refactor: initial commit of vertex ai non-jsonl files reaching gcp endpoint

* fix(vertex_ai/files/transformation.py): initial working commit of non-jsonl file call reaching backend endpoint

* fix(vertex_ai/files/transformation.py): working e2e non-jsonl file upload

* test: working e2e jsonl call

* test: unit testing for jsonl file creation

* fix(vertex_ai/transformation.py): reset file pointer after read

allow multiple reads on same file object

* fix: fix linting errors

* fix: fix ruff linting errors

* fix: fix import

* fix: fix linting error

* fix: fix linting error

* fix(vertex_ai/files/transformation.py): fix linting error

* test: update test

* test: update tests

* fix: fix linting errors

* fix: fix test

* fix: fix linting error
This commit is contained in:
Krish Dholakia 2025-04-09 14:01:48 -07:00 committed by GitHub
parent 93532e00db
commit 6ba3c4a4f8
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
64 changed files with 780 additions and 185 deletions

View file

@ -96,6 +96,7 @@ class SagemakerLLM(BaseAWSLLM):
model: str,
data: dict,
messages: List[AllMessageValues],
litellm_params: dict,
optional_params: dict,
aws_region_name: str,
extra_headers: Optional[dict] = None,
@ -122,6 +123,7 @@ class SagemakerLLM(BaseAWSLLM):
model=model,
messages=messages,
optional_params=optional_params,
litellm_params=litellm_params,
)
request = AWSRequest(
method="POST", url=api_base, data=encoded_data, headers=headers
@ -198,6 +200,7 @@ class SagemakerLLM(BaseAWSLLM):
data=data,
messages=messages,
optional_params=optional_params,
litellm_params=litellm_params,
credentials=credentials,
aws_region_name=aws_region_name,
)
@ -274,6 +277,7 @@ class SagemakerLLM(BaseAWSLLM):
"model": model,
"data": _data,
"optional_params": optional_params,
"litellm_params": litellm_params,
"credentials": credentials,
"aws_region_name": aws_region_name,
"messages": messages,
@ -426,6 +430,7 @@ class SagemakerLLM(BaseAWSLLM):
"model": model,
"data": data,
"optional_params": optional_params,
"litellm_params": litellm_params,
"credentials": credentials,
"aws_region_name": aws_region_name,
"messages": messages,
@ -496,6 +501,7 @@ class SagemakerLLM(BaseAWSLLM):
"model": model,
"data": data,
"optional_params": optional_params,
"litellm_params": litellm_params,
"credentials": credentials,
"aws_region_name": aws_region_name,
"messages": messages,