mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-26 19:24:27 +00:00
test - async ft jobs
This commit is contained in:
parent
106626f224
commit
c9bea3a879
3 changed files with 54 additions and 3 deletions
|
@ -319,7 +319,7 @@ async def alist_fine_tuning_jobs(
|
||||||
|
|
||||||
# Use a partial function to pass your keyword arguments
|
# Use a partial function to pass your keyword arguments
|
||||||
func = partial(
|
func = partial(
|
||||||
cancel_fine_tuning_job,
|
list_fine_tuning_jobs,
|
||||||
after,
|
after,
|
||||||
limit,
|
limit,
|
||||||
custom_llm_provider,
|
custom_llm_provider,
|
||||||
|
|
|
@ -53,7 +53,9 @@ class OpenAIFineTuningAPI(BaseLLM):
|
||||||
create_fine_tuning_job_data: FineTuningJobCreate,
|
create_fine_tuning_job_data: FineTuningJobCreate,
|
||||||
openai_client: AsyncOpenAI,
|
openai_client: AsyncOpenAI,
|
||||||
) -> FineTuningJob:
|
) -> FineTuningJob:
|
||||||
response = await openai_client.batches.create(**create_fine_tuning_job_data)
|
response = await openai_client.fine_tuning.jobs.create(
|
||||||
|
**create_fine_tuning_job_data
|
||||||
|
)
|
||||||
return response
|
return response
|
||||||
|
|
||||||
def create_fine_tuning_job(
|
def create_fine_tuning_job(
|
||||||
|
|
|
@ -45,7 +45,8 @@ def test_create_fine_tune_job():
|
||||||
print("listing ft jobs")
|
print("listing ft jobs")
|
||||||
ft_jobs = litellm.list_fine_tuning_jobs(limit=2)
|
ft_jobs = litellm.list_fine_tuning_jobs(limit=2)
|
||||||
print("response from litellm.list_fine_tuning_jobs=", ft_jobs)
|
print("response from litellm.list_fine_tuning_jobs=", ft_jobs)
|
||||||
assert len(ft_jobs) > 0
|
|
||||||
|
assert len(list(ft_jobs)) > 0
|
||||||
|
|
||||||
# delete file
|
# delete file
|
||||||
|
|
||||||
|
@ -63,3 +64,51 @@ def test_create_fine_tune_job():
|
||||||
assert response.status == "cancelled"
|
assert response.status == "cancelled"
|
||||||
assert response.id == create_fine_tuning_response.id
|
assert response.id == create_fine_tuning_response.id
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_create_fine_tune_jobs_async():
|
||||||
|
verbose_logger.setLevel(logging.DEBUG)
|
||||||
|
file_name = "openai_batch_completions.jsonl"
|
||||||
|
_current_dir = os.path.dirname(os.path.abspath(__file__))
|
||||||
|
file_path = os.path.join(_current_dir, file_name)
|
||||||
|
|
||||||
|
file_obj = await litellm.acreate_file(
|
||||||
|
file=open(file_path, "rb"),
|
||||||
|
purpose="fine-tune",
|
||||||
|
custom_llm_provider="openai",
|
||||||
|
)
|
||||||
|
print("Response from creating file=", file_obj)
|
||||||
|
|
||||||
|
create_fine_tuning_response = await litellm.acreate_fine_tuning_job(
|
||||||
|
model="gpt-3.5-turbo-0125",
|
||||||
|
training_file=file_obj.id,
|
||||||
|
)
|
||||||
|
|
||||||
|
print("response from litellm.create_fine_tuning_job=", create_fine_tuning_response)
|
||||||
|
|
||||||
|
assert create_fine_tuning_response.id is not None
|
||||||
|
assert create_fine_tuning_response.model == "gpt-3.5-turbo-0125"
|
||||||
|
|
||||||
|
# list fine tuning jobs
|
||||||
|
print("listing ft jobs")
|
||||||
|
ft_jobs = await litellm.alist_fine_tuning_jobs(limit=2)
|
||||||
|
print("response from litellm.list_fine_tuning_jobs=", ft_jobs)
|
||||||
|
assert len(list(ft_jobs)) > 0
|
||||||
|
|
||||||
|
# delete file
|
||||||
|
|
||||||
|
await litellm.afile_delete(
|
||||||
|
file_id=file_obj.id,
|
||||||
|
)
|
||||||
|
|
||||||
|
# cancel ft job
|
||||||
|
response = await litellm.acancel_fine_tuning_job(
|
||||||
|
fine_tuning_job_id=create_fine_tuning_response.id,
|
||||||
|
)
|
||||||
|
|
||||||
|
print("response from litellm.cancel_fine_tuning_job=", response)
|
||||||
|
|
||||||
|
assert response.status == "cancelled"
|
||||||
|
assert response.id == create_fine_tuning_response.id
|
||||||
|
pass
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue