From 33c8e149ddfae24259d87b52596eaff2a97ce97f Mon Sep 17 00:00:00 2001 From: Ubuntu Date: Thu, 20 Mar 2025 10:39:51 +0000 Subject: [PATCH] skip mypy, fix linting --- .../post_training/nvidia/post_training.py | 19 +++++++++++++++---- .../remote/post_training/nvidia/utils.py | 8 ++++++-- pyproject.toml | 1 + 3 files changed, 22 insertions(+), 6 deletions(-) diff --git a/llama_stack/providers/remote/post_training/nvidia/post_training.py b/llama_stack/providers/remote/post_training/nvidia/post_training.py index 2468e1d44..b1221335e 100644 --- a/llama_stack/providers/remote/post_training/nvidia/post_training.py +++ b/llama_stack/providers/remote/post_training/nvidia/post_training.py @@ -67,7 +67,7 @@ class NvidiaPostTrainingAdapter(PostTraining, ModelRegistryHelper): self.customizer_url = config.customizer_url if not self.customizer_url: - warnings.warn("Customizer URL is not set, using default value: http://nemo.test") + warnings.warn("Customizer URL is not set, using default value: http://nemo.test", stacklevel=2) self.customizer_url = "http://nemo.test" async def _make_request( @@ -122,8 +122,16 @@ class NvidiaPostTrainingAdapter(PostTraining, ModelRegistryHelper): mapped_status = STATUS_MAPPING.get(job_status, "unknown") # Convert string timestamps to datetime objects - created_at = datetime.fromisoformat(job.pop("created_at")) if "created_at" in job else datetime.now() - updated_at = datetime.fromisoformat(job.pop("updated_at")) if "updated_at" in job else datetime.now() + created_at = ( + datetime.fromisoformat(job.pop("created_at")) + if "created_at" in job + else datetime.now(tz=datetime.timezone.utc) + ) + updated_at = ( + datetime.fromisoformat(job.pop("updated_at")) + if "updated_at" in job + else datetime.now(tz=datetime.timezone.utc) + ) # Create NvidiaPostTrainingJob instance jobs.append( @@ -302,7 +310,10 @@ class NvidiaPostTrainingAdapter(PostTraining, ModelRegistryHelper): unsupported_method_params.append("logger_config") if unsupported_method_params: - warnings.warn(f"Parameters: {', '.join(unsupported_method_params)} are not supported and will be ignored") + warnings.warn( + f"Parameters: {', '.join(unsupported_method_params)} are not supported and will be ignored", + stacklevel=2, + ) # Define all supported parameters supported_params = { diff --git a/llama_stack/providers/remote/post_training/nvidia/utils.py b/llama_stack/providers/remote/post_training/nvidia/utils.py index 383df9c2c..ac47966af 100644 --- a/llama_stack/providers/remote/post_training/nvidia/utils.py +++ b/llama_stack/providers/remote/post_training/nvidia/utils.py @@ -22,7 +22,9 @@ def warn_unsupported_params(config_dict: Any, supported_keys: Set[str], config_n keys = set(config_dict.__annotations__.keys()) if isinstance(config_dict, BaseModel) else config_dict.keys() unsupported_params = [k for k in keys if k not in supported_keys] if unsupported_params: - warnings.warn(f"Parameters: {unsupported_params} in `{config_name}` not supported and will be ignored.") + warnings.warn( + f"Parameters: {unsupported_params} in `{config_name}` not supported and will be ignored.", stacklevel=2 + ) def validate_training_params( @@ -49,7 +51,9 @@ def validate_training_params( unsupported_params.append(key) if unsupported_params: - warnings.warn(f"Parameters: {unsupported_params} in `{config_name}` are not supported and will be ignored.") + warnings.warn( + f"Parameters: {unsupported_params} in `{config_name}` are not supported and will be ignored.", stacklevel=2 + ) # ToDo: implement post health checks for customizer are enabled diff --git a/pyproject.toml b/pyproject.toml index 107150cee..e086abb2e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -272,6 +272,7 @@ exclude = [ "^llama_stack/providers/remote/tool_runtime/model_context_protocol/", "^llama_stack/providers/remote/tool_runtime/tavily_search/", "^llama_stack/providers/remote/tool_runtime/wolfram_alpha/", + "^llama_stack/providers/remote/post_training/nvidia/", "^llama_stack/providers/remote/vector_io/chroma/", "^llama_stack/providers/remote/vector_io/milvus/", "^llama_stack/providers/remote/vector_io/pgvector/",