Fix typos in some files

This commit is contained in:
khanhkhanhlele 2025-11-07 19:09:31 +07:00
parent e894e36eea
commit 0c13649b7b
3 changed files with 3 additions and 3 deletions

View file

@ -146,7 +146,7 @@ class MetaReferenceInferenceImpl(
def check_model(self, request) -> None: def check_model(self, request) -> None:
if self.model_id is None or self.llama_model is None: if self.model_id is None or self.llama_model is None:
raise RuntimeError( raise RuntimeError(
"No avaible model yet, please register your requested model or add your model in the resouces first" "No available model yet, please register your requested model or add your model in the resources first"
) )
elif request.model != self.model_id: elif request.model != self.model_id:
raise RuntimeError(f"Model mismatch: request model: {request.model} != loaded model: {self.model_id}") raise RuntimeError(f"Model mismatch: request model: {request.model} != loaded model: {self.model_id}")

View file

@ -91,7 +91,7 @@ class TorchtuneCheckpointer:
if checkpoint_format == "meta" or checkpoint_format is None: if checkpoint_format == "meta" or checkpoint_format is None:
self._save_meta_format_checkpoint(model_file_path, state_dict, adapter_only) self._save_meta_format_checkpoint(model_file_path, state_dict, adapter_only)
elif checkpoint_format == "huggingface": elif checkpoint_format == "huggingface":
# Note: for saving hugging face format checkpoints, we only suppport saving adapter weights now # Note: for saving hugging face format checkpoints, we only support saving adapter weights now
self._save_hf_format_checkpoint(model_file_path, state_dict) self._save_hf_format_checkpoint(model_file_path, state_dict)
else: else:
raise ValueError(f"Unsupported checkpoint format: {format}") raise ValueError(f"Unsupported checkpoint format: {format}")

View file

@ -25,7 +25,7 @@ def llama_stack_instruct_to_torchtune_instruct(
) )
input_messages = json.loads(sample[ColumnName.chat_completion_input.value]) input_messages = json.loads(sample[ColumnName.chat_completion_input.value])
assert len(input_messages) == 1, "llama stack intruct dataset format only supports 1 user message" assert len(input_messages) == 1, "llama stack instruct dataset format only supports 1 user message"
input_message = input_messages[0] input_message = input_messages[0]
assert "content" in input_message, "content not found in input message" assert "content" in input_message, "content not found in input message"