use default_config file to configure inference

This commit is contained in:
Hardik Shah 2024-07-21 19:26:11 -07:00
parent c64b8cba22
commit d95f5f863d
3 changed files with 25 additions and 15 deletions

View file

@ -1,5 +1,6 @@
accelerate
black==24.4.2
blobfile
codeshield
fairscale
fastapi

View file

@ -46,17 +46,17 @@ class InferenceConfigure(Subcommand):
model_parallel_size,
yaml_output_path
):
yaml_content = textwrap.dedent(f"""
inference_config:
impl_type: "inline"
inline_config:
checkpoint_type: "pytorch"
checkpoint_dir: {checkpoint_dir}/
tokenizer_path: {checkpoint_dir}/tokenizer.model
model_parallel_size: {model_parallel_size}
max_seq_len: 2048
max_batch_size: 1
""")
current_dir = os.path.dirname(os.path.abspath(__file__))
default_conf_path = os.path.join(current_dir, "default_configuration.yaml")
with open(default_conf_path, "r") as f:
yaml_content = f.read()
yaml_content = yaml_content.format(
checkpoint_dir=checkpoint_dir,
model_parallel_size=model_parallel_size,
)
with open(yaml_output_path, 'w') as yaml_file:
yaml_file.write(yaml_content.strip())

View file

@ -0,0 +1,9 @@
inference_config:
impl_type: "inline"
inline_config:
checkpoint_type: "pytorch"
checkpoint_dir: {checkpoint_dir}/
tokenizer_path: {checkpoint_dir}/tokenizer.model
model_parallel_size: {model_parallel_size}
max_seq_len: 2048
max_batch_size: 1