Make each inference provider into its own subdirectory

This commit is contained in:
Ashwin Bharambe 2024-08-05 15:13:52 -07:00
parent f64668319c
commit 0de5a807c7
42 changed files with 123 additions and 103 deletions

View file

@ -18,8 +18,8 @@ def available_inference_providers() -> List[ProviderSpec]:
"torch",
"zmq",
],
module="llama_toolchain.inference.inference",
config_class="llama_toolchain.inference.inference.MetaReferenceImplConfig",
module="llama_toolchain.inference.meta_reference",
config_class="llama_toolchain.inference.meta_reference.MetaReferenceImplConfig",
),
InlineProviderSpec(
api=Api.inference,