diff --git a/docs/source/distributions/index.md b/docs/source/distributions/index.md index bedc9706e..4b66a5fc8 100644 --- a/docs/source/distributions/index.md +++ b/docs/source/distributions/index.md @@ -1,5 +1,4 @@ -# Llama Stack Distributions - +# Building Llama Stacks ```{toctree} :maxdepth: 2 @@ -12,6 +11,7 @@ ondevice_distro/index ## Introduction Llama Stack Distributions are pre-built Docker containers/Conda environments that assemble APIs and Providers to provide a consistent whole to the end application developer. + These distributions allow you to mix-and-match providers - some could be backed by local code and some could be remote. This flexibility enables you to choose the optimal setup for your use case, such as serving a small model locally while using a cloud provider for larger models, all while maintaining a consistent API interface for your application. diff --git a/docs/source/getting_started/index.md b/docs/source/getting_started/index.md index 5875f2776..72f651fd5 100644 --- a/docs/source/getting_started/index.md +++ b/docs/source/getting_started/index.md @@ -149,7 +149,6 @@ if __name__ == "__main__": ## Next Steps -- You can mix and match different providers for inference, memory, agents, evals etc. See [Building custom distributions](../distributions/index.md) -- [Developer Cookbook](developer_cookbook.md) +You can mix and match different providers for inference, memory, agents, evals etc. See [Building Llama Stacks](../distributions/index.md) For example applications and more detailed tutorials, visit our [llama-stack-apps](https://github.com/meta-llama/llama-stack-apps/tree/main/examples) repository. diff --git a/llama_stack/providers/inline/meta_reference/telemetry/config.py b/llama_stack/providers/inline/meta_reference/telemetry/config.py index 34d5bc08e..a1db1d4d8 100644 --- a/llama_stack/providers/inline/meta_reference/telemetry/config.py +++ b/llama_stack/providers/inline/meta_reference/telemetry/config.py @@ -18,4 +18,4 @@ class LogFormat(Enum): @json_schema_type class ConsoleConfig(BaseModel): - log_format: LogFormat = LogFormat.JSON + log_format: LogFormat = LogFormat.TEXT