From b41abff4fb309d69186430591b38337b1d31feef Mon Sep 17 00:00:00 2001 From: Justin Lee Date: Fri, 1 Nov 2024 13:50:24 -0700 Subject: [PATCH] minor enhancement md --- docs/source/chat_local_cloud_guide.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/docs/source/chat_local_cloud_guide.md b/docs/source/chat_local_cloud_guide.md index bb7463897..ea2617ecc 100644 --- a/docs/source/chat_local_cloud_guide.md +++ b/docs/source/chat_local_cloud_guide.md @@ -131,3 +131,7 @@ async def get_llama_response(stream: bool = True): asyncio.run(get_llama_response()) ``` + +--- + +With these fundamentals, you should be well on your way to leveraging Llama Stack’s text generation capabilities! For more advanced features, refer to the [Llama Stack Documentation](https://llama-stack.readthedocs.io/en/latest/).