From 1518a7b42dfad10b0de612f87250a5f8e6f7cac0 Mon Sep 17 00:00:00 2001 From: sepiatone Date: Sun, 5 May 2024 16:41:15 +0530 Subject: [PATCH] update langchain documentation to reflect refactor --- docs/my-website/docs/langchain/langchain.md | 55 ++++++--------------- 1 file changed, 16 insertions(+), 39 deletions(-) diff --git a/docs/my-website/docs/langchain/langchain.md b/docs/my-website/docs/langchain/langchain.md index cc12767b8..efa6b2925 100644 --- a/docs/my-website/docs/langchain/langchain.md +++ b/docs/my-website/docs/langchain/langchain.md @@ -14,14 +14,14 @@ import TabItem from '@theme/TabItem'; ```python import os -from langchain.chat_models import ChatLiteLLM -from langchain.prompts.chat import ( +from langchain_community.chat_models import ChatLiteLLM +from langchain_core.prompts import ( ChatPromptTemplate, SystemMessagePromptTemplate, AIMessagePromptTemplate, HumanMessagePromptTemplate, ) -from langchain.schema import AIMessage, HumanMessage, SystemMessage +from langchain_core.messages import AIMessage, HumanMessage, SystemMessage os.environ['OPENAI_API_KEY'] = "" chat = ChatLiteLLM(model="gpt-3.5-turbo") @@ -30,7 +30,7 @@ messages = [ content="what model are you" ) ] -chat(messages) +chat.invoke(messages) ``` @@ -39,14 +39,14 @@ chat(messages) ```python import os -from langchain.chat_models import ChatLiteLLM -from langchain.prompts.chat import ( +from langchain_community.chat_models import ChatLiteLLM +from langchain_core.prompts import ( ChatPromptTemplate, SystemMessagePromptTemplate, AIMessagePromptTemplate, HumanMessagePromptTemplate, ) -from langchain.schema import AIMessage, HumanMessage, SystemMessage +from langchain_core.messages import AIMessage, HumanMessage, SystemMessage os.environ['ANTHROPIC_API_KEY'] = "" chat = ChatLiteLLM(model="claude-2", temperature=0.3) @@ -55,7 +55,7 @@ messages = [ content="what model are you" ) ] -chat(messages) +chat.invoke(messages) ``` @@ -64,14 +64,14 @@ chat(messages) ```python import os -from langchain.chat_models import ChatLiteLLM -from langchain.prompts.chat import ( +from langchain_community.chat_models import ChatLiteLLM +from langchain_core.prompts.chat import ( ChatPromptTemplate, SystemMessagePromptTemplate, AIMessagePromptTemplate, HumanMessagePromptTemplate, ) -from langchain.schema import AIMessage, HumanMessage, SystemMessage +from langchain_core.messages import AIMessage, HumanMessage, SystemMessage os.environ['REPLICATE_API_TOKEN'] = "" chat = ChatLiteLLM(model="replicate/llama-2-70b-chat:2c1608e18606fad2812020dc541930f2d0495ce32eee50074220b87300bc16e1") @@ -80,7 +80,7 @@ messages = [ content="what model are you?" ) ] -chat(messages) +chat.invoke(messages) ``` @@ -89,14 +89,14 @@ chat(messages) ```python import os -from langchain.chat_models import ChatLiteLLM -from langchain.prompts.chat import ( +from langchain_community.chat_models import ChatLiteLLM +from langchain_core.prompts import ( ChatPromptTemplate, SystemMessagePromptTemplate, AIMessagePromptTemplate, HumanMessagePromptTemplate, ) -from langchain.schema import AIMessage, HumanMessage, SystemMessage +from langchain_core.messages import AIMessage, HumanMessage, SystemMessage os.environ['COHERE_API_KEY'] = "" chat = ChatLiteLLM(model="command-nightly") @@ -105,32 +105,9 @@ messages = [ content="what model are you?" ) ] -chat(messages) +chat.invoke(messages) ``` - - - -```python -import os -from langchain.chat_models import ChatLiteLLM -from langchain.prompts.chat import ( - ChatPromptTemplate, - SystemMessagePromptTemplate, - AIMessagePromptTemplate, - HumanMessagePromptTemplate, -) -from langchain.schema import AIMessage, HumanMessage, SystemMessage - -os.environ['PALM_API_KEY'] = "" -chat = ChatLiteLLM(model="palm/chat-bison") -messages = [ - HumanMessage( - content="what model are you?" - ) -] -chat(messages) -```