Merge pull request #3464 from sepiatone/main

update langchain documentation to reflect refactor
This commit is contained in:
Ishaan Jaff 2024-05-07 08:42:23 -07:00 committed by GitHub
commit ee1b1fe4f8
No known key found for this signature in database
GPG key ID: B5690EEEBB952194

View file

@ -14,14 +14,14 @@ import TabItem from '@theme/TabItem';
```python
import os
from langchain.chat_models import ChatLiteLLM
from langchain.prompts.chat import (
from langchain_community.chat_models import ChatLiteLLM
from langchain_core.prompts import (
ChatPromptTemplate,
SystemMessagePromptTemplate,
AIMessagePromptTemplate,
HumanMessagePromptTemplate,
)
from langchain.schema import AIMessage, HumanMessage, SystemMessage
from langchain_core.messages import AIMessage, HumanMessage, SystemMessage
os.environ['OPENAI_API_KEY'] = ""
chat = ChatLiteLLM(model="gpt-3.5-turbo")
@ -30,7 +30,7 @@ messages = [
content="what model are you"
)
]
chat(messages)
chat.invoke(messages)
```
</TabItem>
@ -39,14 +39,14 @@ chat(messages)
```python
import os
from langchain.chat_models import ChatLiteLLM
from langchain.prompts.chat import (
from langchain_community.chat_models import ChatLiteLLM
from langchain_core.prompts import (
ChatPromptTemplate,
SystemMessagePromptTemplate,
AIMessagePromptTemplate,
HumanMessagePromptTemplate,
)
from langchain.schema import AIMessage, HumanMessage, SystemMessage
from langchain_core.messages import AIMessage, HumanMessage, SystemMessage
os.environ['ANTHROPIC_API_KEY'] = ""
chat = ChatLiteLLM(model="claude-2", temperature=0.3)
@ -55,7 +55,7 @@ messages = [
content="what model are you"
)
]
chat(messages)
chat.invoke(messages)
```
</TabItem>
@ -64,14 +64,14 @@ chat(messages)
```python
import os
from langchain.chat_models import ChatLiteLLM
from langchain.prompts.chat import (
from langchain_community.chat_models import ChatLiteLLM
from langchain_core.prompts.chat import (
ChatPromptTemplate,
SystemMessagePromptTemplate,
AIMessagePromptTemplate,
HumanMessagePromptTemplate,
)
from langchain.schema import AIMessage, HumanMessage, SystemMessage
from langchain_core.messages import AIMessage, HumanMessage, SystemMessage
os.environ['REPLICATE_API_TOKEN'] = ""
chat = ChatLiteLLM(model="replicate/llama-2-70b-chat:2c1608e18606fad2812020dc541930f2d0495ce32eee50074220b87300bc16e1")
@ -80,7 +80,7 @@ messages = [
content="what model are you?"
)
]
chat(messages)
chat.invoke(messages)
```
</TabItem>
@ -89,14 +89,14 @@ chat(messages)
```python
import os
from langchain.chat_models import ChatLiteLLM
from langchain.prompts.chat import (
from langchain_community.chat_models import ChatLiteLLM
from langchain_core.prompts import (
ChatPromptTemplate,
SystemMessagePromptTemplate,
AIMessagePromptTemplate,
HumanMessagePromptTemplate,
)
from langchain.schema import AIMessage, HumanMessage, SystemMessage
from langchain_core.messages import AIMessage, HumanMessage, SystemMessage
os.environ['COHERE_API_KEY'] = ""
chat = ChatLiteLLM(model="command-nightly")
@ -105,32 +105,9 @@ messages = [
content="what model are you?"
)
]
chat(messages)
chat.invoke(messages)
```
</TabItem>
<TabItem value="palm" label="PaLM - Google">
```python
import os
from langchain.chat_models import ChatLiteLLM
from langchain.prompts.chat import (
ChatPromptTemplate,
SystemMessagePromptTemplate,
AIMessagePromptTemplate,
HumanMessagePromptTemplate,
)
from langchain.schema import AIMessage, HumanMessage, SystemMessage
os.environ['PALM_API_KEY'] = ""
chat = ChatLiteLLM(model="palm/chat-bison")
messages = [
HumanMessage(
content="what model are you?"
)
]
chat(messages)
```
</TabItem>
</Tabs>