diff --git a/litellm/proxy/tests/test_langchain_request.py b/litellm/proxy/tests/test_langchain_request.py index 2306ffaf7c..1841b4968c 100644 --- a/litellm/proxy/tests/test_langchain_request.py +++ b/litellm/proxy/tests/test_langchain_request.py @@ -1,38 +1,39 @@ -from langchain.chat_models import ChatOpenAI -from langchain.prompts.chat import ( - ChatPromptTemplate, - HumanMessagePromptTemplate, - SystemMessagePromptTemplate, -) -from langchain.schema import HumanMessage, SystemMessage +## LOCAL TEST +# from langchain.chat_models import ChatOpenAI +# from langchain.prompts.chat import ( +# ChatPromptTemplate, +# HumanMessagePromptTemplate, +# SystemMessagePromptTemplate, +# ) +# from langchain.schema import HumanMessage, SystemMessage -chat = ChatOpenAI( - openai_api_base="http://0.0.0.0:8000", - model = "gpt-3.5-turbo", - temperature=0.1 -) +# chat = ChatOpenAI( +# openai_api_base="http://0.0.0.0:8000", +# model = "gpt-3.5-turbo", +# temperature=0.1 +# ) -messages = [ - SystemMessage( - content="You are a helpful assistant that im using to make a test request to." - ), - HumanMessage( - content="test from litellm. tell me why it's amazing in 1 sentence" - ), -] -response = chat(messages) +# messages = [ +# SystemMessage( +# content="You are a helpful assistant that im using to make a test request to." +# ), +# HumanMessage( +# content="test from litellm. tell me why it's amazing in 1 sentence" +# ), +# ] +# response = chat(messages) -print(response) +# print(response) -claude_chat = ChatOpenAI( - openai_api_base="http://0.0.0.0:8000", - model = "claude-v1", - temperature=0.1 -) +# claude_chat = ChatOpenAI( +# openai_api_base="http://0.0.0.0:8000", +# model = "claude-v1", +# temperature=0.1 +# ) -response = claude_chat(messages) +# response = claude_chat(messages) -print(response) +# print(response)