diff --git a/litellm/proxy/tests/test_langchain_request.py b/litellm/proxy/tests/test_langchain_request.py new file mode 100644 index 0000000000..af6691f3c0 --- /dev/null +++ b/litellm/proxy/tests/test_langchain_request.py @@ -0,0 +1,28 @@ +from langchain.chat_models import ChatOpenAI +from langchain.prompts.chat import ( + ChatPromptTemplate, + HumanMessagePromptTemplate, + SystemMessagePromptTemplate, +) +from langchain.schema import HumanMessage, SystemMessage + +chat = ChatOpenAI( + openai_api_base="http://0.0.0.0:8000", + model = "gpt-3.5-turbo", + temperature=0.1 +) + +messages = [ + SystemMessage( + content="You are a helpful assistant that im using to make a test request to." + ), + HumanMessage( + content="test from litellm. tell me why it's amazing in 1 sentence" + ), +] +response = chat(messages) + +print(response) + + +