From 6ce6a016f45b170f4a74a0eebedfea30b2ab8956 Mon Sep 17 00:00:00 2001 From: Krrish Dholakia Date: Thu, 3 Aug 2023 07:18:10 -0700 Subject: [PATCH] updating docs --- docs/helicone_integration.md | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/docs/helicone_integration.md b/docs/helicone_integration.md index 00c2bbc531..ad081ce9c1 100644 --- a/docs/helicone_integration.md +++ b/docs/helicone_integration.md @@ -18,11 +18,16 @@ from litellm import completion ## set env variables os.environ["HELICONE_API_KEY"] = "your-helicone-key" +os.environ["OPENAI_API_KEY"], os.environ["COHERE_API_KEY"] = "", "" # set callbacks litellm.success_callback=["helicone"] -response = completion(model="gpt-3.5-turbo", messages=messages) +#openai call +response = completion(model="gpt-3.5-turbo", messages=[{"role": "user", "content": "Hi 👋 - i'm openai"}]) + +#cohere call +response = completion(model="command-nightly", messages=[{"role": "user", "content": "Hi 👋 - i'm cohere"}]) ``` ### Approach 2: [OpenAI + Azure only] Use Helicone as a proxy