HumanLoop integration for Prompt Management (#7479)

* feat(humanloop.py): initial commit for humanloop prompt management integration

Closes https://github.com/BerriAI/litellm/issues/213

* feat(humanloop.py): working e2e humanloop prompt management integration

Closes https://github.com/BerriAI/litellm/issues/213

* fix(humanloop.py): fix linting errors

* fix: fix linting erro

* fix: fix test

* test: handle filenotfound error
This commit is contained in:
Krish Dholakia 2024-12-30 22:26:03 -08:00 committed by GitHub
parent 0178e75cd9
commit 77c13df55d
9 changed files with 310 additions and 39 deletions

View file

@ -4534,3 +4534,17 @@ def test_langfuse_completion(monkeypatch):
prompt_variables={"user_message": "this is used"},
messages=[{"role": "user", "content": "this is ignored"}],
)
def test_humanloop_completion(monkeypatch):
monkeypatch.setenv(
"HUMANLOOP_API_KEY", "hl_sk_59c1206e110c3f5b9985f0de4d23e7cbc79c4c4ae18c9f14"
)
litellm.set_verbose = True
resp = litellm.completion(
model="humanloop/gpt-3.5-turbo",
humanloop_api_key=os.getenv("HUMANLOOP_API_KEY"),
prompt_id="pr_nmSOVpEdyYPm2DrOwCoOm",
prompt_variables={"person": "John"},
messages=[{"role": "user", "content": "Tell me a joke."}],
)