mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-25 02:34:29 +00:00
bug fixes to logging
This commit is contained in:
parent
f5257e6eff
commit
80fa61cc0e
3 changed files with 2 additions and 2 deletions
Binary file not shown.
|
@ -22,7 +22,7 @@ response = completion(model="claude-instant-1", messages=[{"role": "user", "cont
|
|||
# print(f"response: {response}")
|
||||
|
||||
# # Test 2: On embedding call
|
||||
# response = embedding(model="text-embedding-ada-002", input=["sample text"])
|
||||
response = embedding(model="text-embedding-ada-002", input=["sample text"])
|
||||
# print(f"response: {response}")
|
||||
|
||||
# # Test 3: On streaming completion call
|
||||
|
|
|
@ -1627,7 +1627,7 @@ class CustomStreamWrapper:
|
|||
chunk = next(self.completion_stream)
|
||||
completion_obj['content'] = chunk['choices']['delta']
|
||||
# LOGGING
|
||||
self.logging_obj(completion_obj["content"])
|
||||
self.logging_obj.post_call(completion_obj["content"])
|
||||
# return this for all models
|
||||
return {"choices": [{"delta": completion_obj}]}
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue