mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-26 03:04:13 +00:00
LITELLM: Remove requests
library usage (#7235)
* fix(generic_api_callback.py): remove requests lib usage * fix(budget_manager.py): remove requests lib usgae * fix(main.py): cleanup requests lib usage * fix(utils.py): remove requests lib usage * fix(argilla.py): fix argilla test * fix(athina.py): replace 'requests' lib usage with litellm module * fix(greenscale.py): replace 'requests' lib usage with httpx * fix: remove unused 'requests' lib import + replace usage in some places * fix(prompt_layer.py): remove 'requests' lib usage from prompt layer * fix(ollama_chat.py): remove 'requests' lib usage * fix(baseten.py): replace 'requests' lib usage * fix(codestral/): replace 'requests' lib usage * fix(predibase/): replace 'requests' lib usage * refactor: cleanup unused 'requests' lib imports * fix(oobabooga.py): cleanup 'requests' lib usage * fix(invoke_handler.py): remove unused 'requests' lib usage * refactor: cleanup unused 'requests' lib import * fix: fix linting errors * refactor(ollama/): move ollama to using base llm http handler removes 'requests' lib dep for ollama integration * fix(ollama_chat.py): fix linting errors * fix(ollama/completion/transformation.py): convert non-jpeg/png image to jpeg/png before passing to ollama
This commit is contained in:
parent
224ead1531
commit
b82add11ba
46 changed files with 523 additions and 612 deletions
|
@ -3,10 +3,10 @@
|
|||
import os
|
||||
import traceback
|
||||
|
||||
import dotenv
|
||||
import requests # type: ignore
|
||||
from pydantic import BaseModel
|
||||
|
||||
import litellm
|
||||
|
||||
|
||||
class PromptLayerLogger:
|
||||
# Class variables or attributes
|
||||
|
@ -47,7 +47,7 @@ class PromptLayerLogger:
|
|||
if isinstance(response_obj, BaseModel):
|
||||
response_obj = response_obj.model_dump()
|
||||
|
||||
request_response = requests.post(
|
||||
request_response = litellm.module_level_client.post(
|
||||
"https://api.promptlayer.com/rest/track-request",
|
||||
json={
|
||||
"function_name": "openai.ChatCompletion.create",
|
||||
|
@ -74,7 +74,7 @@ class PromptLayerLogger:
|
|||
|
||||
if "request_id" in response_json:
|
||||
if metadata:
|
||||
response = requests.post(
|
||||
response = litellm.module_level_client.post(
|
||||
"https://api.promptlayer.com/rest/track-metadata",
|
||||
json={
|
||||
"request_id": response_json["request_id"],
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue