mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-25 10:44:24 +00:00
fix(openai.py): enable custom proxy to pass in ca_bundle_path
This commit is contained in:
parent
7125016d24
commit
b50013386f
12 changed files with 2860 additions and 115 deletions
|
@ -1,7 +1,19 @@
|
|||
## This is a template base class to be used for adding new LLM providers via API calls
|
||||
|
||||
import litellm
|
||||
import requests, certifi, ssl
|
||||
|
||||
class BaseLLM:
|
||||
def create_client_session(self):
|
||||
if litellm.verify_ssl is False:
|
||||
session = requests.Session()
|
||||
session.verify = False
|
||||
else:
|
||||
ca_bundle_path = certifi.where() if litellm.ca_bundle_path is None else litellm.ca_bundle_path
|
||||
session = requests.Session()
|
||||
session.verify = ca_bundle_path
|
||||
|
||||
return session
|
||||
|
||||
def validate_environment(self): # set up the environment required to run the model
|
||||
pass
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue