forked from phoenix/litellm-mirror
expose vertex ai and hf api base as env var
This commit is contained in:
parent
4f7a35cbef
commit
9ef0ad9e66
5 changed files with 31 additions and 6 deletions
|
@ -72,12 +72,13 @@ response = completion(
|
||||||
print(response)
|
print(response)
|
||||||
```
|
```
|
||||||
|
|
||||||
### [OPTIONAL] API KEYS
|
### [OPTIONAL] API KEYS + API BASE
|
||||||
If the endpoint you're calling requires an api key to be passed, set it in your os environment. [Code for how it's sent](https://github.com/BerriAI/litellm/blob/0100ab2382a0e720c7978fbf662cc6e6920e7e03/litellm/llms/huggingface_restapi.py#L25)
|
If required, you can set the api key + api base, set it in your os environment. [Code for how it's sent](https://github.com/BerriAI/litellm/blob/0100ab2382a0e720c7978fbf662cc6e6920e7e03/litellm/llms/huggingface_restapi.py#L25)
|
||||||
|
|
||||||
```python
|
```python
|
||||||
import os
|
import os
|
||||||
os.environ["HUGGINGFACE_API_KEY"] = ""
|
os.environ["HUGGINGFACE_API_KEY"] = ""
|
||||||
|
os.environ["HUGGINGFACE_API_BASE"] = ""
|
||||||
```
|
```
|
||||||
|
|
||||||
### Models with Prompt Formatting
|
### Models with Prompt Formatting
|
||||||
|
|
|
@ -13,9 +13,29 @@
|
||||||
## Set Vertex Project & Vertex Location
|
## Set Vertex Project & Vertex Location
|
||||||
All calls using Vertex AI require the following parameters:
|
All calls using Vertex AI require the following parameters:
|
||||||
* Your Project ID
|
* Your Project ID
|
||||||
`litellm.vertex_project = "hardy-device-38811" Your Project ID`
|
```python
|
||||||
|
import os, litellm
|
||||||
|
|
||||||
|
# set via env var
|
||||||
|
os.environ["VERTEXAI_PROJECT"] = "hardy-device-38811" # Your Project ID`
|
||||||
|
|
||||||
|
### OR ###
|
||||||
|
|
||||||
|
# set directly on module
|
||||||
|
litellm.vertex_project = "hardy-device-38811" # Your Project ID`
|
||||||
|
```
|
||||||
* Your Project Location
|
* Your Project Location
|
||||||
`litellm.vertex_location = "us-central1" `
|
```python
|
||||||
|
import os, litellm
|
||||||
|
|
||||||
|
# set via env var
|
||||||
|
os.environ["VERTEXAI_LOCATION"] = "us-central1 # Your Location
|
||||||
|
|
||||||
|
### OR ###
|
||||||
|
|
||||||
|
# set directly on module
|
||||||
|
litellm.vertex_location = "us-central1 # Your Location
|
||||||
|
```
|
||||||
|
|
||||||
## Sample Usage
|
## Sample Usage
|
||||||
```python
|
```python
|
||||||
|
|
|
@ -49,6 +49,8 @@ def completion(
|
||||||
completion_url = api_base
|
completion_url = api_base
|
||||||
elif "HF_API_BASE" in os.environ:
|
elif "HF_API_BASE" in os.environ:
|
||||||
completion_url = os.getenv("HF_API_BASE", "")
|
completion_url = os.getenv("HF_API_BASE", "")
|
||||||
|
elif "HUGGINGFACE_API_BASE" in os.environ:
|
||||||
|
completion_url = os.getenv("HUGGINGFACE_API_BASE", "")
|
||||||
else:
|
else:
|
||||||
completion_url = f"https://api-inference.huggingface.co/models/{model}"
|
completion_url = f"https://api-inference.huggingface.co/models/{model}"
|
||||||
|
|
||||||
|
|
|
@ -749,8 +749,10 @@ def completion(
|
||||||
raise Exception("vertexai import failed please run `pip install google-cloud-aiplatform`")
|
raise Exception("vertexai import failed please run `pip install google-cloud-aiplatform`")
|
||||||
from vertexai.preview.language_models import ChatModel, CodeChatModel, InputOutputTextPair
|
from vertexai.preview.language_models import ChatModel, CodeChatModel, InputOutputTextPair
|
||||||
|
|
||||||
|
vertex_project = (litellm.vertex_project or get_secret("VERTEXAI_PROJECT"))
|
||||||
|
vertex_location = (litellm.vertex_location or get_secret("VERTEXAI_LOCATION"))
|
||||||
vertexai.init(
|
vertexai.init(
|
||||||
project=litellm.vertex_project, location=litellm.vertex_location
|
project=vertex_project, location=vertex_location
|
||||||
)
|
)
|
||||||
# vertexai does not use an API key, it looks for credentials.json in the environment
|
# vertexai does not use an API key, it looks for credentials.json in the environment
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
[tool.poetry]
|
[tool.poetry]
|
||||||
name = "litellm"
|
name = "litellm"
|
||||||
version = "0.1.734"
|
version = "0.1.735"
|
||||||
description = "Library to easily interface with LLM API providers"
|
description = "Library to easily interface with LLM API providers"
|
||||||
authors = ["BerriAI"]
|
authors = ["BerriAI"]
|
||||||
license = "MIT License"
|
license = "MIT License"
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue