forked from phoenix/litellm-mirror
use get_litellm_virtual_key
This commit is contained in:
parent
7674217e6c
commit
8aa18b3977
4 changed files with 20 additions and 14 deletions
|
@ -50,7 +50,7 @@ const model = vertexAI.getGenerativeModel({
|
|||
model: 'gemini-1.0-pro'
|
||||
}, {
|
||||
customHeaders: {
|
||||
"X-Litellm-Api-Key": "sk-1234" // Your litellm Virtual Key
|
||||
"x-litellm-api-key": "sk-1234" // Your litellm Virtual Key
|
||||
}
|
||||
});
|
||||
|
||||
|
|
|
@ -126,16 +126,7 @@ async def vertex_proxy_route(
|
|||
|
||||
verbose_proxy_logger.debug("requested endpoint %s", endpoint)
|
||||
headers: dict = {}
|
||||
|
||||
# TODO - clean this up before merging
|
||||
litellm_api_key = request.headers.get("X-Litellm-Api-Key")
|
||||
api_key_to_use = ""
|
||||
if litellm_api_key:
|
||||
api_key_to_use = f"Bearer {litellm_api_key}"
|
||||
else:
|
||||
api_key_to_use = request.headers.get("Authorization")
|
||||
|
||||
api_key_to_use = api_key_to_use or ""
|
||||
api_key_to_use = get_litellm_virtual_key(request=request)
|
||||
user_api_key_dict = await user_api_key_auth(
|
||||
request=request,
|
||||
api_key=api_key_to_use,
|
||||
|
@ -227,3 +218,18 @@ async def vertex_proxy_route(
|
|||
)
|
||||
|
||||
return received_value
|
||||
|
||||
|
||||
def get_litellm_virtual_key(request: Request) -> str:
|
||||
"""
|
||||
Extract and format API key from request headers.
|
||||
Prioritizes x-litellm-api-key over Authorization header.
|
||||
|
||||
|
||||
Vertex JS SDK uses `Authorization` header, we use `x-litellm-api-key` to pass litellm virtual key
|
||||
|
||||
"""
|
||||
litellm_api_key = request.headers.get("x-litellm-api-key")
|
||||
if litellm_api_key:
|
||||
return f"Bearer {litellm_api_key}"
|
||||
return request.headers.get("Authorization", "")
|
||||
|
|
|
@ -24,7 +24,7 @@ const vertexAI = new VertexAI({
|
|||
// Use customHeaders in RequestOptions
|
||||
const requestOptions = {
|
||||
customHeaders: new Headers({
|
||||
"X-Litellm-Api-Key": "sk-1234"
|
||||
"x-litellm-api-key": "sk-1234"
|
||||
})
|
||||
};
|
||||
|
||||
|
|
|
@ -67,7 +67,7 @@ describe('Vertex AI Tests', () => {
|
|||
});
|
||||
|
||||
const customHeaders = new Headers({
|
||||
"X-Litellm-Api-Key": "sk-1234"
|
||||
"x-litellm-api-key": "sk-1234"
|
||||
});
|
||||
|
||||
const requestOptions = {
|
||||
|
@ -101,7 +101,7 @@ describe('Vertex AI Tests', () => {
|
|||
|
||||
test('should successfully generate non-streaming content from Vertex AI', async () => {
|
||||
const vertexAI = new VertexAI({project: 'adroit-crow-413218', location: 'us-central1', apiEndpoint: "localhost:4000/vertex-ai"});
|
||||
const customHeaders = new Headers({"X-Litellm-Api-Key": "sk-1234"});
|
||||
const customHeaders = new Headers({"x-litellm-api-key": "sk-1234"});
|
||||
const requestOptions = {customHeaders: customHeaders};
|
||||
const generativeModel = vertexAI.getGenerativeModel({model: 'gemini-1.0-pro'}, requestOptions);
|
||||
const request = {contents: [{role: 'user', parts: [{text: 'What is 2+2?'}]}]};
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue