mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-26 11:14:04 +00:00
(Admin UI) correctly render provider name in /models with wildcard routing (#7349)
* ui fix - allow searching model list + fix bug on filtering * qa fix - use correct provider name for azure_text * ui wrap content onto next line * ui fix - allow selecting current UI session when logging in * ui session budgets * ui show provider models on wildcard models * test provider name appears in model list * ui fix auto scroll on chat ui tab
This commit is contained in:
parent
a3e732de39
commit
56d9427fdb
3 changed files with 56 additions and 12 deletions
|
@ -33,8 +33,11 @@ def get_provider_models(provider: str) -> Optional[List[str]]:
|
|||
return get_valid_models()
|
||||
|
||||
if provider in litellm.models_by_provider:
|
||||
return litellm.models_by_provider[provider]
|
||||
|
||||
provider_models = litellm.models_by_provider[provider]
|
||||
for idx, _model in enumerate(provider_models):
|
||||
if provider not in _model:
|
||||
provider_models[idx] = f"{provider}/{_model}"
|
||||
return provider_models
|
||||
return None
|
||||
|
||||
|
||||
|
|
|
@ -3,7 +3,7 @@ import os
|
|||
import sys
|
||||
from unittest.mock import Mock
|
||||
from litellm.proxy.utils import _get_redoc_url, _get_docs_url
|
||||
|
||||
import json
|
||||
import pytest
|
||||
from fastapi import Request
|
||||
|
||||
|
@ -976,3 +976,35 @@ def test_update_config_fields():
|
|||
)
|
||||
assert team_config["langfuse_public_key"] == "my-fake-key"
|
||||
assert team_config["langfuse_secret"] == "my-fake-secret"
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"proxy_model_list,provider",
|
||||
[
|
||||
(["openai/*"], "openai"),
|
||||
(["bedrock/*"], "bedrock"),
|
||||
(["anthropic/*"], "anthropic"),
|
||||
(["cohere/*"], "cohere"),
|
||||
],
|
||||
)
|
||||
def test_get_complete_model_list(proxy_model_list, provider):
|
||||
"""
|
||||
Test that get_complete_model_list correctly expands model groups like 'openai/*' into individual models with provider prefixes
|
||||
"""
|
||||
from litellm.proxy.auth.model_checks import get_complete_model_list
|
||||
|
||||
complete_list = get_complete_model_list(
|
||||
proxy_model_list=proxy_model_list,
|
||||
key_models=[],
|
||||
team_models=[],
|
||||
user_model=None,
|
||||
infer_model_from_keys=False,
|
||||
)
|
||||
|
||||
# Check that we got a non-empty list back
|
||||
assert len(complete_list) > 0
|
||||
|
||||
print("complete_list", json.dumps(complete_list, indent=4))
|
||||
|
||||
for _model in complete_list:
|
||||
assert provider in _model
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import React, { useState, useEffect } from "react";
|
||||
import React, { useState, useEffect, useRef } from "react";
|
||||
import ReactMarkdown from "react-markdown";
|
||||
import {
|
||||
Card,
|
||||
|
@ -22,8 +22,6 @@ import {
|
|||
Button,
|
||||
} from "@tremor/react";
|
||||
|
||||
|
||||
|
||||
import { message, Select } from "antd";
|
||||
import { modelAvailableCall } from "./networking";
|
||||
import openai from "openai";
|
||||
|
@ -81,7 +79,6 @@ async function generateModelResponse(
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
const ChatUI: React.FC<ChatUIProps> = ({
|
||||
accessToken,
|
||||
token,
|
||||
|
@ -95,15 +92,15 @@ const ChatUI: React.FC<ChatUIProps> = ({
|
|||
const [selectedModel, setSelectedModel] = useState<string | undefined>(
|
||||
undefined
|
||||
);
|
||||
const [modelInfo, setModelInfo] = useState<any[]>([]);// Declare modelInfo at the component level
|
||||
const [modelInfo, setModelInfo] = useState<any[]>([]);
|
||||
|
||||
const chatEndRef = useRef<HTMLDivElement>(null);
|
||||
|
||||
useEffect(() => {
|
||||
if (!accessToken || !token || !userRole || !userID) {
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
|
||||
// Fetch model info and set the default selected model
|
||||
const fetchModelInfo = async () => {
|
||||
try {
|
||||
|
@ -144,6 +141,13 @@ const ChatUI: React.FC<ChatUIProps> = ({
|
|||
}, [accessToken, userID, userRole]);
|
||||
|
||||
|
||||
useEffect(() => {
|
||||
// Scroll to the bottom of the chat whenever chatHistory updates
|
||||
if (chatEndRef.current) {
|
||||
chatEndRef.current.scrollIntoView({ behavior: "smooth" });
|
||||
}
|
||||
}, [chatHistory]);
|
||||
|
||||
const updateUI = (role: string, chunk: string) => {
|
||||
setChatHistory((prevHistory) => {
|
||||
const lastMessage = prevHistory[prevHistory.length - 1];
|
||||
|
@ -201,7 +205,7 @@ const ChatUI: React.FC<ChatUIProps> = ({
|
|||
setInputMessage("");
|
||||
};
|
||||
|
||||
if (userRole && userRole == "Admin Viewer") {
|
||||
if (userRole && userRole === "Admin Viewer") {
|
||||
const { Title, Paragraph } = Typography;
|
||||
return (
|
||||
<div>
|
||||
|
@ -295,13 +299,18 @@ const ChatUI: React.FC<ChatUIProps> = ({
|
|||
</TableCell>
|
||||
</TableRow>
|
||||
))}
|
||||
<TableRow>
|
||||
<TableCell>
|
||||
<div ref={chatEndRef} />
|
||||
</TableCell>
|
||||
</TableRow>
|
||||
</TableBody>
|
||||
</Table>
|
||||
<div
|
||||
className="mt-3"
|
||||
style={{ position: "absolute", bottom: 5, width: "95%" }}
|
||||
>
|
||||
<div className="flex">
|
||||
<div className="flex" style={{ marginTop: "16px" }}>
|
||||
<TextInput
|
||||
type="text"
|
||||
value={inputMessage}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue