mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-27 03:34:10 +00:00
docs(gemini.py): add refactor note to code
\
This commit is contained in:
parent
a6716177fa
commit
79c96030b8
1 changed files with 15 additions and 7 deletions
|
@ -1,14 +1,22 @@
|
|||
import types
|
||||
import traceback
|
||||
####################################
|
||||
######### DEPRECATED FILE ##########
|
||||
####################################
|
||||
# logic moved to `vertex_httpx.py` #
|
||||
|
||||
import copy
|
||||
import time
|
||||
import traceback
|
||||
import types
|
||||
from typing import Callable, Optional
|
||||
from litellm.utils import ModelResponse, Choices, Message, Usage
|
||||
import litellm
|
||||
|
||||
import httpx
|
||||
from .prompt_templates.factory import prompt_factory, custom_prompt, get_system_prompt
|
||||
from packaging.version import Version
|
||||
|
||||
import litellm
|
||||
from litellm import verbose_logger
|
||||
from litellm.utils import Choices, Message, ModelResponse, Usage
|
||||
|
||||
from .prompt_templates.factory import custom_prompt, get_system_prompt, prompt_factory
|
||||
|
||||
|
||||
class GeminiError(Exception):
|
||||
|
@ -186,8 +194,8 @@ def completion(
|
|||
if _system_instruction and len(system_prompt) > 0:
|
||||
_params["system_instruction"] = system_prompt
|
||||
_model = genai.GenerativeModel(**_params)
|
||||
if stream == True:
|
||||
if acompletion == True:
|
||||
if stream is True:
|
||||
if acompletion is True:
|
||||
|
||||
async def async_streaming():
|
||||
try:
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue