mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-08-12 13:00:39 +00:00
chore(rename): move llama_stack.distribution to llama_stack.core (#2975)
We would like to rename the term `template` to `distribution`. To prepare for that, this is a precursor. cc @leseb
This commit is contained in:
parent
f3d5459647
commit
2665f00102
211 changed files with 351 additions and 348 deletions
|
@ -6,7 +6,7 @@
|
|||
import pytest
|
||||
from openai import BadRequestError, OpenAI
|
||||
|
||||
from llama_stack.distribution.library_client import LlamaStackAsLibraryClient
|
||||
from llama_stack.core.library_client import LlamaStackAsLibraryClient
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
|
|
|
@ -10,8 +10,8 @@ from unittest.mock import patch
|
|||
import pytest
|
||||
from openai import OpenAI
|
||||
|
||||
from llama_stack.distribution.datatypes import User
|
||||
from llama_stack.distribution.library_client import LlamaStackAsLibraryClient
|
||||
from llama_stack.core.datatypes import User
|
||||
from llama_stack.core.library_client import LlamaStackAsLibraryClient
|
||||
|
||||
|
||||
def test_openai_client_basic_operations(compat_client, client_with_models):
|
||||
|
|
|
@ -20,7 +20,7 @@ from llama_stack_client import LlamaStackClient
|
|||
from openai import OpenAI
|
||||
|
||||
from llama_stack import LlamaStackAsLibraryClient
|
||||
from llama_stack.distribution.stack import run_config_from_adhoc_config_spec
|
||||
from llama_stack.core.stack import run_config_from_adhoc_config_spec
|
||||
from llama_stack.env import get_env_or_fail
|
||||
|
||||
DEFAULT_PORT = 8321
|
||||
|
|
|
@ -14,7 +14,7 @@ from openai import OpenAI
|
|||
from reportlab.lib.pagesizes import letter
|
||||
from reportlab.pdfgen import canvas
|
||||
|
||||
from llama_stack.distribution.library_client import LlamaStackAsLibraryClient
|
||||
from llama_stack.core.library_client import LlamaStackAsLibraryClient
|
||||
|
||||
from ..test_cases.test_case import TestCase
|
||||
|
||||
|
|
|
@ -10,7 +10,7 @@ import struct
|
|||
import pytest
|
||||
from openai import OpenAI
|
||||
|
||||
from llama_stack.distribution.library_client import LlamaStackAsLibraryClient
|
||||
from llama_stack.core.library_client import LlamaStackAsLibraryClient
|
||||
|
||||
|
||||
def decode_base64_to_floats(base64_string: str) -> list[float]:
|
||||
|
|
|
@ -10,8 +10,8 @@ from unittest.mock import patch
|
|||
|
||||
import pytest
|
||||
|
||||
from llama_stack.distribution.access_control.access_control import default_policy
|
||||
from llama_stack.distribution.datatypes import User
|
||||
from llama_stack.core.access_control.access_control import default_policy
|
||||
from llama_stack.core.datatypes import User
|
||||
from llama_stack.providers.utils.sqlstore.api import ColumnType
|
||||
from llama_stack.providers.utils.sqlstore.authorized_sqlstore import AuthorizedSqlStore
|
||||
from llama_stack.providers.utils.sqlstore.sqlstore import PostgresSqlStoreConfig, SqliteSqlStoreConfig, sqlstore_impl
|
||||
|
@ -186,7 +186,7 @@ async def test_authorized_store_attributes(mock_get_authenticated_user, authoriz
|
|||
@patch("llama_stack.providers.utils.sqlstore.authorized_sqlstore.get_authenticated_user")
|
||||
async def test_user_ownership_policy(mock_get_authenticated_user, authorized_store, request):
|
||||
"""Test that 'user is owner' policies work correctly with record ownership"""
|
||||
from llama_stack.distribution.access_control.datatypes import AccessRule, Action, Scope
|
||||
from llama_stack.core.access_control.datatypes import AccessRule, Action, Scope
|
||||
|
||||
backend_name = request.node.callspec.id
|
||||
|
||||
|
|
|
@ -10,7 +10,7 @@ import pytest
|
|||
from llama_stack_client import Agent
|
||||
|
||||
from llama_stack import LlamaStackAsLibraryClient
|
||||
from llama_stack.distribution.datatypes import AuthenticationRequiredError
|
||||
from llama_stack.core.datatypes import AuthenticationRequiredError
|
||||
|
||||
AUTH_TOKEN = "test-token"
|
||||
|
||||
|
|
|
@ -14,7 +14,7 @@ from openai import BadRequestError as OpenAIBadRequestError
|
|||
from openai import OpenAI
|
||||
|
||||
from llama_stack.apis.vector_io import Chunk
|
||||
from llama_stack.distribution.library_client import LlamaStackAsLibraryClient
|
||||
from llama_stack.core.library_client import LlamaStackAsLibraryClient
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue