mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-08-05 18:22:41 +00:00
remove cache
This commit is contained in:
parent
f5076a0a38
commit
c03f7fe9be
1 changed files with 0 additions and 5 deletions
|
@ -49,15 +49,10 @@ class TestConfig(BaseModel):
|
|||
memory: Optional[APITestConfig] = Field(default=None)
|
||||
|
||||
|
||||
CONFIG_CACHE = None
|
||||
|
||||
|
||||
def try_load_config_file_cached(config):
|
||||
config_file = config.getoption("--config")
|
||||
if config_file is None:
|
||||
return None
|
||||
if CONFIG_CACHE is not None:
|
||||
return CONFIG_CACHE
|
||||
|
||||
config_file_path = Path(__file__).parent / config_file
|
||||
if not config_file_path.exists():
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue