From 6b1773d530e9f168f86beb83a6ec6af73555efe4 Mon Sep 17 00:00:00 2001 From: Yuan Tang Date: Sat, 15 Feb 2025 22:05:23 -0500 Subject: [PATCH 01/14] docs: Fix incorrect link and command for generating API reference (#1124) --- docs/openapi_generator/README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/openapi_generator/README.md b/docs/openapi_generator/README.md index 9d407905d..e98cfaf1b 100644 --- a/docs/openapi_generator/README.md +++ b/docs/openapi_generator/README.md @@ -1,4 +1,4 @@ -The RFC Specification (OpenAPI format) is generated from the set of API endpoints located in `llama_stack/[]/api/endpoints.py` using the `generate.py` utility. +The RFC Specification (OpenAPI format) is generated from the set of API endpoints located in `llama_stack/distribution/server/endpoints.py` using the `generate.py` utility. Please install the following packages before running the script: @@ -6,4 +6,4 @@ Please install the following packages before running the script: pip install python-openapi json-strong-typing fire PyYAML llama-models ``` -Then simply run `sh run_openapi_generator.sh ` +Then simply run `sh run_openapi_generator.sh` From 89d37687dd375eeee96bfd04e960bd328cc00f73 Mon Sep 17 00:00:00 2001 From: Reid <61492567+reidliu41@users.noreply.github.com> Date: Wed, 19 Feb 2025 02:13:46 +0800 Subject: [PATCH 02/14] chore: remove --no-list-templates option (#1121) # What does this PR do? [Provide a short summary of what this PR does and why. Link to relevant issues if applicable.] From the code and the usage, seems cannot see that need to use `--no-list-templates` to handle, and also make the user confused from the help text, so try to remove it. ``` $ llama stack build --no-list-templates > Enter a name for your Llama Stack (e.g. my-local-stack): $ llama stack build > Enter a name for your Llama Stack (e.g. my-local-stack): before: $ llama stack build --help --list-templates, --no-list-templates Show the available templates for building a Llama Stack distribution (default: False) after: --list-templates Show the available templates for building a Llama Stack distribution ``` [//]: # (If resolving an issue, uncomment and update the line below) [//]: # (Closes #[issue-number]) ## Test Plan [Describe the tests you ran to verify your changes with result summaries. *Provide clear instructions so the plan can be easily re-executed.*] [//]: # (## Documentation) Signed-off-by: reidliu Co-authored-by: reidliu --- docs/source/distributions/building_distro.md | 7 ++++--- llama_stack/cli/stack/build.py | 3 +-- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/docs/source/distributions/building_distro.md b/docs/source/distributions/building_distro.md index 90239cb4e..9cb1a402f 100644 --- a/docs/source/distributions/building_distro.md +++ b/docs/source/distributions/building_distro.md @@ -23,7 +23,8 @@ The main points to consider are: ``` llama stack build -h -usage: llama stack build [-h] [--config CONFIG] [--template TEMPLATE] [--list-templates | --no-list-templates] [--image-type {conda,container,venv}] [--image-name IMAGE_NAME] +usage: llama stack build [-h] [--config CONFIG] [--template TEMPLATE] [--list-templates] + [--image-type {conda,container,venv}] [--image-name IMAGE_NAME] [--print-deps-only] Build a Llama stack container @@ -32,14 +33,14 @@ options: --config CONFIG Path to a config file to use for the build. You can find example configs in llama_stack/distribution/**/build.yaml. If this argument is not provided, you will be prompted to enter information interactively --template TEMPLATE Name of the example template config to use for build. You may use `llama stack build --list-templates` to check out the available templates - --list-templates, --no-list-templates - Show the available templates for building a Llama Stack distribution (default: False) + --list-templates Show the available templates for building a Llama Stack distribution --image-type {conda,container,venv} Image Type to use for the build. This can be either conda or container or venv. If not specified, will use the image type from the template config. --image-name IMAGE_NAME [for image-type=conda] Name of the conda environment to use for the build. If not specified, currently active Conda environment will be used. If no Conda environment is active, you must specify a name. + --print-deps-only Print the dependencies for the stack only, without building the stack ``` After this step is complete, a file named `-build.yaml` and template file `-run.yaml` will be generated and saved at the output file path specified at the end of the command. diff --git a/llama_stack/cli/stack/build.py b/llama_stack/cli/stack/build.py index ca4c0d8ce..7b17a960a 100644 --- a/llama_stack/cli/stack/build.py +++ b/llama_stack/cli/stack/build.py @@ -38,9 +38,8 @@ class StackBuild(Subcommand): self.parser.add_argument( "--list-templates", - type=bool, + action="store_true", default=False, - action=argparse.BooleanOptionalAction, help="Show the available templates for building a Llama Stack distribution", ) From 92aefec191b12715d5467541ff12659a3f434b11 Mon Sep 17 00:00:00 2001 From: Reid <61492567+reidliu41@users.noreply.github.com> Date: Wed, 19 Feb 2025 02:15:26 +0800 Subject: [PATCH 03/14] style: update verify-download help text (#1134) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit # What does this PR do? [Provide a short summary of what this PR does and why. Link to relevant issues if applicable.] Based on the code https://github.com/meta-llama/llama-stack/blob/6b1773d530e9f168f86beb83a6ec6af73555efe4/llama_stack/cli/download.py#L379 and test, `verify-download` should only use in `downloaded from Meta`. ``` test: no checklist.chk file for hf download $ llama model download --source meta --model-id Llama3.2-1B Downloading checklist.chk ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 100.0% 156/156 bytes - 0:00:00 Downloading tokenizer.model ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 100.0% 2.2/2.2 MB - 0:00:00 Downloading params.json ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 100.0% 220/220 bytes - 0:00:00 Downloading consolidated.00.pth ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 100.0% 2.5/2.5 GB - 0:00:00 before: $ llama model verify-download --help usage: llama model verify-download [-h] --model-id MODEL_ID Verify the downloaded checkpoints' checksums options: -h, --help show this help message and exit --model-id MODEL_ID Model ID to verify after: $ llama model verify-download --help usage: llama model verify-download [-h] --model-id MODEL_ID Verify the downloaded checkpoints' checksums for models downloaded from Meta options: -h, --help show this help message and exit --model-id MODEL_ID Model ID to verify (only for models downloaded from Meta) ``` [//]: # (If resolving an issue, uncomment and update the line below) [//]: # (Closes #[issue-number]) ## Test Plan [Describe the tests you ran to verify your changes with result summaries. *Provide clear instructions so the plan can be easily re-executed.*] [//]: # (## Documentation) Signed-off-by: reidliu Co-authored-by: reidliu --- llama_stack/cli/model/verify_download.py | 2 +- llama_stack/cli/verify_download.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/llama_stack/cli/model/verify_download.py b/llama_stack/cli/model/verify_download.py index b8e6bf173..e7159c0aa 100644 --- a/llama_stack/cli/model/verify_download.py +++ b/llama_stack/cli/model/verify_download.py @@ -15,7 +15,7 @@ class ModelVerifyDownload(Subcommand): self.parser = subparsers.add_parser( "verify-download", prog="llama model verify-download", - description="Verify the downloaded checkpoints' checksums", + description="Verify the downloaded checkpoints' checksums for models downloaded from Meta", formatter_class=argparse.RawTextHelpFormatter, ) diff --git a/llama_stack/cli/verify_download.py b/llama_stack/cli/verify_download.py index 47993c361..1229e8601 100644 --- a/llama_stack/cli/verify_download.py +++ b/llama_stack/cli/verify_download.py @@ -44,7 +44,7 @@ def setup_verify_download_parser(parser: argparse.ArgumentParser) -> None: parser.add_argument( "--model-id", required=True, - help="Model ID to verify", + help="Model ID to verify (only for models downloaded from Meta)", ) parser.set_defaults(func=partial(run_verify_cmd, parser=parser)) From d9f5beb15a2e05c27427c32f52a315947c54c4c9 Mon Sep 17 00:00:00 2001 From: Reid <61492567+reidliu41@users.noreply.github.com> Date: Wed, 19 Feb 2025 02:24:31 +0800 Subject: [PATCH 04/14] style: update download help text (#1135) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit # What does this PR do? [Provide a short summary of what this PR does and why. Link to relevant issues if applicable.] Based on the cade: https://github.com/meta-llama/llama-stack/blob/6b1773d530e9f168f86beb83a6ec6af73555efe4/llama_stack/cli/download.py#L454 and the test, it can use comma to specify multiple model ids. So update the usage. ``` $ llama model download --source meta --model-id Llama3.2-1B,Llama3.2-3B Please provide the signed URL for model Llama3.2-1B you received via email after visiting https://www.llama.com/llama-downloads/ (e.g., https://llama3-1.llamameta.net/*?Policy...): Downloading checklist.chk ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 100.0% 156/156 bytes - 0:00:00 Downloading tokenizer.model ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 100.0% 2.2/2.2 MB - 0:00:00 Downloading params.json ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 100.0% 220/220 bytes - 0:00:00 Downloading consolidated.00.pth ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 100.0% 2.5/2.5 GB - 0:00:00 Successfully downloaded model to /Users/xx/.llama/checkpoints/Llama3.2-1B [Optionally] To run MD5 checksums, use the following command: llama model verify-download --model-id Llama3.2-1B Please provide the signed URL for model Llama3.2-3B you received via email after visiting https://www.llama.com/llama-downloads/ (e.g., https://llama3-1.llamameta.net/*?Policy...): Downloading checklist.chk ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 100.0% 156/156 bytes - 0:00:00 Downloading tokenizer.model ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 100.0% 2.2/2.2 MB - 0:00:00 Downloading params.json ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 100.0% 220/220 bytes - 0:00:00 Downloading consolidated.00.pth ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 100.0% 6.4/6.4 GB - 0:00:00 Successfully downloaded model to /Users/xx/.llama/checkpoints/Llama3.2-3B $ llama model download --source huggingface --model-id Llama3.2-1B,Llama3.2-3B original%2Fparams.json: 100%|██████████████████████████████████████████████████████████| 220/220 [00:00<00:00, 564kB/ Successfully downloaded model to /Users/xx/.llama/checkpoints/Llama3.2-1B ... tokenizer.json: 100%|█████████████████████████████████████████████████████████████| 9.09M/9.09M [00:00<00:00, 9.18MB/s] Successfully downloaded model to /Users/xxx/.llama/checkpoints/Llama3.2-3B before: $ llama model download --help --model-id MODEL_ID See `llama model list` or `llama model list --show-all` for the list of available models after: $ llama model download --help --model-id MODEL_ID See `llama model list` or `llama model list --show-all` for the list of available models. Specify multiple model IDs with commas, e.g. --model-id Llama3.2-1B,Llama3.2-3B ``` [//]: # (If resolving an issue, uncomment and update the line below) [//]: # (Closes #[issue-number]) ## Test Plan [Describe the tests you ran to verify your changes with result summaries. *Provide clear instructions so the plan can be easily re-executed.*] [//]: # (## Documentation) Signed-off-by: reidliu Co-authored-by: reidliu --- llama_stack/cli/download.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/llama_stack/cli/download.py b/llama_stack/cli/download.py index 8afc6d31d..af86f7243 100644 --- a/llama_stack/cli/download.py +++ b/llama_stack/cli/download.py @@ -56,7 +56,7 @@ def setup_download_parser(parser: argparse.ArgumentParser) -> None: parser.add_argument( "--model-id", required=False, - help="See `llama model list` or `llama model list --show-all` for the list of available models", + help="See `llama model list` or `llama model list --show-all` for the list of available models. Specify multiple model IDs with commas, e.g. --model-id Llama3.2-1B,Llama3.2-3B", ) parser.add_argument( "--hf-token", From 4e76d312fa4a8d436da349db8e941d2c7939a894 Mon Sep 17 00:00:00 2001 From: Reid <61492567+reidliu41@users.noreply.github.com> Date: Wed, 19 Feb 2025 02:26:41 +0800 Subject: [PATCH 05/14] fix: modify the model id title for model list (#1095) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit # What does this PR do? [Provide a short summary of what this PR does and why. Link to relevant issues if applicable.] Re-check and based on the doc, the download model id, actually is model descriptor(also without `meta-llama/`). https://llama-stack.readthedocs.io/en/latest/references/llama_cli_reference/index.html ``` $ llama download --source huggingface --model-id Llama-Guard-3-1B:int4 --hf-token xxx # model descriptor Fetching 8 files: 0%| | 0/8 [00:00 Co-authored-by: reidliu --- docs/source/references/llama_cli_reference/download_models.md | 2 +- docs/source/references/llama_cli_reference/index.md | 2 +- llama_stack/cli/model/list.py | 4 ++-- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/docs/source/references/llama_cli_reference/download_models.md b/docs/source/references/llama_cli_reference/download_models.md index 3c40f1392..6c791bcb7 100644 --- a/docs/source/references/llama_cli_reference/download_models.md +++ b/docs/source/references/llama_cli_reference/download_models.md @@ -39,7 +39,7 @@ You should see a table like this: ``` +----------------------------------+------------------------------------------+----------------+ -| Model Descriptor | Hugging Face Repo | Context Length | +| Model Descriptor(ID) | Hugging Face Repo | Context Length | +----------------------------------+------------------------------------------+----------------+ | Llama3.1-8B | meta-llama/Llama-3.1-8B | 128K | +----------------------------------+------------------------------------------+----------------+ diff --git a/docs/source/references/llama_cli_reference/index.md b/docs/source/references/llama_cli_reference/index.md index f7ac5fe36..76abce544 100644 --- a/docs/source/references/llama_cli_reference/index.md +++ b/docs/source/references/llama_cli_reference/index.md @@ -63,7 +63,7 @@ You should see a table like this: ``` +----------------------------------+------------------------------------------+----------------+ -| Model Descriptor | Hugging Face Repo | Context Length | +| Model Descriptor(ID) | Hugging Face Repo | Context Length | +----------------------------------+------------------------------------------+----------------+ | Llama3.1-8B | meta-llama/Llama-3.1-8B | 128K | +----------------------------------+------------------------------------------+----------------+ diff --git a/llama_stack/cli/model/list.py b/llama_stack/cli/model/list.py index 4fe28751e..e6bf2216a 100644 --- a/llama_stack/cli/model/list.py +++ b/llama_stack/cli/model/list.py @@ -36,8 +36,8 @@ class ModelList(Subcommand): from .safety_models import prompt_guard_model_sku headers = [ - "Model Descriptor", - "Model ID", + "Model Descriptor(ID)", + "Hugging Face Repo", "Context Length", ] From 8585b95a28b31d8bfe43fd13bd699ad0190fd1bc Mon Sep 17 00:00:00 2001 From: Xi Yan Date: Tue, 18 Feb 2025 16:02:44 -0800 Subject: [PATCH 06/14] rename --- ...nb => Tool_Calling101_Using_Together_Llama_Stack_Server.ipynb} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename docs/zero_to_hero_guide/{Tool_Calling101_Using_Together's_Llama_Stack_Server.ipynb => Tool_Calling101_Using_Together_Llama_Stack_Server.ipynb} (100%) diff --git a/docs/zero_to_hero_guide/Tool_Calling101_Using_Together's_Llama_Stack_Server.ipynb b/docs/zero_to_hero_guide/Tool_Calling101_Using_Together_Llama_Stack_Server.ipynb similarity index 100% rename from docs/zero_to_hero_guide/Tool_Calling101_Using_Together's_Llama_Stack_Server.ipynb rename to docs/zero_to_hero_guide/Tool_Calling101_Using_Together_Llama_Stack_Server.ipynb From e8cb9e0adba6485c438bb7cb1e311ac80a90a06c Mon Sep 17 00:00:00 2001 From: Xi Yan Date: Tue, 18 Feb 2025 16:07:54 -0800 Subject: [PATCH 07/14] fix: direct client pydantic type casting (#1145) # What does this PR do? - Closes #1142 - Root cause is due to having `Union[str, AgenToolGroupWithArgs]` ## Test Plan - Test with script described in issue. - Print out final converted pydantic object image [//]: # (## Documentation) --- llama_stack/distribution/library_client.py | 25 ++++++++++++++++------ llama_stack/strong_typing/auxiliary.py | 2 +- llama_stack/strong_typing/classdef.py | 2 +- llama_stack/strong_typing/deserializer.py | 2 +- llama_stack/strong_typing/inspection.py | 6 ++++-- llama_stack/strong_typing/serializer.py | 2 +- 6 files changed, 26 insertions(+), 13 deletions(-) diff --git a/llama_stack/distribution/library_client.py b/llama_stack/distribution/library_client.py index a7ef753b9..a40651551 100644 --- a/llama_stack/distribution/library_client.py +++ b/llama_stack/distribution/library_client.py @@ -13,7 +13,7 @@ import re from concurrent.futures import ThreadPoolExecutor from enum import Enum from pathlib import Path -from typing import Any, Optional, TypeVar, get_args, get_origin +from typing import Any, Optional, TypeVar, Union, get_args, get_origin import httpx import yaml @@ -81,12 +81,13 @@ def convert_to_pydantic(annotation: Any, value: Any) -> Any: return value origin = get_origin(annotation) + if origin is list: item_type = get_args(annotation)[0] try: return [convert_to_pydantic(item_type, item) for item in value] except Exception: - print(f"Error converting list {value}") + print(f"Error converting list {value} into {item_type}") return value elif origin is dict: @@ -94,17 +95,26 @@ def convert_to_pydantic(annotation: Any, value: Any) -> Any: try: return {k: convert_to_pydantic(val_type, v) for k, v in value.items()} except Exception: - print(f"Error converting dict {value}") + print(f"Error converting dict {value} into {val_type}") return value try: # Handle Pydantic models and discriminated unions return TypeAdapter(annotation).validate_python(value) + except Exception as e: - cprint( - f"Warning: direct client failed to convert parameter {value} into {annotation}: {e}", - "yellow", - ) + # TODO: this is workardound for having Union[str, AgentToolGroup] in API schema. + # We should get rid of any non-discriminated unions in the API schema. + if origin is Union: + for union_type in get_args(annotation): + try: + return convert_to_pydantic(union_type, value) + except Exception: + continue + cprint( + f"Warning: direct client failed to convert parameter {value} into {annotation}: {e}", + "yellow", + ) return value @@ -421,4 +431,5 @@ class AsyncLlamaStackAsLibraryClient(AsyncLlamaStackClient): if param_name in body: value = body.get(param_name) converted_body[param_name] = convert_to_pydantic(param.annotation, value) + return converted_body diff --git a/llama_stack/strong_typing/auxiliary.py b/llama_stack/strong_typing/auxiliary.py index fd183da18..cf19d6083 100644 --- a/llama_stack/strong_typing/auxiliary.py +++ b/llama_stack/strong_typing/auxiliary.py @@ -77,7 +77,7 @@ def typeannotation( """ def wrap(cls: Type[T]) -> Type[T]: - setattr(cls, "__repr__", _compact_dataclass_repr) + cls.__repr__ = _compact_dataclass_repr if not dataclasses.is_dataclass(cls): cls = dataclasses.dataclass( # type: ignore[call-overload] cls, diff --git a/llama_stack/strong_typing/classdef.py b/llama_stack/strong_typing/classdef.py index d2d8688e4..5ead886d4 100644 --- a/llama_stack/strong_typing/classdef.py +++ b/llama_stack/strong_typing/classdef.py @@ -203,7 +203,7 @@ def schema_to_type(schema: Schema, *, module: types.ModuleType, class_name: str) if type_def.default is not dataclasses.MISSING: raise TypeError("disallowed: `default` for top-level type definitions") - setattr(type_def.type, "__module__", module.__name__) + type_def.type.__module__ = module.__name__ setattr(module, type_name, type_def.type) return node_to_typedef(module, class_name, top_node).type diff --git a/llama_stack/strong_typing/deserializer.py b/llama_stack/strong_typing/deserializer.py index 4c4ee9d89..fc0f40f83 100644 --- a/llama_stack/strong_typing/deserializer.py +++ b/llama_stack/strong_typing/deserializer.py @@ -325,7 +325,7 @@ class TupleDeserializer(Deserializer[Tuple[Any, ...]]): f"type `{self.container_type}` expects a JSON `array` of length {count} but received length {len(data)}" ) - return tuple(item_parser.parse(item) for item_parser, item in zip(self.item_parsers, data)) + return tuple(item_parser.parse(item) for item_parser, item in zip(self.item_parsers, data, strict=False)) class UnionDeserializer(Deserializer): diff --git a/llama_stack/strong_typing/inspection.py b/llama_stack/strong_typing/inspection.py index 69bc15597..8bc313021 100644 --- a/llama_stack/strong_typing/inspection.py +++ b/llama_stack/strong_typing/inspection.py @@ -263,8 +263,8 @@ def extend_enum( enum_class: Type[enum.Enum] = enum.Enum(extend.__name__, values) # type: ignore # assign the newly created type to the same module where the extending class is defined - setattr(enum_class, "__module__", extend.__module__) - setattr(enum_class, "__doc__", extend.__doc__) + enum_class.__module__ = extend.__module__ + enum_class.__doc__ = extend.__doc__ setattr(sys.modules[extend.__module__], extend.__name__, enum_class) return enum.unique(enum_class) @@ -874,6 +874,7 @@ def is_generic_instance(obj: Any, typ: TypeLike) -> bool: for tuple_item_type, item in zip( (tuple_item_type for tuple_item_type in typing.get_args(typ)), (item for item in obj), + strict=False, ) ) elif origin_type is Union: @@ -954,6 +955,7 @@ class RecursiveChecker: for tuple_item_type, item in zip( (tuple_item_type for tuple_item_type in typing.get_args(typ)), (item for item in obj), + strict=False, ) ) elif origin_type is Union: diff --git a/llama_stack/strong_typing/serializer.py b/llama_stack/strong_typing/serializer.py index 5e93e4c4d..4ca4a4119 100644 --- a/llama_stack/strong_typing/serializer.py +++ b/llama_stack/strong_typing/serializer.py @@ -216,7 +216,7 @@ class TypedTupleSerializer(Serializer[tuple]): self.item_generators = tuple(_get_serializer(item_type, context) for item_type in item_types) def generate(self, obj: tuple) -> List[JsonType]: - return [item_generator.generate(item) for item_generator, item in zip(self.item_generators, obj)] + return [item_generator.generate(item) for item_generator, item in zip(self.item_generators, obj, strict=False)] class CustomSerializer(Serializer): From 37cf60b73292468775dbfc876e7838fb1b7ccf96 Mon Sep 17 00:00:00 2001 From: Xi Yan Date: Tue, 18 Feb 2025 19:41:37 -0800 Subject: [PATCH 08/14] style: remove prints in codebase (#1146) # What does this PR do? - replace prints in codebase with logger - update print_table to use rich Table ## Test Plan - library client script in https://github.com/meta-llama/llama-stack/pull/1145 ``` llama stack list-providers ``` image [//]: # (## Documentation) --- llama_stack/cli/table.py | 75 +++++-------------- llama_stack/distribution/library_client.py | 11 +-- .../remote/inference/nvidia/nvidia.py | 12 ++- .../remote/inference/nvidia/utils.py | 5 +- 4 files changed, 38 insertions(+), 65 deletions(-) diff --git a/llama_stack/cli/table.py b/llama_stack/cli/table.py index 599749231..bf59e6103 100644 --- a/llama_stack/cli/table.py +++ b/llama_stack/cli/table.py @@ -4,75 +4,36 @@ # This source code is licensed under the terms described in the LICENSE file in # the root directory of this source tree. -import re -import textwrap from typing import Iterable -from termcolor import cprint - - -def strip_ansi_colors(text): - ansi_escape = re.compile(r"\x1B(?:[@-Z\\-_]|\[[0-?]*[ -/]*[@-~])") - return ansi_escape.sub("", text) - - -def format_row(row, col_widths): - def wrap(text, width): - lines = [] - for line in text.split("\n"): - if line.strip() == "": - lines.append("") - else: - lines.extend(textwrap.wrap(line, width, break_long_words=False, replace_whitespace=False)) - return lines - - wrapped = [wrap(item, width) for item, width in zip(row, col_widths, strict=False)] - max_lines = max(len(subrow) for subrow in wrapped) - - lines = [] - for i in range(max_lines): - line = [] - for cell_lines, width in zip(wrapped, col_widths, strict=False): - value = cell_lines[i] if i < len(cell_lines) else "" - line.append(value + " " * (width - len(strip_ansi_colors(value)))) - lines.append("| " + (" | ".join(line)) + " |") - - return "\n".join(lines) +from rich.console import Console +from rich.table import Table def print_table(rows, headers=None, separate_rows: bool = False, sort_by: Iterable[int] = tuple()): - def itemlen(item): - return max([len(line) for line in strip_ansi_colors(item).split("\n")]) - + # Convert rows and handle None values rows = [[x or "" for x in row] for row in rows] + # Sort rows if sort_by is specified if sort_by: rows.sort(key=lambda x: tuple(x[i] for i in sort_by)) - if not headers: - col_widths = [max(itemlen(item) for item in col) for col in zip(*rows, strict=False)] - else: - col_widths = [ - max( - itemlen(header), - max(itemlen(item) for item in col), - ) - for header, col in zip(headers, zip(*rows, strict=False), strict=False) - ] - col_widths = [min(w, 80) for w in col_widths] - - header_line = "+".join("-" * (width + 2) for width in col_widths) - header_line = f"+{header_line}+" + # Create Rich table + table = Table(show_lines=separate_rows) + # Add headers if provided if headers: - print(header_line) - cprint(format_row(headers, col_widths), "white", attrs=["bold"]) + for header in headers: + table.add_column(header, style="bold white") + else: + # Add unnamed columns based on first row + for _ in range(len(rows[0]) if rows else 0): + table.add_column() - print(header_line) + # Add rows for row in rows: - print(format_row(row, col_widths)) - if separate_rows: - print(header_line) + table.add_row(*row) - if not separate_rows: - print(header_line) + # Print table + console = Console() + console.print(table) diff --git a/llama_stack/distribution/library_client.py b/llama_stack/distribution/library_client.py index a40651551..639e5ee73 100644 --- a/llama_stack/distribution/library_client.py +++ b/llama_stack/distribution/library_client.py @@ -47,6 +47,8 @@ from llama_stack.providers.utils.telemetry.tracing import ( start_trace, ) +logger = logging.getLogger(__name__) + T = TypeVar("T") @@ -87,7 +89,7 @@ def convert_to_pydantic(annotation: Any, value: Any) -> Any: try: return [convert_to_pydantic(item_type, item) for item in value] except Exception: - print(f"Error converting list {value} into {item_type}") + logger.error(f"Error converting list {value} into {item_type}") return value elif origin is dict: @@ -95,7 +97,7 @@ def convert_to_pydantic(annotation: Any, value: Any) -> Any: try: return {k: convert_to_pydantic(val_type, v) for k, v in value.items()} except Exception: - print(f"Error converting dict {value} into {val_type}") + logger.error(f"Error converting dict {value} into {val_type}") return value try: @@ -111,9 +113,8 @@ def convert_to_pydantic(annotation: Any, value: Any) -> Any: return convert_to_pydantic(union_type, value) except Exception: continue - cprint( + logger.warning( f"Warning: direct client failed to convert parameter {value} into {annotation}: {e}", - "yellow", ) return value @@ -152,7 +153,7 @@ class LlamaStackAsLibraryClient(LlamaStackClient): for handler in root_logger.handlers[:]: root_logger.removeHandler(handler) - print(f"Removed handler {handler.__class__.__name__} from root logger") + logger.info(f"Removed handler {handler.__class__.__name__} from root logger") def request(self, *args, **kwargs): if kwargs.get("stream"): diff --git a/llama_stack/providers/remote/inference/nvidia/nvidia.py b/llama_stack/providers/remote/inference/nvidia/nvidia.py index 0c5b7c454..8e67333af 100644 --- a/llama_stack/providers/remote/inference/nvidia/nvidia.py +++ b/llama_stack/providers/remote/inference/nvidia/nvidia.py @@ -4,6 +4,7 @@ # This source code is licensed under the terms described in the LICENSE file in # the root directory of this source tree. +import logging import warnings from typing import AsyncIterator, List, Optional, Union @@ -25,7 +26,12 @@ from llama_stack.apis.inference import ( ToolChoice, ToolConfig, ) -from llama_stack.models.llama.datatypes import CoreModelId, SamplingParams, ToolDefinition, ToolPromptFormat +from llama_stack.models.llama.datatypes import ( + CoreModelId, + SamplingParams, + ToolDefinition, + ToolPromptFormat, +) from llama_stack.providers.utils.inference.model_registry import ( ModelRegistryHelper, build_model_alias, @@ -43,6 +49,8 @@ from .openai_utils import ( ) from .utils import _is_nvidia_hosted, check_health +logger = logging.getLogger(__name__) + _MODEL_ALIASES = [ build_model_alias( "meta/llama3-8b-instruct", @@ -90,7 +98,7 @@ class NVIDIAInferenceAdapter(Inference, ModelRegistryHelper): # TODO(mf): filter by available models ModelRegistryHelper.__init__(self, model_aliases=_MODEL_ALIASES) - print(f"Initializing NVIDIAInferenceAdapter({config.url})...") + logger.info(f"Initializing NVIDIAInferenceAdapter({config.url})...") if _is_nvidia_hosted(config): if not config.api_key: diff --git a/llama_stack/providers/remote/inference/nvidia/utils.py b/llama_stack/providers/remote/inference/nvidia/utils.py index 0ec80e9dd..7d3f3f27e 100644 --- a/llama_stack/providers/remote/inference/nvidia/utils.py +++ b/llama_stack/providers/remote/inference/nvidia/utils.py @@ -4,12 +4,15 @@ # This source code is licensed under the terms described in the LICENSE file in # the root directory of this source tree. +import logging from typing import Tuple import httpx from . import NVIDIAConfig +logger = logging.getLogger(__name__) + def _is_nvidia_hosted(config: NVIDIAConfig) -> bool: return "integrate.api.nvidia.com" in config.url @@ -42,7 +45,7 @@ async def check_health(config: NVIDIAConfig) -> None: RuntimeError: If the server is not running or ready """ if not _is_nvidia_hosted(config): - print("Checking NVIDIA NIM health...") + logger.info("Checking NVIDIA NIM health...") try: is_live, is_ready = await _get_health(config.url) if not is_live: From 8de7cf103b823596f268b19ee2142c6f399556e8 Mon Sep 17 00:00:00 2001 From: ehhuang Date: Tue, 18 Feb 2025 20:25:15 -0800 Subject: [PATCH 09/14] feat: support tool_choice = {required, none, } (#1059) Summary: titled Test Plan: added tests and LLAMA_STACK_CONFIG=fireworks pytest -s -v tests/client-sdk/ --safety-shield meta-llama/Llama-Guard-3-8B --- docs/_static/llama-stack-spec.html | 33 ++++++++++------- docs/_static/llama-stack-spec.yaml | 25 ++++++++----- llama_stack/apis/inference/inference.py | 15 ++++++-- llama_stack/distribution/routers/routers.py | 32 ++++++++++++----- .../utils/inference/prompt_adapter.py | 31 +++++++++++----- tests/client-sdk/agents/test_agents.py | 33 ++++++++++++++++- .../inference/test_text_inference.py | 36 +++++++++++++++++++ 7 files changed, 164 insertions(+), 41 deletions(-) diff --git a/docs/_static/llama-stack-spec.html b/docs/_static/llama-stack-spec.html index 17cf92341..65a1bdd6b 100644 --- a/docs/_static/llama-stack-spec.html +++ b/docs/_static/llama-stack-spec.html @@ -2697,7 +2697,8 @@ "type": "string", "enum": [ "auto", - "required" + "required", + "none" ], "description": "Whether tool use is required or automatic. This is a hint to the model which may not be followed. It depends on the Instruction Following capabilities of the model." }, @@ -3231,13 +3232,22 @@ "type": "object", "properties": { "tool_choice": { - "type": "string", - "enum": [ - "auto", - "required" + "oneOf": [ + { + "type": "string", + "enum": [ + "auto", + "required", + "none" + ], + "description": "Whether tool use is required or automatic. This is a hint to the model which may not be followed. It depends on the Instruction Following capabilities of the model." + }, + { + "type": "string" + } ], - "description": "(Optional) Whether tool use is required or automatic. Defaults to ToolChoice.auto.", - "default": "auto" + "default": "auto", + "description": "(Optional) Whether tool use is automatic, required, or none. Can also specify a tool name to use a specific tool. Defaults to ToolChoice.auto." }, "tool_prompt_format": { "type": "string", @@ -3259,9 +3269,6 @@ } }, "additionalProperties": false, - "required": [ - "system_message_behavior" - ], "description": "Configuration for tool use." }, "ToolDef": { @@ -4100,7 +4107,8 @@ "type": "string", "enum": [ "auto", - "required" + "required", + "none" ], "description": "Whether tool use is required or automatic. This is a hint to the model which may not be followed. It depends on the Instruction Following capabilities of the model." }, @@ -4384,7 +4392,8 @@ "type": "string", "enum": [ "auto", - "required" + "required", + "none" ], "description": "(Optional) Whether tool use is required or automatic. Defaults to ToolChoice.auto. .. deprecated:: Use tool_config instead." }, diff --git a/docs/_static/llama-stack-spec.yaml b/docs/_static/llama-stack-spec.yaml index f63374406..60b777e91 100644 --- a/docs/_static/llama-stack-spec.yaml +++ b/docs/_static/llama-stack-spec.yaml @@ -1637,6 +1637,7 @@ components: enum: - auto - required + - none description: >- Whether tool use is required or automatic. This is a hint to the model which may not be followed. It depends on the Instruction Following capabilities @@ -1994,13 +1995,21 @@ components: type: object properties: tool_choice: - type: string - enum: - - auto - - required - description: >- - (Optional) Whether tool use is required or automatic. Defaults to ToolChoice.auto. + oneOf: + - type: string + enum: + - auto + - required + - none + description: >- + Whether tool use is required or automatic. This is a hint to the model + which may not be followed. It depends on the Instruction Following + capabilities of the model. + - type: string default: auto + description: >- + (Optional) Whether tool use is automatic, required, or none. Can also + specify a tool name to use a specific tool. Defaults to ToolChoice.auto. tool_prompt_format: type: string enum: @@ -2027,8 +2036,6 @@ components: where the function definitions should be inserted. default: append additionalProperties: false - required: - - system_message_behavior description: Configuration for tool use. ToolDef: type: object @@ -2533,6 +2540,7 @@ components: enum: - auto - required + - none description: >- Whether tool use is required or automatic. This is a hint to the model which may not be followed. It depends on the Instruction Following capabilities @@ -2739,6 +2747,7 @@ components: enum: - auto - required + - none description: >- (Optional) Whether tool use is required or automatic. Defaults to ToolChoice.auto. .. deprecated:: Use tool_config instead. diff --git a/llama_stack/apis/inference/inference.py b/llama_stack/apis/inference/inference.py index 433ba3274..a3fb69477 100644 --- a/llama_stack/apis/inference/inference.py +++ b/llama_stack/apis/inference/inference.py @@ -182,10 +182,12 @@ class ToolChoice(Enum): :cvar auto: The model may use tools if it determines that is appropriate. :cvar required: The model must use tools. + :cvar none: The model must not use tools. """ auto = "auto" required = "required" + none = "none" @json_schema_type @@ -326,7 +328,7 @@ class SystemMessageBehavior(Enum): class ToolConfig(BaseModel): """Configuration for tool use. - :param tool_choice: (Optional) Whether tool use is required or automatic. Defaults to ToolChoice.auto. + :param tool_choice: (Optional) Whether tool use is automatic, required, or none. Can also specify a tool name to use a specific tool. Defaults to ToolChoice.auto. :param tool_prompt_format: (Optional) Instructs the model how to format tool calls. By default, Llama Stack will attempt to use a format that is best adapted to the model. - `ToolPromptFormat.json`: The tool calls are formatted as a JSON object. - `ToolPromptFormat.function_tag`: The tool calls are enclosed in a tag. @@ -337,9 +339,16 @@ class ToolConfig(BaseModel): '{{function_definitions}}' to indicate where the function definitions should be inserted. """ - tool_choice: Optional[ToolChoice] = Field(default=ToolChoice.auto) + tool_choice: Optional[ToolChoice | str] = Field(default=ToolChoice.auto) tool_prompt_format: Optional[ToolPromptFormat] = Field(default=None) - system_message_behavior: SystemMessageBehavior = Field(default=SystemMessageBehavior.append) + system_message_behavior: Optional[SystemMessageBehavior] = Field(default=SystemMessageBehavior.append) + + def model_post_init(self, __context: Any) -> None: + if isinstance(self.tool_choice, str): + try: + self.tool_choice = ToolChoice[self.tool_choice] + except KeyError: + pass # This is an internally used class diff --git a/llama_stack/distribution/routers/routers.py b/llama_stack/distribution/routers/routers.py index f45975189..9d12c8a40 100644 --- a/llama_stack/distribution/routers/routers.py +++ b/llama_stack/distribution/routers/routers.py @@ -128,7 +128,7 @@ class InferenceRouter(Inference): sampling_params: Optional[SamplingParams] = SamplingParams(), response_format: Optional[ResponseFormat] = None, tools: Optional[List[ToolDefinition]] = None, - tool_choice: Optional[ToolChoice] = ToolChoice.auto, + tool_choice: Optional[ToolChoice] = None, tool_prompt_format: Optional[ToolPromptFormat] = None, stream: Optional[bool] = False, logprobs: Optional[LogProbConfig] = None, @@ -140,20 +140,36 @@ class InferenceRouter(Inference): if model.model_type == ModelType.embedding: raise ValueError(f"Model '{model_id}' is an embedding model and does not support chat completions") if tool_config: - if tool_choice != tool_config.tool_choice: + if tool_choice and tool_choice != tool_config.tool_choice: raise ValueError("tool_choice and tool_config.tool_choice must match") - if tool_prompt_format != tool_config.tool_prompt_format: + if tool_prompt_format and tool_prompt_format != tool_config.tool_prompt_format: raise ValueError("tool_prompt_format and tool_config.tool_prompt_format must match") else: - tool_config = ToolConfig( - tool_choice=tool_choice, - tool_prompt_format=tool_prompt_format, - ) + params = {} + if tool_choice: + params["tool_choice"] = tool_choice + if tool_prompt_format: + params["tool_prompt_format"] = tool_prompt_format + tool_config = ToolConfig(**params) + + tools = tools or [] + if tool_config.tool_choice == ToolChoice.none: + tools = [] + elif tool_config.tool_choice == ToolChoice.auto: + pass + elif tool_config.tool_choice == ToolChoice.required: + pass + else: + # verify tool_choice is one of the tools + tool_names = [t.tool_name if isinstance(t.tool_name, str) else t.tool_name.value for t in tools] + if tool_config.tool_choice not in tool_names: + raise ValueError(f"Tool choice {tool_config.tool_choice} is not one of the tools: {tool_names}") + params = dict( model_id=model_id, messages=messages, sampling_params=sampling_params, - tools=tools or [], + tools=tools, tool_choice=tool_choice, tool_prompt_format=tool_prompt_format, response_format=response_format, diff --git a/llama_stack/providers/utils/inference/prompt_adapter.py b/llama_stack/providers/utils/inference/prompt_adapter.py index b7945dee7..2782c661f 100644 --- a/llama_stack/providers/utils/inference/prompt_adapter.py +++ b/llama_stack/providers/utils/inference/prompt_adapter.py @@ -31,6 +31,7 @@ from llama_stack.apis.inference import ( SystemMessage, SystemMessageBehavior, ToolChoice, + ToolDefinition, UserMessage, ) from llama_stack.models.llama.datatypes import ( @@ -311,8 +312,6 @@ def response_format_prompt(fmt: Optional[ResponseFormat]): def augment_messages_for_tools_llama_3_1( request: ChatCompletionRequest, ) -> List[Message]: - assert request.tool_config.tool_choice == ToolChoice.auto, "Only `ToolChoice.auto` supported" - existing_messages = request.messages existing_system_message = None if existing_messages[0].role == Role.system.value: @@ -352,6 +351,10 @@ def augment_messages_for_tools_llama_3_1( elif isinstance(existing_system_message.content, list): sys_content += "\n".join([_process(c) for c in existing_system_message.content]) + tool_choice_prompt = _get_tool_choice_prompt(request.tool_config.tool_choice, request.tools) + if tool_choice_prompt: + sys_content += "\n" + tool_choice_prompt + messages.append(SystemMessage(content=sys_content)) has_custom_tools = any(isinstance(dfn.tool_name, str) for dfn in request.tools) @@ -377,8 +380,6 @@ def augment_messages_for_tools_llama_3_1( def augment_messages_for_tools_llama_3_2( request: ChatCompletionRequest, ) -> List[Message]: - assert request.tool_config.tool_choice == ToolChoice.auto, "Only `ToolChoice.auto` supported" - existing_messages = request.messages existing_system_message = None if existing_messages[0].role == Role.system.value: @@ -386,7 +387,6 @@ def augment_messages_for_tools_llama_3_2( assert existing_messages[0].role != Role.system.value, "Should only have 1 system message" - messages = [] sys_content = "" custom_tools, builtin_tools = [], [] for t in request.tools: @@ -395,7 +395,6 @@ def augment_messages_for_tools_llama_3_2( else: builtin_tools.append(t) - tool_template = None if builtin_tools: tool_gen = BuiltinToolGenerator() tool_template = tool_gen.gen(builtin_tools) @@ -423,8 +422,22 @@ def augment_messages_for_tools_llama_3_2( ): sys_content += interleaved_content_as_str(existing_system_message.content, sep="\n") - messages.append(SystemMessage(content=sys_content.strip("\n"))) + tool_choice_prompt = _get_tool_choice_prompt(request.tool_config.tool_choice, request.tools) + if tool_choice_prompt: + sys_content += "\n" + tool_choice_prompt - # Add back existing messages from the request - messages += existing_messages + messages = [SystemMessage(content=sys_content.strip("\n")), *existing_messages] return messages + + +def _get_tool_choice_prompt(tool_choice: ToolChoice | str, tools: List[ToolDefinition]) -> str: + if tool_choice == ToolChoice.auto: + return "" + elif tool_choice == ToolChoice.required: + return "You MUST use one of the provided functions/tools to answer the user query." + elif tool_choice == ToolChoice.none: + # tools are already not passed in + return "" + else: + # specific tool + return f"You MUST use the tool `{tool_choice}` to answer the user query." diff --git a/tests/client-sdk/agents/test_agents.py b/tests/client-sdk/agents/test_agents.py index 0369f325b..e5380d357 100644 --- a/tests/client-sdk/agents/test_agents.py +++ b/tests/client-sdk/agents/test_agents.py @@ -98,7 +98,6 @@ def agent_config(llama_stack_client, text_model_id): }, }, toolgroups=[], - tool_choice="auto", input_shields=available_shields, output_shields=available_shields, enable_session_persistence=False, @@ -322,6 +321,38 @@ def test_custom_tool(llama_stack_client, agent_config): assert "get_boiling_point" in logs_str +def test_tool_choice(llama_stack_client, agent_config): + data = [ + ("required", '{"type": "function"'), + ("none", None), + ("get_boiling_point", '{"type": "function", "name": "get_boiling_point"'), + ] + client_tool = TestClientTool() + for tool_choice, expected_tool in data: + agent_config["tool_config"] = {"tool_choice": tool_choice} + agent_config["client_tools"] = [client_tool.get_tool_definition()] + + agent = Agent(llama_stack_client, agent_config, client_tools=(client_tool,)) + session_id = agent.create_session(f"test-session-{uuid4()}") + + response = agent.create_turn( + messages=[ + { + "role": "user", + "content": "What is the boiling point of polyjuice?", + }, + ], + session_id=session_id, + ) + + logs = [str(log) for log in EventLogger().log(response) if log is not None] + logs_str = "".join(logs) + if expected_tool: + assert expected_tool in logs_str + else: + assert '{"type": "function"' not in logs_str + + # TODO: fix this flaky test def xtest_override_system_message_behavior(llama_stack_client, agent_config): client_tool = TestClientTool() diff --git a/tests/client-sdk/inference/test_text_inference.py b/tests/client-sdk/inference/test_text_inference.py index c931ca255..52d5a24f2 100644 --- a/tests/client-sdk/inference/test_text_inference.py +++ b/tests/client-sdk/inference/test_text_inference.py @@ -247,6 +247,42 @@ def test_text_chat_completion_with_tool_calling_and_streaming( assert tool_invocation_content == "[get_weather, {'location': 'San Francisco, CA'}]" +def test_text_chat_completion_with_tool_choice_required( + llama_stack_client, text_model_id, get_weather_tool_definition, provider_tool_format, inference_provider_type +): + if inference_provider_type == "remote::vllm": + pytest.xfail("vllm-project/vllm#13002") + response = llama_stack_client.inference.chat_completion( + model_id=text_model_id, + messages=[ + {"role": "system", "content": "You are a helpful assistant."}, + {"role": "user", "content": "What's the weather like in San Francisco?"}, + ], + tools=[get_weather_tool_definition], + tool_config={"tool_choice": "required", "tool_prompt_format": provider_tool_format}, + stream=True, + ) + tool_invocation_content = extract_tool_invocation_content(response) + assert tool_invocation_content == "[get_weather, {'location': 'San Francisco, CA'}]" + + +def test_text_chat_completion_with_tool_choice_none( + llama_stack_client, text_model_id, get_weather_tool_definition, provider_tool_format +): + response = llama_stack_client.inference.chat_completion( + model_id=text_model_id, + messages=[ + {"role": "system", "content": "You are a helpful assistant."}, + {"role": "user", "content": "What's the weather like in San Francisco?"}, + ], + tools=[get_weather_tool_definition], + tool_config={"tool_choice": "none", "tool_prompt_format": provider_tool_format}, + stream=True, + ) + tool_invocation_content = extract_tool_invocation_content(response) + assert tool_invocation_content == "" + + def test_text_chat_completion_structured_output(llama_stack_client, text_model_id, inference_provider_type): class AnswerFormat(BaseModel): first_name: str From a66b4c4c81eb2ea899cec0cd8cf3e1401b5c1b51 Mon Sep 17 00:00:00 2001 From: Yuan Tang Date: Tue, 18 Feb 2025 23:52:15 -0500 Subject: [PATCH 10/14] test: Enable test_text_chat_completion_with_tool_choice_required for remote::vllm (#1148) --- tests/client-sdk/inference/test_text_inference.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/tests/client-sdk/inference/test_text_inference.py b/tests/client-sdk/inference/test_text_inference.py index 52d5a24f2..6a113c463 100644 --- a/tests/client-sdk/inference/test_text_inference.py +++ b/tests/client-sdk/inference/test_text_inference.py @@ -250,8 +250,6 @@ def test_text_chat_completion_with_tool_calling_and_streaming( def test_text_chat_completion_with_tool_choice_required( llama_stack_client, text_model_id, get_weather_tool_definition, provider_tool_format, inference_provider_type ): - if inference_provider_type == "remote::vllm": - pytest.xfail("vllm-project/vllm#13002") response = llama_stack_client.inference.chat_completion( model_id=text_model_id, messages=[ From 5e7904ef6c5483bb3aaf82ec0d687ced4867ef86 Mon Sep 17 00:00:00 2001 From: Ashwin Bharambe Date: Wed, 19 Feb 2025 12:24:21 -0800 Subject: [PATCH 11/14] Kill the older strong_typing code --- .../strong_typing/__init__.py | 19 - .../strong_typing/auxiliary.py | 230 ---- .../strong_typing/classdef.py | 460 ------- docs/openapi_generator/strong_typing/core.py | 46 - .../strong_typing/deserializer.py | 959 --------------- .../strong_typing/docstring.py | 437 ------- .../strong_typing/exception.py | 23 - .../strong_typing/inspection.py | 1053 ----------------- .../strong_typing/mapping.py | 42 - docs/openapi_generator/strong_typing/name.py | 188 --- docs/openapi_generator/strong_typing/py.typed | 0 .../openapi_generator/strong_typing/schema.py | 792 ------------- .../strong_typing/serialization.py | 101 -- .../strong_typing/serializer.py | 522 -------- docs/openapi_generator/strong_typing/slots.py | 29 - .../strong_typing/topological.py | 89 -- 16 files changed, 4990 deletions(-) delete mode 100644 docs/openapi_generator/strong_typing/__init__.py delete mode 100644 docs/openapi_generator/strong_typing/auxiliary.py delete mode 100644 docs/openapi_generator/strong_typing/classdef.py delete mode 100644 docs/openapi_generator/strong_typing/core.py delete mode 100644 docs/openapi_generator/strong_typing/deserializer.py delete mode 100644 docs/openapi_generator/strong_typing/docstring.py delete mode 100644 docs/openapi_generator/strong_typing/exception.py delete mode 100644 docs/openapi_generator/strong_typing/inspection.py delete mode 100644 docs/openapi_generator/strong_typing/mapping.py delete mode 100644 docs/openapi_generator/strong_typing/name.py delete mode 100644 docs/openapi_generator/strong_typing/py.typed delete mode 100644 docs/openapi_generator/strong_typing/schema.py delete mode 100644 docs/openapi_generator/strong_typing/serialization.py delete mode 100644 docs/openapi_generator/strong_typing/serializer.py delete mode 100644 docs/openapi_generator/strong_typing/slots.py delete mode 100644 docs/openapi_generator/strong_typing/topological.py diff --git a/docs/openapi_generator/strong_typing/__init__.py b/docs/openapi_generator/strong_typing/__init__.py deleted file mode 100644 index d832dcf6f..000000000 --- a/docs/openapi_generator/strong_typing/__init__.py +++ /dev/null @@ -1,19 +0,0 @@ -# Copyright (c) Meta Platforms, Inc. and affiliates. -# All rights reserved. -# -# This source code is licensed under the terms described in the LICENSE file in -# the root directory of this source tree. - -""" -Type-safe data interchange for Python data classes. - -Provides auxiliary services for working with Python type annotations, converting typed data to and from JSON, -and generating a JSON schema for a complex type. -""" - -__version__ = "0.3.4" -__author__ = "Levente Hunyadi" -__copyright__ = "Copyright 2021-2024, Levente Hunyadi" -__license__ = "MIT" -__maintainer__ = "Levente Hunyadi" -__status__ = "Production" diff --git a/docs/openapi_generator/strong_typing/auxiliary.py b/docs/openapi_generator/strong_typing/auxiliary.py deleted file mode 100644 index bfaec0d29..000000000 --- a/docs/openapi_generator/strong_typing/auxiliary.py +++ /dev/null @@ -1,230 +0,0 @@ -# Copyright (c) Meta Platforms, Inc. and affiliates. -# All rights reserved. -# -# This source code is licensed under the terms described in the LICENSE file in -# the root directory of this source tree. - -""" -Type-safe data interchange for Python data classes. - -:see: https://github.com/hunyadi/strong_typing -""" - -import dataclasses -import sys -from dataclasses import is_dataclass -from typing import Callable, Dict, Optional, overload, Type, TypeVar, Union - -if sys.version_info >= (3, 9): - from typing import Annotated as Annotated -else: - from typing_extensions import Annotated as Annotated - -if sys.version_info >= (3, 10): - from typing import TypeAlias as TypeAlias -else: - from typing_extensions import TypeAlias as TypeAlias - -if sys.version_info >= (3, 11): - from typing import dataclass_transform as dataclass_transform -else: - from typing_extensions import dataclass_transform as dataclass_transform - -T = TypeVar("T") - - -def _compact_dataclass_repr(obj: object) -> str: - """ - Compact data-class representation where positional arguments are used instead of keyword arguments. - - :param obj: A data-class object. - :returns: A string that matches the pattern `Class(arg1, arg2, ...)`. - """ - - if is_dataclass(obj): - arglist = ", ".join( - repr(getattr(obj, field.name)) for field in dataclasses.fields(obj) - ) - return f"{obj.__class__.__name__}({arglist})" - else: - return obj.__class__.__name__ - - -class CompactDataClass: - "A data class whose repr() uses positional rather than keyword arguments." - - def __repr__(self) -> str: - return _compact_dataclass_repr(self) - - -@overload -def typeannotation(cls: Type[T], /) -> Type[T]: ... - - -@overload -def typeannotation( - cls: None, *, eq: bool = True, order: bool = False -) -> Callable[[Type[T]], Type[T]]: ... - - -@dataclass_transform(eq_default=True, order_default=False) -def typeannotation( - cls: Optional[Type[T]] = None, *, eq: bool = True, order: bool = False -) -> Union[Type[T], Callable[[Type[T]], Type[T]]]: - """ - Returns the same class as was passed in, with dunder methods added based on the fields defined in the class. - - :param cls: The data-class type to transform into a type annotation. - :param eq: Whether to generate functions to support equality comparison. - :param order: Whether to generate functions to support ordering. - :returns: A data-class type, or a wrapper for data-class types. - """ - - def wrap(cls: Type[T]) -> Type[T]: - setattr(cls, "__repr__", _compact_dataclass_repr) - if not dataclasses.is_dataclass(cls): - cls = dataclasses.dataclass( # type: ignore[call-overload] - cls, - init=True, - repr=False, - eq=eq, - order=order, - unsafe_hash=False, - frozen=True, - ) - return cls - - # see if decorator is used as @typeannotation or @typeannotation() - if cls is None: - # called with parentheses - return wrap - else: - # called without parentheses - return wrap(cls) - - -@typeannotation -class Alias: - "Alternative name of a property, typically used in JSON serialization." - - name: str - - -@typeannotation -class Signed: - "Signedness of an integer type." - - is_signed: bool - - -@typeannotation -class Storage: - "Number of bytes the binary representation of an integer type takes, e.g. 4 bytes for an int32." - - bytes: int - - -@typeannotation -class IntegerRange: - "Minimum and maximum value of an integer. The range is inclusive." - - minimum: int - maximum: int - - -@typeannotation -class Precision: - "Precision of a floating-point value." - - significant_digits: int - decimal_digits: int = 0 - - @property - def integer_digits(self) -> int: - return self.significant_digits - self.decimal_digits - - -@typeannotation -class TimePrecision: - """ - Precision of a timestamp or time interval. - - :param decimal_digits: Number of fractional digits retained in the sub-seconds field for a timestamp. - """ - - decimal_digits: int = 0 - - -@typeannotation -class Length: - "Exact length of a string." - - value: int - - -@typeannotation -class MinLength: - "Minimum length of a string." - - value: int - - -@typeannotation -class MaxLength: - "Maximum length of a string." - - value: int - - -@typeannotation -class SpecialConversion: - "Indicates that the annotated type is subject to custom conversion rules." - - -int8: TypeAlias = Annotated[int, Signed(True), Storage(1), IntegerRange(-128, 127)] -int16: TypeAlias = Annotated[int, Signed(True), Storage(2), IntegerRange(-32768, 32767)] -int32: TypeAlias = Annotated[ - int, - Signed(True), - Storage(4), - IntegerRange(-2147483648, 2147483647), -] -int64: TypeAlias = Annotated[ - int, - Signed(True), - Storage(8), - IntegerRange(-9223372036854775808, 9223372036854775807), -] - -uint8: TypeAlias = Annotated[int, Signed(False), Storage(1), IntegerRange(0, 255)] -uint16: TypeAlias = Annotated[int, Signed(False), Storage(2), IntegerRange(0, 65535)] -uint32: TypeAlias = Annotated[ - int, - Signed(False), - Storage(4), - IntegerRange(0, 4294967295), -] -uint64: TypeAlias = Annotated[ - int, - Signed(False), - Storage(8), - IntegerRange(0, 18446744073709551615), -] - -float32: TypeAlias = Annotated[float, Storage(4)] -float64: TypeAlias = Annotated[float, Storage(8)] - -# maps globals of type Annotated[T, ...] defined in this module to their string names -_auxiliary_types: Dict[object, str] = {} -module = sys.modules[__name__] -for var in dir(module): - typ = getattr(module, var) - if getattr(typ, "__metadata__", None) is not None: - # type is Annotated[T, ...] - _auxiliary_types[typ] = var - - -def get_auxiliary_format(data_type: object) -> Optional[str]: - "Returns the JSON format string corresponding to an auxiliary type." - - return _auxiliary_types.get(data_type) diff --git a/docs/openapi_generator/strong_typing/classdef.py b/docs/openapi_generator/strong_typing/classdef.py deleted file mode 100644 index b86940420..000000000 --- a/docs/openapi_generator/strong_typing/classdef.py +++ /dev/null @@ -1,460 +0,0 @@ -# Copyright (c) Meta Platforms, Inc. and affiliates. -# All rights reserved. -# -# This source code is licensed under the terms described in the LICENSE file in -# the root directory of this source tree. - -import copy -import dataclasses -import datetime -import decimal -import enum -import ipaddress -import math -import re -import sys -import types -import typing -import uuid -from dataclasses import dataclass -from typing import Any, Dict, List, Literal, Optional, Tuple, Type, TypeVar, Union - -from .auxiliary import ( - Alias, - Annotated, - float32, - float64, - int16, - int32, - int64, - MaxLength, - Precision, -) -from .core import JsonType, Schema -from .docstring import Docstring, DocstringParam -from .inspection import TypeLike -from .serialization import json_to_object, object_to_json - -T = TypeVar("T") - - -@dataclass -class JsonSchemaNode: - title: Optional[str] - description: Optional[str] - - -@dataclass -class JsonSchemaType(JsonSchemaNode): - type: str - format: Optional[str] - - -@dataclass -class JsonSchemaBoolean(JsonSchemaType): - type: Literal["boolean"] - const: Optional[bool] - default: Optional[bool] - examples: Optional[List[bool]] - - -@dataclass -class JsonSchemaInteger(JsonSchemaType): - type: Literal["integer"] - const: Optional[int] - default: Optional[int] - examples: Optional[List[int]] - enum: Optional[List[int]] - minimum: Optional[int] - maximum: Optional[int] - - -@dataclass -class JsonSchemaNumber(JsonSchemaType): - type: Literal["number"] - const: Optional[float] - default: Optional[float] - examples: Optional[List[float]] - minimum: Optional[float] - maximum: Optional[float] - exclusiveMinimum: Optional[float] - exclusiveMaximum: Optional[float] - multipleOf: Optional[float] - - -@dataclass -class JsonSchemaString(JsonSchemaType): - type: Literal["string"] - const: Optional[str] - default: Optional[str] - examples: Optional[List[str]] - enum: Optional[List[str]] - minLength: Optional[int] - maxLength: Optional[int] - - -@dataclass -class JsonSchemaArray(JsonSchemaType): - type: Literal["array"] - items: "JsonSchemaAny" - - -@dataclass -class JsonSchemaObject(JsonSchemaType): - type: Literal["object"] - properties: Optional[Dict[str, "JsonSchemaAny"]] - additionalProperties: Optional[bool] - required: Optional[List[str]] - - -@dataclass -class JsonSchemaRef(JsonSchemaNode): - ref: Annotated[str, Alias("$ref")] - - -@dataclass -class JsonSchemaAllOf(JsonSchemaNode): - allOf: List["JsonSchemaAny"] - - -@dataclass -class JsonSchemaAnyOf(JsonSchemaNode): - anyOf: List["JsonSchemaAny"] - - -@dataclass -class Discriminator: - propertyName: str - mapping: Dict[str, str] - - -@dataclass -class JsonSchemaOneOf(JsonSchemaNode): - oneOf: List["JsonSchemaAny"] - discriminator: Optional[Discriminator] - - -JsonSchemaAny = Union[ - JsonSchemaRef, - JsonSchemaBoolean, - JsonSchemaInteger, - JsonSchemaNumber, - JsonSchemaString, - JsonSchemaArray, - JsonSchemaObject, - JsonSchemaOneOf, -] - - -@dataclass -class JsonSchemaTopLevelObject(JsonSchemaObject): - schema: Annotated[str, Alias("$schema")] - definitions: Optional[Dict[str, JsonSchemaAny]] - - -def integer_range_to_type(min_value: float, max_value: float) -> type: - if min_value >= -(2**15) and max_value < 2**15: - return int16 - elif min_value >= -(2**31) and max_value < 2**31: - return int32 - else: - return int64 - - -def enum_safe_name(name: str) -> str: - name = re.sub(r"\W", "_", name) - is_dunder = name.startswith("__") - is_sunder = name.startswith("_") and name.endswith("_") - if is_dunder or is_sunder: # provide an alternative for dunder and sunder names - name = f"v{name}" - return name - - -def enum_values_to_type( - module: types.ModuleType, - name: str, - values: Dict[str, Any], - title: Optional[str] = None, - description: Optional[str] = None, -) -> Type[enum.Enum]: - enum_class: Type[enum.Enum] = enum.Enum(name, values) # type: ignore - - # assign the newly created type to the same module where the defining class is - enum_class.__module__ = module.__name__ - enum_class.__doc__ = str( - Docstring(short_description=title, long_description=description) - ) - setattr(module, name, enum_class) - - return enum.unique(enum_class) - - -def schema_to_type( - schema: Schema, *, module: types.ModuleType, class_name: str -) -> TypeLike: - """ - Creates a Python type from a JSON schema. - - :param schema: The JSON schema that the types would correspond to. - :param module: The module in which to create the new types. - :param class_name: The name assigned to the top-level class. - """ - - top_node = typing.cast( - JsonSchemaTopLevelObject, json_to_object(JsonSchemaTopLevelObject, schema) - ) - if top_node.definitions is not None: - for type_name, type_node in top_node.definitions.items(): - type_def = node_to_typedef(module, type_name, type_node) - if type_def.default is not dataclasses.MISSING: - raise TypeError("disallowed: `default` for top-level type definitions") - - setattr(type_def.type, "__module__", module.__name__) - setattr(module, type_name, type_def.type) - - return node_to_typedef(module, class_name, top_node).type - - -@dataclass -class TypeDef: - type: TypeLike - default: Any = dataclasses.MISSING - - -def json_to_value(target_type: TypeLike, data: JsonType) -> Any: - if data is not None: - return json_to_object(target_type, data) - else: - return dataclasses.MISSING - - -def node_to_typedef( - module: types.ModuleType, context: str, node: JsonSchemaNode -) -> TypeDef: - if isinstance(node, JsonSchemaRef): - match_obj = re.match(r"^#/definitions/(\w+)$", node.ref) - if not match_obj: - raise ValueError(f"invalid reference: {node.ref}") - - type_name = match_obj.group(1) - return TypeDef(getattr(module, type_name), dataclasses.MISSING) - - elif isinstance(node, JsonSchemaBoolean): - if node.const is not None: - return TypeDef(Literal[node.const], dataclasses.MISSING) - - default = json_to_value(bool, node.default) - return TypeDef(bool, default) - - elif isinstance(node, JsonSchemaInteger): - if node.const is not None: - return TypeDef(Literal[node.const], dataclasses.MISSING) - - integer_type: TypeLike - if node.format == "int16": - integer_type = int16 - elif node.format == "int32": - integer_type = int32 - elif node.format == "int64": - integer_type = int64 - else: - if node.enum is not None: - integer_type = integer_range_to_type(min(node.enum), max(node.enum)) - elif node.minimum is not None and node.maximum is not None: - integer_type = integer_range_to_type(node.minimum, node.maximum) - else: - integer_type = int - - default = json_to_value(integer_type, node.default) - return TypeDef(integer_type, default) - - elif isinstance(node, JsonSchemaNumber): - if node.const is not None: - return TypeDef(Literal[node.const], dataclasses.MISSING) - - number_type: TypeLike - if node.format == "float32": - number_type = float32 - elif node.format == "float64": - number_type = float64 - else: - if ( - node.exclusiveMinimum is not None - and node.exclusiveMaximum is not None - and node.exclusiveMinimum == -node.exclusiveMaximum - ): - integer_digits = round(math.log10(node.exclusiveMaximum)) - else: - integer_digits = None - - if node.multipleOf is not None: - decimal_digits = -round(math.log10(node.multipleOf)) - else: - decimal_digits = None - - if integer_digits is not None and decimal_digits is not None: - number_type = Annotated[ - decimal.Decimal, - Precision(integer_digits + decimal_digits, decimal_digits), - ] - else: - number_type = float - - default = json_to_value(number_type, node.default) - return TypeDef(number_type, default) - - elif isinstance(node, JsonSchemaString): - if node.const is not None: - return TypeDef(Literal[node.const], dataclasses.MISSING) - - string_type: TypeLike - if node.format == "date-time": - string_type = datetime.datetime - elif node.format == "uuid": - string_type = uuid.UUID - elif node.format == "ipv4": - string_type = ipaddress.IPv4Address - elif node.format == "ipv6": - string_type = ipaddress.IPv6Address - - elif node.enum is not None: - string_type = enum_values_to_type( - module, - context, - {enum_safe_name(e): e for e in node.enum}, - title=node.title, - description=node.description, - ) - - elif node.maxLength is not None: - string_type = Annotated[str, MaxLength(node.maxLength)] - else: - string_type = str - - default = json_to_value(string_type, node.default) - return TypeDef(string_type, default) - - elif isinstance(node, JsonSchemaArray): - type_def = node_to_typedef(module, context, node.items) - if type_def.default is not dataclasses.MISSING: - raise TypeError("disallowed: `default` for array element type") - list_type = List[(type_def.type,)] # type: ignore - return TypeDef(list_type, dataclasses.MISSING) - - elif isinstance(node, JsonSchemaObject): - if node.properties is None: - return TypeDef(JsonType, dataclasses.MISSING) - - if node.additionalProperties is None or node.additionalProperties is not False: - raise TypeError("expected: `additionalProperties` equals `false`") - - required = node.required if node.required is not None else [] - - class_name = context - - fields: List[Tuple[str, Any, dataclasses.Field]] = [] - params: Dict[str, DocstringParam] = {} - for prop_name, prop_node in node.properties.items(): - type_def = node_to_typedef(module, f"{class_name}__{prop_name}", prop_node) - if prop_name in required: - prop_type = type_def.type - else: - prop_type = Union[(None, type_def.type)] - fields.append( - (prop_name, prop_type, dataclasses.field(default=type_def.default)) - ) - prop_desc = prop_node.title or prop_node.description - if prop_desc is not None: - params[prop_name] = DocstringParam(prop_name, prop_desc) - - fields.sort(key=lambda t: t[2].default is not dataclasses.MISSING) - if sys.version_info >= (3, 12): - class_type = dataclasses.make_dataclass( - class_name, fields, module=module.__name__ - ) - else: - class_type = dataclasses.make_dataclass( - class_name, fields, namespace={"__module__": module.__name__} - ) - class_type.__doc__ = str( - Docstring( - short_description=node.title, - long_description=node.description, - params=params, - ) - ) - setattr(module, class_name, class_type) - return TypeDef(class_type, dataclasses.MISSING) - - elif isinstance(node, JsonSchemaOneOf): - union_defs = tuple(node_to_typedef(module, context, n) for n in node.oneOf) - if any(d.default is not dataclasses.MISSING for d in union_defs): - raise TypeError("disallowed: `default` for union member type") - union_types = tuple(d.type for d in union_defs) - return TypeDef(Union[union_types], dataclasses.MISSING) - - raise NotImplementedError() - - -@dataclass -class SchemaFlatteningOptions: - qualified_names: bool = False - recursive: bool = False - - -def flatten_schema( - schema: Schema, *, options: Optional[SchemaFlatteningOptions] = None -) -> Schema: - top_node = typing.cast( - JsonSchemaTopLevelObject, json_to_object(JsonSchemaTopLevelObject, schema) - ) - flattener = SchemaFlattener(options) - obj = flattener.flatten(top_node) - return typing.cast(Schema, object_to_json(obj)) - - -class SchemaFlattener: - options: SchemaFlatteningOptions - - def __init__(self, options: Optional[SchemaFlatteningOptions] = None) -> None: - self.options = options or SchemaFlatteningOptions() - - def flatten(self, source_node: JsonSchemaObject) -> JsonSchemaObject: - if source_node.type != "object": - return source_node - - source_props = source_node.properties or {} - target_props: Dict[str, JsonSchemaAny] = {} - - source_reqs = source_node.required or [] - target_reqs: List[str] = [] - - for name, prop in source_props.items(): - if not isinstance(prop, JsonSchemaObject): - target_props[name] = prop - if name in source_reqs: - target_reqs.append(name) - continue - - if self.options.recursive: - obj = self.flatten(prop) - else: - obj = prop - if obj.properties is not None: - if self.options.qualified_names: - target_props.update( - (f"{name}.{n}", p) for n, p in obj.properties.items() - ) - else: - target_props.update(obj.properties.items()) - if obj.required is not None: - if self.options.qualified_names: - target_reqs.extend(f"{name}.{n}" for n in obj.required) - else: - target_reqs.extend(obj.required) - - target_node = copy.copy(source_node) - target_node.properties = target_props or None - target_node.additionalProperties = False - target_node.required = target_reqs or None - return target_node diff --git a/docs/openapi_generator/strong_typing/core.py b/docs/openapi_generator/strong_typing/core.py deleted file mode 100644 index 501b6a5db..000000000 --- a/docs/openapi_generator/strong_typing/core.py +++ /dev/null @@ -1,46 +0,0 @@ -# Copyright (c) Meta Platforms, Inc. and affiliates. -# All rights reserved. -# -# This source code is licensed under the terms described in the LICENSE file in -# the root directory of this source tree. - -""" -Type-safe data interchange for Python data classes. - -:see: https://github.com/hunyadi/strong_typing -""" - -from typing import Dict, List, Union - - -class JsonObject: - "Placeholder type for an unrestricted JSON object." - - -class JsonArray: - "Placeholder type for an unrestricted JSON array." - - -# a JSON type with possible `null` values -JsonType = Union[ - None, - bool, - int, - float, - str, - Dict[str, "JsonType"], - List["JsonType"], -] - -# a JSON type that cannot contain `null` values -StrictJsonType = Union[ - bool, - int, - float, - str, - Dict[str, "StrictJsonType"], - List["StrictJsonType"], -] - -# a meta-type that captures the object type in a JSON schema -Schema = Dict[str, JsonType] diff --git a/docs/openapi_generator/strong_typing/deserializer.py b/docs/openapi_generator/strong_typing/deserializer.py deleted file mode 100644 index 5859d3bbe..000000000 --- a/docs/openapi_generator/strong_typing/deserializer.py +++ /dev/null @@ -1,959 +0,0 @@ -# Copyright (c) Meta Platforms, Inc. and affiliates. -# All rights reserved. -# -# This source code is licensed under the terms described in the LICENSE file in -# the root directory of this source tree. - -""" -Type-safe data interchange for Python data classes. - -:see: https://github.com/hunyadi/strong_typing -""" - -import abc -import base64 -import dataclasses -import datetime -import enum -import inspect -import ipaddress -import sys -import typing -import uuid -from types import ModuleType -from typing import ( - Any, - Callable, - Dict, - Generic, - List, - Literal, - NamedTuple, - Optional, - Set, - Tuple, - Type, - TypeVar, - Union, -) - -from .core import JsonType -from .exception import JsonKeyError, JsonTypeError, JsonValueError -from .inspection import ( - create_object, - enum_value_types, - evaluate_type, - get_class_properties, - get_class_property, - get_resolved_hints, - is_dataclass_instance, - is_dataclass_type, - is_named_tuple_type, - is_type_annotated, - is_type_literal, - is_type_optional, - TypeLike, - unwrap_annotated_type, - unwrap_literal_values, - unwrap_optional_type, -) -from .mapping import python_field_to_json_property -from .name import python_type_to_str - -E = TypeVar("E", bound=enum.Enum) -T = TypeVar("T") -R = TypeVar("R") -K = TypeVar("K") -V = TypeVar("V") - - -class Deserializer(abc.ABC, Generic[T]): - "Parses a JSON value into a Python type." - - def build(self, context: Optional[ModuleType]) -> None: - """ - Creates auxiliary parsers that this parser is depending on. - - :param context: A module context for evaluating types specified as a string. - """ - - @abc.abstractmethod - def parse(self, data: JsonType) -> T: - """ - Parses a JSON value into a Python type. - - :param data: The JSON value to de-serialize. - :returns: The Python object that the JSON value de-serializes to. - """ - - -class NoneDeserializer(Deserializer[None]): - "Parses JSON `null` values into Python `None`." - - def parse(self, data: JsonType) -> None: - if data is not None: - raise JsonTypeError( - f"`None` type expects JSON `null` but instead received: {data}" - ) - return None - - -class BoolDeserializer(Deserializer[bool]): - "Parses JSON `boolean` values into Python `bool` type." - - def parse(self, data: JsonType) -> bool: - if not isinstance(data, bool): - raise JsonTypeError( - f"`bool` type expects JSON `boolean` data but instead received: {data}" - ) - return bool(data) - - -class IntDeserializer(Deserializer[int]): - "Parses JSON `number` values into Python `int` type." - - def parse(self, data: JsonType) -> int: - if not isinstance(data, int): - raise JsonTypeError( - f"`int` type expects integer data as JSON `number` but instead received: {data}" - ) - return int(data) - - -class FloatDeserializer(Deserializer[float]): - "Parses JSON `number` values into Python `float` type." - - def parse(self, data: JsonType) -> float: - if not isinstance(data, float) and not isinstance(data, int): - raise JsonTypeError( - f"`int` type expects data as JSON `number` but instead received: {data}" - ) - return float(data) - - -class StringDeserializer(Deserializer[str]): - "Parses JSON `string` values into Python `str` type." - - def parse(self, data: JsonType) -> str: - if not isinstance(data, str): - raise JsonTypeError( - f"`str` type expects JSON `string` data but instead received: {data}" - ) - return str(data) - - -class BytesDeserializer(Deserializer[bytes]): - "Parses JSON `string` values of Base64-encoded strings into Python `bytes` type." - - def parse(self, data: JsonType) -> bytes: - if not isinstance(data, str): - raise JsonTypeError( - f"`bytes` type expects JSON `string` data but instead received: {data}" - ) - return base64.b64decode(data, validate=True) - - -class DateTimeDeserializer(Deserializer[datetime.datetime]): - "Parses JSON `string` values representing timestamps in ISO 8601 format to Python `datetime` with time zone." - - def parse(self, data: JsonType) -> datetime.datetime: - if not isinstance(data, str): - raise JsonTypeError( - f"`datetime` type expects JSON `string` data but instead received: {data}" - ) - - if data.endswith("Z"): - data = f"{data[:-1]}+00:00" # Python's isoformat() does not support military time zones like "Zulu" for UTC - timestamp = datetime.datetime.fromisoformat(data) - if timestamp.tzinfo is None: - raise JsonValueError( - f"timestamp lacks explicit time zone designator: {data}" - ) - return timestamp - - -class DateDeserializer(Deserializer[datetime.date]): - "Parses JSON `string` values representing dates in ISO 8601 format to Python `date` type." - - def parse(self, data: JsonType) -> datetime.date: - if not isinstance(data, str): - raise JsonTypeError( - f"`date` type expects JSON `string` data but instead received: {data}" - ) - - return datetime.date.fromisoformat(data) - - -class TimeDeserializer(Deserializer[datetime.time]): - "Parses JSON `string` values representing time instances in ISO 8601 format to Python `time` type with time zone." - - def parse(self, data: JsonType) -> datetime.time: - if not isinstance(data, str): - raise JsonTypeError( - f"`time` type expects JSON `string` data but instead received: {data}" - ) - - return datetime.time.fromisoformat(data) - - -class UUIDDeserializer(Deserializer[uuid.UUID]): - "Parses JSON `string` values of UUID strings into Python `uuid.UUID` type." - - def parse(self, data: JsonType) -> uuid.UUID: - if not isinstance(data, str): - raise JsonTypeError( - f"`UUID` type expects JSON `string` data but instead received: {data}" - ) - return uuid.UUID(data) - - -class IPv4Deserializer(Deserializer[ipaddress.IPv4Address]): - "Parses JSON `string` values of IPv4 address strings into Python `ipaddress.IPv4Address` type." - - def parse(self, data: JsonType) -> ipaddress.IPv4Address: - if not isinstance(data, str): - raise JsonTypeError( - f"`IPv4Address` type expects JSON `string` data but instead received: {data}" - ) - return ipaddress.IPv4Address(data) - - -class IPv6Deserializer(Deserializer[ipaddress.IPv6Address]): - "Parses JSON `string` values of IPv6 address strings into Python `ipaddress.IPv6Address` type." - - def parse(self, data: JsonType) -> ipaddress.IPv6Address: - if not isinstance(data, str): - raise JsonTypeError( - f"`IPv6Address` type expects JSON `string` data but instead received: {data}" - ) - return ipaddress.IPv6Address(data) - - -class ListDeserializer(Deserializer[List[T]]): - "Recursively de-serializes a JSON array into a Python `list`." - - item_type: Type[T] - item_parser: Deserializer - - def __init__(self, item_type: Type[T]) -> None: - self.item_type = item_type - - def build(self, context: Optional[ModuleType]) -> None: - self.item_parser = _get_deserializer(self.item_type, context) - - def parse(self, data: JsonType) -> List[T]: - if not isinstance(data, list): - type_name = python_type_to_str(self.item_type) - raise JsonTypeError( - f"type `List[{type_name}]` expects JSON `array` data but instead received: {data}" - ) - - return [self.item_parser.parse(item) for item in data] - - -class DictDeserializer(Deserializer[Dict[K, V]]): - "Recursively de-serializes a JSON object into a Python `dict`." - - key_type: Type[K] - value_type: Type[V] - value_parser: Deserializer[V] - - def __init__(self, key_type: Type[K], value_type: Type[V]) -> None: - self.key_type = key_type - self.value_type = value_type - self._check_key_type() - - def build(self, context: Optional[ModuleType]) -> None: - self.value_parser = _get_deserializer(self.value_type, context) - - def _check_key_type(self) -> None: - if self.key_type is str: - return - - if issubclass(self.key_type, enum.Enum): - value_types = enum_value_types(self.key_type) - if len(value_types) != 1: - raise JsonTypeError( - f"type `{self.container_type}` has invalid key type, " - f"enumerations must have a consistent member value type but several types found: {value_types}" - ) - value_type = value_types.pop() - if value_type is not str: - f"`type `{self.container_type}` has invalid enumeration key type, expected `enum.Enum` with string values" - return - - raise JsonTypeError( - f"`type `{self.container_type}` has invalid key type, expected `str` or `enum.Enum` with string values" - ) - - @property - def container_type(self) -> str: - key_type_name = python_type_to_str(self.key_type) - value_type_name = python_type_to_str(self.value_type) - return f"Dict[{key_type_name}, {value_type_name}]" - - def parse(self, data: JsonType) -> Dict[K, V]: - if not isinstance(data, dict): - raise JsonTypeError( - f"`type `{self.container_type}` expects JSON `object` data but instead received: {data}" - ) - - return dict( - (self.key_type(key), self.value_parser.parse(value)) # type: ignore[call-arg] - for key, value in data.items() - ) - - -class SetDeserializer(Deserializer[Set[T]]): - "Recursively de-serializes a JSON list into a Python `set`." - - member_type: Type[T] - member_parser: Deserializer - - def __init__(self, member_type: Type[T]) -> None: - self.member_type = member_type - - def build(self, context: Optional[ModuleType]) -> None: - self.member_parser = _get_deserializer(self.member_type, context) - - def parse(self, data: JsonType) -> Set[T]: - if not isinstance(data, list): - type_name = python_type_to_str(self.member_type) - raise JsonTypeError( - f"type `Set[{type_name}]` expects JSON `array` data but instead received: {data}" - ) - - return set(self.member_parser.parse(item) for item in data) - - -class TupleDeserializer(Deserializer[Tuple[Any, ...]]): - "Recursively de-serializes a JSON list into a Python `tuple`." - - item_types: Tuple[Type[Any], ...] - item_parsers: Tuple[Deserializer[Any], ...] - - def __init__(self, item_types: Tuple[Type[Any], ...]) -> None: - self.item_types = item_types - - def build(self, context: Optional[ModuleType]) -> None: - self.item_parsers = tuple( - _get_deserializer(item_type, context) for item_type in self.item_types - ) - - @property - def container_type(self) -> str: - type_names = ", ".join( - python_type_to_str(item_type) for item_type in self.item_types - ) - return f"Tuple[{type_names}]" - - def parse(self, data: JsonType) -> Tuple[Any, ...]: - if not isinstance(data, list) or len(data) != len(self.item_parsers): - if not isinstance(data, list): - raise JsonTypeError( - f"type `{self.container_type}` expects JSON `array` data but instead received: {data}" - ) - else: - count = len(self.item_parsers) - raise JsonValueError( - f"type `{self.container_type}` expects a JSON `array` of length {count} but received length {len(data)}" - ) - - return tuple( - item_parser.parse(item) - for item_parser, item in zip(self.item_parsers, data) - ) - - -class UnionDeserializer(Deserializer): - "De-serializes a JSON value (of any type) into a Python union type." - - member_types: Tuple[type, ...] - member_parsers: Tuple[Deserializer, ...] - - def __init__(self, member_types: Tuple[type, ...]) -> None: - self.member_types = member_types - - def build(self, context: Optional[ModuleType]) -> None: - self.member_parsers = tuple( - _get_deserializer(member_type, context) for member_type in self.member_types - ) - - def parse(self, data: JsonType) -> Any: - for member_parser in self.member_parsers: - # iterate over potential types of discriminated union - try: - return member_parser.parse(data) - except (JsonKeyError, JsonTypeError): - # indicates a required field is missing from JSON dict -OR- the data cannot be cast to the expected type, - # i.e. we don't have the type that we are looking for - continue - - type_names = ", ".join( - python_type_to_str(member_type) for member_type in self.member_types - ) - raise JsonKeyError( - f"type `Union[{type_names}]` could not be instantiated from: {data}" - ) - - -def get_literal_properties(typ: type) -> Set[str]: - "Returns the names of all properties in a class that are of a literal type." - - return set( - property_name - for property_name, property_type in get_class_properties(typ) - if is_type_literal(property_type) - ) - - -def get_discriminating_properties(types: Tuple[type, ...]) -> Set[str]: - "Returns a set of properties with literal type that are common across all specified classes." - - if not types or not all(isinstance(typ, type) for typ in types): - return set() - - props = get_literal_properties(types[0]) - for typ in types[1:]: - props = props & get_literal_properties(typ) - - return props - - -class TaggedUnionDeserializer(Deserializer): - "De-serializes a JSON value with one or more disambiguating properties into a Python union type." - - member_types: Tuple[type, ...] - disambiguating_properties: Set[str] - member_parsers: Dict[Tuple[str, Any], Deserializer] - - def __init__(self, member_types: Tuple[type, ...]) -> None: - self.member_types = member_types - self.disambiguating_properties = get_discriminating_properties(member_types) - - def build(self, context: Optional[ModuleType]) -> None: - self.member_parsers = {} - for member_type in self.member_types: - for property_name in self.disambiguating_properties: - literal_type = get_class_property(member_type, property_name) - if not literal_type: - continue - - for literal_value in unwrap_literal_values(literal_type): - tpl = (property_name, literal_value) - if tpl in self.member_parsers: - raise JsonTypeError( - f"disambiguating property `{property_name}` in type `{self.union_type}` has a duplicate value: {literal_value}" - ) - - self.member_parsers[tpl] = _get_deserializer(member_type, context) - - @property - def union_type(self) -> str: - type_names = ", ".join( - python_type_to_str(member_type) for member_type in self.member_types - ) - return f"Union[{type_names}]" - - def parse(self, data: JsonType) -> Any: - if not isinstance(data, dict): - raise JsonTypeError( - f"tagged union type `{self.union_type}` expects JSON `object` data but instead received: {data}" - ) - - for property_name in self.disambiguating_properties: - disambiguating_value = data.get(property_name) - if disambiguating_value is None: - continue - - member_parser = self.member_parsers.get( - (property_name, disambiguating_value) - ) - if member_parser is None: - raise JsonTypeError( - f"disambiguating property value is invalid for tagged union type `{self.union_type}`: {data}" - ) - - return member_parser.parse(data) - - raise JsonTypeError( - f"disambiguating property value is missing for tagged union type `{self.union_type}`: {data}" - ) - - -class LiteralDeserializer(Deserializer): - "De-serializes a JSON value into a Python literal type." - - values: Tuple[Any, ...] - parser: Deserializer - - def __init__(self, values: Tuple[Any, ...]) -> None: - self.values = values - - def build(self, context: Optional[ModuleType]) -> None: - literal_type_tuple = tuple(type(value) for value in self.values) - literal_type_set = set(literal_type_tuple) - if len(literal_type_set) != 1: - value_names = ", ".join(repr(value) for value in self.values) - raise TypeError( - f"type `Literal[{value_names}]` expects consistent literal value types but got: {literal_type_tuple}" - ) - - literal_type = literal_type_set.pop() - self.parser = _get_deserializer(literal_type, context) - - def parse(self, data: JsonType) -> Any: - value = self.parser.parse(data) - if value not in self.values: - value_names = ", ".join(repr(value) for value in self.values) - raise JsonTypeError( - f"type `Literal[{value_names}]` could not be instantiated from: {data}" - ) - return value - - -class EnumDeserializer(Deserializer[E]): - "Returns an enumeration instance based on the enumeration value read from a JSON value." - - enum_type: Type[E] - - def __init__(self, enum_type: Type[E]) -> None: - self.enum_type = enum_type - - def parse(self, data: JsonType) -> E: - return self.enum_type(data) - - -class CustomDeserializer(Deserializer[T]): - "Uses the `from_json` class method in class to de-serialize the object from JSON." - - converter: Callable[[JsonType], T] - - def __init__(self, converter: Callable[[JsonType], T]) -> None: - self.converter = converter - - def parse(self, data: JsonType) -> T: - return self.converter(data) - - -class FieldDeserializer(abc.ABC, Generic[T, R]): - """ - Deserializes a JSON property into a Python object field. - - :param property_name: The name of the JSON property to read from a JSON `object`. - :param field_name: The name of the field in a Python class to write data to. - :param parser: A compatible deserializer that can handle the field's type. - """ - - property_name: str - field_name: str - parser: Deserializer[T] - - def __init__( - self, property_name: str, field_name: str, parser: Deserializer[T] - ) -> None: - self.property_name = property_name - self.field_name = field_name - self.parser = parser - - @abc.abstractmethod - def parse_field(self, data: Dict[str, JsonType]) -> R: ... - - -class RequiredFieldDeserializer(FieldDeserializer[T, T]): - "Deserializes a JSON property into a mandatory Python object field." - - def parse_field(self, data: Dict[str, JsonType]) -> T: - if self.property_name not in data: - raise JsonKeyError( - f"missing required property `{self.property_name}` from JSON object: {data}" - ) - - return self.parser.parse(data[self.property_name]) - - -class OptionalFieldDeserializer(FieldDeserializer[T, Optional[T]]): - "Deserializes a JSON property into an optional Python object field with a default value of `None`." - - def parse_field(self, data: Dict[str, JsonType]) -> Optional[T]: - value = data.get(self.property_name) - if value is not None: - return self.parser.parse(value) - else: - return None - - -class DefaultFieldDeserializer(FieldDeserializer[T, T]): - "Deserializes a JSON property into a Python object field with an explicit default value." - - default_value: T - - def __init__( - self, - property_name: str, - field_name: str, - parser: Deserializer, - default_value: T, - ) -> None: - super().__init__(property_name, field_name, parser) - self.default_value = default_value - - def parse_field(self, data: Dict[str, JsonType]) -> T: - value = data.get(self.property_name) - if value is not None: - return self.parser.parse(value) - else: - return self.default_value - - -class DefaultFactoryFieldDeserializer(FieldDeserializer[T, T]): - "Deserializes a JSON property into an optional Python object field with an explicit default value factory." - - default_factory: Callable[[], T] - - def __init__( - self, - property_name: str, - field_name: str, - parser: Deserializer[T], - default_factory: Callable[[], T], - ) -> None: - super().__init__(property_name, field_name, parser) - self.default_factory = default_factory - - def parse_field(self, data: Dict[str, JsonType]) -> T: - value = data.get(self.property_name) - if value is not None: - return self.parser.parse(value) - else: - return self.default_factory() - - -class ClassDeserializer(Deserializer[T]): - "Base class for de-serializing class-like types such as data classes, named tuples and regular classes." - - class_type: type - property_parsers: List[FieldDeserializer] - property_fields: Set[str] - - def __init__(self, class_type: Type[T]) -> None: - self.class_type = class_type - - def assign(self, property_parsers: List[FieldDeserializer]) -> None: - self.property_parsers = property_parsers - self.property_fields = set( - property_parser.property_name for property_parser in property_parsers - ) - - def parse(self, data: JsonType) -> T: - if not isinstance(data, dict): - type_name = python_type_to_str(self.class_type) - raise JsonTypeError( - f"`type `{type_name}` expects JSON `object` data but instead received: {data}" - ) - - object_data: Dict[str, JsonType] = typing.cast(Dict[str, JsonType], data) - - field_values = {} - for property_parser in self.property_parsers: - field_values[property_parser.field_name] = property_parser.parse_field( - object_data - ) - - if not self.property_fields.issuperset(object_data): - unassigned_names = [ - name for name in object_data if name not in self.property_fields - ] - raise JsonKeyError( - f"unrecognized fields in JSON object: {unassigned_names}" - ) - - return self.create(**field_values) - - def create(self, **field_values: Any) -> T: - "Instantiates an object with a collection of property values." - - obj: T = create_object(self.class_type) - - # use `setattr` on newly created object instance - for field_name, field_value in field_values.items(): - setattr(obj, field_name, field_value) - return obj - - -class NamedTupleDeserializer(ClassDeserializer[NamedTuple]): - "De-serializes a named tuple from a JSON `object`." - - def build(self, context: Optional[ModuleType]) -> None: - property_parsers: List[FieldDeserializer] = [ - RequiredFieldDeserializer( - field_name, field_name, _get_deserializer(field_type, context) - ) - for field_name, field_type in get_resolved_hints(self.class_type).items() - ] - super().assign(property_parsers) - - def create(self, **field_values: Any) -> NamedTuple: - return self.class_type(**field_values) - - -class DataclassDeserializer(ClassDeserializer[T]): - "De-serializes a data class from a JSON `object`." - - def __init__(self, class_type: Type[T]) -> None: - if not dataclasses.is_dataclass(class_type): - raise TypeError("expected: data-class type") - super().__init__(class_type) # type: ignore[arg-type] - - def build(self, context: Optional[ModuleType]) -> None: - property_parsers: List[FieldDeserializer] = [] - resolved_hints = get_resolved_hints(self.class_type) - for field in dataclasses.fields(self.class_type): - field_type = resolved_hints[field.name] - property_name = python_field_to_json_property(field.name, field_type) - - is_optional = is_type_optional(field_type) - has_default = field.default is not dataclasses.MISSING - has_default_factory = field.default_factory is not dataclasses.MISSING - - if is_optional: - required_type: Type[T] = unwrap_optional_type(field_type) - else: - required_type = field_type - - parser = _get_deserializer(required_type, context) - - if has_default: - field_parser: FieldDeserializer = DefaultFieldDeserializer( - property_name, field.name, parser, field.default - ) - elif has_default_factory: - default_factory = typing.cast(Callable[[], Any], field.default_factory) - field_parser = DefaultFactoryFieldDeserializer( - property_name, field.name, parser, default_factory - ) - elif is_optional: - field_parser = OptionalFieldDeserializer( - property_name, field.name, parser - ) - else: - field_parser = RequiredFieldDeserializer( - property_name, field.name, parser - ) - - property_parsers.append(field_parser) - - super().assign(property_parsers) - - -class FrozenDataclassDeserializer(DataclassDeserializer[T]): - "De-serializes a frozen data class from a JSON `object`." - - def create(self, **field_values: Any) -> T: - "Instantiates an object with a collection of property values." - - # create object instance without calling `__init__` - obj: T = create_object(self.class_type) - - # can't use `setattr` on frozen dataclasses, pass member variable values to `__init__` - obj.__init__(**field_values) # type: ignore - return obj - - -class TypedClassDeserializer(ClassDeserializer[T]): - "De-serializes a class with type annotations from a JSON `object` by iterating over class properties." - - def build(self, context: Optional[ModuleType]) -> None: - property_parsers: List[FieldDeserializer] = [] - for field_name, field_type in get_resolved_hints(self.class_type).items(): - property_name = python_field_to_json_property(field_name, field_type) - - is_optional = is_type_optional(field_type) - - if is_optional: - required_type: Type[T] = unwrap_optional_type(field_type) - else: - required_type = field_type - - parser = _get_deserializer(required_type, context) - - if is_optional: - field_parser: FieldDeserializer = OptionalFieldDeserializer( - property_name, field_name, parser - ) - else: - field_parser = RequiredFieldDeserializer( - property_name, field_name, parser - ) - - property_parsers.append(field_parser) - - super().assign(property_parsers) - - -def create_deserializer( - typ: TypeLike, context: Optional[ModuleType] = None -) -> Deserializer: - """ - Creates a de-serializer engine to produce a Python object from an object obtained from a JSON string. - - When de-serializing a JSON object into a Python object, the following transformations are applied: - - * Fundamental types are parsed as `bool`, `int`, `float` or `str`. - * Date and time types are parsed from the ISO 8601 format with time zone into the corresponding Python type - `datetime`, `date` or `time`. - * Byte arrays are read from a string with Base64 encoding into a `bytes` instance. - * UUIDs are extracted from a UUID string compliant with RFC 4122 into a `uuid.UUID` instance. - * Enumerations are instantiated with a lookup on enumeration value. - * Containers (e.g. `list`, `dict`, `set`, `tuple`) are parsed recursively. - * Complex objects with properties (including data class types) are populated from dictionaries of key-value pairs - using reflection (enumerating type annotations). - - :raises TypeError: A de-serializer engine cannot be constructed for the input type. - """ - - if context is None: - if isinstance(typ, type): - context = sys.modules[typ.__module__] - - return _get_deserializer(typ, context) - - -_CACHE: Dict[Tuple[str, str], Deserializer] = {} - - -def _get_deserializer(typ: TypeLike, context: Optional[ModuleType]) -> Deserializer: - "Creates or re-uses a de-serializer engine to parse an object obtained from a JSON string." - - cache_key = None - - if isinstance(typ, (str, typing.ForwardRef)): - if context is None: - raise TypeError(f"missing context for evaluating type: {typ}") - - if isinstance(typ, str): - if hasattr(context, typ): - cache_key = (context.__name__, typ) - elif isinstance(typ, typing.ForwardRef): - if hasattr(context, typ.__forward_arg__): - cache_key = (context.__name__, typ.__forward_arg__) - - typ = evaluate_type(typ, context) - - typ = unwrap_annotated_type(typ) if is_type_annotated(typ) else typ - - if isinstance(typ, type) and typing.get_origin(typ) is None: - cache_key = (typ.__module__, typ.__name__) - - if cache_key is not None: - deserializer = _CACHE.get(cache_key) - if deserializer is None: - deserializer = _create_deserializer(typ) - - # store de-serializer immediately in cache to avoid stack overflow for recursive types - _CACHE[cache_key] = deserializer - - if isinstance(typ, type): - # use type's own module as context for evaluating member types - context = sys.modules[typ.__module__] - - # create any de-serializers this de-serializer is depending on - deserializer.build(context) - else: - # special forms are not always hashable, create a new de-serializer every time - deserializer = _create_deserializer(typ) - deserializer.build(context) - - return deserializer - - -def _create_deserializer(typ: TypeLike) -> Deserializer: - "Creates a de-serializer engine to parse an object obtained from a JSON string." - - # check for well-known types - if typ is type(None): - return NoneDeserializer() - elif typ is bool: - return BoolDeserializer() - elif typ is int: - return IntDeserializer() - elif typ is float: - return FloatDeserializer() - elif typ is str: - return StringDeserializer() - elif typ is bytes: - return BytesDeserializer() - elif typ is datetime.datetime: - return DateTimeDeserializer() - elif typ is datetime.date: - return DateDeserializer() - elif typ is datetime.time: - return TimeDeserializer() - elif typ is uuid.UUID: - return UUIDDeserializer() - elif typ is ipaddress.IPv4Address: - return IPv4Deserializer() - elif typ is ipaddress.IPv6Address: - return IPv6Deserializer() - - # dynamically-typed collection types - if typ is list: - raise TypeError("explicit item type required: use `List[T]` instead of `list`") - if typ is dict: - raise TypeError( - "explicit key and value types required: use `Dict[K, V]` instead of `dict`" - ) - if typ is set: - raise TypeError("explicit member type required: use `Set[T]` instead of `set`") - if typ is tuple: - raise TypeError( - "explicit item type list required: use `Tuple[T, ...]` instead of `tuple`" - ) - - # generic types (e.g. list, dict, set, etc.) - origin_type = typing.get_origin(typ) - if origin_type is list: - (list_item_type,) = typing.get_args(typ) # unpack single tuple element - return ListDeserializer(list_item_type) - elif origin_type is dict: - key_type, value_type = typing.get_args(typ) - return DictDeserializer(key_type, value_type) - elif origin_type is set: - (set_member_type,) = typing.get_args(typ) # unpack single tuple element - return SetDeserializer(set_member_type) - elif origin_type is tuple: - return TupleDeserializer(typing.get_args(typ)) - elif origin_type is Union: - union_args = typing.get_args(typ) - if get_discriminating_properties(union_args): - return TaggedUnionDeserializer(union_args) - else: - return UnionDeserializer(union_args) - elif origin_type is Literal: - return LiteralDeserializer(typing.get_args(typ)) - - if not inspect.isclass(typ): - if is_dataclass_instance(typ): - raise TypeError(f"dataclass type expected but got instance: {typ}") - else: - raise TypeError(f"unable to de-serialize unrecognized type: {typ}") - - if issubclass(typ, enum.Enum): - return EnumDeserializer(typ) - - if is_named_tuple_type(typ): - return NamedTupleDeserializer(typ) - - # check if object has custom serialization method - convert_func = getattr(typ, "from_json", None) - if callable(convert_func): - return CustomDeserializer(convert_func) - - if is_dataclass_type(typ): - dataclass_params = getattr(typ, "__dataclass_params__", None) - if dataclass_params is not None and dataclass_params.frozen: - return FrozenDataclassDeserializer(typ) - else: - return DataclassDeserializer(typ) - - return TypedClassDeserializer(typ) diff --git a/docs/openapi_generator/strong_typing/docstring.py b/docs/openapi_generator/strong_typing/docstring.py deleted file mode 100644 index 3ef1e5e7a..000000000 --- a/docs/openapi_generator/strong_typing/docstring.py +++ /dev/null @@ -1,437 +0,0 @@ -# Copyright (c) Meta Platforms, Inc. and affiliates. -# All rights reserved. -# -# This source code is licensed under the terms described in the LICENSE file in -# the root directory of this source tree. - -""" -Type-safe data interchange for Python data classes. - -:see: https://github.com/hunyadi/strong_typing -""" - -import builtins -import dataclasses -import inspect -import re -import sys -import types -import typing -from dataclasses import dataclass -from io import StringIO -from typing import Any, Callable, Dict, Optional, Protocol, Type, TypeVar - -if sys.version_info >= (3, 10): - from typing import TypeGuard -else: - from typing_extensions import TypeGuard - -from .inspection import ( - DataclassInstance, - get_class_properties, - get_signature, - is_dataclass_type, - is_type_enum, -) - -T = TypeVar("T") - - -@dataclass -class DocstringParam: - """ - A parameter declaration in a parameter block. - - :param name: The name of the parameter. - :param description: The description text for the parameter. - """ - - name: str - description: str - param_type: type = inspect.Signature.empty - - def __str__(self) -> str: - return f":param {self.name}: {self.description}" - - -@dataclass -class DocstringReturns: - """ - A `returns` declaration extracted from a docstring. - - :param description: The description text for the return value. - """ - - description: str - return_type: type = inspect.Signature.empty - - def __str__(self) -> str: - return f":returns: {self.description}" - - -@dataclass -class DocstringRaises: - """ - A `raises` declaration extracted from a docstring. - - :param typename: The type name of the exception raised. - :param description: The description associated with the exception raised. - """ - - typename: str - description: str - raise_type: type = inspect.Signature.empty - - def __str__(self) -> str: - return f":raises {self.typename}: {self.description}" - - -@dataclass -class Docstring: - """ - Represents the documentation string (a.k.a. docstring) for a type such as a (data) class or function. - - A docstring is broken down into the following components: - * A short description, which is the first block of text in the documentation string, and ends with a double - newline or a parameter block. - * A long description, which is the optional block of text following the short description, and ends with - a parameter block. - * A parameter block of named parameter and description string pairs in ReST-style. - * A `returns` declaration, which adds explanation to the return value. - * A `raises` declaration, which adds explanation to the exception type raised by the function on error. - - When the docstring is attached to a data class, it is understood as the documentation string of the class - `__init__` method. - - :param short_description: The short description text parsed from a docstring. - :param long_description: The long description text parsed from a docstring. - :param params: The parameter block extracted from a docstring. - :param returns: The returns declaration extracted from a docstring. - """ - - short_description: Optional[str] = None - long_description: Optional[str] = None - params: Dict[str, DocstringParam] = dataclasses.field(default_factory=dict) - returns: Optional[DocstringReturns] = None - raises: Dict[str, DocstringRaises] = dataclasses.field(default_factory=dict) - - @property - def full_description(self) -> Optional[str]: - if self.short_description and self.long_description: - return f"{self.short_description}\n\n{self.long_description}" - elif self.short_description: - return self.short_description - else: - return None - - def __str__(self) -> str: - output = StringIO() - - has_description = self.short_description or self.long_description - has_blocks = self.params or self.returns or self.raises - - if has_description: - if self.short_description and self.long_description: - output.write(self.short_description) - output.write("\n\n") - output.write(self.long_description) - elif self.short_description: - output.write(self.short_description) - - if has_blocks: - if has_description: - output.write("\n") - - for param in self.params.values(): - output.write("\n") - output.write(str(param)) - if self.returns: - output.write("\n") - output.write(str(self.returns)) - for raises in self.raises.values(): - output.write("\n") - output.write(str(raises)) - - s = output.getvalue() - output.close() - return s - - -def is_exception(member: object) -> TypeGuard[Type[BaseException]]: - return isinstance(member, type) and issubclass(member, BaseException) - - -def get_exceptions(module: types.ModuleType) -> Dict[str, Type[BaseException]]: - "Returns all exception classes declared in a module." - - return { - name: class_type - for name, class_type in inspect.getmembers(module, is_exception) - } - - -class SupportsDoc(Protocol): - __doc__: Optional[str] - - -def parse_type(typ: SupportsDoc) -> Docstring: - """ - Parse the docstring of a type into its components. - - :param typ: The type whose documentation string to parse. - :returns: Components of the documentation string. - """ - - doc = get_docstring(typ) - if doc is None: - return Docstring() - - docstring = parse_text(doc) - check_docstring(typ, docstring) - - # assign parameter and return types - if is_dataclass_type(typ): - properties = dict(get_class_properties(typing.cast(type, typ))) - - for name, param in docstring.params.items(): - param.param_type = properties[name] - - elif inspect.isfunction(typ): - signature = get_signature(typ) - for name, param in docstring.params.items(): - param.param_type = signature.parameters[name].annotation - if docstring.returns: - docstring.returns.return_type = signature.return_annotation - - # assign exception types - defining_module = inspect.getmodule(typ) - if defining_module: - context: Dict[str, type] = {} - context.update(get_exceptions(builtins)) - context.update(get_exceptions(defining_module)) - for exc_name, exc in docstring.raises.items(): - raise_type = context.get(exc_name) - if raise_type is None: - type_name = ( - getattr(typ, "__qualname__", None) - or getattr(typ, "__name__", None) - or None - ) - raise TypeError( - f"doc-string exception type `{exc_name}` is not an exception defined in the context of `{type_name}`" - ) - - exc.raise_type = raise_type - - return docstring - - -def parse_text(text: str) -> Docstring: - """ - Parse a ReST-style docstring into its components. - - :param text: The documentation string to parse, typically acquired as `type.__doc__`. - :returns: Components of the documentation string. - """ - - if not text: - return Docstring() - - # find block that starts object metadata block (e.g. `:param p:` or `:returns:`) - text = inspect.cleandoc(text) - match = re.search("^:", text, flags=re.MULTILINE) - if match: - desc_chunk = text[: match.start()] - meta_chunk = text[match.start() :] # noqa: E203 - else: - desc_chunk = text - meta_chunk = "" - - # split description text into short and long description - parts = desc_chunk.split("\n\n", 1) - - # ensure short description has no newlines - short_description = parts[0].strip().replace("\n", " ") or None - - # ensure long description preserves its structure (e.g. preformatted text) - if len(parts) > 1: - long_description = parts[1].strip() or None - else: - long_description = None - - params: Dict[str, DocstringParam] = {} - raises: Dict[str, DocstringRaises] = {} - returns = None - for match in re.finditer( - r"(^:.*?)(?=^:|\Z)", meta_chunk, flags=re.DOTALL | re.MULTILINE - ): - chunk = match.group(0) - if not chunk: - continue - - args_chunk, desc_chunk = chunk.lstrip(":").split(":", 1) - args = args_chunk.split() - desc = re.sub(r"\s+", " ", desc_chunk.strip()) - - if len(args) > 0: - kw = args[0] - if len(args) == 2: - if kw == "param": - params[args[1]] = DocstringParam( - name=args[1], - description=desc, - ) - elif kw == "raise" or kw == "raises": - raises[args[1]] = DocstringRaises( - typename=args[1], - description=desc, - ) - - elif len(args) == 1: - if kw == "return" or kw == "returns": - returns = DocstringReturns(description=desc) - - return Docstring( - long_description=long_description, - short_description=short_description, - params=params, - returns=returns, - raises=raises, - ) - - -def has_default_docstring(typ: SupportsDoc) -> bool: - "Check if class has the auto-generated string assigned by @dataclass." - - if not isinstance(typ, type): - return False - - if is_dataclass_type(typ): - return ( - typ.__doc__ is not None - and re.match(f"^{re.escape(typ.__name__)}[(].*[)]$", typ.__doc__) - is not None - ) - - if is_type_enum(typ): - return typ.__doc__ is not None and typ.__doc__ == "An enumeration." - - return False - - -def has_docstring(typ: SupportsDoc) -> bool: - "Check if class has a documentation string other than the auto-generated string assigned by @dataclass." - - if has_default_docstring(typ): - return False - - return bool(typ.__doc__) - - -def get_docstring(typ: SupportsDoc) -> Optional[str]: - if typ.__doc__ is None: - return None - - if has_default_docstring(typ): - return None - - return typ.__doc__ - - -def check_docstring( - typ: SupportsDoc, docstring: Docstring, strict: bool = False -) -> None: - """ - Verifies the doc-string of a type. - - :raises TypeError: Raised on a mismatch between doc-string parameters, and function or type signature. - """ - - if is_dataclass_type(typ): - check_dataclass_docstring(typ, docstring, strict) - elif inspect.isfunction(typ): - check_function_docstring(typ, docstring, strict) - - -def check_dataclass_docstring( - typ: Type[DataclassInstance], docstring: Docstring, strict: bool = False -) -> None: - """ - Verifies the doc-string of a data-class type. - - :param strict: Whether to check if all data-class members have doc-strings. - :raises TypeError: Raised on a mismatch between doc-string parameters and data-class members. - """ - - if not is_dataclass_type(typ): - raise TypeError("not a data-class type") - - properties = dict(get_class_properties(typ)) - class_name = typ.__name__ - - for name in docstring.params: - if name not in properties: - raise TypeError( - f"doc-string parameter `{name}` is not a member of the data-class `{class_name}`" - ) - - if not strict: - return - - for name in properties: - if name not in docstring.params: - raise TypeError( - f"member `{name}` in data-class `{class_name}` is missing its doc-string" - ) - - -def check_function_docstring( - fn: Callable[..., Any], docstring: Docstring, strict: bool = False -) -> None: - """ - Verifies the doc-string of a function or member function. - - :param strict: Whether to check if all function parameters and the return type have doc-strings. - :raises TypeError: Raised on a mismatch between doc-string parameters and function signature. - """ - - signature = get_signature(fn) - func_name = fn.__qualname__ - - for name in docstring.params: - if name not in signature.parameters: - raise TypeError( - f"doc-string parameter `{name}` is absent from signature of function `{func_name}`" - ) - - if ( - docstring.returns is not None - and signature.return_annotation is inspect.Signature.empty - ): - raise TypeError( - f"doc-string has returns description in function `{func_name}` with no return type annotation" - ) - - if not strict: - return - - for name, param in signature.parameters.items(): - # ignore `self` in member function signatures - if name == "self" and ( - param.kind is inspect.Parameter.POSITIONAL_ONLY - or param.kind is inspect.Parameter.POSITIONAL_OR_KEYWORD - ): - continue - - if name not in docstring.params: - raise TypeError( - f"function parameter `{name}` in `{func_name}` is missing its doc-string" - ) - - if ( - signature.return_annotation is not inspect.Signature.empty - and docstring.returns is None - ): - raise TypeError( - f"function `{func_name}` has no returns description in its doc-string" - ) diff --git a/docs/openapi_generator/strong_typing/exception.py b/docs/openapi_generator/strong_typing/exception.py deleted file mode 100644 index af037cc3c..000000000 --- a/docs/openapi_generator/strong_typing/exception.py +++ /dev/null @@ -1,23 +0,0 @@ -# Copyright (c) Meta Platforms, Inc. and affiliates. -# All rights reserved. -# -# This source code is licensed under the terms described in the LICENSE file in -# the root directory of this source tree. - -""" -Type-safe data interchange for Python data classes. - -:see: https://github.com/hunyadi/strong_typing -""" - - -class JsonKeyError(Exception): - "Raised when deserialization for a class or union type has failed because a matching member was not found." - - -class JsonValueError(Exception): - "Raised when (de)serialization of data has failed due to invalid value." - - -class JsonTypeError(Exception): - "Raised when deserialization of data has failed due to a type mismatch." diff --git a/docs/openapi_generator/strong_typing/inspection.py b/docs/openapi_generator/strong_typing/inspection.py deleted file mode 100644 index 41804f12c..000000000 --- a/docs/openapi_generator/strong_typing/inspection.py +++ /dev/null @@ -1,1053 +0,0 @@ -# Copyright (c) Meta Platforms, Inc. and affiliates. -# All rights reserved. -# -# This source code is licensed under the terms described in the LICENSE file in -# the root directory of this source tree. - -""" -Type-safe data interchange for Python data classes. - -:see: https://github.com/hunyadi/strong_typing -""" - -import dataclasses -import datetime -import enum -import importlib -import importlib.machinery -import importlib.util -import inspect -import re -import sys -import types -import typing -import uuid -from typing import ( - Any, - Callable, - Dict, - Iterable, - List, - Literal, - NamedTuple, - Optional, - Protocol, - runtime_checkable, - Set, - Tuple, - Type, - TypeVar, - Union, -) - -if sys.version_info >= (3, 9): - from typing import Annotated -else: - from typing_extensions import Annotated - -if sys.version_info >= (3, 10): - from typing import TypeGuard -else: - from typing_extensions import TypeGuard - -S = TypeVar("S") -T = TypeVar("T") -K = TypeVar("K") -V = TypeVar("V") - - -def _is_type_like(data_type: object) -> bool: - """ - Checks if the object is a type or type-like object (e.g. generic type). - - :param data_type: The object to validate. - :returns: True if the object is a type or type-like object. - """ - - if isinstance(data_type, type): - # a standard type - return True - elif typing.get_origin(data_type) is not None: - # a generic type such as `list`, `dict` or `set` - return True - elif hasattr(data_type, "__forward_arg__"): - # an instance of `ForwardRef` - return True - elif data_type is Any: - # the special form `Any` - return True - else: - return False - - -if sys.version_info >= (3, 9): - TypeLike = Union[type, types.GenericAlias, typing.ForwardRef, Any] - - def is_type_like( - data_type: object, - ) -> TypeGuard[TypeLike]: - """ - Checks if the object is a type or type-like object (e.g. generic type). - - :param data_type: The object to validate. - :returns: True if the object is a type or type-like object. - """ - - return _is_type_like(data_type) - -else: - TypeLike = object - - def is_type_like( - data_type: object, - ) -> bool: - return _is_type_like(data_type) - - -def evaluate_member_type(typ: Any, cls: type) -> Any: - """ - Evaluates a forward reference type in a dataclass member. - - :param typ: The dataclass member type to convert. - :param cls: The dataclass in which the member is defined. - :returns: The evaluated type. - """ - - return evaluate_type(typ, sys.modules[cls.__module__]) - - -def evaluate_type(typ: Any, module: types.ModuleType) -> Any: - """ - Evaluates a forward reference type. - - :param typ: The type to convert, typically a dataclass member type. - :param module: The context for the type, i.e. the module in which the member is defined. - :returns: The evaluated type. - """ - - if isinstance(typ, str): - # evaluate data-class field whose type annotation is a string - return eval(typ, module.__dict__, locals()) - if isinstance(typ, typing.ForwardRef): - if sys.version_info >= (3, 9): - return typ._evaluate(module.__dict__, locals(), recursive_guard=frozenset()) - else: - return typ._evaluate(module.__dict__, locals()) - else: - return typ - - -@runtime_checkable -class DataclassInstance(Protocol): - __dataclass_fields__: typing.ClassVar[Dict[str, dataclasses.Field]] - - -def is_dataclass_type(typ: Any) -> TypeGuard[Type[DataclassInstance]]: - "True if the argument corresponds to a data class type (but not an instance)." - - typ = unwrap_annotated_type(typ) - return isinstance(typ, type) and dataclasses.is_dataclass(typ) - - -def is_dataclass_instance(obj: Any) -> TypeGuard[DataclassInstance]: - "True if the argument corresponds to a data class instance (but not a type)." - - return not isinstance(obj, type) and dataclasses.is_dataclass(obj) - - -@dataclasses.dataclass -class DataclassField: - name: str - type: Any - default: Any - - def __init__( - self, name: str, type: Any, default: Any = dataclasses.MISSING - ) -> None: - self.name = name - self.type = type - self.default = default - - -def dataclass_fields(cls: Type[DataclassInstance]) -> Iterable[DataclassField]: - "Generates the fields of a data-class resolving forward references." - - for field in dataclasses.fields(cls): - yield DataclassField( - field.name, evaluate_member_type(field.type, cls), field.default - ) - - -def dataclass_field_by_name(cls: Type[DataclassInstance], name: str) -> DataclassField: - "Looks up a field in a data-class by its field name." - - for field in dataclasses.fields(cls): - if field.name == name: - return DataclassField(field.name, evaluate_member_type(field.type, cls)) - - raise LookupError(f"field `{name}` missing from class `{cls.__name__}`") - - -def is_named_tuple_instance(obj: Any) -> TypeGuard[NamedTuple]: - "True if the argument corresponds to a named tuple instance." - - return is_named_tuple_type(type(obj)) - - -def is_named_tuple_type(typ: Any) -> TypeGuard[Type[NamedTuple]]: - """ - True if the argument corresponds to a named tuple type. - - Calling the function `collections.namedtuple` gives a new type that is a subclass of `tuple` (and no other classes) - with a member named `_fields` that is a tuple whose items are all strings. - """ - - if not isinstance(typ, type): - return False - - typ = unwrap_annotated_type(typ) - - b = getattr(typ, "__bases__", None) - if b is None: - return False - - if len(b) != 1 or b[0] != tuple: - return False - - f = getattr(typ, "_fields", None) - if not isinstance(f, tuple): - return False - - return all(isinstance(n, str) for n in f) - - -if sys.version_info >= (3, 11): - - def is_type_enum(typ: object) -> TypeGuard[Type[enum.Enum]]: - "True if the specified type is an enumeration type." - - typ = unwrap_annotated_type(typ) - return isinstance(typ, enum.EnumType) - -else: - - def is_type_enum(typ: object) -> TypeGuard[Type[enum.Enum]]: - "True if the specified type is an enumeration type." - - typ = unwrap_annotated_type(typ) - - # use an explicit isinstance(..., type) check to filter out special forms like generics - return isinstance(typ, type) and issubclass(typ, enum.Enum) - - -def enum_value_types(enum_type: Type[enum.Enum]) -> List[type]: - """ - Returns all unique value types of the `enum.Enum` type in definition order. - """ - - # filter unique enumeration value types by keeping definition order - return list(dict.fromkeys(type(e.value) for e in enum_type)) - - -def extend_enum( - source: Type[enum.Enum], -) -> Callable[[Type[enum.Enum]], Type[enum.Enum]]: - """ - Creates a new enumeration type extending the set of values in an existing type. - - :param source: The existing enumeration type to be extended with new values. - :returns: A new enumeration type with the extended set of values. - """ - - def wrap(extend: Type[enum.Enum]) -> Type[enum.Enum]: - # create new enumeration type combining the values from both types - values: Dict[str, Any] = {} - values.update((e.name, e.value) for e in source) - values.update((e.name, e.value) for e in extend) - enum_class: Type[enum.Enum] = enum.Enum(extend.__name__, values) # type: ignore - - # assign the newly created type to the same module where the extending class is defined - setattr(enum_class, "__module__", extend.__module__) - setattr(enum_class, "__doc__", extend.__doc__) - setattr(sys.modules[extend.__module__], extend.__name__, enum_class) - - return enum.unique(enum_class) - - return wrap - - -if sys.version_info >= (3, 10): - - def _is_union_like(typ: object) -> bool: - "True if type is a union such as `Union[T1, T2, ...]` or a union type `T1 | T2`." - - return typing.get_origin(typ) is Union or isinstance(typ, types.UnionType) - -else: - - def _is_union_like(typ: object) -> bool: - "True if type is a union such as `Union[T1, T2, ...]` or a union type `T1 | T2`." - - return typing.get_origin(typ) is Union - - -def is_type_optional( - typ: object, strict: bool = False -) -> TypeGuard[Type[Optional[Any]]]: - """ - True if the type annotation corresponds to an optional type (e.g. `Optional[T]` or `Union[T1,T2,None]`). - - `Optional[T]` is represented as `Union[T, None]` is classic style, and is equivalent to `T | None` in new style. - - :param strict: True if only `Optional[T]` qualifies as an optional type but `Union[T1, T2, None]` does not. - """ - - typ = unwrap_annotated_type(typ) - - if _is_union_like(typ): - args = typing.get_args(typ) - if strict and len(args) != 2: - return False - - return type(None) in args - - return False - - -def unwrap_optional_type(typ: Type[Optional[T]]) -> Type[T]: - """ - Extracts the inner type of an optional type. - - :param typ: The optional type `Optional[T]`. - :returns: The inner type `T`. - """ - - return rewrap_annotated_type(_unwrap_optional_type, typ) - - -def _unwrap_optional_type(typ: Type[Optional[T]]) -> Type[T]: - "Extracts the type qualified as optional (e.g. returns `T` for `Optional[T]`)." - - # Optional[T] is represented internally as Union[T, None] - if not _is_union_like(typ): - raise TypeError("optional type must have un-subscripted type of Union") - - # will automatically unwrap Union[T] into T - return Union[ - tuple(filter(lambda item: item is not type(None), typing.get_args(typ))) # type: ignore - ] - - -def is_type_union(typ: object) -> bool: - "True if the type annotation corresponds to a union type (e.g. `Union[T1,T2,T3]`)." - - typ = unwrap_annotated_type(typ) - if _is_union_like(typ): - args = typing.get_args(typ) - return len(args) > 2 or type(None) not in args - - return False - - -def unwrap_union_types(typ: object) -> Tuple[object, ...]: - """ - Extracts the inner types of a union type. - - :param typ: The union type `Union[T1, T2, ...]`. - :returns: The inner types `T1`, `T2`, etc. - """ - - typ = unwrap_annotated_type(typ) - return _unwrap_union_types(typ) - - -def _unwrap_union_types(typ: object) -> Tuple[object, ...]: - "Extracts the types in a union (e.g. returns a tuple of types `T1` and `T2` for `Union[T1, T2]`)." - - if not _is_union_like(typ): - raise TypeError("union type must have un-subscripted type of Union") - - return typing.get_args(typ) - - -def is_type_literal(typ: object) -> bool: - "True if the specified type is a literal of one or more constant values, e.g. `Literal['string']` or `Literal[42]`." - - typ = unwrap_annotated_type(typ) - return typing.get_origin(typ) is Literal - - -def unwrap_literal_value(typ: object) -> Any: - """ - Extracts the single constant value captured by a literal type. - - :param typ: The literal type `Literal[value]`. - :returns: The values captured by the literal type. - """ - - args = unwrap_literal_values(typ) - if len(args) != 1: - raise TypeError("too many values in literal type") - - return args[0] - - -def unwrap_literal_values(typ: object) -> Tuple[Any, ...]: - """ - Extracts the constant values captured by a literal type. - - :param typ: The literal type `Literal[value, ...]`. - :returns: A tuple of values captured by the literal type. - """ - - typ = unwrap_annotated_type(typ) - return typing.get_args(typ) - - -def unwrap_literal_types(typ: object) -> Tuple[type, ...]: - """ - Extracts the types of the constant values captured by a literal type. - - :param typ: The literal type `Literal[value, ...]`. - :returns: A tuple of item types `T` such that `type(value) == T`. - """ - - return tuple(type(t) for t in unwrap_literal_values(typ)) - - -def is_generic_list(typ: object) -> TypeGuard[Type[list]]: - "True if the specified type is a generic list, i.e. `List[T]`." - - typ = unwrap_annotated_type(typ) - return typing.get_origin(typ) is list - - -def unwrap_generic_list(typ: Type[List[T]]) -> Type[T]: - """ - Extracts the item type of a list type. - - :param typ: The list type `List[T]`. - :returns: The item type `T`. - """ - - return rewrap_annotated_type(_unwrap_generic_list, typ) - - -def _unwrap_generic_list(typ: Type[List[T]]) -> Type[T]: - "Extracts the item type of a list type (e.g. returns `T` for `List[T]`)." - - (list_type,) = typing.get_args(typ) # unpack single tuple element - return list_type - - -def is_generic_set(typ: object) -> TypeGuard[Type[set]]: - "True if the specified type is a generic set, i.e. `Set[T]`." - - typ = unwrap_annotated_type(typ) - return typing.get_origin(typ) is set - - -def unwrap_generic_set(typ: Type[Set[T]]) -> Type[T]: - """ - Extracts the item type of a set type. - - :param typ: The set type `Set[T]`. - :returns: The item type `T`. - """ - - return rewrap_annotated_type(_unwrap_generic_set, typ) - - -def _unwrap_generic_set(typ: Type[Set[T]]) -> Type[T]: - "Extracts the item type of a set type (e.g. returns `T` for `Set[T]`)." - - (set_type,) = typing.get_args(typ) # unpack single tuple element - return set_type - - -def is_generic_dict(typ: object) -> TypeGuard[Type[dict]]: - "True if the specified type is a generic dictionary, i.e. `Dict[KeyType, ValueType]`." - - typ = unwrap_annotated_type(typ) - return typing.get_origin(typ) is dict - - -def unwrap_generic_dict(typ: Type[Dict[K, V]]) -> Tuple[Type[K], Type[V]]: - """ - Extracts the key and value types of a dictionary type as a tuple. - - :param typ: The dictionary type `Dict[K, V]`. - :returns: The key and value types `K` and `V`. - """ - - return _unwrap_generic_dict(unwrap_annotated_type(typ)) - - -def _unwrap_generic_dict(typ: Type[Dict[K, V]]) -> Tuple[Type[K], Type[V]]: - "Extracts the key and value types of a dict type (e.g. returns (`K`, `V`) for `Dict[K, V]`)." - - key_type, value_type = typing.get_args(typ) - return key_type, value_type - - -def is_type_annotated(typ: TypeLike) -> bool: - "True if the type annotation corresponds to an annotated type (i.e. `Annotated[T, ...]`)." - - return getattr(typ, "__metadata__", None) is not None - - -def get_annotation(data_type: TypeLike, annotation_type: Type[T]) -> Optional[T]: - """ - Returns the first annotation on a data type that matches the expected annotation type. - - :param data_type: The annotated type from which to extract the annotation. - :param annotation_type: The annotation class to look for. - :returns: The annotation class instance found (if any). - """ - - metadata = getattr(data_type, "__metadata__", None) - if metadata is not None: - for annotation in metadata: - if isinstance(annotation, annotation_type): - return annotation - - return None - - -def unwrap_annotated_type(typ: T) -> T: - "Extracts the wrapped type from an annotated type (e.g. returns `T` for `Annotated[T, ...]`)." - - if is_type_annotated(typ): - # type is Annotated[T, ...] - return typing.get_args(typ)[0] - else: - # type is a regular type - return typ - - -def rewrap_annotated_type( - transform: Callable[[Type[S]], Type[T]], typ: Type[S] -) -> Type[T]: - """ - Un-boxes, transforms and re-boxes an optionally annotated type. - - :param transform: A function that maps an un-annotated type to another type. - :param typ: A type to un-box (if necessary), transform, and re-box (if necessary). - """ - - metadata = getattr(typ, "__metadata__", None) - if metadata is not None: - # type is Annotated[T, ...] - inner_type = typing.get_args(typ)[0] - else: - # type is a regular type - inner_type = typ - - transformed_type = transform(inner_type) - - if metadata is not None: - return Annotated[(transformed_type, *metadata)] # type: ignore - else: - return transformed_type - - -def get_module_classes(module: types.ModuleType) -> List[type]: - "Returns all classes declared directly in a module." - - def is_class_member(member: object) -> TypeGuard[type]: - return inspect.isclass(member) and member.__module__ == module.__name__ - - return [class_type for _, class_type in inspect.getmembers(module, is_class_member)] - - -if sys.version_info >= (3, 9): - - def get_resolved_hints(typ: type) -> Dict[str, type]: - return typing.get_type_hints(typ, include_extras=True) - -else: - - def get_resolved_hints(typ: type) -> Dict[str, type]: - return typing.get_type_hints(typ) - - -def get_class_properties(typ: type) -> Iterable[Tuple[str, type]]: - "Returns all properties of a class." - - if is_dataclass_type(typ): - return ((field.name, field.type) for field in dataclasses.fields(typ)) - else: - resolved_hints = get_resolved_hints(typ) - return resolved_hints.items() - - -def get_class_property(typ: type, name: str) -> Optional[type]: - "Looks up the annotated type of a property in a class by its property name." - - for property_name, property_type in get_class_properties(typ): - if name == property_name: - return property_type - return None - - -@dataclasses.dataclass -class _ROOT: - pass - - -def get_referenced_types( - typ: TypeLike, module: Optional[types.ModuleType] = None -) -> Set[type]: - """ - Extracts types directly or indirectly referenced by this type. - - For example, extract `T` from `List[T]`, `Optional[T]` or `Annotated[T, ...]`, `K` and `V` from `Dict[K,V]`, - `A` and `B` from `Union[A,B]`. - - :param typ: A type or special form. - :param module: The context in which types are evaluated. - :returns: Types referenced by the given type or special form. - """ - - collector = TypeCollector() - collector.run(typ, _ROOT, module) - return collector.references - - -class TypeCollector: - """ - Collects types directly or indirectly referenced by a type. - - :param graph: The type dependency graph, linking types to types they depend on. - """ - - graph: Dict[type, Set[type]] - - @property - def references(self) -> Set[type]: - "Types collected by the type collector." - - dependencies = set() - for edges in self.graph.values(): - dependencies.update(edges) - return dependencies - - def __init__(self) -> None: - self.graph = {_ROOT: set()} - - def traverse(self, typ: type) -> None: - "Finds all dependent types of a type." - - self.run(typ, _ROOT, sys.modules[typ.__module__]) - - def traverse_all(self, types: Iterable[type]) -> None: - "Finds all dependent types of a list of types." - - for typ in types: - self.traverse(typ) - - def run( - self, - typ: TypeLike, - cls: Type[DataclassInstance], - module: Optional[types.ModuleType], - ) -> None: - """ - Extracts types indirectly referenced by this type. - - For example, extract `T` from `List[T]`, `Optional[T]` or `Annotated[T, ...]`, `K` and `V` from `Dict[K,V]`, - `A` and `B` from `Union[A,B]`. - - :param typ: A type or special form. - :param cls: A dataclass type being expanded for dependent types. - :param module: The context in which types are evaluated. - :returns: Types referenced by the given type or special form. - """ - - if typ is type(None) or typ is Any: - return - - if isinstance(typ, type): - self.graph[cls].add(typ) - - if typ in self.graph: - return - - self.graph[typ] = set() - - metadata = getattr(typ, "__metadata__", None) - if metadata is not None: - # type is Annotated[T, ...] - arg = typing.get_args(typ)[0] - return self.run(arg, cls, module) - - # type is a forward reference - if isinstance(typ, str) or isinstance(typ, typing.ForwardRef): - if module is None: - raise ValueError("missing context for evaluating types") - - evaluated_type = evaluate_type(typ, module) - return self.run(evaluated_type, cls, module) - - # type is a special form - origin = typing.get_origin(typ) - if origin in [list, dict, frozenset, set, tuple, Union]: - for arg in typing.get_args(typ): - self.run(arg, cls, module) - return - elif origin is Literal: - return - - # type is optional or a union type - if is_type_optional(typ): - return self.run(unwrap_optional_type(typ), cls, module) - if is_type_union(typ): - for union_type in unwrap_union_types(typ): - self.run(union_type, cls, module) - return - - # type is a regular type - elif is_dataclass_type(typ) or is_type_enum(typ) or isinstance(typ, type): - context = sys.modules[typ.__module__] - if is_dataclass_type(typ): - for field in dataclass_fields(typ): - self.run(field.type, typ, context) - else: - for field_name, field_type in get_resolved_hints(typ).items(): - self.run(field_type, typ, context) - return - - raise TypeError(f"expected: type-like; got: {typ}") - - -if sys.version_info >= (3, 10): - - def get_signature(fn: Callable[..., Any]) -> inspect.Signature: - "Extracts the signature of a function." - - return inspect.signature(fn, eval_str=True) - -else: - - def get_signature(fn: Callable[..., Any]) -> inspect.Signature: - "Extracts the signature of a function." - - return inspect.signature(fn) - - -def is_reserved_property(name: str) -> bool: - "True if the name stands for an internal property." - - # filter built-in and special properties - if re.match(r"^__.+__$", name): - return True - - # filter built-in special names - if name in ["_abc_impl"]: - return True - - return False - - -def create_module(name: str) -> types.ModuleType: - """ - Creates a new module dynamically at run-time. - - :param name: Fully qualified name of the new module (with dot notation). - """ - - if name in sys.modules: - raise KeyError(f"{name!r} already in sys.modules") - - spec = importlib.machinery.ModuleSpec(name, None) - module = importlib.util.module_from_spec(spec) - sys.modules[name] = module - if spec.loader is not None: - spec.loader.exec_module(module) - return module - - -if sys.version_info >= (3, 10): - - def create_data_type(class_name: str, fields: List[Tuple[str, type]]) -> type: - """ - Creates a new data-class type dynamically. - - :param class_name: The name of new data-class type. - :param fields: A list of fields (and their type) that the new data-class type is expected to have. - :returns: The newly created data-class type. - """ - - # has the `slots` parameter - return dataclasses.make_dataclass(class_name, fields, slots=True) - -else: - - def create_data_type(class_name: str, fields: List[Tuple[str, type]]) -> type: - """ - Creates a new data-class type dynamically. - - :param class_name: The name of new data-class type. - :param fields: A list of fields (and their type) that the new data-class type is expected to have. - :returns: The newly created data-class type. - """ - - cls = dataclasses.make_dataclass(class_name, fields) - - cls_dict = dict(cls.__dict__) - field_names = tuple(field.name for field in dataclasses.fields(cls)) - - cls_dict["__slots__"] = field_names - - for field_name in field_names: - cls_dict.pop(field_name, None) - cls_dict.pop("__dict__", None) - - qualname = getattr(cls, "__qualname__", None) - cls = type(cls)(cls.__name__, (), cls_dict) - if qualname is not None: - cls.__qualname__ = qualname - - return cls - - -def create_object(typ: Type[T]) -> T: - "Creates an instance of a type." - - if issubclass(typ, Exception): - # exception types need special treatment - e = typ.__new__(typ) - return typing.cast(T, e) - else: - return object.__new__(typ) - - -if sys.version_info >= (3, 9): - TypeOrGeneric = Union[type, types.GenericAlias] - -else: - TypeOrGeneric = object - - -def is_generic_instance(obj: Any, typ: TypeLike) -> bool: - """ - Returns whether an object is an instance of a generic class, a standard class or of a subclass thereof. - - This function checks the following items recursively: - * items of a list - * keys and values of a dictionary - * members of a set - * items of a tuple - * members of a union type - - :param obj: The (possibly generic container) object to check recursively. - :param typ: The expected type of the object. - """ - - if isinstance(typ, typing.ForwardRef): - fwd: typing.ForwardRef = typ - identifier = fwd.__forward_arg__ - typ = eval(identifier) - if isinstance(typ, type): - return isinstance(obj, typ) - else: - return False - - # generic types (e.g. list, dict, set, etc.) - origin_type = typing.get_origin(typ) - if origin_type is list: - if not isinstance(obj, list): - return False - (list_item_type,) = typing.get_args(typ) # unpack single tuple element - list_obj: list = obj - return all(is_generic_instance(item, list_item_type) for item in list_obj) - elif origin_type is dict: - if not isinstance(obj, dict): - return False - key_type, value_type = typing.get_args(typ) - dict_obj: dict = obj - return all( - is_generic_instance(key, key_type) - and is_generic_instance(value, value_type) - for key, value in dict_obj.items() - ) - elif origin_type is set: - if not isinstance(obj, set): - return False - (set_member_type,) = typing.get_args(typ) # unpack single tuple element - set_obj: set = obj - return all(is_generic_instance(item, set_member_type) for item in set_obj) - elif origin_type is tuple: - if not isinstance(obj, tuple): - return False - return all( - is_generic_instance(item, tuple_item_type) - for tuple_item_type, item in zip( - (tuple_item_type for tuple_item_type in typing.get_args(typ)), - (item for item in obj), - ) - ) - elif origin_type is Union: - return any( - is_generic_instance(obj, member_type) - for member_type in typing.get_args(typ) - ) - elif isinstance(typ, type): - return isinstance(obj, typ) - else: - raise TypeError(f"expected `type` but got: {typ}") - - -class RecursiveChecker: - _pred: Optional[Callable[[type, Any], bool]] - - def __init__(self, pred: Callable[[type, Any], bool]) -> None: - """ - Creates a checker to verify if a predicate applies to all nested member properties of an object recursively. - - :param pred: The predicate to test on member properties. Takes a property type and a property value. - """ - - self._pred = pred - - def pred(self, typ: type, obj: Any) -> bool: - "Acts as a workaround for the type checker mypy." - - assert self._pred is not None - return self._pred(typ, obj) - - def check(self, typ: TypeLike, obj: Any) -> bool: - """ - Checks if a predicate applies to all nested member properties of an object recursively. - - :param typ: The type to recurse into. - :param obj: The object to inspect recursively. Must be an instance of the given type. - :returns: True if all member properties pass the filter predicate. - """ - - # check for well-known types - if ( - typ is type(None) - or typ is bool - or typ is int - or typ is float - or typ is str - or typ is bytes - or typ is datetime.datetime - or typ is datetime.date - or typ is datetime.time - or typ is uuid.UUID - ): - return self.pred(typing.cast(type, typ), obj) - - # generic types (e.g. list, dict, set, etc.) - origin_type = typing.get_origin(typ) - if origin_type is list: - if not isinstance(obj, list): - raise TypeError(f"expected `list` but got: {obj}") - (list_item_type,) = typing.get_args(typ) # unpack single tuple element - list_obj: list = obj - return all(self.check(list_item_type, item) for item in list_obj) - elif origin_type is dict: - if not isinstance(obj, dict): - raise TypeError(f"expected `dict` but got: {obj}") - key_type, value_type = typing.get_args(typ) - dict_obj: dict = obj - return all(self.check(value_type, item) for item in dict_obj.values()) - elif origin_type is set: - if not isinstance(obj, set): - raise TypeError(f"expected `set` but got: {obj}") - (set_member_type,) = typing.get_args(typ) # unpack single tuple element - set_obj: set = obj - return all(self.check(set_member_type, item) for item in set_obj) - elif origin_type is tuple: - if not isinstance(obj, tuple): - raise TypeError(f"expected `tuple` but got: {obj}") - return all( - self.check(tuple_item_type, item) - for tuple_item_type, item in zip( - (tuple_item_type for tuple_item_type in typing.get_args(typ)), - (item for item in obj), - ) - ) - elif origin_type is Union: - return self.pred(typ, obj) # type: ignore[arg-type] - - if not inspect.isclass(typ): - raise TypeError(f"expected `type` but got: {typ}") - - # enumeration type - if issubclass(typ, enum.Enum): - if not isinstance(obj, enum.Enum): - raise TypeError(f"expected `{typ}` but got: {obj}") - return self.pred(typ, obj) - - # class types with properties - if is_named_tuple_type(typ): - if not isinstance(obj, tuple): - raise TypeError(f"expected `NamedTuple` but got: {obj}") - return all( - self.check(field_type, getattr(obj, field_name)) - for field_name, field_type in typing.get_type_hints(typ).items() - ) - elif is_dataclass_type(typ): - if not isinstance(obj, typ): - raise TypeError(f"expected `{typ}` but got: {obj}") - resolved_hints = get_resolved_hints(typ) - return all( - self.check(resolved_hints[field.name], getattr(obj, field.name)) - for field in dataclasses.fields(typ) - ) - else: - if not isinstance(obj, typ): - raise TypeError(f"expected `{typ}` but got: {obj}") - return all( - self.check(property_type, getattr(obj, property_name)) - for property_name, property_type in get_class_properties(typ) - ) - - -def check_recursive( - obj: object, - /, - *, - pred: Optional[Callable[[type, Any], bool]] = None, - type_pred: Optional[Callable[[type], bool]] = None, - value_pred: Optional[Callable[[Any], bool]] = None, -) -> bool: - """ - Checks if a predicate applies to all nested member properties of an object recursively. - - :param obj: The object to inspect recursively. - :param pred: The predicate to test on member properties. Takes a property type and a property value. - :param type_pred: Constrains the check to properties of an expected type. Properties of other types pass automatically. - :param value_pred: Verifies a condition on member property values (of an expected type). - :returns: True if all member properties pass the filter predicate(s). - """ - - if type_pred is not None and value_pred is not None: - if pred is not None: - raise TypeError( - "filter predicate not permitted when type and value predicates are present" - ) - - type_p: Callable[[Type[T]], bool] = type_pred - value_p: Callable[[T], bool] = value_pred - pred = lambda typ, obj: not type_p(typ) or value_p(obj) # noqa: E731 - - elif value_pred is not None: - if pred is not None: - raise TypeError( - "filter predicate not permitted when value predicate is present" - ) - - value_only_p: Callable[[T], bool] = value_pred - pred = lambda typ, obj: value_only_p(obj) # noqa: E731 - - elif type_pred is not None: - raise TypeError("value predicate required when type predicate is present") - - elif pred is None: - pred = lambda typ, obj: True # noqa: E731 - - return RecursiveChecker(pred).check(type(obj), obj) diff --git a/docs/openapi_generator/strong_typing/mapping.py b/docs/openapi_generator/strong_typing/mapping.py deleted file mode 100644 index 2bc68bb63..000000000 --- a/docs/openapi_generator/strong_typing/mapping.py +++ /dev/null @@ -1,42 +0,0 @@ -# Copyright (c) Meta Platforms, Inc. and affiliates. -# All rights reserved. -# -# This source code is licensed under the terms described in the LICENSE file in -# the root directory of this source tree. - -""" -Type-safe data interchange for Python data classes. - -:see: https://github.com/hunyadi/strong_typing -""" - -import keyword -from typing import Optional - -from .auxiliary import Alias -from .inspection import get_annotation - - -def python_field_to_json_property( - python_id: str, python_type: Optional[object] = None -) -> str: - """ - Map a Python field identifier to a JSON property name. - - Authors may use an underscore appended at the end of a Python identifier as per PEP 8 if it clashes with a Python - keyword: e.g. `in` would become `in_` and `from` would become `from_`. Remove these suffixes when exporting to JSON. - - Authors may supply an explicit alias with the type annotation `Alias`, e.g. `Annotated[MyType, Alias("alias")]`. - """ - - if python_type is not None: - alias = get_annotation(python_type, Alias) - if alias: - return alias.name - - if python_id.endswith("_"): - id = python_id[:-1] - if keyword.iskeyword(id): - return id - - return python_id diff --git a/docs/openapi_generator/strong_typing/name.py b/docs/openapi_generator/strong_typing/name.py deleted file mode 100644 index c883794c0..000000000 --- a/docs/openapi_generator/strong_typing/name.py +++ /dev/null @@ -1,188 +0,0 @@ -# Copyright (c) Meta Platforms, Inc. and affiliates. -# All rights reserved. -# -# This source code is licensed under the terms described in the LICENSE file in -# the root directory of this source tree. - -""" -Type-safe data interchange for Python data classes. - -:see: https://github.com/hunyadi/strong_typing -""" - -import typing -from typing import Any, Literal, Optional, Tuple, Union - -from .auxiliary import _auxiliary_types -from .inspection import ( - is_generic_dict, - is_generic_list, - is_type_optional, - is_type_union, - TypeLike, - unwrap_generic_dict, - unwrap_generic_list, - unwrap_optional_type, - unwrap_union_types, -) - - -class TypeFormatter: - """ - Type formatter. - - :param use_union_operator: Whether to emit union types as `X | Y` as per PEP 604. - """ - - use_union_operator: bool - - def __init__(self, use_union_operator: bool = False) -> None: - self.use_union_operator = use_union_operator - - def union_to_str(self, data_type_args: Tuple[TypeLike, ...]) -> str: - if self.use_union_operator: - return " | ".join(self.python_type_to_str(t) for t in data_type_args) - else: - if len(data_type_args) == 2 and type(None) in data_type_args: - # Optional[T] is represented as Union[T, None] - origin_name = "Optional" - data_type_args = tuple(t for t in data_type_args if t is not type(None)) - else: - origin_name = "Union" - - args = ", ".join(self.python_type_to_str(t) for t in data_type_args) - return f"{origin_name}[{args}]" - - def plain_type_to_str(self, data_type: TypeLike) -> str: - "Returns the string representation of a Python type without metadata." - - # return forward references as the annotation string - if isinstance(data_type, typing.ForwardRef): - fwd: typing.ForwardRef = data_type - return fwd.__forward_arg__ - elif isinstance(data_type, str): - return data_type - - origin = typing.get_origin(data_type) - if origin is not None: - data_type_args = typing.get_args(data_type) - - if origin is dict: # Dict[T] - origin_name = "Dict" - elif origin is list: # List[T] - origin_name = "List" - elif origin is set: # Set[T] - origin_name = "Set" - elif origin is Union: - return self.union_to_str(data_type_args) - elif origin is Literal: - args = ", ".join(repr(arg) for arg in data_type_args) - return f"Literal[{args}]" - else: - origin_name = origin.__name__ - - args = ", ".join(self.python_type_to_str(t) for t in data_type_args) - return f"{origin_name}[{args}]" - - return data_type.__name__ - - def python_type_to_str(self, data_type: TypeLike) -> str: - "Returns the string representation of a Python type." - - if data_type is type(None): - return "None" - - # use compact name for alias types - name = _auxiliary_types.get(data_type) - if name is not None: - return name - - metadata = getattr(data_type, "__metadata__", None) - if metadata is not None: - # type is Annotated[T, ...] - metatuple: Tuple[Any, ...] = metadata - arg = typing.get_args(data_type)[0] - - # check for auxiliary types with user-defined annotations - metaset = set(metatuple) - for auxiliary_type, auxiliary_name in _auxiliary_types.items(): - auxiliary_arg = typing.get_args(auxiliary_type)[0] - if arg is not auxiliary_arg: - continue - - auxiliary_metatuple: Optional[Tuple[Any, ...]] = getattr( - auxiliary_type, "__metadata__", None - ) - if auxiliary_metatuple is None: - continue - - if metaset.issuperset(auxiliary_metatuple): - # type is an auxiliary type with extra annotations - auxiliary_args = ", ".join( - repr(m) for m in metatuple if m not in auxiliary_metatuple - ) - return f"Annotated[{auxiliary_name}, {auxiliary_args}]" - - # type is an annotated type - args = ", ".join(repr(m) for m in metatuple) - return f"Annotated[{self.plain_type_to_str(arg)}, {args}]" - else: - # type is a regular type - return self.plain_type_to_str(data_type) - - -def python_type_to_str(data_type: TypeLike, use_union_operator: bool = False) -> str: - """ - Returns the string representation of a Python type. - - :param use_union_operator: Whether to emit union types as `X | Y` as per PEP 604. - """ - - fmt = TypeFormatter(use_union_operator) - return fmt.python_type_to_str(data_type) - - -def python_type_to_name(data_type: TypeLike, force: bool = False) -> str: - """ - Returns the short name of a Python type. - - :param force: Whether to produce a name for composite types such as generics. - """ - - # use compact name for alias types - name = _auxiliary_types.get(data_type) - if name is not None: - return name - - # unwrap annotated types - metadata = getattr(data_type, "__metadata__", None) - if metadata is not None: - # type is Annotated[T, ...] - arg = typing.get_args(data_type)[0] - return python_type_to_name(arg) - - if force: - # generic types - if is_type_optional(data_type, strict=True): - inner_name = python_type_to_name(unwrap_optional_type(data_type)) - return f"Optional__{inner_name}" - elif is_generic_list(data_type): - item_name = python_type_to_name(unwrap_generic_list(data_type)) - return f"List__{item_name}" - elif is_generic_dict(data_type): - key_type, value_type = unwrap_generic_dict(data_type) - key_name = python_type_to_name(key_type) - value_name = python_type_to_name(value_type) - return f"Dict__{key_name}__{value_name}" - elif is_type_union(data_type): - member_types = unwrap_union_types(data_type) - member_names = "__".join( - python_type_to_name(member_type) for member_type in member_types - ) - return f"Union__{member_names}" - - # named system or user-defined type - if hasattr(data_type, "__name__") and not typing.get_args(data_type): - return data_type.__name__ - - raise TypeError(f"cannot assign a simple name to type: {data_type}") diff --git a/docs/openapi_generator/strong_typing/py.typed b/docs/openapi_generator/strong_typing/py.typed deleted file mode 100644 index e69de29bb..000000000 diff --git a/docs/openapi_generator/strong_typing/schema.py b/docs/openapi_generator/strong_typing/schema.py deleted file mode 100644 index 7f44435b8..000000000 --- a/docs/openapi_generator/strong_typing/schema.py +++ /dev/null @@ -1,792 +0,0 @@ -# Copyright (c) Meta Platforms, Inc. and affiliates. -# All rights reserved. -# -# This source code is licensed under the terms described in the LICENSE file in -# the root directory of this source tree. - -""" -Type-safe data interchange for Python data classes. - -:see: https://github.com/hunyadi/strong_typing -""" - -import dataclasses -import datetime -import decimal -import enum -import functools -import inspect -import json -import typing -import uuid -from copy import deepcopy -from typing import ( - Any, - Callable, - ClassVar, - Dict, - List, - Literal, - Optional, - overload, - Tuple, - Type, - TypeVar, - Union, -) - -import jsonschema -from typing_extensions import Annotated - -from . import docstring -from .auxiliary import ( - Alias, - get_auxiliary_format, - IntegerRange, - MaxLength, - MinLength, - Precision, -) -from .core import JsonArray, JsonObject, JsonType, Schema, StrictJsonType -from .inspection import ( - enum_value_types, - get_annotation, - get_class_properties, - is_type_enum, - is_type_like, - is_type_optional, - TypeLike, - unwrap_optional_type, -) -from .name import python_type_to_name -from .serialization import object_to_json - -# determines the maximum number of distinct enum members up to which a Dict[EnumType, Any] is converted into a JSON -# schema with explicitly listed properties (rather than employing a pattern constraint on property names) -OBJECT_ENUM_EXPANSION_LIMIT = 4 - - -T = TypeVar("T") - - -def get_class_docstrings(data_type: type) -> Tuple[Optional[str], Optional[str]]: - docstr = docstring.parse_type(data_type) - - # check if class has a doc-string other than the auto-generated string assigned by @dataclass - if docstring.has_default_docstring(data_type): - return None, None - - return docstr.short_description, docstr.long_description - - -def get_class_property_docstrings( - data_type: type, transform_fun: Optional[Callable[[type, str, str], str]] = None -) -> Dict[str, str]: - """ - Extracts the documentation strings associated with the properties of a composite type. - - :param data_type: The object whose properties to iterate over. - :param transform_fun: An optional function that maps a property documentation string to a custom tailored string. - :returns: A dictionary mapping property names to descriptions. - """ - - result = {} - for base in inspect.getmro(data_type): - docstr = docstring.parse_type(base) - for param in docstr.params.values(): - if param.name in result: - continue - - if transform_fun: - description = transform_fun(data_type, param.name, param.description) - else: - description = param.description - - result[param.name] = description - return result - - -def docstring_to_schema(data_type: type) -> Schema: - short_description, long_description = get_class_docstrings(data_type) - schema: Schema = {} - - description = "\n".join(filter(None, [short_description, long_description])) - if description: - schema["description"] = description - return schema - - -def id_from_ref(data_type: Union[typing.ForwardRef, str, type]) -> str: - "Extracts the name of a possibly forward-referenced type." - - if isinstance(data_type, typing.ForwardRef): - forward_type: typing.ForwardRef = data_type - return forward_type.__forward_arg__ - elif isinstance(data_type, str): - return data_type - else: - return data_type.__name__ - - -def type_from_ref(data_type: Union[typing.ForwardRef, str, type]) -> Tuple[str, type]: - "Creates a type from a forward reference." - - if isinstance(data_type, typing.ForwardRef): - forward_type: typing.ForwardRef = data_type - true_type = eval(forward_type.__forward_code__) - return forward_type.__forward_arg__, true_type - elif isinstance(data_type, str): - true_type = eval(data_type) - return data_type, true_type - else: - return data_type.__name__, data_type - - -@dataclasses.dataclass -class TypeCatalogEntry: - schema: Optional[Schema] - identifier: str - examples: Optional[JsonType] = None - - -class TypeCatalog: - "Maintains an association of well-known Python types to their JSON schema." - - _by_type: Dict[TypeLike, TypeCatalogEntry] - _by_name: Dict[str, TypeCatalogEntry] - - def __init__(self) -> None: - self._by_type = {} - self._by_name = {} - - def __contains__(self, data_type: TypeLike) -> bool: - if isinstance(data_type, typing.ForwardRef): - fwd: typing.ForwardRef = data_type - name = fwd.__forward_arg__ - return name in self._by_name - else: - return data_type in self._by_type - - def add( - self, - data_type: TypeLike, - schema: Optional[Schema], - identifier: str, - examples: Optional[List[JsonType]] = None, - ) -> None: - if isinstance(data_type, typing.ForwardRef): - raise TypeError("forward references cannot be used to register a type") - - if data_type in self._by_type: - raise ValueError(f"type {data_type} is already registered in the catalog") - - entry = TypeCatalogEntry(schema, identifier, examples) - self._by_type[data_type] = entry - self._by_name[identifier] = entry - - def get(self, data_type: TypeLike) -> TypeCatalogEntry: - if isinstance(data_type, typing.ForwardRef): - fwd: typing.ForwardRef = data_type - name = fwd.__forward_arg__ - return self._by_name[name] - else: - return self._by_type[data_type] - - -@dataclasses.dataclass -class SchemaOptions: - definitions_path: str = "#/definitions/" - use_descriptions: bool = True - use_examples: bool = True - property_description_fun: Optional[Callable[[type, str, str], str]] = None - - -class JsonSchemaGenerator: - "Creates a JSON schema with user-defined type definitions." - - type_catalog: ClassVar[TypeCatalog] = TypeCatalog() - types_used: Dict[str, TypeLike] - options: SchemaOptions - - def __init__(self, options: Optional[SchemaOptions] = None): - if options is None: - self.options = SchemaOptions() - else: - self.options = options - self.types_used = {} - - @functools.singledispatchmethod - def _metadata_to_schema(self, arg: object) -> Schema: - # unrecognized annotation - return {} - - @_metadata_to_schema.register - def _(self, arg: IntegerRange) -> Schema: - return {"minimum": arg.minimum, "maximum": arg.maximum} - - @_metadata_to_schema.register - def _(self, arg: Precision) -> Schema: - return { - "multipleOf": 10 ** (-arg.decimal_digits), - "exclusiveMinimum": -(10**arg.integer_digits), - "exclusiveMaximum": (10**arg.integer_digits), - } - - @_metadata_to_schema.register - def _(self, arg: MinLength) -> Schema: - return {"minLength": arg.value} - - @_metadata_to_schema.register - def _(self, arg: MaxLength) -> Schema: - return {"maxLength": arg.value} - - def _with_metadata( - self, type_schema: Schema, metadata: Optional[Tuple[Any, ...]] - ) -> Schema: - if metadata: - for m in metadata: - type_schema.update(self._metadata_to_schema(m)) - return type_schema - - def _simple_type_to_schema( - self, typ: TypeLike, json_schema_extra: Optional[dict] = None - ) -> Optional[Schema]: - """ - Returns the JSON schema associated with a simple, unrestricted type. - - :returns: The schema for a simple type, or `None`. - """ - - if typ is type(None): - return {"type": "null"} - elif typ is bool: - return {"type": "boolean"} - elif typ is int: - return {"type": "integer"} - elif typ is float: - return {"type": "number"} - elif typ is str: - if json_schema_extra and "contentEncoding" in json_schema_extra: - return { - "type": "string", - "contentEncoding": json_schema_extra["contentEncoding"], - } - return {"type": "string"} - elif typ is bytes: - return {"type": "string", "contentEncoding": "base64"} - elif typ is datetime.datetime: - # 2018-11-13T20:20:39+00:00 - return { - "type": "string", - "format": "date-time", - } - elif typ is datetime.date: - # 2018-11-13 - return {"type": "string", "format": "date"} - elif typ is datetime.time: - # 20:20:39+00:00 - return {"type": "string", "format": "time"} - elif typ is decimal.Decimal: - return {"type": "number"} - elif typ is uuid.UUID: - # f81d4fae-7dec-11d0-a765-00a0c91e6bf6 - return {"type": "string", "format": "uuid"} - elif typ is Any: - return { - "oneOf": [ - {"type": "null"}, - {"type": "boolean"}, - {"type": "number"}, - {"type": "string"}, - {"type": "array"}, - {"type": "object"}, - ] - } - elif typ is JsonObject: - return {"type": "object"} - elif typ is JsonArray: - return {"type": "array"} - else: - # not a simple type - return None - - def type_to_schema( - self, - data_type: TypeLike, - force_expand: bool = False, - json_schema_extra: Optional[dict] = None, - ) -> Schema: - """ - Returns the JSON schema associated with a type. - - :param data_type: The Python type whose JSON schema to return. - :param force_expand: Forces a JSON schema to be returned even if the type is registered in the catalog of known types. - :returns: The JSON schema associated with the type. - """ - - # short-circuit for common simple types - schema = self._simple_type_to_schema(data_type, json_schema_extra) - if schema is not None: - return schema - - # types registered in the type catalog of well-known types - type_catalog = JsonSchemaGenerator.type_catalog - if not force_expand and data_type in type_catalog: - # user-defined type - identifier = type_catalog.get(data_type).identifier - self.types_used.setdefault(identifier, data_type) - return {"$ref": f"{self.options.definitions_path}{identifier}"} - - # unwrap annotated types - metadata = getattr(data_type, "__metadata__", None) - if metadata is not None: - # type is Annotated[T, ...] - typ = typing.get_args(data_type)[0] - schema = self._simple_type_to_schema(typ) - if schema is not None: - # recognize well-known auxiliary types - fmt = get_auxiliary_format(data_type) - if fmt is not None: - schema.update({"format": fmt}) - return schema - else: - return self._with_metadata(schema, metadata) - - else: - # type is a regular type - typ = data_type - - if isinstance(typ, typing.ForwardRef) or isinstance(typ, str): - if force_expand: - identifier, true_type = type_from_ref(typ) - return self.type_to_schema(true_type, force_expand=True) - else: - try: - identifier, true_type = type_from_ref(typ) - self.types_used[identifier] = true_type - except NameError: - identifier = id_from_ref(typ) - - return {"$ref": f"{self.options.definitions_path}{identifier}"} - - if is_type_enum(typ): - enum_type: Type[enum.Enum] = typ - value_types = enum_value_types(enum_type) - if len(value_types) != 1: - raise ValueError( - f"enumerations must have a consistent member value type but several types found: {value_types}" - ) - enum_value_type = value_types.pop() - - enum_schema: Schema - if ( - enum_value_type is bool - or enum_value_type is int - or enum_value_type is float - or enum_value_type is str - ): - if enum_value_type is bool: - enum_schema_type = "boolean" - elif enum_value_type is int: - enum_schema_type = "integer" - elif enum_value_type is float: - enum_schema_type = "number" - elif enum_value_type is str: - enum_schema_type = "string" - - enum_schema = { - "type": enum_schema_type, - "enum": [object_to_json(e.value) for e in enum_type], - } - if self.options.use_descriptions: - enum_schema.update(docstring_to_schema(typ)) - return enum_schema - else: - enum_schema = self.type_to_schema(enum_value_type) - if self.options.use_descriptions: - enum_schema.update(docstring_to_schema(typ)) - return enum_schema - - origin_type = typing.get_origin(typ) - if origin_type is list: - (list_type,) = typing.get_args(typ) # unpack single tuple element - return {"type": "array", "items": self.type_to_schema(list_type)} - elif origin_type is dict: - key_type, value_type = typing.get_args(typ) - if not (key_type is str or key_type is int or is_type_enum(key_type)): - raise ValueError( - "`dict` with key type not coercible to `str` is not supported" - ) - - dict_schema: Schema - value_schema = self.type_to_schema(value_type) - if is_type_enum(key_type): - enum_values = [str(e.value) for e in key_type] - if len(enum_values) > OBJECT_ENUM_EXPANSION_LIMIT: - dict_schema = { - "propertyNames": { - "pattern": "^(" + "|".join(enum_values) + ")$" - }, - "additionalProperties": value_schema, - } - else: - dict_schema = { - "properties": {value: value_schema for value in enum_values}, - "additionalProperties": False, - } - else: - dict_schema = {"additionalProperties": value_schema} - - schema = {"type": "object"} - schema.update(dict_schema) - return schema - elif origin_type is set: - (set_type,) = typing.get_args(typ) # unpack single tuple element - return { - "type": "array", - "items": self.type_to_schema(set_type), - "uniqueItems": True, - } - elif origin_type is tuple: - args = typing.get_args(typ) - return { - "type": "array", - "minItems": len(args), - "maxItems": len(args), - "prefixItems": [ - self.type_to_schema(member_type) for member_type in args - ], - } - elif origin_type is Union: - discriminator = None - if typing.get_origin(data_type) is Annotated: - discriminator = typing.get_args(data_type)[1].discriminator - ret = { - "oneOf": [ - self.type_to_schema(union_type) - for union_type in typing.get_args(typ) - ] - } - if discriminator: - # for each union type, we need to read the value of the discriminator - mapping = {} - for union_type in typing.get_args(typ): - props = self.type_to_schema(union_type, force_expand=True)[ - "properties" - ] - mapping[props[discriminator]["default"]] = self.type_to_schema( - union_type - )["$ref"] - - ret["discriminator"] = { - "propertyName": discriminator, - "mapping": mapping, - } - return ret - elif origin_type is Literal: - (literal_value,) = typing.get_args(typ) # unpack value of literal type - schema = self.type_to_schema(type(literal_value)) - schema["const"] = literal_value - return schema - elif origin_type is type: - (concrete_type,) = typing.get_args(typ) # unpack single tuple element - return {"const": self.type_to_schema(concrete_type, force_expand=True)} - - # dictionary of class attributes - members = dict(inspect.getmembers(typ, lambda a: not inspect.isroutine(a))) - - property_docstrings = get_class_property_docstrings( - typ, self.options.property_description_fun - ) - properties: Dict[str, Schema] = {} - required: List[str] = [] - for property_name, property_type in get_class_properties(typ): - # rename property if an alias name is specified - alias = get_annotation(property_type, Alias) - if alias: - output_name = alias.name - else: - output_name = property_name - - defaults = {} - json_schema_extra = None - if "model_fields" in members: - f = members["model_fields"] - defaults = {k: finfo.default for k, finfo in f.items()} - json_schema_extra = f.get(output_name, None).json_schema_extra - - if is_type_optional(property_type): - optional_type: type = unwrap_optional_type(property_type) - property_def = self.type_to_schema( - optional_type, json_schema_extra=json_schema_extra - ) - else: - property_def = self.type_to_schema( - property_type, json_schema_extra=json_schema_extra - ) - required.append(output_name) - - # check if attribute has a default value initializer - if defaults.get(property_name) is not None: - def_value = defaults[property_name] - # check if value can be directly represented in JSON - if isinstance( - def_value, - ( - bool, - int, - float, - str, - enum.Enum, - datetime.datetime, - datetime.date, - datetime.time, - ), - ): - property_def["default"] = object_to_json(def_value) - - # add property docstring if available - property_doc = property_docstrings.get(property_name) - if property_doc: - # print(output_name, property_doc) - property_def.pop("title", None) - property_def["description"] = property_doc - - properties[output_name] = property_def - - schema = {"type": "object"} - if len(properties) > 0: - schema["properties"] = typing.cast(JsonType, properties) - schema["additionalProperties"] = False - if len(required) > 0: - schema["required"] = typing.cast(JsonType, required) - if self.options.use_descriptions: - schema.update(docstring_to_schema(typ)) - return schema - - def _type_to_schema_with_lookup(self, data_type: TypeLike) -> Schema: - """ - Returns the JSON schema associated with a type that may be registered in the catalog of known types. - - :param data_type: The type whose JSON schema we seek. - :returns: The JSON schema associated with the type. - """ - - entry = JsonSchemaGenerator.type_catalog.get(data_type) - if entry.schema is None: - type_schema = self.type_to_schema(data_type, force_expand=True) - else: - type_schema = deepcopy(entry.schema) - - # add descriptive text (if present) - if self.options.use_descriptions: - if isinstance(data_type, type) and not isinstance( - data_type, typing.ForwardRef - ): - type_schema.update(docstring_to_schema(data_type)) - - # add example (if present) - if self.options.use_examples and entry.examples: - type_schema["examples"] = entry.examples - - return type_schema - - def classdef_to_schema( - self, data_type: TypeLike, force_expand: bool = False - ) -> Tuple[Schema, Dict[str, Schema]]: - """ - Returns the JSON schema associated with a type and any nested types. - - :param data_type: The type whose JSON schema to return. - :param force_expand: True if a full JSON schema is to be returned even for well-known types; false if a schema - reference is to be used for well-known types. - :returns: A tuple of the JSON schema, and a mapping between nested type names and their corresponding schema. - """ - - if not is_type_like(data_type): - raise TypeError(f"expected a type-like object but got: {data_type}") - - self.types_used = {} - try: - type_schema = self.type_to_schema(data_type, force_expand=force_expand) - - types_defined: Dict[str, Schema] = {} - while len(self.types_used) > len(types_defined): - # make a snapshot copy; original collection is going to be modified - types_undefined = { - sub_name: sub_type - for sub_name, sub_type in self.types_used.items() - if sub_name not in types_defined - } - - # expand undefined types, which may lead to additional types to be defined - for sub_name, sub_type in types_undefined.items(): - types_defined[sub_name] = self._type_to_schema_with_lookup(sub_type) - - type_definitions = dict(sorted(types_defined.items())) - finally: - self.types_used = {} - - return type_schema, type_definitions - - -class Validator(enum.Enum): - "Defines constants for JSON schema standards." - - Draft7 = jsonschema.Draft7Validator - Draft201909 = jsonschema.Draft201909Validator - Draft202012 = jsonschema.Draft202012Validator - Latest = jsonschema.Draft202012Validator - - -def classdef_to_schema( - data_type: TypeLike, - options: Optional[SchemaOptions] = None, - validator: Validator = Validator.Latest, -) -> Schema: - """ - Returns the JSON schema corresponding to the given type. - - :param data_type: The Python type used to generate the JSON schema - :returns: A JSON object that you can serialize to a JSON string with json.dump or json.dumps - :raises TypeError: Indicates that the generated JSON schema does not validate against the desired meta-schema. - """ - - # short-circuit with an error message when passing invalid data - if not is_type_like(data_type): - raise TypeError(f"expected a type-like object but got: {data_type}") - - generator = JsonSchemaGenerator(options) - type_schema, type_definitions = generator.classdef_to_schema(data_type) - - class_schema: Schema = {} - if type_definitions: - class_schema["definitions"] = typing.cast(JsonType, type_definitions) - class_schema.update(type_schema) - - validator_id = validator.value.META_SCHEMA["$id"] - try: - validator.value.check_schema(class_schema) - except jsonschema.exceptions.SchemaError: - raise TypeError( - f"schema does not validate against meta-schema <{validator_id}>" - ) - - schema = {"$schema": validator_id} - schema.update(class_schema) - return schema - - -def validate_object(data_type: TypeLike, json_dict: JsonType) -> None: - """ - Validates if the JSON dictionary object conforms to the expected type. - - :param data_type: The type to match against. - :param json_dict: A JSON object obtained with `json.load` or `json.loads`. - :raises jsonschema.exceptions.ValidationError: Indicates that the JSON object cannot represent the type. - """ - - schema_dict = classdef_to_schema(data_type) - jsonschema.validate( - json_dict, schema_dict, format_checker=jsonschema.FormatChecker() - ) - - -def print_schema(data_type: type) -> None: - """Pretty-prints the JSON schema corresponding to the type.""" - - s = classdef_to_schema(data_type) - print(json.dumps(s, indent=4)) - - -def get_schema_identifier(data_type: type) -> Optional[str]: - if data_type in JsonSchemaGenerator.type_catalog: - return JsonSchemaGenerator.type_catalog.get(data_type).identifier - else: - return None - - -def register_schema( - data_type: T, - schema: Optional[Schema] = None, - name: Optional[str] = None, - examples: Optional[List[JsonType]] = None, -) -> T: - """ - Associates a type with a JSON schema definition. - - :param data_type: The type to associate with a JSON schema. - :param schema: The schema to associate the type with. Derived automatically if omitted. - :param name: The name used for looking uo the type. Determined automatically if omitted. - :returns: The input type. - """ - - JsonSchemaGenerator.type_catalog.add( - data_type, - schema, - name if name is not None else python_type_to_name(data_type), - examples, - ) - return data_type - - -@overload -def json_schema_type(cls: Type[T], /) -> Type[T]: ... - - -@overload -def json_schema_type( - cls: None, *, schema: Optional[Schema] = None -) -> Callable[[Type[T]], Type[T]]: ... - - -def json_schema_type( - cls: Optional[Type[T]] = None, - *, - schema: Optional[Schema] = None, - examples: Optional[List[JsonType]] = None, -) -> Union[Type[T], Callable[[Type[T]], Type[T]]]: - """Decorator to add user-defined schema definition to a class.""" - - def wrap(cls: Type[T]) -> Type[T]: - return register_schema(cls, schema, examples=examples) - - # see if decorator is used as @json_schema_type or @json_schema_type() - if cls is None: - # called with parentheses - return wrap - else: - # called as @json_schema_type without parentheses - return wrap(cls) - - -register_schema(JsonObject, name="JsonObject") -register_schema(JsonArray, name="JsonArray") - -register_schema( - JsonType, - name="JsonType", - examples=[ - { - "property1": None, - "property2": True, - "property3": 64, - "property4": "string", - "property5": ["item"], - "property6": {"key": "value"}, - } - ], -) -register_schema( - StrictJsonType, - name="StrictJsonType", - examples=[ - { - "property1": True, - "property2": 64, - "property3": "string", - "property4": ["item"], - "property5": {"key": "value"}, - } - ], -) diff --git a/docs/openapi_generator/strong_typing/serialization.py b/docs/openapi_generator/strong_typing/serialization.py deleted file mode 100644 index 88d8fccad..000000000 --- a/docs/openapi_generator/strong_typing/serialization.py +++ /dev/null @@ -1,101 +0,0 @@ -# Copyright (c) Meta Platforms, Inc. and affiliates. -# All rights reserved. -# -# This source code is licensed under the terms described in the LICENSE file in -# the root directory of this source tree. - -""" -Type-safe data interchange for Python data classes. - -:see: https://github.com/hunyadi/strong_typing -""" - -import inspect -import json -import sys -from types import ModuleType -from typing import Any, Optional, TextIO, TypeVar - -from .core import JsonType -from .deserializer import create_deserializer -from .inspection import TypeLike -from .serializer import create_serializer - -T = TypeVar("T") - - -def object_to_json(obj: Any) -> JsonType: - """ - Converts a Python object to a representation that can be exported to JSON. - - * Fundamental types (e.g. numeric types) are written as is. - * Date and time types are serialized in the ISO 8601 format with time zone. - * A byte array is written as a string with Base64 encoding. - * UUIDs are written as a UUID string. - * Enumerations are written as their value. - * Containers (e.g. `list`, `dict`, `set`, `tuple`) are exported recursively. - * Objects with properties (including data class types) are converted to a dictionaries of key-value pairs. - """ - - typ: type = type(obj) - generator = create_serializer(typ) - return generator.generate(obj) - - -def json_to_object( - typ: TypeLike, data: JsonType, *, context: Optional[ModuleType] = None -) -> object: - """ - Creates an object from a representation that has been de-serialized from JSON. - - When de-serializing a JSON object into a Python object, the following transformations are applied: - - * Fundamental types are parsed as `bool`, `int`, `float` or `str`. - * Date and time types are parsed from the ISO 8601 format with time zone into the corresponding Python type - `datetime`, `date` or `time` - * A byte array is read from a string with Base64 encoding into a `bytes` instance. - * UUIDs are extracted from a UUID string into a `uuid.UUID` instance. - * Enumerations are instantiated with a lookup on enumeration value. - * Containers (e.g. `list`, `dict`, `set`, `tuple`) are parsed recursively. - * Complex objects with properties (including data class types) are populated from dictionaries of key-value pairs - using reflection (enumerating type annotations). - - :raises TypeError: A de-serializing engine cannot be constructed for the input type. - :raises JsonKeyError: Deserialization for a class or union type has failed because a matching member was not found. - :raises JsonTypeError: Deserialization for data has failed due to a type mismatch. - """ - - # use caller context for evaluating types if no context is supplied - if context is None: - this_frame = inspect.currentframe() - if this_frame is not None: - caller_frame = this_frame.f_back - del this_frame - - if caller_frame is not None: - try: - context = sys.modules[caller_frame.f_globals["__name__"]] - finally: - del caller_frame - - parser = create_deserializer(typ, context) - return parser.parse(data) - - -def json_dump_string(json_object: JsonType) -> str: - "Dump an object as a JSON string with a compact representation." - - return json.dumps( - json_object, ensure_ascii=False, check_circular=False, separators=(",", ":") - ) - - -def json_dump(json_object: JsonType, file: TextIO) -> None: - json.dump( - json_object, - file, - ensure_ascii=False, - check_circular=False, - separators=(",", ":"), - ) - file.write("\n") diff --git a/docs/openapi_generator/strong_typing/serializer.py b/docs/openapi_generator/strong_typing/serializer.py deleted file mode 100644 index f1252e374..000000000 --- a/docs/openapi_generator/strong_typing/serializer.py +++ /dev/null @@ -1,522 +0,0 @@ -# Copyright (c) Meta Platforms, Inc. and affiliates. -# All rights reserved. -# -# This source code is licensed under the terms described in the LICENSE file in -# the root directory of this source tree. - -""" -Type-safe data interchange for Python data classes. - -:see: https://github.com/hunyadi/strong_typing -""" - -import abc -import base64 -import datetime -import enum -import functools -import inspect -import ipaddress -import sys -import typing -import uuid -from types import FunctionType, MethodType, ModuleType -from typing import ( - Any, - Callable, - Dict, - Generic, - List, - Literal, - NamedTuple, - Optional, - Set, - Tuple, - Type, - TypeVar, - Union, -) - -from .core import JsonType -from .exception import JsonTypeError, JsonValueError -from .inspection import ( - enum_value_types, - evaluate_type, - get_class_properties, - get_resolved_hints, - is_dataclass_type, - is_named_tuple_type, - is_reserved_property, - is_type_annotated, - is_type_enum, - TypeLike, - unwrap_annotated_type, -) -from .mapping import python_field_to_json_property - -T = TypeVar("T") - - -class Serializer(abc.ABC, Generic[T]): - @abc.abstractmethod - def generate(self, data: T) -> JsonType: ... - - -class NoneSerializer(Serializer[None]): - def generate(self, data: None) -> None: - # can be directly represented in JSON - return None - - -class BoolSerializer(Serializer[bool]): - def generate(self, data: bool) -> bool: - # can be directly represented in JSON - return data - - -class IntSerializer(Serializer[int]): - def generate(self, data: int) -> int: - # can be directly represented in JSON - return data - - -class FloatSerializer(Serializer[float]): - def generate(self, data: float) -> float: - # can be directly represented in JSON - return data - - -class StringSerializer(Serializer[str]): - def generate(self, data: str) -> str: - # can be directly represented in JSON - return data - - -class BytesSerializer(Serializer[bytes]): - def generate(self, data: bytes) -> str: - return base64.b64encode(data).decode("ascii") - - -class DateTimeSerializer(Serializer[datetime.datetime]): - def generate(self, obj: datetime.datetime) -> str: - if obj.tzinfo is None: - raise JsonValueError( - f"timestamp lacks explicit time zone designator: {obj}" - ) - fmt = obj.isoformat() - if fmt.endswith("+00:00"): - fmt = f"{fmt[:-6]}Z" # Python's isoformat() does not support military time zones like "Zulu" for UTC - return fmt - - -class DateSerializer(Serializer[datetime.date]): - def generate(self, obj: datetime.date) -> str: - return obj.isoformat() - - -class TimeSerializer(Serializer[datetime.time]): - def generate(self, obj: datetime.time) -> str: - return obj.isoformat() - - -class UUIDSerializer(Serializer[uuid.UUID]): - def generate(self, obj: uuid.UUID) -> str: - return str(obj) - - -class IPv4Serializer(Serializer[ipaddress.IPv4Address]): - def generate(self, obj: ipaddress.IPv4Address) -> str: - return str(obj) - - -class IPv6Serializer(Serializer[ipaddress.IPv6Address]): - def generate(self, obj: ipaddress.IPv6Address) -> str: - return str(obj) - - -class EnumSerializer(Serializer[enum.Enum]): - def generate(self, obj: enum.Enum) -> Union[int, str]: - return obj.value - - -class UntypedListSerializer(Serializer[list]): - def generate(self, obj: list) -> List[JsonType]: - return [object_to_json(item) for item in obj] - - -class UntypedDictSerializer(Serializer[dict]): - def generate(self, obj: dict) -> Dict[str, JsonType]: - if obj and isinstance(next(iter(obj.keys())), enum.Enum): - iterator = ( - (key.value, object_to_json(value)) for key, value in obj.items() - ) - else: - iterator = ((str(key), object_to_json(value)) for key, value in obj.items()) - return dict(iterator) - - -class UntypedSetSerializer(Serializer[set]): - def generate(self, obj: set) -> List[JsonType]: - return [object_to_json(item) for item in obj] - - -class UntypedTupleSerializer(Serializer[tuple]): - def generate(self, obj: tuple) -> List[JsonType]: - return [object_to_json(item) for item in obj] - - -class TypedCollectionSerializer(Serializer, Generic[T]): - generator: Serializer[T] - - def __init__(self, item_type: Type[T], context: Optional[ModuleType]) -> None: - self.generator = _get_serializer(item_type, context) - - -class TypedListSerializer(TypedCollectionSerializer[T]): - def generate(self, obj: List[T]) -> List[JsonType]: - return [self.generator.generate(item) for item in obj] - - -class TypedStringDictSerializer(TypedCollectionSerializer[T]): - def __init__(self, value_type: Type[T], context: Optional[ModuleType]) -> None: - super().__init__(value_type, context) - - def generate(self, obj: Dict[str, T]) -> Dict[str, JsonType]: - return {key: self.generator.generate(value) for key, value in obj.items()} - - -class TypedEnumDictSerializer(TypedCollectionSerializer[T]): - def __init__( - self, - key_type: Type[enum.Enum], - value_type: Type[T], - context: Optional[ModuleType], - ) -> None: - super().__init__(value_type, context) - - value_types = enum_value_types(key_type) - if len(value_types) != 1: - raise JsonTypeError( - f"invalid key type, enumerations must have a consistent member value type but several types found: {value_types}" - ) - - value_type = value_types.pop() - if value_type is not str: - raise JsonTypeError( - "invalid enumeration key type, expected `enum.Enum` with string values" - ) - - def generate(self, obj: Dict[enum.Enum, T]) -> Dict[str, JsonType]: - return {key.value: self.generator.generate(value) for key, value in obj.items()} - - -class TypedSetSerializer(TypedCollectionSerializer[T]): - def generate(self, obj: Set[T]) -> JsonType: - return [self.generator.generate(item) for item in obj] - - -class TypedTupleSerializer(Serializer[tuple]): - item_generators: Tuple[Serializer, ...] - - def __init__( - self, item_types: Tuple[type, ...], context: Optional[ModuleType] - ) -> None: - self.item_generators = tuple( - _get_serializer(item_type, context) for item_type in item_types - ) - - def generate(self, obj: tuple) -> List[JsonType]: - return [ - item_generator.generate(item) - for item_generator, item in zip(self.item_generators, obj) - ] - - -class CustomSerializer(Serializer): - converter: Callable[[object], JsonType] - - def __init__(self, converter: Callable[[object], JsonType]) -> None: - self.converter = converter - - def generate(self, obj: object) -> JsonType: - return self.converter(obj) - - -class FieldSerializer(Generic[T]): - """ - Serializes a Python object field into a JSON property. - - :param field_name: The name of the field in a Python class to read data from. - :param property_name: The name of the JSON property to write to a JSON `object`. - :param generator: A compatible serializer that can handle the field's type. - """ - - field_name: str - property_name: str - generator: Serializer - - def __init__( - self, field_name: str, property_name: str, generator: Serializer[T] - ) -> None: - self.field_name = field_name - self.property_name = property_name - self.generator = generator - - def generate_field(self, obj: object, object_dict: Dict[str, JsonType]) -> None: - value = getattr(obj, self.field_name) - if value is not None: - object_dict[self.property_name] = self.generator.generate(value) - - -class TypedClassSerializer(Serializer[T]): - property_generators: List[FieldSerializer] - - def __init__(self, class_type: Type[T], context: Optional[ModuleType]) -> None: - self.property_generators = [ - FieldSerializer( - field_name, - python_field_to_json_property(field_name, field_type), - _get_serializer(field_type, context), - ) - for field_name, field_type in get_class_properties(class_type) - ] - - def generate(self, obj: T) -> Dict[str, JsonType]: - object_dict: Dict[str, JsonType] = {} - for property_generator in self.property_generators: - property_generator.generate_field(obj, object_dict) - - return object_dict - - -class TypedNamedTupleSerializer(TypedClassSerializer[NamedTuple]): - def __init__( - self, class_type: Type[NamedTuple], context: Optional[ModuleType] - ) -> None: - super().__init__(class_type, context) - - -class DataclassSerializer(TypedClassSerializer[T]): - def __init__(self, class_type: Type[T], context: Optional[ModuleType]) -> None: - super().__init__(class_type, context) - - -class UnionSerializer(Serializer): - def generate(self, obj: Any) -> JsonType: - return object_to_json(obj) - - -class LiteralSerializer(Serializer): - generator: Serializer - - def __init__(self, values: Tuple[Any, ...], context: Optional[ModuleType]) -> None: - literal_type_tuple = tuple(type(value) for value in values) - literal_type_set = set(literal_type_tuple) - if len(literal_type_set) != 1: - value_names = ", ".join(repr(value) for value in values) - raise TypeError( - f"type `Literal[{value_names}]` expects consistent literal value types but got: {literal_type_tuple}" - ) - - literal_type = literal_type_set.pop() - self.generator = _get_serializer(literal_type, context) - - def generate(self, obj: Any) -> JsonType: - return self.generator.generate(obj) - - -class UntypedNamedTupleSerializer(Serializer): - fields: Dict[str, str] - - def __init__(self, class_type: Type[NamedTuple]) -> None: - # named tuples are also instances of tuple - self.fields = {} - field_names: Tuple[str, ...] = class_type._fields - for field_name in field_names: - self.fields[field_name] = python_field_to_json_property(field_name) - - def generate(self, obj: NamedTuple) -> JsonType: - object_dict = {} - for field_name, property_name in self.fields.items(): - value = getattr(obj, field_name) - object_dict[property_name] = object_to_json(value) - - return object_dict - - -class UntypedClassSerializer(Serializer): - def generate(self, obj: object) -> JsonType: - # iterate over object attributes to get a standard representation - object_dict = {} - for name in dir(obj): - if is_reserved_property(name): - continue - - value = getattr(obj, name) - if value is None: - continue - - # filter instance methods - if inspect.ismethod(value): - continue - - object_dict[python_field_to_json_property(name)] = object_to_json(value) - - return object_dict - - -def create_serializer( - typ: TypeLike, context: Optional[ModuleType] = None -) -> Serializer: - """ - Creates a serializer engine to produce an object that can be directly converted into a JSON string. - - When serializing a Python object into a JSON object, the following transformations are applied: - - * Fundamental types (`bool`, `int`, `float` or `str`) are returned as-is. - * Date and time types (`datetime`, `date` or `time`) produce an ISO 8601 format string with time zone - (ending with `Z` for UTC). - * Byte arrays (`bytes`) are written as a string with Base64 encoding. - * UUIDs (`uuid.UUID`) are written as a UUID string as per RFC 4122. - * Enumerations yield their enumeration value. - * Containers (e.g. `list`, `dict`, `set`, `tuple`) are processed recursively. - * Complex objects with properties (including data class types) generate dictionaries of key-value pairs. - - :raises TypeError: A serializer engine cannot be constructed for the input type. - """ - - if context is None: - if isinstance(typ, type): - context = sys.modules[typ.__module__] - - return _get_serializer(typ, context) - - -def _get_serializer(typ: TypeLike, context: Optional[ModuleType]) -> Serializer: - if isinstance(typ, (str, typing.ForwardRef)): - if context is None: - raise TypeError(f"missing context for evaluating type: {typ}") - - typ = evaluate_type(typ, context) - - if isinstance(typ, type): - return _fetch_serializer(typ) - else: - # special forms are not always hashable - return _create_serializer(typ, context) - - -@functools.lru_cache(maxsize=None) -def _fetch_serializer(typ: type) -> Serializer: - context = sys.modules[typ.__module__] - return _create_serializer(typ, context) - - -def _create_serializer(typ: TypeLike, context: Optional[ModuleType]) -> Serializer: - # check for well-known types - if typ is type(None): - return NoneSerializer() - elif typ is bool: - return BoolSerializer() - elif typ is int: - return IntSerializer() - elif typ is float: - return FloatSerializer() - elif typ is str: - return StringSerializer() - elif typ is bytes: - return BytesSerializer() - elif typ is datetime.datetime: - return DateTimeSerializer() - elif typ is datetime.date: - return DateSerializer() - elif typ is datetime.time: - return TimeSerializer() - elif typ is uuid.UUID: - return UUIDSerializer() - elif typ is ipaddress.IPv4Address: - return IPv4Serializer() - elif typ is ipaddress.IPv6Address: - return IPv6Serializer() - - # dynamically-typed collection types - if typ is list: - return UntypedListSerializer() - elif typ is dict: - return UntypedDictSerializer() - elif typ is set: - return UntypedSetSerializer() - elif typ is tuple: - return UntypedTupleSerializer() - - # generic types (e.g. list, dict, set, etc.) - origin_type = typing.get_origin(typ) - if origin_type is list: - (list_item_type,) = typing.get_args(typ) # unpack single tuple element - return TypedListSerializer(list_item_type, context) - elif origin_type is dict: - key_type, value_type = typing.get_args(typ) - if key_type is str: - return TypedStringDictSerializer(value_type, context) - elif issubclass(key_type, enum.Enum): - return TypedEnumDictSerializer(key_type, value_type, context) - elif origin_type is set: - (set_member_type,) = typing.get_args(typ) # unpack single tuple element - return TypedSetSerializer(set_member_type, context) - elif origin_type is tuple: - return TypedTupleSerializer(typing.get_args(typ), context) - elif origin_type is Union: - return UnionSerializer() - elif origin_type is Literal: - return LiteralSerializer(typing.get_args(typ), context) - - if is_type_annotated(typ): - return create_serializer(unwrap_annotated_type(typ)) - - # check if object has custom serialization method - convert_func = getattr(typ, "to_json", None) - if callable(convert_func): - return CustomSerializer(convert_func) - - if is_type_enum(typ): - return EnumSerializer() - if is_dataclass_type(typ): - return DataclassSerializer(typ, context) - if is_named_tuple_type(typ): - if getattr(typ, "__annotations__", None): - return TypedNamedTupleSerializer(typ, context) - else: - return UntypedNamedTupleSerializer(typ) - - # fail early if caller passes an object with an exotic type - if ( - not isinstance(typ, type) - or typ is FunctionType - or typ is MethodType - or typ is type - or typ is ModuleType - ): - raise TypeError(f"object of type {typ} cannot be represented in JSON") - - if get_resolved_hints(typ): - return TypedClassSerializer(typ, context) - else: - return UntypedClassSerializer() - - -def object_to_json(obj: Any) -> JsonType: - """ - Converts a Python object to a representation that can be exported to JSON. - - * Fundamental types (e.g. numeric types) are written as is. - * Date and time types are serialized in the ISO 8601 format with time zone. - * A byte array is written as a string with Base64 encoding. - * UUIDs are written as a UUID string. - * Enumerations are written as their value. - * Containers (e.g. `list`, `dict`, `set`, `tuple`) are exported recursively. - * Objects with properties (including data class types) are converted to a dictionaries of key-value pairs. - """ - - typ: type = type(obj) - generator = create_serializer(typ) - return generator.generate(obj) diff --git a/docs/openapi_generator/strong_typing/slots.py b/docs/openapi_generator/strong_typing/slots.py deleted file mode 100644 index 564ffa11f..000000000 --- a/docs/openapi_generator/strong_typing/slots.py +++ /dev/null @@ -1,29 +0,0 @@ -# Copyright (c) Meta Platforms, Inc. and affiliates. -# All rights reserved. -# -# This source code is licensed under the terms described in the LICENSE file in -# the root directory of this source tree. - -from typing import Any, Dict, Tuple, Type, TypeVar - -T = TypeVar("T") - - -class SlotsMeta(type): - def __new__( - cls: Type[T], name: str, bases: Tuple[type, ...], ns: Dict[str, Any] - ) -> T: - # caller may have already provided slots, in which case just retain them and keep going - slots: Tuple[str, ...] = ns.get("__slots__", ()) - - # add fields with type annotations to slots - annotations: Dict[str, Any] = ns.get("__annotations__", {}) - members = tuple(member for member in annotations.keys() if member not in slots) - - # assign slots - ns["__slots__"] = slots + tuple(members) - return super().__new__(cls, name, bases, ns) # type: ignore - - -class Slots(metaclass=SlotsMeta): - pass diff --git a/docs/openapi_generator/strong_typing/topological.py b/docs/openapi_generator/strong_typing/topological.py deleted file mode 100644 index 28bf4bd0f..000000000 --- a/docs/openapi_generator/strong_typing/topological.py +++ /dev/null @@ -1,89 +0,0 @@ -# Copyright (c) Meta Platforms, Inc. and affiliates. -# All rights reserved. -# -# This source code is licensed under the terms described in the LICENSE file in -# the root directory of this source tree. - -""" -Type-safe data interchange for Python data classes. - -:see: https://github.com/hunyadi/strong_typing -""" - -from typing import Callable, Dict, Iterable, List, Optional, Set, TypeVar - -from .inspection import TypeCollector - -T = TypeVar("T") - - -def topological_sort(graph: Dict[T, Set[T]]) -> List[T]: - """ - Performs a topological sort of a graph. - - Nodes with no outgoing edges are first. Nodes with no incoming edges are last. - The topological ordering is not unique. - - :param graph: A dictionary of mappings from nodes to adjacent nodes. Keys and set members must be hashable. - :returns: The list of nodes in topological order. - """ - - # empty list that will contain the sorted nodes (in reverse order) - ordered: List[T] = [] - - seen: Dict[T, bool] = {} - - def _visit(n: T) -> None: - status = seen.get(n) - if status is not None: - if status: # node has a permanent mark - return - else: # node has a temporary mark - raise RuntimeError(f"cycle detected in graph for node {n}") - - seen[n] = False # apply temporary mark - for m in graph[n]: # visit all adjacent nodes - if m != n: # ignore self-referencing nodes - _visit(m) - - seen[n] = True # apply permanent mark - ordered.append(n) - - for n in graph.keys(): - _visit(n) - - return ordered - - -def type_topological_sort( - types: Iterable[type], - dependency_fn: Optional[Callable[[type], Iterable[type]]] = None, -) -> List[type]: - """ - Performs a topological sort of a list of types. - - Types that don't depend on other types (i.e. fundamental types) are first. Types on which no other types depend - are last. The topological ordering is not unique. - - :param types: A list of types (simple or composite). - :param dependency_fn: Returns a list of additional dependencies for a class (e.g. classes referenced by a foreign key). - :returns: The list of types in topological order. - """ - - if not all(isinstance(typ, type) for typ in types): - raise TypeError("expected a list of types") - - collector = TypeCollector() - collector.traverse_all(types) - graph = collector.graph - - if dependency_fn: - new_types: Set[type] = set() - for source_type, references in graph.items(): - dependent_types = dependency_fn(source_type) - references.update(dependent_types) - new_types.update(dependent_types) - for new_type in new_types: - graph[new_type] = set() - - return topological_sort(graph) From 31a5ba52683a8ca50ec22e4ce2c93242320078c2 Mon Sep 17 00:00:00 2001 From: Ashwin Bharambe Date: Wed, 19 Feb 2025 13:26:39 -0800 Subject: [PATCH 12/14] Add title to the json schemas --- docs/_static/llama-stack-spec.html | 524 +++++++++++++++++++--------- docs/_static/llama-stack-spec.yaml | 194 ++++++++++ llama_stack/strong_typing/schema.py | 4 +- 3 files changed, 556 insertions(+), 166 deletions(-) diff --git a/docs/_static/llama-stack-spec.html b/docs/_static/llama-stack-spec.html index 65a1bdd6b..82abc947b 100644 --- a/docs/_static/llama-stack-spec.html +++ b/docs/_static/llama-stack-spec.html @@ -2661,7 +2661,8 @@ "required": [ "type", "config" - ] + ], + "title": "AgentCandidate" }, "AgentConfig": { "type": "object", @@ -2700,6 +2701,7 @@ "required", "none" ], + "title": "ToolChoice", "description": "Whether tool use is required or automatic. This is a hint to the model which may not be followed. It depends on the Instruction Following capabilities of the model." }, "tool_prompt_format": { @@ -2709,6 +2711,7 @@ "function_tag", "python_list" ], + "title": "ToolPromptFormat", "description": "Prompt format for calling custom / zero shot tools." }, "tool_config": { @@ -2736,7 +2739,8 @@ "required": [ "model", "instructions" - ] + ], + "title": "AgentConfig" }, "AgentTool": { "oneOf": [ @@ -2779,7 +2783,8 @@ "required": [ "name", "args" - ] + ], + "title": "AgentToolGroupWithArgs" } ] }, @@ -2790,7 +2795,8 @@ "median", "categorical_count", "accuracy" - ] + ], + "title": "AggregationFunctionType" }, "BasicScoringFnParams": { "type": "object", @@ -2810,7 +2816,8 @@ "additionalProperties": false, "required": [ "type" - ] + ], + "title": "BasicScoringFnParams" }, "BenchmarkConfig": { "type": "object", @@ -2838,7 +2845,8 @@ "type", "eval_candidate", "scoring_params" - ] + ], + "title": "BenchmarkConfig" }, "EvalCandidate": { "oneOf": [ @@ -2898,6 +2906,7 @@ "type", "bnf" ], + "title": "GrammarResponseFormat", "description": "Configuration for grammar-guided response generation." }, "GreedySamplingStrategy": { @@ -2912,7 +2921,8 @@ "additionalProperties": false, "required": [ "type" - ] + ], + "title": "GreedySamplingStrategy" }, "ImageContentItem": { "type": "object", @@ -2945,6 +2955,7 @@ "type", "image" ], + "title": "ImageContentItem", "description": "A image content item" }, "InterleavedContent": { @@ -3021,6 +3032,7 @@ "type", "json_schema" ], + "title": "JsonSchemaResponseFormat", "description": "Configuration for JSON schema-guided response generation." }, "LLMAsJudgeScoringFnParams": { @@ -3054,7 +3066,8 @@ "required": [ "type", "judge_model" - ] + ], + "title": "LLMAsJudgeScoringFnParams" }, "ModelCandidate": { "type": "object", @@ -3079,7 +3092,8 @@ "type", "model", "sampling_params" - ] + ], + "title": "ModelCandidate" }, "RegexParserScoringFnParams": { "type": "object", @@ -3105,7 +3119,8 @@ "additionalProperties": false, "required": [ "type" - ] + ], + "title": "RegexParserScoringFnParams" }, "ResponseFormat": { "oneOf": [ @@ -3142,7 +3157,8 @@ "additionalProperties": false, "required": [ "strategy" - ] + ], + "title": "SamplingParams" }, "SamplingStrategy": { "oneOf": [ @@ -3205,6 +3221,7 @@ "role", "content" ], + "title": "SystemMessage", "description": "A system message providing instructions or context to the model." }, "TextContentItem": { @@ -3226,6 +3243,7 @@ "type", "text" ], + "title": "TextContentItem", "description": "A text content item" }, "ToolConfig": { @@ -3240,6 +3258,7 @@ "required", "none" ], + "title": "ToolChoice", "description": "Whether tool use is required or automatic. This is a hint to the model which may not be followed. It depends on the Instruction Following capabilities of the model." }, { @@ -3269,6 +3288,7 @@ } }, "additionalProperties": false, + "title": "ToolConfig", "description": "Configuration for tool use." }, "ToolDef": { @@ -3315,7 +3335,8 @@ "additionalProperties": false, "required": [ "name" - ] + ], + "title": "ToolDef" }, "ToolParameter": { "type": "object", @@ -3362,7 +3383,8 @@ "parameter_type", "description", "required" - ] + ], + "title": "ToolParameter" }, "TopKSamplingStrategy": { "type": "object", @@ -3380,7 +3402,8 @@ "required": [ "type", "top_k" - ] + ], + "title": "TopKSamplingStrategy" }, "TopPSamplingStrategy": { "type": "object", @@ -3401,7 +3424,8 @@ "additionalProperties": false, "required": [ "type" - ] + ], + "title": "TopPSamplingStrategy" }, "URL": { "type": "object", @@ -3413,7 +3437,8 @@ "additionalProperties": false, "required": [ "uri" - ] + ], + "title": "URL" }, "DeprecatedEvaluateRowsRequest": { "type": "object", @@ -3461,7 +3486,8 @@ "input_rows", "scoring_functions", "task_config" - ] + ], + "title": "DeprecatedEvaluateRowsRequest" }, "EvaluateResponse": { "type": "object", @@ -3505,7 +3531,8 @@ "required": [ "generations", "scores" - ] + ], + "title": "EvaluateResponse" }, "ScoringResult": { "type": "object", @@ -3568,7 +3595,8 @@ "required": [ "score_rows", "aggregated_results" - ] + ], + "title": "ScoringResult" }, "Benchmark": { "type": "object", @@ -3631,7 +3659,8 @@ "dataset_id", "scoring_functions", "metadata" - ] + ], + "title": "Benchmark" }, "JobStatus": { "type": "string", @@ -3640,7 +3669,8 @@ "in_progress", "failed", "scheduled" - ] + ], + "title": "JobStatus" }, "ListBenchmarksResponse": { "type": "object", @@ -3655,7 +3685,8 @@ "additionalProperties": false, "required": [ "data" - ] + ], + "title": "ListBenchmarksResponse" }, "DeprecatedRegisterEvalTaskRequest": { "type": "object", @@ -3709,7 +3740,8 @@ "eval_task_id", "dataset_id", "scoring_functions" - ] + ], + "title": "DeprecatedRegisterEvalTaskRequest" }, "DeprecatedRunEvalRequest": { "type": "object", @@ -3721,7 +3753,8 @@ "additionalProperties": false, "required": [ "task_config" - ] + ], + "title": "DeprecatedRunEvalRequest" }, "Job": { "type": "object", @@ -3733,7 +3766,8 @@ "additionalProperties": false, "required": [ "job_id" - ] + ], + "title": "Job" }, "AppendRowsRequest": { "type": "object", @@ -3774,7 +3808,8 @@ "required": [ "dataset_id", "rows" - ] + ], + "title": "AppendRowsRequest" }, "CompletionMessage": { "type": "object", @@ -3812,6 +3847,7 @@ "content", "stop_reason" ], + "title": "CompletionMessage", "description": "A message containing the model's (assistant) response in a chat conversation." }, "Message": { @@ -3854,7 +3890,8 @@ "wolfram_alpha", "photogen", "code_interpreter" - ] + ], + "title": "BuiltinTool" }, { "type": "string" @@ -3933,7 +3970,8 @@ "call_id", "tool_name", "arguments" - ] + ], + "title": "ToolCall" }, "ToolDefinition": { "type": "object", @@ -3947,7 +3985,8 @@ "wolfram_alpha", "photogen", "code_interpreter" - ] + ], + "title": "BuiltinTool" }, { "type": "string" @@ -3967,7 +4006,8 @@ "additionalProperties": false, "required": [ "tool_name" - ] + ], + "title": "ToolDefinition" }, "ToolParamDefinition": { "type": "object", @@ -4008,7 +4048,8 @@ "additionalProperties": false, "required": [ "param_type" - ] + ], + "title": "ToolParamDefinition" }, "ToolResponseMessage": { "type": "object", @@ -4032,7 +4073,8 @@ "wolfram_alpha", "photogen", "code_interpreter" - ] + ], + "title": "BuiltinTool" }, { "type": "string" @@ -4052,6 +4094,7 @@ "tool_name", "content" ], + "title": "ToolResponseMessage", "description": "A message representing the result of a tool invocation." }, "UserMessage": { @@ -4077,6 +4120,7 @@ "role", "content" ], + "title": "UserMessage", "description": "A message from the user in a chat conversation." }, "BatchChatCompletionRequest": { @@ -4110,6 +4154,7 @@ "required", "none" ], + "title": "ToolChoice", "description": "Whether tool use is required or automatic. This is a hint to the model which may not be followed. It depends on the Instruction Following capabilities of the model." }, "tool_prompt_format": { @@ -4119,6 +4164,7 @@ "function_tag", "python_list" ], + "title": "ToolPromptFormat", "description": "Prompt format for calling custom / zero shot tools." }, "response_format": { @@ -4133,14 +4179,16 @@ "description": "How many tokens (for each position) to return log probabilities for." } }, - "additionalProperties": false + "additionalProperties": false, + "title": "LogProbConfig" } }, "additionalProperties": false, "required": [ "model", "messages_batch" - ] + ], + "title": "BatchChatCompletionRequest" }, "BatchChatCompletionResponse": { "type": "object", @@ -4155,7 +4203,8 @@ "additionalProperties": false, "required": [ "batch" - ] + ], + "title": "BatchChatCompletionResponse" }, "ChatCompletionResponse": { "type": "object", @@ -4182,6 +4231,7 @@ "required": [ "completion_message" ], + "title": "ChatCompletionResponse", "description": "Response from a chat completion request." }, "MetricEvent": { @@ -4250,7 +4300,8 @@ "metric", "value", "unit" - ] + ], + "title": "MetricEvent" }, "TokenLogProbs": { "type": "object", @@ -4267,6 +4318,7 @@ "required": [ "logprobs_by_token" ], + "title": "TokenLogProbs", "description": "Log probabilities for generated tokens." }, "BatchCompletionRequest": { @@ -4296,14 +4348,16 @@ "description": "How many tokens (for each position) to return log probabilities for." } }, - "additionalProperties": false + "additionalProperties": false, + "title": "LogProbConfig" } }, "additionalProperties": false, "required": [ "model", "content_batch" - ] + ], + "title": "BatchCompletionRequest" }, "BatchCompletionResponse": { "type": "object", @@ -4318,7 +4372,8 @@ "additionalProperties": false, "required": [ "batch" - ] + ], + "title": "BatchCompletionResponse" }, "CompletionResponse": { "type": "object", @@ -4349,6 +4404,7 @@ "content", "stop_reason" ], + "title": "CompletionResponse", "description": "Response from a completion request." }, "CancelTrainingJobRequest": { @@ -4361,7 +4417,8 @@ "additionalProperties": false, "required": [ "job_uuid" - ] + ], + "title": "CancelTrainingJobRequest" }, "ChatCompletionRequest": { "type": "object", @@ -4435,7 +4492,8 @@ "required": [ "model_id", "messages" - ] + ], + "title": "ChatCompletionRequest" }, "ChatCompletionResponseEvent": { "type": "object", @@ -4475,6 +4533,7 @@ "event_type", "delta" ], + "title": "ChatCompletionResponseEvent", "description": "An event during chat completion generation." }, "ChatCompletionResponseStreamChunk": { @@ -4495,6 +4554,7 @@ "required": [ "event" ], + "title": "ChatCompletionResponseStreamChunk", "description": "A chunk of a streamed chat completion response." }, "ContentDelta": { @@ -4535,7 +4595,8 @@ "required": [ "type", "image" - ] + ], + "title": "ImageDelta" }, "TextDelta": { "type": "object", @@ -4553,7 +4614,8 @@ "required": [ "type", "text" - ] + ], + "title": "TextDelta" }, "ToolCallDelta": { "type": "object", @@ -4580,7 +4642,8 @@ "in_progress", "failed", "succeeded" - ] + ], + "title": "ToolCallParseStatus" } }, "additionalProperties": false, @@ -4588,7 +4651,8 @@ "type", "tool_call", "parse_status" - ] + ], + "title": "ToolCallDelta" }, "CompletionRequest": { "type": "object", @@ -4630,7 +4694,8 @@ "required": [ "model_id", "content" - ] + ], + "title": "CompletionRequest" }, "CompletionResponseStreamChunk": { "type": "object", @@ -4660,6 +4725,7 @@ "required": [ "delta" ], + "title": "CompletionResponseStreamChunk", "description": "A chunk of a streamed completion response." }, "CreateAgentRequest": { @@ -4672,7 +4738,8 @@ "additionalProperties": false, "required": [ "agent_config" - ] + ], + "title": "CreateAgentRequest" }, "AgentCreateResponse": { "type": "object", @@ -4684,7 +4751,8 @@ "additionalProperties": false, "required": [ "agent_id" - ] + ], + "title": "AgentCreateResponse" }, "CreateAgentSessionRequest": { "type": "object", @@ -4696,7 +4764,8 @@ "additionalProperties": false, "required": [ "session_name" - ] + ], + "title": "CreateAgentSessionRequest" }, "AgentSessionCreateResponse": { "type": "object", @@ -4708,7 +4777,8 @@ "additionalProperties": false, "required": [ "session_id" - ] + ], + "title": "AgentSessionCreateResponse" }, "CreateAgentTurnRequest": { "type": "object", @@ -4761,7 +4831,8 @@ "required": [ "content", "mime_type" - ] + ], + "title": "Document" } }, "toolgroups": { @@ -4777,7 +4848,8 @@ "additionalProperties": false, "required": [ "messages" - ] + ], + "title": "CreateAgentTurnRequest" }, "InferenceStep": { "type": "object", @@ -4811,7 +4883,8 @@ "step_id", "step_type", "model_response" - ] + ], + "title": "InferenceStep" }, "MemoryRetrievalStep": { "type": "object", @@ -4849,7 +4922,8 @@ "step_type", "vector_db_ids", "inserted_context" - ] + ], + "title": "MemoryRetrievalStep" }, "SafetyViolation": { "type": "object", @@ -4890,7 +4964,8 @@ "required": [ "violation_level", "metadata" - ] + ], + "title": "SafetyViolation" }, "ShieldCallStep": { "type": "object", @@ -4923,7 +4998,8 @@ "turn_id", "step_id", "step_type" - ] + ], + "title": "ShieldCallStep" }, "ToolExecutionStep": { "type": "object", @@ -4967,7 +5043,8 @@ "step_type", "tool_calls", "tool_responses" - ] + ], + "title": "ToolExecutionStep" }, "ToolResponse": { "type": "object", @@ -4984,7 +5061,8 @@ "wolfram_alpha", "photogen", "code_interpreter" - ] + ], + "title": "BuiltinTool" }, { "type": "string" @@ -5000,7 +5078,8 @@ "call_id", "tool_name", "content" - ] + ], + "title": "ToolResponse" }, "Turn": { "type": "object", @@ -5087,7 +5166,8 @@ "required": [ "content", "mime_type" - ] + ], + "title": "Attachment" } }, "started_at": { @@ -5108,6 +5188,7 @@ "output_message", "started_at" ], + "title": "Turn", "description": "A single turn in an interaction with an Agentic System." }, "ViolationLevel": { @@ -5116,7 +5197,8 @@ "info", "warn", "error" - ] + ], + "title": "ViolationLevel" }, "AgentTurnResponseEvent": { "type": "object", @@ -5128,7 +5210,8 @@ "additionalProperties": false, "required": [ "payload" - ] + ], + "title": "AgentTurnResponseEvent" }, "AgentTurnResponseEventPayload": { "oneOf": [ @@ -5174,7 +5257,8 @@ "tool_execution", "shield_call", "memory_retrieval" - ] + ], + "title": "StepType" }, "step_id": { "type": "string" @@ -5211,7 +5295,8 @@ "step_type", "step_id", "step_details" - ] + ], + "title": "AgentTurnResponseStepCompletePayload" }, "AgentTurnResponseStepProgressPayload": { "type": "object", @@ -5228,7 +5313,8 @@ "tool_execution", "shield_call", "memory_retrieval" - ] + ], + "title": "StepType" }, "step_id": { "type": "string" @@ -5243,7 +5329,8 @@ "step_type", "step_id", "delta" - ] + ], + "title": "AgentTurnResponseStepProgressPayload" }, "AgentTurnResponseStepStartPayload": { "type": "object", @@ -5260,7 +5347,8 @@ "tool_execution", "shield_call", "memory_retrieval" - ] + ], + "title": "StepType" }, "step_id": { "type": "string" @@ -5296,7 +5384,8 @@ "event_type", "step_type", "step_id" - ] + ], + "title": "AgentTurnResponseStepStartPayload" }, "AgentTurnResponseStreamChunk": { "type": "object", @@ -5309,6 +5398,7 @@ "required": [ "event" ], + "title": "AgentTurnResponseStreamChunk", "description": "streamed agent turn completion response." }, "AgentTurnResponseTurnCompletePayload": { @@ -5327,7 +5417,8 @@ "required": [ "event_type", "turn" - ] + ], + "title": "AgentTurnResponseTurnCompletePayload" }, "AgentTurnResponseTurnStartPayload": { "type": "object", @@ -5345,7 +5436,8 @@ "required": [ "event_type", "turn_id" - ] + ], + "title": "AgentTurnResponseTurnStartPayload" }, "EmbeddingsRequest": { "type": "object", @@ -5366,7 +5458,8 @@ "required": [ "model_id", "contents" - ] + ], + "title": "EmbeddingsRequest" }, "EmbeddingsResponse": { "type": "object", @@ -5386,6 +5479,7 @@ "required": [ "embeddings" ], + "title": "EmbeddingsResponse", "description": "Response containing generated embeddings." }, "EvaluateRowsRequest": { @@ -5434,7 +5528,8 @@ "input_rows", "scoring_functions", "task_config" - ] + ], + "title": "EvaluateRowsRequest" }, "Session": { "type": "object", @@ -5463,6 +5558,7 @@ "turns", "started_at" ], + "title": "Session", "description": "A single session of an interaction with an Agentic System." }, "AgentStepResponse": { @@ -5497,7 +5593,8 @@ "additionalProperties": false, "required": [ "step" - ] + ], + "title": "AgentStepResponse" }, "AgentTurnInputType": { "type": "object", @@ -5511,7 +5608,8 @@ "additionalProperties": false, "required": [ "type" - ] + ], + "title": "AgentTurnInputType" }, "ArrayType": { "type": "object", @@ -5525,7 +5623,8 @@ "additionalProperties": false, "required": [ "type" - ] + ], + "title": "ArrayType" }, "BooleanType": { "type": "object", @@ -5539,7 +5638,8 @@ "additionalProperties": false, "required": [ "type" - ] + ], + "title": "BooleanType" }, "ChatCompletionInputType": { "type": "object", @@ -5553,7 +5653,8 @@ "additionalProperties": false, "required": [ "type" - ] + ], + "title": "ChatCompletionInputType" }, "CompletionInputType": { "type": "object", @@ -5567,7 +5668,8 @@ "additionalProperties": false, "required": [ "type" - ] + ], + "title": "CompletionInputType" }, "Dataset": { "type": "object", @@ -5630,7 +5732,8 @@ "dataset_schema", "url", "metadata" - ] + ], + "title": "Dataset" }, "JsonType": { "type": "object", @@ -5644,7 +5747,8 @@ "additionalProperties": false, "required": [ "type" - ] + ], + "title": "JsonType" }, "NumberType": { "type": "object", @@ -5658,7 +5762,8 @@ "additionalProperties": false, "required": [ "type" - ] + ], + "title": "NumberType" }, "ObjectType": { "type": "object", @@ -5672,7 +5777,8 @@ "additionalProperties": false, "required": [ "type" - ] + ], + "title": "ObjectType" }, "ParamType": { "oneOf": [ @@ -5735,7 +5841,8 @@ "additionalProperties": false, "required": [ "type" - ] + ], + "title": "StringType" }, "UnionType": { "type": "object", @@ -5749,7 +5856,8 @@ "additionalProperties": false, "required": [ "type" - ] + ], + "title": "UnionType" }, "Model": { "type": "object", @@ -5806,14 +5914,16 @@ "type", "metadata", "model_type" - ] + ], + "title": "Model" }, "ModelType": { "type": "string", "enum": [ "llm", "embedding" - ] + ], + "title": "ModelType" }, "PaginatedRowsResult": { "type": "object", @@ -5857,7 +5967,8 @@ "required": [ "rows", "total_count" - ] + ], + "title": "PaginatedRowsResult" }, "ScoringFn": { "type": "object", @@ -5919,7 +6030,8 @@ "type", "metadata", "return_type" - ] + ], + "title": "ScoringFn" }, "Shield": { "type": "object", @@ -5971,6 +6083,7 @@ "provider_id", "type" ], + "title": "Shield", "description": "A safety shield resource that can be used to check content" }, "Span": { @@ -6028,14 +6141,16 @@ "trace_id", "name", "start_time" - ] + ], + "title": "Span" }, "SpanStatus": { "type": "string", "enum": [ "ok", "error" - ] + ], + "title": "SpanStatus" }, "SpanWithStatus": { "type": "object", @@ -6095,7 +6210,8 @@ "trace_id", "name", "start_time" - ] + ], + "title": "SpanWithStatus" }, "QuerySpanTreeResponse": { "type": "object", @@ -6110,7 +6226,8 @@ "additionalProperties": false, "required": [ "data" - ] + ], + "title": "QuerySpanTreeResponse" }, "Tool": { "type": "object", @@ -6180,7 +6297,8 @@ "tool_host", "description", "parameters" - ] + ], + "title": "Tool" }, "ToolHost": { "type": "string", @@ -6188,7 +6306,8 @@ "distribution", "client", "model_context_protocol" - ] + ], + "title": "ToolHost" }, "ToolGroup": { "type": "object", @@ -6242,7 +6361,8 @@ "provider_resource_id", "provider_id", "type" - ] + ], + "title": "ToolGroup" }, "Trace": { "type": "object", @@ -6267,10 +6387,12 @@ "trace_id", "root_span_id", "start_time" - ] + ], + "title": "Trace" }, "Checkpoint": { - "description": "Checkpoint created during training runs" + "description": "Checkpoint created during training runs", + "title": "Checkpoint" }, "PostTrainingJobArtifactsResponse": { "type": "object", @@ -6290,6 +6412,7 @@ "job_uuid", "checkpoints" ], + "title": "PostTrainingJobArtifactsResponse", "description": "Artifacts of a finetuning job." }, "PostTrainingJobStatusResponse": { @@ -6351,6 +6474,7 @@ "status", "checkpoints" ], + "title": "PostTrainingJobStatusResponse", "description": "Status of a finetuning job." }, "ListPostTrainingJobsResponse": { @@ -6368,14 +6492,16 @@ "additionalProperties": false, "required": [ "job_uuid" - ] + ], + "title": "PostTrainingJob" } } }, "additionalProperties": false, "required": [ "data" - ] + ], + "title": "ListPostTrainingJobsResponse" }, "VectorDB": { "type": "object", @@ -6409,7 +6535,8 @@ "type", "embedding_model", "embedding_dimension" - ] + ], + "title": "VectorDB" }, "HealthInfo": { "type": "object", @@ -6421,7 +6548,8 @@ "additionalProperties": false, "required": [ "status" - ] + ], + "title": "HealthInfo" }, "RAGDocument": { "type": "object", @@ -6482,7 +6610,8 @@ "document_id", "content", "metadata" - ] + ], + "title": "RAGDocument" }, "InsertRequest": { "type": "object", @@ -6505,7 +6634,8 @@ "documents", "vector_db_id", "chunk_size_in_tokens" - ] + ], + "title": "InsertRequest" }, "InsertChunksRequest": { "type": "object", @@ -6551,7 +6681,8 @@ "required": [ "content", "metadata" - ] + ], + "title": "Chunk" } }, "ttl_seconds": { @@ -6562,7 +6693,8 @@ "required": [ "vector_db_id", "chunks" - ] + ], + "title": "InsertChunksRequest" }, "InvokeToolRequest": { "type": "object", @@ -6600,7 +6732,8 @@ "required": [ "tool_name", "kwargs" - ] + ], + "title": "InvokeToolRequest" }, "ToolInvocationResult": { "type": "object", @@ -6618,7 +6751,8 @@ "additionalProperties": false, "required": [ "content" - ] + ], + "title": "ToolInvocationResult" }, "ListDatasetsResponse": { "type": "object", @@ -6633,7 +6767,8 @@ "additionalProperties": false, "required": [ "data" - ] + ], + "title": "ListDatasetsResponse" }, "ListModelsResponse": { "type": "object", @@ -6648,7 +6783,8 @@ "additionalProperties": false, "required": [ "data" - ] + ], + "title": "ListModelsResponse" }, "ProviderInfo": { "type": "object", @@ -6668,7 +6804,8 @@ "api", "provider_id", "provider_type" - ] + ], + "title": "ProviderInfo" }, "ListProvidersResponse": { "type": "object", @@ -6683,7 +6820,8 @@ "additionalProperties": false, "required": [ "data" - ] + ], + "title": "ListProvidersResponse" }, "RouteInfo": { "type": "object", @@ -6706,7 +6844,8 @@ "route", "method", "provider_types" - ] + ], + "title": "RouteInfo" }, "ListRoutesResponse": { "type": "object", @@ -6721,7 +6860,8 @@ "additionalProperties": false, "required": [ "data" - ] + ], + "title": "ListRoutesResponse" }, "ListScoringFunctionsResponse": { "type": "object", @@ -6736,7 +6876,8 @@ "additionalProperties": false, "required": [ "data" - ] + ], + "title": "ListScoringFunctionsResponse" }, "ListShieldsResponse": { "type": "object", @@ -6751,7 +6892,8 @@ "additionalProperties": false, "required": [ "data" - ] + ], + "title": "ListShieldsResponse" }, "ListToolGroupsResponse": { "type": "object", @@ -6766,7 +6908,8 @@ "additionalProperties": false, "required": [ "data" - ] + ], + "title": "ListToolGroupsResponse" }, "ListToolsResponse": { "type": "object", @@ -6781,7 +6924,8 @@ "additionalProperties": false, "required": [ "data" - ] + ], + "title": "ListToolsResponse" }, "ListVectorDBsResponse": { "type": "object", @@ -6796,7 +6940,8 @@ "additionalProperties": false, "required": [ "data" - ] + ], + "title": "ListVectorDBsResponse" }, "Event": { "oneOf": [ @@ -6828,7 +6973,8 @@ "warn", "error", "critical" - ] + ], + "title": "LogSeverity" }, "SpanEndPayload": { "type": "object", @@ -6846,7 +6992,8 @@ "required": [ "type", "status" - ] + ], + "title": "SpanEndPayload" }, "SpanStartPayload": { "type": "object", @@ -6867,7 +7014,8 @@ "required": [ "type", "name" - ] + ], + "title": "SpanStartPayload" }, "StructuredLogEvent": { "type": "object", @@ -6920,7 +7068,8 @@ "timestamp", "type", "payload" - ] + ], + "title": "StructuredLogEvent" }, "StructuredLogPayload": { "oneOf": [ @@ -6994,7 +7143,8 @@ "type", "message", "severity" - ] + ], + "title": "UnstructuredLogEvent" }, "LogEventRequest": { "type": "object", @@ -7010,7 +7160,8 @@ "required": [ "event", "ttl_seconds" - ] + ], + "title": "LogEventRequest" }, "DPOAlignmentConfig": { "type": "object", @@ -7034,7 +7185,8 @@ "reward_clip", "epsilon", "gamma" - ] + ], + "title": "DPOAlignmentConfig" }, "DataConfig": { "type": "object", @@ -7069,14 +7221,16 @@ "batch_size", "shuffle", "data_format" - ] + ], + "title": "DataConfig" }, "DatasetFormat": { "type": "string", "enum": [ "instruct", "dialog" - ] + ], + "title": "DatasetFormat" }, "EfficiencyConfig": { "type": "object", @@ -7098,7 +7252,8 @@ "default": false } }, - "additionalProperties": false + "additionalProperties": false, + "title": "EfficiencyConfig" }, "OptimizerConfig": { "type": "object", @@ -7122,7 +7277,8 @@ "lr", "weight_decay", "num_warmup_steps" - ] + ], + "title": "OptimizerConfig" }, "OptimizerType": { "type": "string", @@ -7130,7 +7286,8 @@ "adam", "adamw", "sgd" - ] + ], + "title": "OptimizerType" }, "TrainingConfig": { "type": "object", @@ -7169,7 +7326,8 @@ "max_validation_steps", "data_config", "optimizer_config" - ] + ], + "title": "TrainingConfig" }, "PreferenceOptimizeRequest": { "type": "object", @@ -7245,7 +7403,8 @@ "training_config", "hyperparam_search_config", "logger_config" - ] + ], + "title": "PreferenceOptimizeRequest" }, "PostTrainingJob": { "type": "object", @@ -7257,7 +7416,8 @@ "additionalProperties": false, "required": [ "job_uuid" - ] + ], + "title": "PostTrainingJob" }, "DefaultRAGQueryGeneratorConfig": { "type": "object", @@ -7276,7 +7436,8 @@ "required": [ "type", "separator" - ] + ], + "title": "DefaultRAGQueryGeneratorConfig" }, "LLMRAGQueryGeneratorConfig": { "type": "object", @@ -7298,7 +7459,8 @@ "type", "model", "template" - ] + ], + "title": "LLMRAGQueryGeneratorConfig" }, "RAGQueryConfig": { "type": "object", @@ -7320,7 +7482,8 @@ "query_generator_config", "max_tokens_in_context", "max_chunks" - ] + ], + "title": "RAGQueryConfig" }, "RAGQueryGeneratorConfig": { "oneOf": [ @@ -7359,7 +7522,8 @@ "required": [ "content", "vector_db_ids" - ] + ], + "title": "QueryRequest" }, "RAGQueryResult": { "type": "object", @@ -7368,7 +7532,8 @@ "$ref": "#/components/schemas/InterleavedContent" } }, - "additionalProperties": false + "additionalProperties": false, + "title": "RAGQueryResult" }, "QueryChunksRequest": { "type": "object", @@ -7409,7 +7574,8 @@ "required": [ "vector_db_id", "query" - ] + ], + "title": "QueryChunksRequest" }, "QueryChunksResponse": { "type": "object", @@ -7452,7 +7618,8 @@ "required": [ "content", "metadata" - ] + ], + "title": "Chunk" } }, "scores": { @@ -7466,7 +7633,8 @@ "required": [ "chunks", "scores" - ] + ], + "title": "QueryChunksResponse" }, "QueryCondition": { "type": "object", @@ -7505,7 +7673,8 @@ "key", "op", "value" - ] + ], + "title": "QueryCondition" }, "QueryConditionOp": { "type": "string", @@ -7514,7 +7683,8 @@ "ne", "gt", "lt" - ] + ], + "title": "QueryConditionOp" }, "QuerySpansResponse": { "type": "object", @@ -7529,7 +7699,8 @@ "additionalProperties": false, "required": [ "data" - ] + ], + "title": "QuerySpansResponse" }, "QueryTracesResponse": { "type": "object", @@ -7544,7 +7715,8 @@ "additionalProperties": false, "required": [ "data" - ] + ], + "title": "QueryTracesResponse" }, "RegisterBenchmarkRequest": { "type": "object", @@ -7598,7 +7770,8 @@ "benchmark_id", "dataset_id", "scoring_functions" - ] + ], + "title": "RegisterBenchmarkRequest" }, "RegisterDatasetRequest": { "type": "object", @@ -7652,7 +7825,8 @@ "dataset_id", "dataset_schema", "url" - ] + ], + "title": "RegisterDatasetRequest" }, "RegisterModelRequest": { "type": "object", @@ -7698,7 +7872,8 @@ "additionalProperties": false, "required": [ "model_id" - ] + ], + "title": "RegisterModelRequest" }, "RegisterScoringFunctionRequest": { "type": "object", @@ -7727,7 +7902,8 @@ "scoring_fn_id", "description", "return_type" - ] + ], + "title": "RegisterScoringFunctionRequest" }, "RegisterShieldRequest": { "type": "object", @@ -7770,7 +7946,8 @@ "additionalProperties": false, "required": [ "shield_id" - ] + ], + "title": "RegisterShieldRequest" }, "RegisterToolGroupRequest": { "type": "object", @@ -7814,7 +7991,8 @@ "required": [ "toolgroup_id", "provider_id" - ] + ], + "title": "RegisterToolGroupRequest" }, "RegisterVectorDbRequest": { "type": "object", @@ -7839,7 +8017,8 @@ "required": [ "vector_db_id", "embedding_model" - ] + ], + "title": "RegisterVectorDbRequest" }, "RunEvalRequest": { "type": "object", @@ -7851,7 +8030,8 @@ "additionalProperties": false, "required": [ "task_config" - ] + ], + "title": "RunEvalRequest" }, "RunShieldRequest": { "type": "object", @@ -7896,7 +8076,8 @@ "shield_id", "messages", "params" - ] + ], + "title": "RunShieldRequest" }, "RunShieldResponse": { "type": "object", @@ -7905,7 +8086,8 @@ "$ref": "#/components/schemas/SafetyViolation" } }, - "additionalProperties": false + "additionalProperties": false, + "title": "RunShieldResponse" }, "SaveSpansToDatasetRequest": { "type": "object", @@ -7934,7 +8116,8 @@ "attribute_filters", "attributes_to_save", "dataset_id" - ] + ], + "title": "SaveSpansToDatasetRequest" }, "ScoreRequest": { "type": "object", @@ -7985,7 +8168,8 @@ "required": [ "input_rows", "scoring_functions" - ] + ], + "title": "ScoreRequest" }, "ScoreResponse": { "type": "object", @@ -8000,7 +8184,8 @@ "additionalProperties": false, "required": [ "results" - ] + ], + "title": "ScoreResponse" }, "ScoreBatchRequest": { "type": "object", @@ -8030,7 +8215,8 @@ "dataset_id", "scoring_functions", "save_results_dataset" - ] + ], + "title": "ScoreBatchRequest" }, "ScoreBatchResponse": { "type": "object", @@ -8048,7 +8234,8 @@ "additionalProperties": false, "required": [ "results" - ] + ], + "title": "ScoreBatchResponse" }, "AlgorithmConfig": { "oneOf": [ @@ -8110,7 +8297,8 @@ "apply_lora_to_output", "rank", "alpha" - ] + ], + "title": "LoraFinetuningConfig" }, "QATFinetuningConfig": { "type": "object", @@ -8132,7 +8320,8 @@ "type", "quantizer_name", "group_size" - ] + ], + "title": "QATFinetuningConfig" }, "SupervisedFineTuneRequest": { "type": "object", @@ -8210,7 +8399,8 @@ "hyperparam_search_config", "logger_config", "model" - ] + ], + "title": "SupervisedFineTuneRequest" }, "SyntheticDataGenerateRequest": { "type": "object", @@ -8231,6 +8421,7 @@ "top_k_top_p", "sigmoid" ], + "title": "FilteringFunction", "description": "The type of filtering function." }, "model": { @@ -8241,7 +8432,8 @@ "required": [ "dialogs", "filtering_function" - ] + ], + "title": "SyntheticDataGenerateRequest" }, "SyntheticDataGenerationResponse": { "type": "object", @@ -8304,6 +8496,7 @@ "required": [ "synthetic_data" ], + "title": "SyntheticDataGenerationResponse", "description": "Response from the synthetic data generation. Batch of (prompt, response, score) tuples that pass the threshold." }, "VersionInfo": { @@ -8316,7 +8509,8 @@ "additionalProperties": false, "required": [ "version" - ] + ], + "title": "VersionInfo" } }, "responses": {} diff --git a/docs/_static/llama-stack-spec.yaml b/docs/_static/llama-stack-spec.yaml index 60b777e91..4d13ca565 100644 --- a/docs/_static/llama-stack-spec.yaml +++ b/docs/_static/llama-stack-spec.yaml @@ -1611,6 +1611,7 @@ components: required: - type - config + title: AgentCandidate AgentConfig: type: object properties: @@ -1638,6 +1639,7 @@ components: - auto - required - none + title: ToolChoice description: >- Whether tool use is required or automatic. This is a hint to the model which may not be followed. It depends on the Instruction Following capabilities @@ -1648,6 +1650,7 @@ components: - json - function_tag - python_list + title: ToolPromptFormat description: >- Prompt format for calling custom / zero shot tools. tool_config: @@ -1668,6 +1671,7 @@ components: required: - model - instructions + title: AgentConfig AgentTool: oneOf: - type: string @@ -1689,6 +1693,7 @@ components: required: - name - args + title: AgentToolGroupWithArgs AggregationFunctionType: type: string enum: @@ -1696,6 +1701,7 @@ components: - median - categorical_count - accuracy + title: AggregationFunctionType BasicScoringFnParams: type: object properties: @@ -1710,6 +1716,7 @@ components: additionalProperties: false required: - type + title: BasicScoringFnParams BenchmarkConfig: type: object properties: @@ -1730,6 +1737,7 @@ components: - type - eval_candidate - scoring_params + title: BenchmarkConfig EvalCandidate: oneOf: - $ref: '#/components/schemas/ModelCandidate' @@ -1764,6 +1772,7 @@ components: required: - type - bnf + title: GrammarResponseFormat description: >- Configuration for grammar-guided response generation. GreedySamplingStrategy: @@ -1776,6 +1785,7 @@ components: additionalProperties: false required: - type + title: GreedySamplingStrategy ImageContentItem: type: object properties: @@ -1804,6 +1814,7 @@ components: required: - type - image + title: ImageContentItem description: A image content item InterleavedContent: oneOf: @@ -1847,6 +1858,7 @@ components: required: - type - json_schema + title: JsonSchemaResponseFormat description: >- Configuration for JSON schema-guided response generation. LLMAsJudgeScoringFnParams: @@ -1872,6 +1884,7 @@ components: required: - type - judge_model + title: LLMAsJudgeScoringFnParams ModelCandidate: type: object properties: @@ -1890,6 +1903,7 @@ components: - type - model - sampling_params + title: ModelCandidate RegexParserScoringFnParams: type: object properties: @@ -1908,6 +1922,7 @@ components: additionalProperties: false required: - type + title: RegexParserScoringFnParams ResponseFormat: oneOf: - $ref: '#/components/schemas/JsonSchemaResponseFormat' @@ -1931,6 +1946,7 @@ components: additionalProperties: false required: - strategy + title: SamplingParams SamplingStrategy: oneOf: - $ref: '#/components/schemas/GreedySamplingStrategy' @@ -1972,6 +1988,7 @@ components: required: - role - content + title: SystemMessage description: >- A system message providing instructions or context to the model. TextContentItem: @@ -1990,6 +2007,7 @@ components: required: - type - text + title: TextContentItem description: A text content item ToolConfig: type: object @@ -2001,6 +2019,7 @@ components: - auto - required - none + title: ToolChoice description: >- Whether tool use is required or automatic. This is a hint to the model which may not be followed. It depends on the Instruction Following @@ -2036,6 +2055,7 @@ components: where the function definitions should be inserted. default: append additionalProperties: false + title: ToolConfig description: Configuration for tool use. ToolDef: type: object @@ -2061,6 +2081,7 @@ components: additionalProperties: false required: - name + title: ToolDef ToolParameter: type: object properties: @@ -2087,6 +2108,7 @@ components: - parameter_type - description - required + title: ToolParameter TopKSamplingStrategy: type: object properties: @@ -2100,6 +2122,7 @@ components: required: - type - top_k + title: TopKSamplingStrategy TopPSamplingStrategy: type: object properties: @@ -2115,6 +2138,7 @@ components: additionalProperties: false required: - type + title: TopPSamplingStrategy URL: type: object properties: @@ -2123,6 +2147,7 @@ components: additionalProperties: false required: - uri + title: URL DeprecatedEvaluateRowsRequest: type: object properties: @@ -2149,6 +2174,7 @@ components: - input_rows - scoring_functions - task_config + title: DeprecatedEvaluateRowsRequest EvaluateResponse: type: object properties: @@ -2172,6 +2198,7 @@ components: required: - generations - scores + title: EvaluateResponse ScoringResult: type: object properties: @@ -2201,6 +2228,7 @@ components: required: - score_rows - aggregated_results + title: ScoringResult Benchmark: type: object properties: @@ -2239,6 +2267,7 @@ components: - dataset_id - scoring_functions - metadata + title: Benchmark JobStatus: type: string enum: @@ -2246,6 +2275,7 @@ components: - in_progress - failed - scheduled + title: JobStatus ListBenchmarksResponse: type: object properties: @@ -2256,6 +2286,7 @@ components: additionalProperties: false required: - data + title: ListBenchmarksResponse DeprecatedRegisterEvalTaskRequest: type: object properties: @@ -2286,6 +2317,7 @@ components: - eval_task_id - dataset_id - scoring_functions + title: DeprecatedRegisterEvalTaskRequest DeprecatedRunEvalRequest: type: object properties: @@ -2294,6 +2326,7 @@ components: additionalProperties: false required: - task_config + title: DeprecatedRunEvalRequest Job: type: object properties: @@ -2302,6 +2335,7 @@ components: additionalProperties: false required: - job_id + title: Job AppendRowsRequest: type: object properties: @@ -2323,6 +2357,7 @@ components: required: - dataset_id - rows + title: AppendRowsRequest CompletionMessage: type: object properties: @@ -2359,6 +2394,7 @@ components: - role - content - stop_reason + title: CompletionMessage description: >- A message containing the model's (assistant) response in a chat conversation. Message: @@ -2387,6 +2423,7 @@ components: - wolfram_alpha - photogen - code_interpreter + title: BuiltinTool - type: string arguments: type: object @@ -2418,6 +2455,7 @@ components: - call_id - tool_name - arguments + title: ToolCall ToolDefinition: type: object properties: @@ -2429,6 +2467,7 @@ components: - wolfram_alpha - photogen - code_interpreter + title: BuiltinTool - type: string description: type: string @@ -2439,6 +2478,7 @@ components: additionalProperties: false required: - tool_name + title: ToolDefinition ToolParamDefinition: type: object properties: @@ -2460,6 +2500,7 @@ components: additionalProperties: false required: - param_type + title: ToolParamDefinition ToolResponseMessage: type: object properties: @@ -2481,6 +2522,7 @@ components: - wolfram_alpha - photogen - code_interpreter + title: BuiltinTool - type: string description: Name of the tool that was called content: @@ -2492,6 +2534,7 @@ components: - call_id - tool_name - content + title: ToolResponseMessage description: >- A message representing the result of a tool invocation. UserMessage: @@ -2516,6 +2559,7 @@ components: required: - role - content + title: UserMessage description: >- A message from the user in a chat conversation. BatchChatCompletionRequest: @@ -2541,6 +2585,7 @@ components: - auto - required - none + title: ToolChoice description: >- Whether tool use is required or automatic. This is a hint to the model which may not be followed. It depends on the Instruction Following capabilities @@ -2551,6 +2596,7 @@ components: - json - function_tag - python_list + title: ToolPromptFormat description: >- Prompt format for calling custom / zero shot tools. response_format: @@ -2564,10 +2610,12 @@ components: description: >- How many tokens (for each position) to return log probabilities for. additionalProperties: false + title: LogProbConfig additionalProperties: false required: - model - messages_batch + title: BatchChatCompletionRequest BatchChatCompletionResponse: type: object properties: @@ -2578,6 +2626,7 @@ components: additionalProperties: false required: - batch + title: BatchChatCompletionResponse ChatCompletionResponse: type: object properties: @@ -2597,6 +2646,7 @@ components: additionalProperties: false required: - completion_message + title: ChatCompletionResponse description: Response from a chat completion request. MetricEvent: type: object @@ -2638,6 +2688,7 @@ components: - metric - value - unit + title: MetricEvent TokenLogProbs: type: object properties: @@ -2650,6 +2701,7 @@ components: additionalProperties: false required: - logprobs_by_token + title: TokenLogProbs description: Log probabilities for generated tokens. BatchCompletionRequest: type: object @@ -2673,10 +2725,12 @@ components: description: >- How many tokens (for each position) to return log probabilities for. additionalProperties: false + title: LogProbConfig additionalProperties: false required: - model - content_batch + title: BatchCompletionRequest BatchCompletionResponse: type: object properties: @@ -2687,6 +2741,7 @@ components: additionalProperties: false required: - batch + title: BatchCompletionResponse CompletionResponse: type: object properties: @@ -2710,6 +2765,7 @@ components: required: - content - stop_reason + title: CompletionResponse description: Response from a completion request. CancelTrainingJobRequest: type: object @@ -2719,6 +2775,7 @@ components: additionalProperties: false required: - job_uuid + title: CancelTrainingJobRequest ChatCompletionRequest: type: object properties: @@ -2796,6 +2853,7 @@ components: required: - model_id - messages + title: ChatCompletionRequest ChatCompletionResponseEvent: type: object properties: @@ -2829,6 +2887,7 @@ components: required: - event_type - delta + title: ChatCompletionResponseEvent description: >- An event during chat completion generation. ChatCompletionResponseStreamChunk: @@ -2844,6 +2903,7 @@ components: additionalProperties: false required: - event + title: ChatCompletionResponseStreamChunk description: >- A chunk of a streamed chat completion response. ContentDelta: @@ -2871,6 +2931,7 @@ components: required: - type - image + title: ImageDelta TextDelta: type: object properties: @@ -2884,6 +2945,7 @@ components: required: - type - text + title: TextDelta ToolCallDelta: type: object properties: @@ -2902,11 +2964,13 @@ components: - in_progress - failed - succeeded + title: ToolCallParseStatus additionalProperties: false required: - type - tool_call - parse_status + title: ToolCallDelta CompletionRequest: type: object properties: @@ -2947,6 +3011,7 @@ components: required: - model_id - content + title: CompletionRequest CompletionResponseStreamChunk: type: object properties: @@ -2971,6 +3036,7 @@ components: additionalProperties: false required: - delta + title: CompletionResponseStreamChunk description: >- A chunk of a streamed completion response. CreateAgentRequest: @@ -2981,6 +3047,7 @@ components: additionalProperties: false required: - agent_config + title: CreateAgentRequest AgentCreateResponse: type: object properties: @@ -2989,6 +3056,7 @@ components: additionalProperties: false required: - agent_id + title: AgentCreateResponse CreateAgentSessionRequest: type: object properties: @@ -2997,6 +3065,7 @@ components: additionalProperties: false required: - session_name + title: CreateAgentSessionRequest AgentSessionCreateResponse: type: object properties: @@ -3005,6 +3074,7 @@ components: additionalProperties: false required: - session_id + title: AgentSessionCreateResponse CreateAgentTurnRequest: type: object properties: @@ -3035,6 +3105,7 @@ components: required: - content - mime_type + title: Document toolgroups: type: array items: @@ -3044,6 +3115,7 @@ components: additionalProperties: false required: - messages + title: CreateAgentTurnRequest InferenceStep: type: object properties: @@ -3069,6 +3141,7 @@ components: - step_id - step_type - model_response + title: InferenceStep MemoryRetrievalStep: type: object properties: @@ -3097,6 +3170,7 @@ components: - step_type - vector_db_ids - inserted_context + title: MemoryRetrievalStep SafetyViolation: type: object properties: @@ -3118,6 +3192,7 @@ components: required: - violation_level - metadata + title: SafetyViolation ShieldCallStep: type: object properties: @@ -3142,6 +3217,7 @@ components: - turn_id - step_id - step_type + title: ShieldCallStep ToolExecutionStep: type: object properties: @@ -3174,6 +3250,7 @@ components: - step_type - tool_calls - tool_responses + title: ToolExecutionStep ToolResponse: type: object properties: @@ -3187,6 +3264,7 @@ components: - wolfram_alpha - photogen - code_interpreter + title: BuiltinTool - type: string content: $ref: '#/components/schemas/InterleavedContent' @@ -3195,6 +3273,7 @@ components: - call_id - tool_name - content + title: ToolResponse Turn: type: object properties: @@ -3244,6 +3323,7 @@ components: required: - content - mime_type + title: Attachment started_at: type: string format: date-time @@ -3258,6 +3338,7 @@ components: - steps - output_message - started_at + title: Turn description: >- A single turn in an interaction with an Agentic System. ViolationLevel: @@ -3266,6 +3347,7 @@ components: - info - warn - error + title: ViolationLevel AgentTurnResponseEvent: type: object properties: @@ -3274,6 +3356,7 @@ components: additionalProperties: false required: - payload + title: AgentTurnResponseEvent AgentTurnResponseEventPayload: oneOf: - $ref: '#/components/schemas/AgentTurnResponseStepStartPayload' @@ -3303,6 +3386,7 @@ components: - tool_execution - shield_call - memory_retrieval + title: StepType step_id: type: string step_details: @@ -3324,6 +3408,7 @@ components: - step_type - step_id - step_details + title: AgentTurnResponseStepCompletePayload AgentTurnResponseStepProgressPayload: type: object properties: @@ -3338,6 +3423,7 @@ components: - tool_execution - shield_call - memory_retrieval + title: StepType step_id: type: string delta: @@ -3348,6 +3434,7 @@ components: - step_type - step_id - delta + title: AgentTurnResponseStepProgressPayload AgentTurnResponseStepStartPayload: type: object properties: @@ -3362,6 +3449,7 @@ components: - tool_execution - shield_call - memory_retrieval + title: StepType step_id: type: string metadata: @@ -3379,6 +3467,7 @@ components: - event_type - step_type - step_id + title: AgentTurnResponseStepStartPayload AgentTurnResponseStreamChunk: type: object properties: @@ -3387,6 +3476,7 @@ components: additionalProperties: false required: - event + title: AgentTurnResponseStreamChunk description: streamed agent turn completion response. AgentTurnResponseTurnCompletePayload: type: object @@ -3401,6 +3491,7 @@ components: required: - event_type - turn + title: AgentTurnResponseTurnCompletePayload AgentTurnResponseTurnStartPayload: type: object properties: @@ -3414,6 +3505,7 @@ components: required: - event_type - turn_id + title: AgentTurnResponseTurnStartPayload EmbeddingsRequest: type: object properties: @@ -3434,6 +3526,7 @@ components: required: - model_id - contents + title: EmbeddingsRequest EmbeddingsResponse: type: object properties: @@ -3450,6 +3543,7 @@ components: additionalProperties: false required: - embeddings + title: EmbeddingsResponse description: >- Response containing generated embeddings. EvaluateRowsRequest: @@ -3478,6 +3572,7 @@ components: - input_rows - scoring_functions - task_config + title: EvaluateRowsRequest Session: type: object properties: @@ -3498,6 +3593,7 @@ components: - session_name - turns - started_at + title: Session description: >- A single session of an interaction with an Agentic System. AgentStepResponse: @@ -3519,6 +3615,7 @@ components: additionalProperties: false required: - step + title: AgentStepResponse AgentTurnInputType: type: object properties: @@ -3529,6 +3626,7 @@ components: additionalProperties: false required: - type + title: AgentTurnInputType ArrayType: type: object properties: @@ -3539,6 +3637,7 @@ components: additionalProperties: false required: - type + title: ArrayType BooleanType: type: object properties: @@ -3549,6 +3648,7 @@ components: additionalProperties: false required: - type + title: BooleanType ChatCompletionInputType: type: object properties: @@ -3559,6 +3659,7 @@ components: additionalProperties: false required: - type + title: ChatCompletionInputType CompletionInputType: type: object properties: @@ -3569,6 +3670,7 @@ components: additionalProperties: false required: - type + title: CompletionInputType Dataset: type: object properties: @@ -3607,6 +3709,7 @@ components: - dataset_schema - url - metadata + title: Dataset JsonType: type: object properties: @@ -3617,6 +3720,7 @@ components: additionalProperties: false required: - type + title: JsonType NumberType: type: object properties: @@ -3627,6 +3731,7 @@ components: additionalProperties: false required: - type + title: NumberType ObjectType: type: object properties: @@ -3637,6 +3742,7 @@ components: additionalProperties: false required: - type + title: ObjectType ParamType: oneOf: - $ref: '#/components/schemas/StringType' @@ -3672,6 +3778,7 @@ components: additionalProperties: false required: - type + title: StringType UnionType: type: object properties: @@ -3682,6 +3789,7 @@ components: additionalProperties: false required: - type + title: UnionType Model: type: object properties: @@ -3716,11 +3824,13 @@ components: - type - metadata - model_type + title: Model ModelType: type: string enum: - llm - embedding + title: ModelType PaginatedRowsResult: type: object properties: @@ -3744,6 +3854,7 @@ components: required: - rows - total_count + title: PaginatedRowsResult ScoringFn: type: object properties: @@ -3781,6 +3892,7 @@ components: - type - metadata - return_type + title: ScoringFn Shield: type: object properties: @@ -3810,6 +3922,7 @@ components: - provider_resource_id - provider_id - type + title: Shield description: >- A safety shield resource that can be used to check content Span: @@ -3845,11 +3958,13 @@ components: - trace_id - name - start_time + title: Span SpanStatus: type: string enum: - ok - error + title: SpanStatus SpanWithStatus: type: object properties: @@ -3885,6 +4000,7 @@ components: - trace_id - name - start_time + title: SpanWithStatus QuerySpanTreeResponse: type: object properties: @@ -3895,6 +4011,7 @@ components: additionalProperties: false required: - data + title: QuerySpanTreeResponse Tool: type: object properties: @@ -3938,12 +4055,14 @@ components: - tool_host - description - parameters + title: Tool ToolHost: type: string enum: - distribution - client - model_context_protocol + title: ToolHost ToolGroup: type: object properties: @@ -3975,6 +4094,7 @@ components: - provider_resource_id - provider_id - type + title: ToolGroup Trace: type: object properties: @@ -3993,8 +4113,10 @@ components: - trace_id - root_span_id - start_time + title: Trace Checkpoint: description: Checkpoint created during training runs + title: Checkpoint PostTrainingJobArtifactsResponse: type: object properties: @@ -4008,6 +4130,7 @@ components: required: - job_uuid - checkpoints + title: PostTrainingJobArtifactsResponse description: Artifacts of a finetuning job. PostTrainingJobStatusResponse: type: object @@ -4044,6 +4167,7 @@ components: - job_uuid - status - checkpoints + title: PostTrainingJobStatusResponse description: Status of a finetuning job. ListPostTrainingJobsResponse: type: object @@ -4058,9 +4182,11 @@ components: additionalProperties: false required: - job_uuid + title: PostTrainingJob additionalProperties: false required: - data + title: ListPostTrainingJobsResponse VectorDB: type: object properties: @@ -4086,6 +4212,7 @@ components: - type - embedding_model - embedding_dimension + title: VectorDB HealthInfo: type: object properties: @@ -4094,6 +4221,7 @@ components: additionalProperties: false required: - status + title: HealthInfo RAGDocument: type: object properties: @@ -4124,6 +4252,7 @@ components: - document_id - content - metadata + title: RAGDocument InsertRequest: type: object properties: @@ -4140,6 +4269,7 @@ components: - documents - vector_db_id - chunk_size_in_tokens + title: InsertRequest InsertChunksRequest: type: object properties: @@ -4166,12 +4296,14 @@ components: required: - content - metadata + title: Chunk ttl_seconds: type: integer additionalProperties: false required: - vector_db_id - chunks + title: InsertChunksRequest InvokeToolRequest: type: object properties: @@ -4191,6 +4323,7 @@ components: required: - tool_name - kwargs + title: InvokeToolRequest ToolInvocationResult: type: object properties: @@ -4203,6 +4336,7 @@ components: additionalProperties: false required: - content + title: ToolInvocationResult ListDatasetsResponse: type: object properties: @@ -4213,6 +4347,7 @@ components: additionalProperties: false required: - data + title: ListDatasetsResponse ListModelsResponse: type: object properties: @@ -4223,6 +4358,7 @@ components: additionalProperties: false required: - data + title: ListModelsResponse ProviderInfo: type: object properties: @@ -4237,6 +4373,7 @@ components: - api - provider_id - provider_type + title: ProviderInfo ListProvidersResponse: type: object properties: @@ -4247,6 +4384,7 @@ components: additionalProperties: false required: - data + title: ListProvidersResponse RouteInfo: type: object properties: @@ -4263,6 +4401,7 @@ components: - route - method - provider_types + title: RouteInfo ListRoutesResponse: type: object properties: @@ -4273,6 +4412,7 @@ components: additionalProperties: false required: - data + title: ListRoutesResponse ListScoringFunctionsResponse: type: object properties: @@ -4283,6 +4423,7 @@ components: additionalProperties: false required: - data + title: ListScoringFunctionsResponse ListShieldsResponse: type: object properties: @@ -4293,6 +4434,7 @@ components: additionalProperties: false required: - data + title: ListShieldsResponse ListToolGroupsResponse: type: object properties: @@ -4303,6 +4445,7 @@ components: additionalProperties: false required: - data + title: ListToolGroupsResponse ListToolsResponse: type: object properties: @@ -4313,6 +4456,7 @@ components: additionalProperties: false required: - data + title: ListToolsResponse ListVectorDBsResponse: type: object properties: @@ -4323,6 +4467,7 @@ components: additionalProperties: false required: - data + title: ListVectorDBsResponse Event: oneOf: - $ref: '#/components/schemas/UnstructuredLogEvent' @@ -4343,6 +4488,7 @@ components: - warn - error - critical + title: LogSeverity SpanEndPayload: type: object properties: @@ -4356,6 +4502,7 @@ components: required: - type - status + title: SpanEndPayload SpanStartPayload: type: object properties: @@ -4371,6 +4518,7 @@ components: required: - type - name + title: SpanStartPayload StructuredLogEvent: type: object properties: @@ -4403,6 +4551,7 @@ components: - timestamp - type - payload + title: StructuredLogEvent StructuredLogPayload: oneOf: - $ref: '#/components/schemas/SpanStartPayload' @@ -4447,6 +4596,7 @@ components: - type - message - severity + title: UnstructuredLogEvent LogEventRequest: type: object properties: @@ -4458,6 +4608,7 @@ components: required: - event - ttl_seconds + title: LogEventRequest DPOAlignmentConfig: type: object properties: @@ -4475,6 +4626,7 @@ components: - reward_clip - epsilon - gamma + title: DPOAlignmentConfig DataConfig: type: object properties: @@ -4500,11 +4652,13 @@ components: - batch_size - shuffle - data_format + title: DataConfig DatasetFormat: type: string enum: - instruct - dialog + title: DatasetFormat EfficiencyConfig: type: object properties: @@ -4521,6 +4675,7 @@ components: type: boolean default: false additionalProperties: false + title: EfficiencyConfig OptimizerConfig: type: object properties: @@ -4538,12 +4693,14 @@ components: - lr - weight_decay - num_warmup_steps + title: OptimizerConfig OptimizerType: type: string enum: - adam - adamw - sgd + title: OptimizerType TrainingConfig: type: object properties: @@ -4572,6 +4729,7 @@ components: - max_validation_steps - data_config - optimizer_config + title: TrainingConfig PreferenceOptimizeRequest: type: object properties: @@ -4611,6 +4769,7 @@ components: - training_config - hyperparam_search_config - logger_config + title: PreferenceOptimizeRequest PostTrainingJob: type: object properties: @@ -4619,6 +4778,7 @@ components: additionalProperties: false required: - job_uuid + title: PostTrainingJob DefaultRAGQueryGeneratorConfig: type: object properties: @@ -4633,6 +4793,7 @@ components: required: - type - separator + title: DefaultRAGQueryGeneratorConfig LLMRAGQueryGeneratorConfig: type: object properties: @@ -4649,6 +4810,7 @@ components: - type - model - template + title: LLMRAGQueryGeneratorConfig RAGQueryConfig: type: object properties: @@ -4665,6 +4827,7 @@ components: - query_generator_config - max_tokens_in_context - max_chunks + title: RAGQueryConfig RAGQueryGeneratorConfig: oneOf: - $ref: '#/components/schemas/DefaultRAGQueryGeneratorConfig' @@ -4689,12 +4852,14 @@ components: required: - content - vector_db_ids + title: QueryRequest RAGQueryResult: type: object properties: content: $ref: '#/components/schemas/InterleavedContent' additionalProperties: false + title: RAGQueryResult QueryChunksRequest: type: object properties: @@ -4716,6 +4881,7 @@ components: required: - vector_db_id - query + title: QueryChunksRequest QueryChunksResponse: type: object properties: @@ -4740,6 +4906,7 @@ components: required: - content - metadata + title: Chunk scores: type: array items: @@ -4748,6 +4915,7 @@ components: required: - chunks - scores + title: QueryChunksResponse QueryCondition: type: object properties: @@ -4768,6 +4936,7 @@ components: - key - op - value + title: QueryCondition QueryConditionOp: type: string enum: @@ -4775,6 +4944,7 @@ components: - ne - gt - lt + title: QueryConditionOp QuerySpansResponse: type: object properties: @@ -4785,6 +4955,7 @@ components: additionalProperties: false required: - data + title: QuerySpansResponse QueryTracesResponse: type: object properties: @@ -4795,6 +4966,7 @@ components: additionalProperties: false required: - data + title: QueryTracesResponse RegisterBenchmarkRequest: type: object properties: @@ -4825,6 +4997,7 @@ components: - benchmark_id - dataset_id - scoring_functions + title: RegisterBenchmarkRequest RegisterDatasetRequest: type: object properties: @@ -4855,6 +5028,7 @@ components: - dataset_id - dataset_schema - url + title: RegisterDatasetRequest RegisterModelRequest: type: object properties: @@ -4879,6 +5053,7 @@ components: additionalProperties: false required: - model_id + title: RegisterModelRequest RegisterScoringFunctionRequest: type: object properties: @@ -4899,6 +5074,7 @@ components: - scoring_fn_id - description - return_type + title: RegisterScoringFunctionRequest RegisterShieldRequest: type: object properties: @@ -4921,6 +5097,7 @@ components: additionalProperties: false required: - shield_id + title: RegisterShieldRequest RegisterToolGroupRequest: type: object properties: @@ -4944,6 +5121,7 @@ components: required: - toolgroup_id - provider_id + title: RegisterToolGroupRequest RegisterVectorDbRequest: type: object properties: @@ -4961,6 +5139,7 @@ components: required: - vector_db_id - embedding_model + title: RegisterVectorDbRequest RunEvalRequest: type: object properties: @@ -4969,6 +5148,7 @@ components: additionalProperties: false required: - task_config + title: RunEvalRequest RunShieldRequest: type: object properties: @@ -4993,12 +5173,14 @@ components: - shield_id - messages - params + title: RunShieldRequest RunShieldResponse: type: object properties: violation: $ref: '#/components/schemas/SafetyViolation' additionalProperties: false + title: RunShieldResponse SaveSpansToDatasetRequest: type: object properties: @@ -5019,6 +5201,7 @@ components: - attribute_filters - attributes_to_save - dataset_id + title: SaveSpansToDatasetRequest ScoreRequest: type: object properties: @@ -5044,6 +5227,7 @@ components: required: - input_rows - scoring_functions + title: ScoreRequest ScoreResponse: type: object properties: @@ -5054,6 +5238,7 @@ components: additionalProperties: false required: - results + title: ScoreResponse ScoreBatchRequest: type: object properties: @@ -5072,6 +5257,7 @@ components: - dataset_id - scoring_functions - save_results_dataset + title: ScoreBatchRequest ScoreBatchResponse: type: object properties: @@ -5084,6 +5270,7 @@ components: additionalProperties: false required: - results + title: ScoreBatchResponse AlgorithmConfig: oneOf: - $ref: '#/components/schemas/LoraFinetuningConfig' @@ -5126,6 +5313,7 @@ components: - apply_lora_to_output - rank - alpha + title: LoraFinetuningConfig QATFinetuningConfig: type: object properties: @@ -5142,6 +5330,7 @@ components: - type - quantizer_name - group_size + title: QATFinetuningConfig SupervisedFineTuneRequest: type: object properties: @@ -5182,6 +5371,7 @@ components: - hyperparam_search_config - logger_config - model + title: SupervisedFineTuneRequest SyntheticDataGenerateRequest: type: object properties: @@ -5198,6 +5388,7 @@ components: - top_p - top_k_top_p - sigmoid + title: FilteringFunction description: The type of filtering function. model: type: string @@ -5205,6 +5396,7 @@ components: required: - dialogs - filtering_function + title: SyntheticDataGenerateRequest SyntheticDataGenerationResponse: type: object properties: @@ -5233,6 +5425,7 @@ components: additionalProperties: false required: - synthetic_data + title: SyntheticDataGenerationResponse description: >- Response from the synthetic data generation. Batch of (prompt, response, score) tuples that pass the threshold. @@ -5244,6 +5437,7 @@ components: additionalProperties: false required: - version + title: VersionInfo responses: {} security: - Default: [] diff --git a/llama_stack/strong_typing/schema.py b/llama_stack/strong_typing/schema.py index ddff7cf82..45c7130ba 100644 --- a/llama_stack/strong_typing/schema.py +++ b/llama_stack/strong_typing/schema.py @@ -108,7 +108,9 @@ def get_class_property_docstrings( def docstring_to_schema(data_type: type) -> Schema: short_description, long_description = get_class_docstrings(data_type) - schema: Schema = {} + schema: Schema = { + "title": python_type_to_name(data_type), + } description = "\n".join(filter(None, [short_description, long_description])) if description: From 034ece0011ece62d905a2b8a163127a365dd6a8c Mon Sep 17 00:00:00 2001 From: Ashwin Bharambe Date: Wed, 19 Feb 2025 13:54:04 -0800 Subject: [PATCH 13/14] Ensure that deprecations for fields follow through to OpenAPI --- docs/_static/llama-stack-spec.html | 6 ++++-- docs/_static/llama-stack-spec.yaml | 2 ++ llama_stack/strong_typing/schema.py | 17 ++++++++++++++++- 3 files changed, 22 insertions(+), 3 deletions(-) diff --git a/docs/_static/llama-stack-spec.html b/docs/_static/llama-stack-spec.html index 82abc947b..2b6e1d11c 100644 --- a/docs/_static/llama-stack-spec.html +++ b/docs/_static/llama-stack-spec.html @@ -2702,7 +2702,8 @@ "none" ], "title": "ToolChoice", - "description": "Whether tool use is required or automatic. This is a hint to the model which may not be followed. It depends on the Instruction Following capabilities of the model." + "description": "Whether tool use is required or automatic. This is a hint to the model which may not be followed. It depends on the Instruction Following capabilities of the model.", + "deprecated": true }, "tool_prompt_format": { "type": "string", @@ -2712,7 +2713,8 @@ "python_list" ], "title": "ToolPromptFormat", - "description": "Prompt format for calling custom / zero shot tools." + "description": "Prompt format for calling custom / zero shot tools.", + "deprecated": true }, "tool_config": { "$ref": "#/components/schemas/ToolConfig" diff --git a/docs/_static/llama-stack-spec.yaml b/docs/_static/llama-stack-spec.yaml index 4d13ca565..99300fedf 100644 --- a/docs/_static/llama-stack-spec.yaml +++ b/docs/_static/llama-stack-spec.yaml @@ -1644,6 +1644,7 @@ components: Whether tool use is required or automatic. This is a hint to the model which may not be followed. It depends on the Instruction Following capabilities of the model. + deprecated: true tool_prompt_format: type: string enum: @@ -1653,6 +1654,7 @@ components: title: ToolPromptFormat description: >- Prompt format for calling custom / zero shot tools. + deprecated: true tool_config: $ref: '#/components/schemas/ToolConfig' max_infer_iters: diff --git a/llama_stack/strong_typing/schema.py b/llama_stack/strong_typing/schema.py index 45c7130ba..dfc51ea78 100644 --- a/llama_stack/strong_typing/schema.py +++ b/llama_stack/strong_typing/schema.py @@ -313,6 +313,17 @@ class JsonSchemaGenerator: data_type: TypeLike, force_expand: bool = False, json_schema_extra: Optional[dict] = None, + ) -> Schema: + common_info = {} + if json_schema_extra and "deprecated" in json_schema_extra: + common_info["deprecated"] = json_schema_extra["deprecated"] + return self._type_to_schema(data_type, force_expand, json_schema_extra) | common_info + + def _type_to_schema( + self, + data_type: TypeLike, + force_expand: bool = False, + json_schema_extra: Optional[dict] = None, ) -> Schema: """ Returns the JSON schema associated with a type. @@ -489,7 +500,11 @@ class JsonSchemaGenerator: if "model_fields" in members: f = members["model_fields"] defaults = {k: finfo.default for k, finfo in f.items()} - json_schema_extra = f.get(output_name, None).json_schema_extra + if output_name in f: + finfo = f[output_name] + json_schema_extra = finfo.json_schema_extra or {} + if finfo.deprecated: + json_schema_extra["deprecated"] = True if is_type_optional(property_type): optional_type: type = unwrap_optional_type(property_type) From 9e03df983e780233ee47866b426fed735ca97f7d Mon Sep 17 00:00:00 2001 From: Alessandro Sangiorgi Date: Wed, 19 Feb 2025 17:37:25 -0600 Subject: [PATCH 14/14] fix(rag-example): add provider_id to avoid llama_stack_client 400 error (#1114) # What does this PR do? Add provider_id to avoid errors using the rag example with llama_stack_client `llama_stack_client.BadRequestError: Error code: 400 - {'detail': 'Invalid value: No provider specified and multiple providers available. Please specify a provider_id.'}` [//]: # (If resolving an issue, uncomment and update the line below) [//]: # (Closes #[issue-number]) ## Test Plan [Describe the tests you ran to verify your changes with result summaries. *Provide clear instructions so the plan can be easily re-executed.*] [//]: # (## Documentation) --------- Co-authored-by: Xi Yan --- docs/source/getting_started/index.md | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/docs/source/getting_started/index.md b/docs/source/getting_started/index.md index b28b9afa3..554f4354a 100644 --- a/docs/source/getting_started/index.md +++ b/docs/source/getting_started/index.md @@ -214,10 +214,16 @@ documents = [ for i, url in enumerate(urls) ] +vector_providers = [ + provider for provider in client.providers.list() if provider.api == "vector_io" +] +provider_id = vector_providers[0].provider_id # Use the first available vector provider + # Register a vector database vector_db_id = f"test-vector-db-{uuid.uuid4().hex}" client.vector_dbs.register( vector_db_id=vector_db_id, + provider_id=provider_id, embedding_model="all-MiniLM-L6-v2", embedding_dimension=384, )