diff --git a/docs/_static/llama-stack-spec.html b/docs/_static/llama-stack-spec.html
index 17cf92341..2b6e1d11c 100644
--- a/docs/_static/llama-stack-spec.html
+++ b/docs/_static/llama-stack-spec.html
@@ -2661,7 +2661,8 @@
"required": [
"type",
"config"
- ]
+ ],
+ "title": "AgentCandidate"
},
"AgentConfig": {
"type": "object",
@@ -2697,9 +2698,12 @@
"type": "string",
"enum": [
"auto",
- "required"
+ "required",
+ "none"
],
- "description": "Whether tool use is required or automatic. This is a hint to the model which may not be followed. It depends on the Instruction Following capabilities of the model."
+ "title": "ToolChoice",
+ "description": "Whether tool use is required or automatic. This is a hint to the model which may not be followed. It depends on the Instruction Following capabilities of the model.",
+ "deprecated": true
},
"tool_prompt_format": {
"type": "string",
@@ -2708,7 +2712,9 @@
"function_tag",
"python_list"
],
- "description": "Prompt format for calling custom / zero shot tools."
+ "title": "ToolPromptFormat",
+ "description": "Prompt format for calling custom / zero shot tools.",
+ "deprecated": true
},
"tool_config": {
"$ref": "#/components/schemas/ToolConfig"
@@ -2735,7 +2741,8 @@
"required": [
"model",
"instructions"
- ]
+ ],
+ "title": "AgentConfig"
},
"AgentTool": {
"oneOf": [
@@ -2778,7 +2785,8 @@
"required": [
"name",
"args"
- ]
+ ],
+ "title": "AgentToolGroupWithArgs"
}
]
},
@@ -2789,7 +2797,8 @@
"median",
"categorical_count",
"accuracy"
- ]
+ ],
+ "title": "AggregationFunctionType"
},
"BasicScoringFnParams": {
"type": "object",
@@ -2809,7 +2818,8 @@
"additionalProperties": false,
"required": [
"type"
- ]
+ ],
+ "title": "BasicScoringFnParams"
},
"BenchmarkConfig": {
"type": "object",
@@ -2837,7 +2847,8 @@
"type",
"eval_candidate",
"scoring_params"
- ]
+ ],
+ "title": "BenchmarkConfig"
},
"EvalCandidate": {
"oneOf": [
@@ -2897,6 +2908,7 @@
"type",
"bnf"
],
+ "title": "GrammarResponseFormat",
"description": "Configuration for grammar-guided response generation."
},
"GreedySamplingStrategy": {
@@ -2911,7 +2923,8 @@
"additionalProperties": false,
"required": [
"type"
- ]
+ ],
+ "title": "GreedySamplingStrategy"
},
"ImageContentItem": {
"type": "object",
@@ -2944,6 +2957,7 @@
"type",
"image"
],
+ "title": "ImageContentItem",
"description": "A image content item"
},
"InterleavedContent": {
@@ -3020,6 +3034,7 @@
"type",
"json_schema"
],
+ "title": "JsonSchemaResponseFormat",
"description": "Configuration for JSON schema-guided response generation."
},
"LLMAsJudgeScoringFnParams": {
@@ -3053,7 +3068,8 @@
"required": [
"type",
"judge_model"
- ]
+ ],
+ "title": "LLMAsJudgeScoringFnParams"
},
"ModelCandidate": {
"type": "object",
@@ -3078,7 +3094,8 @@
"type",
"model",
"sampling_params"
- ]
+ ],
+ "title": "ModelCandidate"
},
"RegexParserScoringFnParams": {
"type": "object",
@@ -3104,7 +3121,8 @@
"additionalProperties": false,
"required": [
"type"
- ]
+ ],
+ "title": "RegexParserScoringFnParams"
},
"ResponseFormat": {
"oneOf": [
@@ -3141,7 +3159,8 @@
"additionalProperties": false,
"required": [
"strategy"
- ]
+ ],
+ "title": "SamplingParams"
},
"SamplingStrategy": {
"oneOf": [
@@ -3204,6 +3223,7 @@
"role",
"content"
],
+ "title": "SystemMessage",
"description": "A system message providing instructions or context to the model."
},
"TextContentItem": {
@@ -3225,19 +3245,30 @@
"type",
"text"
],
+ "title": "TextContentItem",
"description": "A text content item"
},
"ToolConfig": {
"type": "object",
"properties": {
"tool_choice": {
- "type": "string",
- "enum": [
- "auto",
- "required"
+ "oneOf": [
+ {
+ "type": "string",
+ "enum": [
+ "auto",
+ "required",
+ "none"
+ ],
+ "title": "ToolChoice",
+ "description": "Whether tool use is required or automatic. This is a hint to the model which may not be followed. It depends on the Instruction Following capabilities of the model."
+ },
+ {
+ "type": "string"
+ }
],
- "description": "(Optional) Whether tool use is required or automatic. Defaults to ToolChoice.auto.",
- "default": "auto"
+ "default": "auto",
+ "description": "(Optional) Whether tool use is automatic, required, or none. Can also specify a tool name to use a specific tool. Defaults to ToolChoice.auto."
},
"tool_prompt_format": {
"type": "string",
@@ -3259,9 +3290,7 @@
}
},
"additionalProperties": false,
- "required": [
- "system_message_behavior"
- ],
+ "title": "ToolConfig",
"description": "Configuration for tool use."
},
"ToolDef": {
@@ -3308,7 +3337,8 @@
"additionalProperties": false,
"required": [
"name"
- ]
+ ],
+ "title": "ToolDef"
},
"ToolParameter": {
"type": "object",
@@ -3355,7 +3385,8 @@
"parameter_type",
"description",
"required"
- ]
+ ],
+ "title": "ToolParameter"
},
"TopKSamplingStrategy": {
"type": "object",
@@ -3373,7 +3404,8 @@
"required": [
"type",
"top_k"
- ]
+ ],
+ "title": "TopKSamplingStrategy"
},
"TopPSamplingStrategy": {
"type": "object",
@@ -3394,7 +3426,8 @@
"additionalProperties": false,
"required": [
"type"
- ]
+ ],
+ "title": "TopPSamplingStrategy"
},
"URL": {
"type": "object",
@@ -3406,7 +3439,8 @@
"additionalProperties": false,
"required": [
"uri"
- ]
+ ],
+ "title": "URL"
},
"DeprecatedEvaluateRowsRequest": {
"type": "object",
@@ -3454,7 +3488,8 @@
"input_rows",
"scoring_functions",
"task_config"
- ]
+ ],
+ "title": "DeprecatedEvaluateRowsRequest"
},
"EvaluateResponse": {
"type": "object",
@@ -3498,7 +3533,8 @@
"required": [
"generations",
"scores"
- ]
+ ],
+ "title": "EvaluateResponse"
},
"ScoringResult": {
"type": "object",
@@ -3561,7 +3597,8 @@
"required": [
"score_rows",
"aggregated_results"
- ]
+ ],
+ "title": "ScoringResult"
},
"Benchmark": {
"type": "object",
@@ -3624,7 +3661,8 @@
"dataset_id",
"scoring_functions",
"metadata"
- ]
+ ],
+ "title": "Benchmark"
},
"JobStatus": {
"type": "string",
@@ -3633,7 +3671,8 @@
"in_progress",
"failed",
"scheduled"
- ]
+ ],
+ "title": "JobStatus"
},
"ListBenchmarksResponse": {
"type": "object",
@@ -3648,7 +3687,8 @@
"additionalProperties": false,
"required": [
"data"
- ]
+ ],
+ "title": "ListBenchmarksResponse"
},
"DeprecatedRegisterEvalTaskRequest": {
"type": "object",
@@ -3702,7 +3742,8 @@
"eval_task_id",
"dataset_id",
"scoring_functions"
- ]
+ ],
+ "title": "DeprecatedRegisterEvalTaskRequest"
},
"DeprecatedRunEvalRequest": {
"type": "object",
@@ -3714,7 +3755,8 @@
"additionalProperties": false,
"required": [
"task_config"
- ]
+ ],
+ "title": "DeprecatedRunEvalRequest"
},
"Job": {
"type": "object",
@@ -3726,7 +3768,8 @@
"additionalProperties": false,
"required": [
"job_id"
- ]
+ ],
+ "title": "Job"
},
"AppendRowsRequest": {
"type": "object",
@@ -3767,7 +3810,8 @@
"required": [
"dataset_id",
"rows"
- ]
+ ],
+ "title": "AppendRowsRequest"
},
"CompletionMessage": {
"type": "object",
@@ -3805,6 +3849,7 @@
"content",
"stop_reason"
],
+ "title": "CompletionMessage",
"description": "A message containing the model's (assistant) response in a chat conversation."
},
"Message": {
@@ -3847,7 +3892,8 @@
"wolfram_alpha",
"photogen",
"code_interpreter"
- ]
+ ],
+ "title": "BuiltinTool"
},
{
"type": "string"
@@ -3926,7 +3972,8 @@
"call_id",
"tool_name",
"arguments"
- ]
+ ],
+ "title": "ToolCall"
},
"ToolDefinition": {
"type": "object",
@@ -3940,7 +3987,8 @@
"wolfram_alpha",
"photogen",
"code_interpreter"
- ]
+ ],
+ "title": "BuiltinTool"
},
{
"type": "string"
@@ -3960,7 +4008,8 @@
"additionalProperties": false,
"required": [
"tool_name"
- ]
+ ],
+ "title": "ToolDefinition"
},
"ToolParamDefinition": {
"type": "object",
@@ -4001,7 +4050,8 @@
"additionalProperties": false,
"required": [
"param_type"
- ]
+ ],
+ "title": "ToolParamDefinition"
},
"ToolResponseMessage": {
"type": "object",
@@ -4025,7 +4075,8 @@
"wolfram_alpha",
"photogen",
"code_interpreter"
- ]
+ ],
+ "title": "BuiltinTool"
},
{
"type": "string"
@@ -4045,6 +4096,7 @@
"tool_name",
"content"
],
+ "title": "ToolResponseMessage",
"description": "A message representing the result of a tool invocation."
},
"UserMessage": {
@@ -4070,6 +4122,7 @@
"role",
"content"
],
+ "title": "UserMessage",
"description": "A message from the user in a chat conversation."
},
"BatchChatCompletionRequest": {
@@ -4100,8 +4153,10 @@
"type": "string",
"enum": [
"auto",
- "required"
+ "required",
+ "none"
],
+ "title": "ToolChoice",
"description": "Whether tool use is required or automatic. This is a hint to the model which may not be followed. It depends on the Instruction Following capabilities of the model."
},
"tool_prompt_format": {
@@ -4111,6 +4166,7 @@
"function_tag",
"python_list"
],
+ "title": "ToolPromptFormat",
"description": "Prompt format for calling custom / zero shot tools."
},
"response_format": {
@@ -4125,14 +4181,16 @@
"description": "How many tokens (for each position) to return log probabilities for."
}
},
- "additionalProperties": false
+ "additionalProperties": false,
+ "title": "LogProbConfig"
}
},
"additionalProperties": false,
"required": [
"model",
"messages_batch"
- ]
+ ],
+ "title": "BatchChatCompletionRequest"
},
"BatchChatCompletionResponse": {
"type": "object",
@@ -4147,7 +4205,8 @@
"additionalProperties": false,
"required": [
"batch"
- ]
+ ],
+ "title": "BatchChatCompletionResponse"
},
"ChatCompletionResponse": {
"type": "object",
@@ -4174,6 +4233,7 @@
"required": [
"completion_message"
],
+ "title": "ChatCompletionResponse",
"description": "Response from a chat completion request."
},
"MetricEvent": {
@@ -4242,7 +4302,8 @@
"metric",
"value",
"unit"
- ]
+ ],
+ "title": "MetricEvent"
},
"TokenLogProbs": {
"type": "object",
@@ -4259,6 +4320,7 @@
"required": [
"logprobs_by_token"
],
+ "title": "TokenLogProbs",
"description": "Log probabilities for generated tokens."
},
"BatchCompletionRequest": {
@@ -4288,14 +4350,16 @@
"description": "How many tokens (for each position) to return log probabilities for."
}
},
- "additionalProperties": false
+ "additionalProperties": false,
+ "title": "LogProbConfig"
}
},
"additionalProperties": false,
"required": [
"model",
"content_batch"
- ]
+ ],
+ "title": "BatchCompletionRequest"
},
"BatchCompletionResponse": {
"type": "object",
@@ -4310,7 +4374,8 @@
"additionalProperties": false,
"required": [
"batch"
- ]
+ ],
+ "title": "BatchCompletionResponse"
},
"CompletionResponse": {
"type": "object",
@@ -4341,6 +4406,7 @@
"content",
"stop_reason"
],
+ "title": "CompletionResponse",
"description": "Response from a completion request."
},
"CancelTrainingJobRequest": {
@@ -4353,7 +4419,8 @@
"additionalProperties": false,
"required": [
"job_uuid"
- ]
+ ],
+ "title": "CancelTrainingJobRequest"
},
"ChatCompletionRequest": {
"type": "object",
@@ -4384,7 +4451,8 @@
"type": "string",
"enum": [
"auto",
- "required"
+ "required",
+ "none"
],
"description": "(Optional) Whether tool use is required or automatic. Defaults to ToolChoice.auto. .. deprecated:: Use tool_config instead."
},
@@ -4426,7 +4494,8 @@
"required": [
"model_id",
"messages"
- ]
+ ],
+ "title": "ChatCompletionRequest"
},
"ChatCompletionResponseEvent": {
"type": "object",
@@ -4466,6 +4535,7 @@
"event_type",
"delta"
],
+ "title": "ChatCompletionResponseEvent",
"description": "An event during chat completion generation."
},
"ChatCompletionResponseStreamChunk": {
@@ -4486,6 +4556,7 @@
"required": [
"event"
],
+ "title": "ChatCompletionResponseStreamChunk",
"description": "A chunk of a streamed chat completion response."
},
"ContentDelta": {
@@ -4526,7 +4597,8 @@
"required": [
"type",
"image"
- ]
+ ],
+ "title": "ImageDelta"
},
"TextDelta": {
"type": "object",
@@ -4544,7 +4616,8 @@
"required": [
"type",
"text"
- ]
+ ],
+ "title": "TextDelta"
},
"ToolCallDelta": {
"type": "object",
@@ -4571,7 +4644,8 @@
"in_progress",
"failed",
"succeeded"
- ]
+ ],
+ "title": "ToolCallParseStatus"
}
},
"additionalProperties": false,
@@ -4579,7 +4653,8 @@
"type",
"tool_call",
"parse_status"
- ]
+ ],
+ "title": "ToolCallDelta"
},
"CompletionRequest": {
"type": "object",
@@ -4621,7 +4696,8 @@
"required": [
"model_id",
"content"
- ]
+ ],
+ "title": "CompletionRequest"
},
"CompletionResponseStreamChunk": {
"type": "object",
@@ -4651,6 +4727,7 @@
"required": [
"delta"
],
+ "title": "CompletionResponseStreamChunk",
"description": "A chunk of a streamed completion response."
},
"CreateAgentRequest": {
@@ -4663,7 +4740,8 @@
"additionalProperties": false,
"required": [
"agent_config"
- ]
+ ],
+ "title": "CreateAgentRequest"
},
"AgentCreateResponse": {
"type": "object",
@@ -4675,7 +4753,8 @@
"additionalProperties": false,
"required": [
"agent_id"
- ]
+ ],
+ "title": "AgentCreateResponse"
},
"CreateAgentSessionRequest": {
"type": "object",
@@ -4687,7 +4766,8 @@
"additionalProperties": false,
"required": [
"session_name"
- ]
+ ],
+ "title": "CreateAgentSessionRequest"
},
"AgentSessionCreateResponse": {
"type": "object",
@@ -4699,7 +4779,8 @@
"additionalProperties": false,
"required": [
"session_id"
- ]
+ ],
+ "title": "AgentSessionCreateResponse"
},
"CreateAgentTurnRequest": {
"type": "object",
@@ -4752,7 +4833,8 @@
"required": [
"content",
"mime_type"
- ]
+ ],
+ "title": "Document"
}
},
"toolgroups": {
@@ -4768,7 +4850,8 @@
"additionalProperties": false,
"required": [
"messages"
- ]
+ ],
+ "title": "CreateAgentTurnRequest"
},
"InferenceStep": {
"type": "object",
@@ -4802,7 +4885,8 @@
"step_id",
"step_type",
"model_response"
- ]
+ ],
+ "title": "InferenceStep"
},
"MemoryRetrievalStep": {
"type": "object",
@@ -4840,7 +4924,8 @@
"step_type",
"vector_db_ids",
"inserted_context"
- ]
+ ],
+ "title": "MemoryRetrievalStep"
},
"SafetyViolation": {
"type": "object",
@@ -4881,7 +4966,8 @@
"required": [
"violation_level",
"metadata"
- ]
+ ],
+ "title": "SafetyViolation"
},
"ShieldCallStep": {
"type": "object",
@@ -4914,7 +5000,8 @@
"turn_id",
"step_id",
"step_type"
- ]
+ ],
+ "title": "ShieldCallStep"
},
"ToolExecutionStep": {
"type": "object",
@@ -4958,7 +5045,8 @@
"step_type",
"tool_calls",
"tool_responses"
- ]
+ ],
+ "title": "ToolExecutionStep"
},
"ToolResponse": {
"type": "object",
@@ -4975,7 +5063,8 @@
"wolfram_alpha",
"photogen",
"code_interpreter"
- ]
+ ],
+ "title": "BuiltinTool"
},
{
"type": "string"
@@ -4991,7 +5080,8 @@
"call_id",
"tool_name",
"content"
- ]
+ ],
+ "title": "ToolResponse"
},
"Turn": {
"type": "object",
@@ -5078,7 +5168,8 @@
"required": [
"content",
"mime_type"
- ]
+ ],
+ "title": "Attachment"
}
},
"started_at": {
@@ -5099,6 +5190,7 @@
"output_message",
"started_at"
],
+ "title": "Turn",
"description": "A single turn in an interaction with an Agentic System."
},
"ViolationLevel": {
@@ -5107,7 +5199,8 @@
"info",
"warn",
"error"
- ]
+ ],
+ "title": "ViolationLevel"
},
"AgentTurnResponseEvent": {
"type": "object",
@@ -5119,7 +5212,8 @@
"additionalProperties": false,
"required": [
"payload"
- ]
+ ],
+ "title": "AgentTurnResponseEvent"
},
"AgentTurnResponseEventPayload": {
"oneOf": [
@@ -5165,7 +5259,8 @@
"tool_execution",
"shield_call",
"memory_retrieval"
- ]
+ ],
+ "title": "StepType"
},
"step_id": {
"type": "string"
@@ -5202,7 +5297,8 @@
"step_type",
"step_id",
"step_details"
- ]
+ ],
+ "title": "AgentTurnResponseStepCompletePayload"
},
"AgentTurnResponseStepProgressPayload": {
"type": "object",
@@ -5219,7 +5315,8 @@
"tool_execution",
"shield_call",
"memory_retrieval"
- ]
+ ],
+ "title": "StepType"
},
"step_id": {
"type": "string"
@@ -5234,7 +5331,8 @@
"step_type",
"step_id",
"delta"
- ]
+ ],
+ "title": "AgentTurnResponseStepProgressPayload"
},
"AgentTurnResponseStepStartPayload": {
"type": "object",
@@ -5251,7 +5349,8 @@
"tool_execution",
"shield_call",
"memory_retrieval"
- ]
+ ],
+ "title": "StepType"
},
"step_id": {
"type": "string"
@@ -5287,7 +5386,8 @@
"event_type",
"step_type",
"step_id"
- ]
+ ],
+ "title": "AgentTurnResponseStepStartPayload"
},
"AgentTurnResponseStreamChunk": {
"type": "object",
@@ -5300,6 +5400,7 @@
"required": [
"event"
],
+ "title": "AgentTurnResponseStreamChunk",
"description": "streamed agent turn completion response."
},
"AgentTurnResponseTurnCompletePayload": {
@@ -5318,7 +5419,8 @@
"required": [
"event_type",
"turn"
- ]
+ ],
+ "title": "AgentTurnResponseTurnCompletePayload"
},
"AgentTurnResponseTurnStartPayload": {
"type": "object",
@@ -5336,7 +5438,8 @@
"required": [
"event_type",
"turn_id"
- ]
+ ],
+ "title": "AgentTurnResponseTurnStartPayload"
},
"EmbeddingsRequest": {
"type": "object",
@@ -5357,7 +5460,8 @@
"required": [
"model_id",
"contents"
- ]
+ ],
+ "title": "EmbeddingsRequest"
},
"EmbeddingsResponse": {
"type": "object",
@@ -5377,6 +5481,7 @@
"required": [
"embeddings"
],
+ "title": "EmbeddingsResponse",
"description": "Response containing generated embeddings."
},
"EvaluateRowsRequest": {
@@ -5425,7 +5530,8 @@
"input_rows",
"scoring_functions",
"task_config"
- ]
+ ],
+ "title": "EvaluateRowsRequest"
},
"Session": {
"type": "object",
@@ -5454,6 +5560,7 @@
"turns",
"started_at"
],
+ "title": "Session",
"description": "A single session of an interaction with an Agentic System."
},
"AgentStepResponse": {
@@ -5488,7 +5595,8 @@
"additionalProperties": false,
"required": [
"step"
- ]
+ ],
+ "title": "AgentStepResponse"
},
"AgentTurnInputType": {
"type": "object",
@@ -5502,7 +5610,8 @@
"additionalProperties": false,
"required": [
"type"
- ]
+ ],
+ "title": "AgentTurnInputType"
},
"ArrayType": {
"type": "object",
@@ -5516,7 +5625,8 @@
"additionalProperties": false,
"required": [
"type"
- ]
+ ],
+ "title": "ArrayType"
},
"BooleanType": {
"type": "object",
@@ -5530,7 +5640,8 @@
"additionalProperties": false,
"required": [
"type"
- ]
+ ],
+ "title": "BooleanType"
},
"ChatCompletionInputType": {
"type": "object",
@@ -5544,7 +5655,8 @@
"additionalProperties": false,
"required": [
"type"
- ]
+ ],
+ "title": "ChatCompletionInputType"
},
"CompletionInputType": {
"type": "object",
@@ -5558,7 +5670,8 @@
"additionalProperties": false,
"required": [
"type"
- ]
+ ],
+ "title": "CompletionInputType"
},
"Dataset": {
"type": "object",
@@ -5621,7 +5734,8 @@
"dataset_schema",
"url",
"metadata"
- ]
+ ],
+ "title": "Dataset"
},
"JsonType": {
"type": "object",
@@ -5635,7 +5749,8 @@
"additionalProperties": false,
"required": [
"type"
- ]
+ ],
+ "title": "JsonType"
},
"NumberType": {
"type": "object",
@@ -5649,7 +5764,8 @@
"additionalProperties": false,
"required": [
"type"
- ]
+ ],
+ "title": "NumberType"
},
"ObjectType": {
"type": "object",
@@ -5663,7 +5779,8 @@
"additionalProperties": false,
"required": [
"type"
- ]
+ ],
+ "title": "ObjectType"
},
"ParamType": {
"oneOf": [
@@ -5726,7 +5843,8 @@
"additionalProperties": false,
"required": [
"type"
- ]
+ ],
+ "title": "StringType"
},
"UnionType": {
"type": "object",
@@ -5740,7 +5858,8 @@
"additionalProperties": false,
"required": [
"type"
- ]
+ ],
+ "title": "UnionType"
},
"Model": {
"type": "object",
@@ -5797,14 +5916,16 @@
"type",
"metadata",
"model_type"
- ]
+ ],
+ "title": "Model"
},
"ModelType": {
"type": "string",
"enum": [
"llm",
"embedding"
- ]
+ ],
+ "title": "ModelType"
},
"PaginatedRowsResult": {
"type": "object",
@@ -5848,7 +5969,8 @@
"required": [
"rows",
"total_count"
- ]
+ ],
+ "title": "PaginatedRowsResult"
},
"ScoringFn": {
"type": "object",
@@ -5910,7 +6032,8 @@
"type",
"metadata",
"return_type"
- ]
+ ],
+ "title": "ScoringFn"
},
"Shield": {
"type": "object",
@@ -5962,6 +6085,7 @@
"provider_id",
"type"
],
+ "title": "Shield",
"description": "A safety shield resource that can be used to check content"
},
"Span": {
@@ -6019,14 +6143,16 @@
"trace_id",
"name",
"start_time"
- ]
+ ],
+ "title": "Span"
},
"SpanStatus": {
"type": "string",
"enum": [
"ok",
"error"
- ]
+ ],
+ "title": "SpanStatus"
},
"SpanWithStatus": {
"type": "object",
@@ -6086,7 +6212,8 @@
"trace_id",
"name",
"start_time"
- ]
+ ],
+ "title": "SpanWithStatus"
},
"QuerySpanTreeResponse": {
"type": "object",
@@ -6101,7 +6228,8 @@
"additionalProperties": false,
"required": [
"data"
- ]
+ ],
+ "title": "QuerySpanTreeResponse"
},
"Tool": {
"type": "object",
@@ -6171,7 +6299,8 @@
"tool_host",
"description",
"parameters"
- ]
+ ],
+ "title": "Tool"
},
"ToolHost": {
"type": "string",
@@ -6179,7 +6308,8 @@
"distribution",
"client",
"model_context_protocol"
- ]
+ ],
+ "title": "ToolHost"
},
"ToolGroup": {
"type": "object",
@@ -6233,7 +6363,8 @@
"provider_resource_id",
"provider_id",
"type"
- ]
+ ],
+ "title": "ToolGroup"
},
"Trace": {
"type": "object",
@@ -6258,10 +6389,12 @@
"trace_id",
"root_span_id",
"start_time"
- ]
+ ],
+ "title": "Trace"
},
"Checkpoint": {
- "description": "Checkpoint created during training runs"
+ "description": "Checkpoint created during training runs",
+ "title": "Checkpoint"
},
"PostTrainingJobArtifactsResponse": {
"type": "object",
@@ -6281,6 +6414,7 @@
"job_uuid",
"checkpoints"
],
+ "title": "PostTrainingJobArtifactsResponse",
"description": "Artifacts of a finetuning job."
},
"PostTrainingJobStatusResponse": {
@@ -6342,6 +6476,7 @@
"status",
"checkpoints"
],
+ "title": "PostTrainingJobStatusResponse",
"description": "Status of a finetuning job."
},
"ListPostTrainingJobsResponse": {
@@ -6359,14 +6494,16 @@
"additionalProperties": false,
"required": [
"job_uuid"
- ]
+ ],
+ "title": "PostTrainingJob"
}
}
},
"additionalProperties": false,
"required": [
"data"
- ]
+ ],
+ "title": "ListPostTrainingJobsResponse"
},
"VectorDB": {
"type": "object",
@@ -6400,7 +6537,8 @@
"type",
"embedding_model",
"embedding_dimension"
- ]
+ ],
+ "title": "VectorDB"
},
"HealthInfo": {
"type": "object",
@@ -6412,7 +6550,8 @@
"additionalProperties": false,
"required": [
"status"
- ]
+ ],
+ "title": "HealthInfo"
},
"RAGDocument": {
"type": "object",
@@ -6473,7 +6612,8 @@
"document_id",
"content",
"metadata"
- ]
+ ],
+ "title": "RAGDocument"
},
"InsertRequest": {
"type": "object",
@@ -6496,7 +6636,8 @@
"documents",
"vector_db_id",
"chunk_size_in_tokens"
- ]
+ ],
+ "title": "InsertRequest"
},
"InsertChunksRequest": {
"type": "object",
@@ -6542,7 +6683,8 @@
"required": [
"content",
"metadata"
- ]
+ ],
+ "title": "Chunk"
}
},
"ttl_seconds": {
@@ -6553,7 +6695,8 @@
"required": [
"vector_db_id",
"chunks"
- ]
+ ],
+ "title": "InsertChunksRequest"
},
"InvokeToolRequest": {
"type": "object",
@@ -6591,7 +6734,8 @@
"required": [
"tool_name",
"kwargs"
- ]
+ ],
+ "title": "InvokeToolRequest"
},
"ToolInvocationResult": {
"type": "object",
@@ -6609,7 +6753,8 @@
"additionalProperties": false,
"required": [
"content"
- ]
+ ],
+ "title": "ToolInvocationResult"
},
"ListDatasetsResponse": {
"type": "object",
@@ -6624,7 +6769,8 @@
"additionalProperties": false,
"required": [
"data"
- ]
+ ],
+ "title": "ListDatasetsResponse"
},
"ListModelsResponse": {
"type": "object",
@@ -6639,7 +6785,8 @@
"additionalProperties": false,
"required": [
"data"
- ]
+ ],
+ "title": "ListModelsResponse"
},
"ProviderInfo": {
"type": "object",
@@ -6659,7 +6806,8 @@
"api",
"provider_id",
"provider_type"
- ]
+ ],
+ "title": "ProviderInfo"
},
"ListProvidersResponse": {
"type": "object",
@@ -6674,7 +6822,8 @@
"additionalProperties": false,
"required": [
"data"
- ]
+ ],
+ "title": "ListProvidersResponse"
},
"RouteInfo": {
"type": "object",
@@ -6697,7 +6846,8 @@
"route",
"method",
"provider_types"
- ]
+ ],
+ "title": "RouteInfo"
},
"ListRoutesResponse": {
"type": "object",
@@ -6712,7 +6862,8 @@
"additionalProperties": false,
"required": [
"data"
- ]
+ ],
+ "title": "ListRoutesResponse"
},
"ListScoringFunctionsResponse": {
"type": "object",
@@ -6727,7 +6878,8 @@
"additionalProperties": false,
"required": [
"data"
- ]
+ ],
+ "title": "ListScoringFunctionsResponse"
},
"ListShieldsResponse": {
"type": "object",
@@ -6742,7 +6894,8 @@
"additionalProperties": false,
"required": [
"data"
- ]
+ ],
+ "title": "ListShieldsResponse"
},
"ListToolGroupsResponse": {
"type": "object",
@@ -6757,7 +6910,8 @@
"additionalProperties": false,
"required": [
"data"
- ]
+ ],
+ "title": "ListToolGroupsResponse"
},
"ListToolsResponse": {
"type": "object",
@@ -6772,7 +6926,8 @@
"additionalProperties": false,
"required": [
"data"
- ]
+ ],
+ "title": "ListToolsResponse"
},
"ListVectorDBsResponse": {
"type": "object",
@@ -6787,7 +6942,8 @@
"additionalProperties": false,
"required": [
"data"
- ]
+ ],
+ "title": "ListVectorDBsResponse"
},
"Event": {
"oneOf": [
@@ -6819,7 +6975,8 @@
"warn",
"error",
"critical"
- ]
+ ],
+ "title": "LogSeverity"
},
"SpanEndPayload": {
"type": "object",
@@ -6837,7 +6994,8 @@
"required": [
"type",
"status"
- ]
+ ],
+ "title": "SpanEndPayload"
},
"SpanStartPayload": {
"type": "object",
@@ -6858,7 +7016,8 @@
"required": [
"type",
"name"
- ]
+ ],
+ "title": "SpanStartPayload"
},
"StructuredLogEvent": {
"type": "object",
@@ -6911,7 +7070,8 @@
"timestamp",
"type",
"payload"
- ]
+ ],
+ "title": "StructuredLogEvent"
},
"StructuredLogPayload": {
"oneOf": [
@@ -6985,7 +7145,8 @@
"type",
"message",
"severity"
- ]
+ ],
+ "title": "UnstructuredLogEvent"
},
"LogEventRequest": {
"type": "object",
@@ -7001,7 +7162,8 @@
"required": [
"event",
"ttl_seconds"
- ]
+ ],
+ "title": "LogEventRequest"
},
"DPOAlignmentConfig": {
"type": "object",
@@ -7025,7 +7187,8 @@
"reward_clip",
"epsilon",
"gamma"
- ]
+ ],
+ "title": "DPOAlignmentConfig"
},
"DataConfig": {
"type": "object",
@@ -7060,14 +7223,16 @@
"batch_size",
"shuffle",
"data_format"
- ]
+ ],
+ "title": "DataConfig"
},
"DatasetFormat": {
"type": "string",
"enum": [
"instruct",
"dialog"
- ]
+ ],
+ "title": "DatasetFormat"
},
"EfficiencyConfig": {
"type": "object",
@@ -7089,7 +7254,8 @@
"default": false
}
},
- "additionalProperties": false
+ "additionalProperties": false,
+ "title": "EfficiencyConfig"
},
"OptimizerConfig": {
"type": "object",
@@ -7113,7 +7279,8 @@
"lr",
"weight_decay",
"num_warmup_steps"
- ]
+ ],
+ "title": "OptimizerConfig"
},
"OptimizerType": {
"type": "string",
@@ -7121,7 +7288,8 @@
"adam",
"adamw",
"sgd"
- ]
+ ],
+ "title": "OptimizerType"
},
"TrainingConfig": {
"type": "object",
@@ -7160,7 +7328,8 @@
"max_validation_steps",
"data_config",
"optimizer_config"
- ]
+ ],
+ "title": "TrainingConfig"
},
"PreferenceOptimizeRequest": {
"type": "object",
@@ -7236,7 +7405,8 @@
"training_config",
"hyperparam_search_config",
"logger_config"
- ]
+ ],
+ "title": "PreferenceOptimizeRequest"
},
"PostTrainingJob": {
"type": "object",
@@ -7248,7 +7418,8 @@
"additionalProperties": false,
"required": [
"job_uuid"
- ]
+ ],
+ "title": "PostTrainingJob"
},
"DefaultRAGQueryGeneratorConfig": {
"type": "object",
@@ -7267,7 +7438,8 @@
"required": [
"type",
"separator"
- ]
+ ],
+ "title": "DefaultRAGQueryGeneratorConfig"
},
"LLMRAGQueryGeneratorConfig": {
"type": "object",
@@ -7289,7 +7461,8 @@
"type",
"model",
"template"
- ]
+ ],
+ "title": "LLMRAGQueryGeneratorConfig"
},
"RAGQueryConfig": {
"type": "object",
@@ -7311,7 +7484,8 @@
"query_generator_config",
"max_tokens_in_context",
"max_chunks"
- ]
+ ],
+ "title": "RAGQueryConfig"
},
"RAGQueryGeneratorConfig": {
"oneOf": [
@@ -7350,7 +7524,8 @@
"required": [
"content",
"vector_db_ids"
- ]
+ ],
+ "title": "QueryRequest"
},
"RAGQueryResult": {
"type": "object",
@@ -7359,7 +7534,8 @@
"$ref": "#/components/schemas/InterleavedContent"
}
},
- "additionalProperties": false
+ "additionalProperties": false,
+ "title": "RAGQueryResult"
},
"QueryChunksRequest": {
"type": "object",
@@ -7400,7 +7576,8 @@
"required": [
"vector_db_id",
"query"
- ]
+ ],
+ "title": "QueryChunksRequest"
},
"QueryChunksResponse": {
"type": "object",
@@ -7443,7 +7620,8 @@
"required": [
"content",
"metadata"
- ]
+ ],
+ "title": "Chunk"
}
},
"scores": {
@@ -7457,7 +7635,8 @@
"required": [
"chunks",
"scores"
- ]
+ ],
+ "title": "QueryChunksResponse"
},
"QueryCondition": {
"type": "object",
@@ -7496,7 +7675,8 @@
"key",
"op",
"value"
- ]
+ ],
+ "title": "QueryCondition"
},
"QueryConditionOp": {
"type": "string",
@@ -7505,7 +7685,8 @@
"ne",
"gt",
"lt"
- ]
+ ],
+ "title": "QueryConditionOp"
},
"QuerySpansResponse": {
"type": "object",
@@ -7520,7 +7701,8 @@
"additionalProperties": false,
"required": [
"data"
- ]
+ ],
+ "title": "QuerySpansResponse"
},
"QueryTracesResponse": {
"type": "object",
@@ -7535,7 +7717,8 @@
"additionalProperties": false,
"required": [
"data"
- ]
+ ],
+ "title": "QueryTracesResponse"
},
"RegisterBenchmarkRequest": {
"type": "object",
@@ -7589,7 +7772,8 @@
"benchmark_id",
"dataset_id",
"scoring_functions"
- ]
+ ],
+ "title": "RegisterBenchmarkRequest"
},
"RegisterDatasetRequest": {
"type": "object",
@@ -7643,7 +7827,8 @@
"dataset_id",
"dataset_schema",
"url"
- ]
+ ],
+ "title": "RegisterDatasetRequest"
},
"RegisterModelRequest": {
"type": "object",
@@ -7689,7 +7874,8 @@
"additionalProperties": false,
"required": [
"model_id"
- ]
+ ],
+ "title": "RegisterModelRequest"
},
"RegisterScoringFunctionRequest": {
"type": "object",
@@ -7718,7 +7904,8 @@
"scoring_fn_id",
"description",
"return_type"
- ]
+ ],
+ "title": "RegisterScoringFunctionRequest"
},
"RegisterShieldRequest": {
"type": "object",
@@ -7761,7 +7948,8 @@
"additionalProperties": false,
"required": [
"shield_id"
- ]
+ ],
+ "title": "RegisterShieldRequest"
},
"RegisterToolGroupRequest": {
"type": "object",
@@ -7805,7 +7993,8 @@
"required": [
"toolgroup_id",
"provider_id"
- ]
+ ],
+ "title": "RegisterToolGroupRequest"
},
"RegisterVectorDbRequest": {
"type": "object",
@@ -7830,7 +8019,8 @@
"required": [
"vector_db_id",
"embedding_model"
- ]
+ ],
+ "title": "RegisterVectorDbRequest"
},
"RunEvalRequest": {
"type": "object",
@@ -7842,7 +8032,8 @@
"additionalProperties": false,
"required": [
"task_config"
- ]
+ ],
+ "title": "RunEvalRequest"
},
"RunShieldRequest": {
"type": "object",
@@ -7887,7 +8078,8 @@
"shield_id",
"messages",
"params"
- ]
+ ],
+ "title": "RunShieldRequest"
},
"RunShieldResponse": {
"type": "object",
@@ -7896,7 +8088,8 @@
"$ref": "#/components/schemas/SafetyViolation"
}
},
- "additionalProperties": false
+ "additionalProperties": false,
+ "title": "RunShieldResponse"
},
"SaveSpansToDatasetRequest": {
"type": "object",
@@ -7925,7 +8118,8 @@
"attribute_filters",
"attributes_to_save",
"dataset_id"
- ]
+ ],
+ "title": "SaveSpansToDatasetRequest"
},
"ScoreRequest": {
"type": "object",
@@ -7976,7 +8170,8 @@
"required": [
"input_rows",
"scoring_functions"
- ]
+ ],
+ "title": "ScoreRequest"
},
"ScoreResponse": {
"type": "object",
@@ -7991,7 +8186,8 @@
"additionalProperties": false,
"required": [
"results"
- ]
+ ],
+ "title": "ScoreResponse"
},
"ScoreBatchRequest": {
"type": "object",
@@ -8021,7 +8217,8 @@
"dataset_id",
"scoring_functions",
"save_results_dataset"
- ]
+ ],
+ "title": "ScoreBatchRequest"
},
"ScoreBatchResponse": {
"type": "object",
@@ -8039,7 +8236,8 @@
"additionalProperties": false,
"required": [
"results"
- ]
+ ],
+ "title": "ScoreBatchResponse"
},
"AlgorithmConfig": {
"oneOf": [
@@ -8101,7 +8299,8 @@
"apply_lora_to_output",
"rank",
"alpha"
- ]
+ ],
+ "title": "LoraFinetuningConfig"
},
"QATFinetuningConfig": {
"type": "object",
@@ -8123,7 +8322,8 @@
"type",
"quantizer_name",
"group_size"
- ]
+ ],
+ "title": "QATFinetuningConfig"
},
"SupervisedFineTuneRequest": {
"type": "object",
@@ -8201,7 +8401,8 @@
"hyperparam_search_config",
"logger_config",
"model"
- ]
+ ],
+ "title": "SupervisedFineTuneRequest"
},
"SyntheticDataGenerateRequest": {
"type": "object",
@@ -8222,6 +8423,7 @@
"top_k_top_p",
"sigmoid"
],
+ "title": "FilteringFunction",
"description": "The type of filtering function."
},
"model": {
@@ -8232,7 +8434,8 @@
"required": [
"dialogs",
"filtering_function"
- ]
+ ],
+ "title": "SyntheticDataGenerateRequest"
},
"SyntheticDataGenerationResponse": {
"type": "object",
@@ -8295,6 +8498,7 @@
"required": [
"synthetic_data"
],
+ "title": "SyntheticDataGenerationResponse",
"description": "Response from the synthetic data generation. Batch of (prompt, response, score) tuples that pass the threshold."
},
"VersionInfo": {
@@ -8307,7 +8511,8 @@
"additionalProperties": false,
"required": [
"version"
- ]
+ ],
+ "title": "VersionInfo"
}
},
"responses": {}
diff --git a/docs/_static/llama-stack-spec.yaml b/docs/_static/llama-stack-spec.yaml
index f63374406..99300fedf 100644
--- a/docs/_static/llama-stack-spec.yaml
+++ b/docs/_static/llama-stack-spec.yaml
@@ -1611,6 +1611,7 @@ components:
required:
- type
- config
+ title: AgentCandidate
AgentConfig:
type: object
properties:
@@ -1637,18 +1638,23 @@ components:
enum:
- auto
- required
+ - none
+ title: ToolChoice
description: >-
Whether tool use is required or automatic. This is a hint to the model
which may not be followed. It depends on the Instruction Following capabilities
of the model.
+ deprecated: true
tool_prompt_format:
type: string
enum:
- json
- function_tag
- python_list
+ title: ToolPromptFormat
description: >-
Prompt format for calling custom / zero shot tools.
+ deprecated: true
tool_config:
$ref: '#/components/schemas/ToolConfig'
max_infer_iters:
@@ -1667,6 +1673,7 @@ components:
required:
- model
- instructions
+ title: AgentConfig
AgentTool:
oneOf:
- type: string
@@ -1688,6 +1695,7 @@ components:
required:
- name
- args
+ title: AgentToolGroupWithArgs
AggregationFunctionType:
type: string
enum:
@@ -1695,6 +1703,7 @@ components:
- median
- categorical_count
- accuracy
+ title: AggregationFunctionType
BasicScoringFnParams:
type: object
properties:
@@ -1709,6 +1718,7 @@ components:
additionalProperties: false
required:
- type
+ title: BasicScoringFnParams
BenchmarkConfig:
type: object
properties:
@@ -1729,6 +1739,7 @@ components:
- type
- eval_candidate
- scoring_params
+ title: BenchmarkConfig
EvalCandidate:
oneOf:
- $ref: '#/components/schemas/ModelCandidate'
@@ -1763,6 +1774,7 @@ components:
required:
- type
- bnf
+ title: GrammarResponseFormat
description: >-
Configuration for grammar-guided response generation.
GreedySamplingStrategy:
@@ -1775,6 +1787,7 @@ components:
additionalProperties: false
required:
- type
+ title: GreedySamplingStrategy
ImageContentItem:
type: object
properties:
@@ -1803,6 +1816,7 @@ components:
required:
- type
- image
+ title: ImageContentItem
description: A image content item
InterleavedContent:
oneOf:
@@ -1846,6 +1860,7 @@ components:
required:
- type
- json_schema
+ title: JsonSchemaResponseFormat
description: >-
Configuration for JSON schema-guided response generation.
LLMAsJudgeScoringFnParams:
@@ -1871,6 +1886,7 @@ components:
required:
- type
- judge_model
+ title: LLMAsJudgeScoringFnParams
ModelCandidate:
type: object
properties:
@@ -1889,6 +1905,7 @@ components:
- type
- model
- sampling_params
+ title: ModelCandidate
RegexParserScoringFnParams:
type: object
properties:
@@ -1907,6 +1924,7 @@ components:
additionalProperties: false
required:
- type
+ title: RegexParserScoringFnParams
ResponseFormat:
oneOf:
- $ref: '#/components/schemas/JsonSchemaResponseFormat'
@@ -1930,6 +1948,7 @@ components:
additionalProperties: false
required:
- strategy
+ title: SamplingParams
SamplingStrategy:
oneOf:
- $ref: '#/components/schemas/GreedySamplingStrategy'
@@ -1971,6 +1990,7 @@ components:
required:
- role
- content
+ title: SystemMessage
description: >-
A system message providing instructions or context to the model.
TextContentItem:
@@ -1989,18 +2009,28 @@ components:
required:
- type
- text
+ title: TextContentItem
description: A text content item
ToolConfig:
type: object
properties:
tool_choice:
- type: string
- enum:
- - auto
- - required
- description: >-
- (Optional) Whether tool use is required or automatic. Defaults to ToolChoice.auto.
+ oneOf:
+ - type: string
+ enum:
+ - auto
+ - required
+ - none
+ title: ToolChoice
+ description: >-
+ Whether tool use is required or automatic. This is a hint to the model
+ which may not be followed. It depends on the Instruction Following
+ capabilities of the model.
+ - type: string
default: auto
+ description: >-
+ (Optional) Whether tool use is automatic, required, or none. Can also
+ specify a tool name to use a specific tool. Defaults to ToolChoice.auto.
tool_prompt_format:
type: string
enum:
@@ -2027,8 +2057,7 @@ components:
where the function definitions should be inserted.
default: append
additionalProperties: false
- required:
- - system_message_behavior
+ title: ToolConfig
description: Configuration for tool use.
ToolDef:
type: object
@@ -2054,6 +2083,7 @@ components:
additionalProperties: false
required:
- name
+ title: ToolDef
ToolParameter:
type: object
properties:
@@ -2080,6 +2110,7 @@ components:
- parameter_type
- description
- required
+ title: ToolParameter
TopKSamplingStrategy:
type: object
properties:
@@ -2093,6 +2124,7 @@ components:
required:
- type
- top_k
+ title: TopKSamplingStrategy
TopPSamplingStrategy:
type: object
properties:
@@ -2108,6 +2140,7 @@ components:
additionalProperties: false
required:
- type
+ title: TopPSamplingStrategy
URL:
type: object
properties:
@@ -2116,6 +2149,7 @@ components:
additionalProperties: false
required:
- uri
+ title: URL
DeprecatedEvaluateRowsRequest:
type: object
properties:
@@ -2142,6 +2176,7 @@ components:
- input_rows
- scoring_functions
- task_config
+ title: DeprecatedEvaluateRowsRequest
EvaluateResponse:
type: object
properties:
@@ -2165,6 +2200,7 @@ components:
required:
- generations
- scores
+ title: EvaluateResponse
ScoringResult:
type: object
properties:
@@ -2194,6 +2230,7 @@ components:
required:
- score_rows
- aggregated_results
+ title: ScoringResult
Benchmark:
type: object
properties:
@@ -2232,6 +2269,7 @@ components:
- dataset_id
- scoring_functions
- metadata
+ title: Benchmark
JobStatus:
type: string
enum:
@@ -2239,6 +2277,7 @@ components:
- in_progress
- failed
- scheduled
+ title: JobStatus
ListBenchmarksResponse:
type: object
properties:
@@ -2249,6 +2288,7 @@ components:
additionalProperties: false
required:
- data
+ title: ListBenchmarksResponse
DeprecatedRegisterEvalTaskRequest:
type: object
properties:
@@ -2279,6 +2319,7 @@ components:
- eval_task_id
- dataset_id
- scoring_functions
+ title: DeprecatedRegisterEvalTaskRequest
DeprecatedRunEvalRequest:
type: object
properties:
@@ -2287,6 +2328,7 @@ components:
additionalProperties: false
required:
- task_config
+ title: DeprecatedRunEvalRequest
Job:
type: object
properties:
@@ -2295,6 +2337,7 @@ components:
additionalProperties: false
required:
- job_id
+ title: Job
AppendRowsRequest:
type: object
properties:
@@ -2316,6 +2359,7 @@ components:
required:
- dataset_id
- rows
+ title: AppendRowsRequest
CompletionMessage:
type: object
properties:
@@ -2352,6 +2396,7 @@ components:
- role
- content
- stop_reason
+ title: CompletionMessage
description: >-
A message containing the model's (assistant) response in a chat conversation.
Message:
@@ -2380,6 +2425,7 @@ components:
- wolfram_alpha
- photogen
- code_interpreter
+ title: BuiltinTool
- type: string
arguments:
type: object
@@ -2411,6 +2457,7 @@ components:
- call_id
- tool_name
- arguments
+ title: ToolCall
ToolDefinition:
type: object
properties:
@@ -2422,6 +2469,7 @@ components:
- wolfram_alpha
- photogen
- code_interpreter
+ title: BuiltinTool
- type: string
description:
type: string
@@ -2432,6 +2480,7 @@ components:
additionalProperties: false
required:
- tool_name
+ title: ToolDefinition
ToolParamDefinition:
type: object
properties:
@@ -2453,6 +2502,7 @@ components:
additionalProperties: false
required:
- param_type
+ title: ToolParamDefinition
ToolResponseMessage:
type: object
properties:
@@ -2474,6 +2524,7 @@ components:
- wolfram_alpha
- photogen
- code_interpreter
+ title: BuiltinTool
- type: string
description: Name of the tool that was called
content:
@@ -2485,6 +2536,7 @@ components:
- call_id
- tool_name
- content
+ title: ToolResponseMessage
description: >-
A message representing the result of a tool invocation.
UserMessage:
@@ -2509,6 +2561,7 @@ components:
required:
- role
- content
+ title: UserMessage
description: >-
A message from the user in a chat conversation.
BatchChatCompletionRequest:
@@ -2533,6 +2586,8 @@ components:
enum:
- auto
- required
+ - none
+ title: ToolChoice
description: >-
Whether tool use is required or automatic. This is a hint to the model
which may not be followed. It depends on the Instruction Following capabilities
@@ -2543,6 +2598,7 @@ components:
- json
- function_tag
- python_list
+ title: ToolPromptFormat
description: >-
Prompt format for calling custom / zero shot tools.
response_format:
@@ -2556,10 +2612,12 @@ components:
description: >-
How many tokens (for each position) to return log probabilities for.
additionalProperties: false
+ title: LogProbConfig
additionalProperties: false
required:
- model
- messages_batch
+ title: BatchChatCompletionRequest
BatchChatCompletionResponse:
type: object
properties:
@@ -2570,6 +2628,7 @@ components:
additionalProperties: false
required:
- batch
+ title: BatchChatCompletionResponse
ChatCompletionResponse:
type: object
properties:
@@ -2589,6 +2648,7 @@ components:
additionalProperties: false
required:
- completion_message
+ title: ChatCompletionResponse
description: Response from a chat completion request.
MetricEvent:
type: object
@@ -2630,6 +2690,7 @@ components:
- metric
- value
- unit
+ title: MetricEvent
TokenLogProbs:
type: object
properties:
@@ -2642,6 +2703,7 @@ components:
additionalProperties: false
required:
- logprobs_by_token
+ title: TokenLogProbs
description: Log probabilities for generated tokens.
BatchCompletionRequest:
type: object
@@ -2665,10 +2727,12 @@ components:
description: >-
How many tokens (for each position) to return log probabilities for.
additionalProperties: false
+ title: LogProbConfig
additionalProperties: false
required:
- model
- content_batch
+ title: BatchCompletionRequest
BatchCompletionResponse:
type: object
properties:
@@ -2679,6 +2743,7 @@ components:
additionalProperties: false
required:
- batch
+ title: BatchCompletionResponse
CompletionResponse:
type: object
properties:
@@ -2702,6 +2767,7 @@ components:
required:
- content
- stop_reason
+ title: CompletionResponse
description: Response from a completion request.
CancelTrainingJobRequest:
type: object
@@ -2711,6 +2777,7 @@ components:
additionalProperties: false
required:
- job_uuid
+ title: CancelTrainingJobRequest
ChatCompletionRequest:
type: object
properties:
@@ -2739,6 +2806,7 @@ components:
enum:
- auto
- required
+ - none
description: >-
(Optional) Whether tool use is required or automatic. Defaults to ToolChoice.auto.
.. deprecated:: Use tool_config instead.
@@ -2787,6 +2855,7 @@ components:
required:
- model_id
- messages
+ title: ChatCompletionRequest
ChatCompletionResponseEvent:
type: object
properties:
@@ -2820,6 +2889,7 @@ components:
required:
- event_type
- delta
+ title: ChatCompletionResponseEvent
description: >-
An event during chat completion generation.
ChatCompletionResponseStreamChunk:
@@ -2835,6 +2905,7 @@ components:
additionalProperties: false
required:
- event
+ title: ChatCompletionResponseStreamChunk
description: >-
A chunk of a streamed chat completion response.
ContentDelta:
@@ -2862,6 +2933,7 @@ components:
required:
- type
- image
+ title: ImageDelta
TextDelta:
type: object
properties:
@@ -2875,6 +2947,7 @@ components:
required:
- type
- text
+ title: TextDelta
ToolCallDelta:
type: object
properties:
@@ -2893,11 +2966,13 @@ components:
- in_progress
- failed
- succeeded
+ title: ToolCallParseStatus
additionalProperties: false
required:
- type
- tool_call
- parse_status
+ title: ToolCallDelta
CompletionRequest:
type: object
properties:
@@ -2938,6 +3013,7 @@ components:
required:
- model_id
- content
+ title: CompletionRequest
CompletionResponseStreamChunk:
type: object
properties:
@@ -2962,6 +3038,7 @@ components:
additionalProperties: false
required:
- delta
+ title: CompletionResponseStreamChunk
description: >-
A chunk of a streamed completion response.
CreateAgentRequest:
@@ -2972,6 +3049,7 @@ components:
additionalProperties: false
required:
- agent_config
+ title: CreateAgentRequest
AgentCreateResponse:
type: object
properties:
@@ -2980,6 +3058,7 @@ components:
additionalProperties: false
required:
- agent_id
+ title: AgentCreateResponse
CreateAgentSessionRequest:
type: object
properties:
@@ -2988,6 +3067,7 @@ components:
additionalProperties: false
required:
- session_name
+ title: CreateAgentSessionRequest
AgentSessionCreateResponse:
type: object
properties:
@@ -2996,6 +3076,7 @@ components:
additionalProperties: false
required:
- session_id
+ title: AgentSessionCreateResponse
CreateAgentTurnRequest:
type: object
properties:
@@ -3026,6 +3107,7 @@ components:
required:
- content
- mime_type
+ title: Document
toolgroups:
type: array
items:
@@ -3035,6 +3117,7 @@ components:
additionalProperties: false
required:
- messages
+ title: CreateAgentTurnRequest
InferenceStep:
type: object
properties:
@@ -3060,6 +3143,7 @@ components:
- step_id
- step_type
- model_response
+ title: InferenceStep
MemoryRetrievalStep:
type: object
properties:
@@ -3088,6 +3172,7 @@ components:
- step_type
- vector_db_ids
- inserted_context
+ title: MemoryRetrievalStep
SafetyViolation:
type: object
properties:
@@ -3109,6 +3194,7 @@ components:
required:
- violation_level
- metadata
+ title: SafetyViolation
ShieldCallStep:
type: object
properties:
@@ -3133,6 +3219,7 @@ components:
- turn_id
- step_id
- step_type
+ title: ShieldCallStep
ToolExecutionStep:
type: object
properties:
@@ -3165,6 +3252,7 @@ components:
- step_type
- tool_calls
- tool_responses
+ title: ToolExecutionStep
ToolResponse:
type: object
properties:
@@ -3178,6 +3266,7 @@ components:
- wolfram_alpha
- photogen
- code_interpreter
+ title: BuiltinTool
- type: string
content:
$ref: '#/components/schemas/InterleavedContent'
@@ -3186,6 +3275,7 @@ components:
- call_id
- tool_name
- content
+ title: ToolResponse
Turn:
type: object
properties:
@@ -3235,6 +3325,7 @@ components:
required:
- content
- mime_type
+ title: Attachment
started_at:
type: string
format: date-time
@@ -3249,6 +3340,7 @@ components:
- steps
- output_message
- started_at
+ title: Turn
description: >-
A single turn in an interaction with an Agentic System.
ViolationLevel:
@@ -3257,6 +3349,7 @@ components:
- info
- warn
- error
+ title: ViolationLevel
AgentTurnResponseEvent:
type: object
properties:
@@ -3265,6 +3358,7 @@ components:
additionalProperties: false
required:
- payload
+ title: AgentTurnResponseEvent
AgentTurnResponseEventPayload:
oneOf:
- $ref: '#/components/schemas/AgentTurnResponseStepStartPayload'
@@ -3294,6 +3388,7 @@ components:
- tool_execution
- shield_call
- memory_retrieval
+ title: StepType
step_id:
type: string
step_details:
@@ -3315,6 +3410,7 @@ components:
- step_type
- step_id
- step_details
+ title: AgentTurnResponseStepCompletePayload
AgentTurnResponseStepProgressPayload:
type: object
properties:
@@ -3329,6 +3425,7 @@ components:
- tool_execution
- shield_call
- memory_retrieval
+ title: StepType
step_id:
type: string
delta:
@@ -3339,6 +3436,7 @@ components:
- step_type
- step_id
- delta
+ title: AgentTurnResponseStepProgressPayload
AgentTurnResponseStepStartPayload:
type: object
properties:
@@ -3353,6 +3451,7 @@ components:
- tool_execution
- shield_call
- memory_retrieval
+ title: StepType
step_id:
type: string
metadata:
@@ -3370,6 +3469,7 @@ components:
- event_type
- step_type
- step_id
+ title: AgentTurnResponseStepStartPayload
AgentTurnResponseStreamChunk:
type: object
properties:
@@ -3378,6 +3478,7 @@ components:
additionalProperties: false
required:
- event
+ title: AgentTurnResponseStreamChunk
description: streamed agent turn completion response.
AgentTurnResponseTurnCompletePayload:
type: object
@@ -3392,6 +3493,7 @@ components:
required:
- event_type
- turn
+ title: AgentTurnResponseTurnCompletePayload
AgentTurnResponseTurnStartPayload:
type: object
properties:
@@ -3405,6 +3507,7 @@ components:
required:
- event_type
- turn_id
+ title: AgentTurnResponseTurnStartPayload
EmbeddingsRequest:
type: object
properties:
@@ -3425,6 +3528,7 @@ components:
required:
- model_id
- contents
+ title: EmbeddingsRequest
EmbeddingsResponse:
type: object
properties:
@@ -3441,6 +3545,7 @@ components:
additionalProperties: false
required:
- embeddings
+ title: EmbeddingsResponse
description: >-
Response containing generated embeddings.
EvaluateRowsRequest:
@@ -3469,6 +3574,7 @@ components:
- input_rows
- scoring_functions
- task_config
+ title: EvaluateRowsRequest
Session:
type: object
properties:
@@ -3489,6 +3595,7 @@ components:
- session_name
- turns
- started_at
+ title: Session
description: >-
A single session of an interaction with an Agentic System.
AgentStepResponse:
@@ -3510,6 +3617,7 @@ components:
additionalProperties: false
required:
- step
+ title: AgentStepResponse
AgentTurnInputType:
type: object
properties:
@@ -3520,6 +3628,7 @@ components:
additionalProperties: false
required:
- type
+ title: AgentTurnInputType
ArrayType:
type: object
properties:
@@ -3530,6 +3639,7 @@ components:
additionalProperties: false
required:
- type
+ title: ArrayType
BooleanType:
type: object
properties:
@@ -3540,6 +3650,7 @@ components:
additionalProperties: false
required:
- type
+ title: BooleanType
ChatCompletionInputType:
type: object
properties:
@@ -3550,6 +3661,7 @@ components:
additionalProperties: false
required:
- type
+ title: ChatCompletionInputType
CompletionInputType:
type: object
properties:
@@ -3560,6 +3672,7 @@ components:
additionalProperties: false
required:
- type
+ title: CompletionInputType
Dataset:
type: object
properties:
@@ -3598,6 +3711,7 @@ components:
- dataset_schema
- url
- metadata
+ title: Dataset
JsonType:
type: object
properties:
@@ -3608,6 +3722,7 @@ components:
additionalProperties: false
required:
- type
+ title: JsonType
NumberType:
type: object
properties:
@@ -3618,6 +3733,7 @@ components:
additionalProperties: false
required:
- type
+ title: NumberType
ObjectType:
type: object
properties:
@@ -3628,6 +3744,7 @@ components:
additionalProperties: false
required:
- type
+ title: ObjectType
ParamType:
oneOf:
- $ref: '#/components/schemas/StringType'
@@ -3663,6 +3780,7 @@ components:
additionalProperties: false
required:
- type
+ title: StringType
UnionType:
type: object
properties:
@@ -3673,6 +3791,7 @@ components:
additionalProperties: false
required:
- type
+ title: UnionType
Model:
type: object
properties:
@@ -3707,11 +3826,13 @@ components:
- type
- metadata
- model_type
+ title: Model
ModelType:
type: string
enum:
- llm
- embedding
+ title: ModelType
PaginatedRowsResult:
type: object
properties:
@@ -3735,6 +3856,7 @@ components:
required:
- rows
- total_count
+ title: PaginatedRowsResult
ScoringFn:
type: object
properties:
@@ -3772,6 +3894,7 @@ components:
- type
- metadata
- return_type
+ title: ScoringFn
Shield:
type: object
properties:
@@ -3801,6 +3924,7 @@ components:
- provider_resource_id
- provider_id
- type
+ title: Shield
description: >-
A safety shield resource that can be used to check content
Span:
@@ -3836,11 +3960,13 @@ components:
- trace_id
- name
- start_time
+ title: Span
SpanStatus:
type: string
enum:
- ok
- error
+ title: SpanStatus
SpanWithStatus:
type: object
properties:
@@ -3876,6 +4002,7 @@ components:
- trace_id
- name
- start_time
+ title: SpanWithStatus
QuerySpanTreeResponse:
type: object
properties:
@@ -3886,6 +4013,7 @@ components:
additionalProperties: false
required:
- data
+ title: QuerySpanTreeResponse
Tool:
type: object
properties:
@@ -3929,12 +4057,14 @@ components:
- tool_host
- description
- parameters
+ title: Tool
ToolHost:
type: string
enum:
- distribution
- client
- model_context_protocol
+ title: ToolHost
ToolGroup:
type: object
properties:
@@ -3966,6 +4096,7 @@ components:
- provider_resource_id
- provider_id
- type
+ title: ToolGroup
Trace:
type: object
properties:
@@ -3984,8 +4115,10 @@ components:
- trace_id
- root_span_id
- start_time
+ title: Trace
Checkpoint:
description: Checkpoint created during training runs
+ title: Checkpoint
PostTrainingJobArtifactsResponse:
type: object
properties:
@@ -3999,6 +4132,7 @@ components:
required:
- job_uuid
- checkpoints
+ title: PostTrainingJobArtifactsResponse
description: Artifacts of a finetuning job.
PostTrainingJobStatusResponse:
type: object
@@ -4035,6 +4169,7 @@ components:
- job_uuid
- status
- checkpoints
+ title: PostTrainingJobStatusResponse
description: Status of a finetuning job.
ListPostTrainingJobsResponse:
type: object
@@ -4049,9 +4184,11 @@ components:
additionalProperties: false
required:
- job_uuid
+ title: PostTrainingJob
additionalProperties: false
required:
- data
+ title: ListPostTrainingJobsResponse
VectorDB:
type: object
properties:
@@ -4077,6 +4214,7 @@ components:
- type
- embedding_model
- embedding_dimension
+ title: VectorDB
HealthInfo:
type: object
properties:
@@ -4085,6 +4223,7 @@ components:
additionalProperties: false
required:
- status
+ title: HealthInfo
RAGDocument:
type: object
properties:
@@ -4115,6 +4254,7 @@ components:
- document_id
- content
- metadata
+ title: RAGDocument
InsertRequest:
type: object
properties:
@@ -4131,6 +4271,7 @@ components:
- documents
- vector_db_id
- chunk_size_in_tokens
+ title: InsertRequest
InsertChunksRequest:
type: object
properties:
@@ -4157,12 +4298,14 @@ components:
required:
- content
- metadata
+ title: Chunk
ttl_seconds:
type: integer
additionalProperties: false
required:
- vector_db_id
- chunks
+ title: InsertChunksRequest
InvokeToolRequest:
type: object
properties:
@@ -4182,6 +4325,7 @@ components:
required:
- tool_name
- kwargs
+ title: InvokeToolRequest
ToolInvocationResult:
type: object
properties:
@@ -4194,6 +4338,7 @@ components:
additionalProperties: false
required:
- content
+ title: ToolInvocationResult
ListDatasetsResponse:
type: object
properties:
@@ -4204,6 +4349,7 @@ components:
additionalProperties: false
required:
- data
+ title: ListDatasetsResponse
ListModelsResponse:
type: object
properties:
@@ -4214,6 +4360,7 @@ components:
additionalProperties: false
required:
- data
+ title: ListModelsResponse
ProviderInfo:
type: object
properties:
@@ -4228,6 +4375,7 @@ components:
- api
- provider_id
- provider_type
+ title: ProviderInfo
ListProvidersResponse:
type: object
properties:
@@ -4238,6 +4386,7 @@ components:
additionalProperties: false
required:
- data
+ title: ListProvidersResponse
RouteInfo:
type: object
properties:
@@ -4254,6 +4403,7 @@ components:
- route
- method
- provider_types
+ title: RouteInfo
ListRoutesResponse:
type: object
properties:
@@ -4264,6 +4414,7 @@ components:
additionalProperties: false
required:
- data
+ title: ListRoutesResponse
ListScoringFunctionsResponse:
type: object
properties:
@@ -4274,6 +4425,7 @@ components:
additionalProperties: false
required:
- data
+ title: ListScoringFunctionsResponse
ListShieldsResponse:
type: object
properties:
@@ -4284,6 +4436,7 @@ components:
additionalProperties: false
required:
- data
+ title: ListShieldsResponse
ListToolGroupsResponse:
type: object
properties:
@@ -4294,6 +4447,7 @@ components:
additionalProperties: false
required:
- data
+ title: ListToolGroupsResponse
ListToolsResponse:
type: object
properties:
@@ -4304,6 +4458,7 @@ components:
additionalProperties: false
required:
- data
+ title: ListToolsResponse
ListVectorDBsResponse:
type: object
properties:
@@ -4314,6 +4469,7 @@ components:
additionalProperties: false
required:
- data
+ title: ListVectorDBsResponse
Event:
oneOf:
- $ref: '#/components/schemas/UnstructuredLogEvent'
@@ -4334,6 +4490,7 @@ components:
- warn
- error
- critical
+ title: LogSeverity
SpanEndPayload:
type: object
properties:
@@ -4347,6 +4504,7 @@ components:
required:
- type
- status
+ title: SpanEndPayload
SpanStartPayload:
type: object
properties:
@@ -4362,6 +4520,7 @@ components:
required:
- type
- name
+ title: SpanStartPayload
StructuredLogEvent:
type: object
properties:
@@ -4394,6 +4553,7 @@ components:
- timestamp
- type
- payload
+ title: StructuredLogEvent
StructuredLogPayload:
oneOf:
- $ref: '#/components/schemas/SpanStartPayload'
@@ -4438,6 +4598,7 @@ components:
- type
- message
- severity
+ title: UnstructuredLogEvent
LogEventRequest:
type: object
properties:
@@ -4449,6 +4610,7 @@ components:
required:
- event
- ttl_seconds
+ title: LogEventRequest
DPOAlignmentConfig:
type: object
properties:
@@ -4466,6 +4628,7 @@ components:
- reward_clip
- epsilon
- gamma
+ title: DPOAlignmentConfig
DataConfig:
type: object
properties:
@@ -4491,11 +4654,13 @@ components:
- batch_size
- shuffle
- data_format
+ title: DataConfig
DatasetFormat:
type: string
enum:
- instruct
- dialog
+ title: DatasetFormat
EfficiencyConfig:
type: object
properties:
@@ -4512,6 +4677,7 @@ components:
type: boolean
default: false
additionalProperties: false
+ title: EfficiencyConfig
OptimizerConfig:
type: object
properties:
@@ -4529,12 +4695,14 @@ components:
- lr
- weight_decay
- num_warmup_steps
+ title: OptimizerConfig
OptimizerType:
type: string
enum:
- adam
- adamw
- sgd
+ title: OptimizerType
TrainingConfig:
type: object
properties:
@@ -4563,6 +4731,7 @@ components:
- max_validation_steps
- data_config
- optimizer_config
+ title: TrainingConfig
PreferenceOptimizeRequest:
type: object
properties:
@@ -4602,6 +4771,7 @@ components:
- training_config
- hyperparam_search_config
- logger_config
+ title: PreferenceOptimizeRequest
PostTrainingJob:
type: object
properties:
@@ -4610,6 +4780,7 @@ components:
additionalProperties: false
required:
- job_uuid
+ title: PostTrainingJob
DefaultRAGQueryGeneratorConfig:
type: object
properties:
@@ -4624,6 +4795,7 @@ components:
required:
- type
- separator
+ title: DefaultRAGQueryGeneratorConfig
LLMRAGQueryGeneratorConfig:
type: object
properties:
@@ -4640,6 +4812,7 @@ components:
- type
- model
- template
+ title: LLMRAGQueryGeneratorConfig
RAGQueryConfig:
type: object
properties:
@@ -4656,6 +4829,7 @@ components:
- query_generator_config
- max_tokens_in_context
- max_chunks
+ title: RAGQueryConfig
RAGQueryGeneratorConfig:
oneOf:
- $ref: '#/components/schemas/DefaultRAGQueryGeneratorConfig'
@@ -4680,12 +4854,14 @@ components:
required:
- content
- vector_db_ids
+ title: QueryRequest
RAGQueryResult:
type: object
properties:
content:
$ref: '#/components/schemas/InterleavedContent'
additionalProperties: false
+ title: RAGQueryResult
QueryChunksRequest:
type: object
properties:
@@ -4707,6 +4883,7 @@ components:
required:
- vector_db_id
- query
+ title: QueryChunksRequest
QueryChunksResponse:
type: object
properties:
@@ -4731,6 +4908,7 @@ components:
required:
- content
- metadata
+ title: Chunk
scores:
type: array
items:
@@ -4739,6 +4917,7 @@ components:
required:
- chunks
- scores
+ title: QueryChunksResponse
QueryCondition:
type: object
properties:
@@ -4759,6 +4938,7 @@ components:
- key
- op
- value
+ title: QueryCondition
QueryConditionOp:
type: string
enum:
@@ -4766,6 +4946,7 @@ components:
- ne
- gt
- lt
+ title: QueryConditionOp
QuerySpansResponse:
type: object
properties:
@@ -4776,6 +4957,7 @@ components:
additionalProperties: false
required:
- data
+ title: QuerySpansResponse
QueryTracesResponse:
type: object
properties:
@@ -4786,6 +4968,7 @@ components:
additionalProperties: false
required:
- data
+ title: QueryTracesResponse
RegisterBenchmarkRequest:
type: object
properties:
@@ -4816,6 +4999,7 @@ components:
- benchmark_id
- dataset_id
- scoring_functions
+ title: RegisterBenchmarkRequest
RegisterDatasetRequest:
type: object
properties:
@@ -4846,6 +5030,7 @@ components:
- dataset_id
- dataset_schema
- url
+ title: RegisterDatasetRequest
RegisterModelRequest:
type: object
properties:
@@ -4870,6 +5055,7 @@ components:
additionalProperties: false
required:
- model_id
+ title: RegisterModelRequest
RegisterScoringFunctionRequest:
type: object
properties:
@@ -4890,6 +5076,7 @@ components:
- scoring_fn_id
- description
- return_type
+ title: RegisterScoringFunctionRequest
RegisterShieldRequest:
type: object
properties:
@@ -4912,6 +5099,7 @@ components:
additionalProperties: false
required:
- shield_id
+ title: RegisterShieldRequest
RegisterToolGroupRequest:
type: object
properties:
@@ -4935,6 +5123,7 @@ components:
required:
- toolgroup_id
- provider_id
+ title: RegisterToolGroupRequest
RegisterVectorDbRequest:
type: object
properties:
@@ -4952,6 +5141,7 @@ components:
required:
- vector_db_id
- embedding_model
+ title: RegisterVectorDbRequest
RunEvalRequest:
type: object
properties:
@@ -4960,6 +5150,7 @@ components:
additionalProperties: false
required:
- task_config
+ title: RunEvalRequest
RunShieldRequest:
type: object
properties:
@@ -4984,12 +5175,14 @@ components:
- shield_id
- messages
- params
+ title: RunShieldRequest
RunShieldResponse:
type: object
properties:
violation:
$ref: '#/components/schemas/SafetyViolation'
additionalProperties: false
+ title: RunShieldResponse
SaveSpansToDatasetRequest:
type: object
properties:
@@ -5010,6 +5203,7 @@ components:
- attribute_filters
- attributes_to_save
- dataset_id
+ title: SaveSpansToDatasetRequest
ScoreRequest:
type: object
properties:
@@ -5035,6 +5229,7 @@ components:
required:
- input_rows
- scoring_functions
+ title: ScoreRequest
ScoreResponse:
type: object
properties:
@@ -5045,6 +5240,7 @@ components:
additionalProperties: false
required:
- results
+ title: ScoreResponse
ScoreBatchRequest:
type: object
properties:
@@ -5063,6 +5259,7 @@ components:
- dataset_id
- scoring_functions
- save_results_dataset
+ title: ScoreBatchRequest
ScoreBatchResponse:
type: object
properties:
@@ -5075,6 +5272,7 @@ components:
additionalProperties: false
required:
- results
+ title: ScoreBatchResponse
AlgorithmConfig:
oneOf:
- $ref: '#/components/schemas/LoraFinetuningConfig'
@@ -5117,6 +5315,7 @@ components:
- apply_lora_to_output
- rank
- alpha
+ title: LoraFinetuningConfig
QATFinetuningConfig:
type: object
properties:
@@ -5133,6 +5332,7 @@ components:
- type
- quantizer_name
- group_size
+ title: QATFinetuningConfig
SupervisedFineTuneRequest:
type: object
properties:
@@ -5173,6 +5373,7 @@ components:
- hyperparam_search_config
- logger_config
- model
+ title: SupervisedFineTuneRequest
SyntheticDataGenerateRequest:
type: object
properties:
@@ -5189,6 +5390,7 @@ components:
- top_p
- top_k_top_p
- sigmoid
+ title: FilteringFunction
description: The type of filtering function.
model:
type: string
@@ -5196,6 +5398,7 @@ components:
required:
- dialogs
- filtering_function
+ title: SyntheticDataGenerateRequest
SyntheticDataGenerationResponse:
type: object
properties:
@@ -5224,6 +5427,7 @@ components:
additionalProperties: false
required:
- synthetic_data
+ title: SyntheticDataGenerationResponse
description: >-
Response from the synthetic data generation. Batch of (prompt, response, score)
tuples that pass the threshold.
@@ -5235,6 +5439,7 @@ components:
additionalProperties: false
required:
- version
+ title: VersionInfo
responses: {}
security:
- Default: []
diff --git a/docs/openapi_generator/README.md b/docs/openapi_generator/README.md
index 9d407905d..e98cfaf1b 100644
--- a/docs/openapi_generator/README.md
+++ b/docs/openapi_generator/README.md
@@ -1,4 +1,4 @@
-The RFC Specification (OpenAPI format) is generated from the set of API endpoints located in `llama_stack/[]/api/endpoints.py` using the `generate.py` utility.
+The RFC Specification (OpenAPI format) is generated from the set of API endpoints located in `llama_stack/distribution/server/endpoints.py` using the `generate.py` utility.
Please install the following packages before running the script:
@@ -6,4 +6,4 @@ Please install the following packages before running the script:
pip install python-openapi json-strong-typing fire PyYAML llama-models
```
-Then simply run `sh run_openapi_generator.sh `
+Then simply run `sh run_openapi_generator.sh`
diff --git a/docs/openapi_generator/strong_typing/__init__.py b/docs/openapi_generator/strong_typing/__init__.py
deleted file mode 100644
index d832dcf6f..000000000
--- a/docs/openapi_generator/strong_typing/__init__.py
+++ /dev/null
@@ -1,19 +0,0 @@
-# Copyright (c) Meta Platforms, Inc. and affiliates.
-# All rights reserved.
-#
-# This source code is licensed under the terms described in the LICENSE file in
-# the root directory of this source tree.
-
-"""
-Type-safe data interchange for Python data classes.
-
-Provides auxiliary services for working with Python type annotations, converting typed data to and from JSON,
-and generating a JSON schema for a complex type.
-"""
-
-__version__ = "0.3.4"
-__author__ = "Levente Hunyadi"
-__copyright__ = "Copyright 2021-2024, Levente Hunyadi"
-__license__ = "MIT"
-__maintainer__ = "Levente Hunyadi"
-__status__ = "Production"
diff --git a/docs/openapi_generator/strong_typing/auxiliary.py b/docs/openapi_generator/strong_typing/auxiliary.py
deleted file mode 100644
index bfaec0d29..000000000
--- a/docs/openapi_generator/strong_typing/auxiliary.py
+++ /dev/null
@@ -1,230 +0,0 @@
-# Copyright (c) Meta Platforms, Inc. and affiliates.
-# All rights reserved.
-#
-# This source code is licensed under the terms described in the LICENSE file in
-# the root directory of this source tree.
-
-"""
-Type-safe data interchange for Python data classes.
-
-:see: https://github.com/hunyadi/strong_typing
-"""
-
-import dataclasses
-import sys
-from dataclasses import is_dataclass
-from typing import Callable, Dict, Optional, overload, Type, TypeVar, Union
-
-if sys.version_info >= (3, 9):
- from typing import Annotated as Annotated
-else:
- from typing_extensions import Annotated as Annotated
-
-if sys.version_info >= (3, 10):
- from typing import TypeAlias as TypeAlias
-else:
- from typing_extensions import TypeAlias as TypeAlias
-
-if sys.version_info >= (3, 11):
- from typing import dataclass_transform as dataclass_transform
-else:
- from typing_extensions import dataclass_transform as dataclass_transform
-
-T = TypeVar("T")
-
-
-def _compact_dataclass_repr(obj: object) -> str:
- """
- Compact data-class representation where positional arguments are used instead of keyword arguments.
-
- :param obj: A data-class object.
- :returns: A string that matches the pattern `Class(arg1, arg2, ...)`.
- """
-
- if is_dataclass(obj):
- arglist = ", ".join(
- repr(getattr(obj, field.name)) for field in dataclasses.fields(obj)
- )
- return f"{obj.__class__.__name__}({arglist})"
- else:
- return obj.__class__.__name__
-
-
-class CompactDataClass:
- "A data class whose repr() uses positional rather than keyword arguments."
-
- def __repr__(self) -> str:
- return _compact_dataclass_repr(self)
-
-
-@overload
-def typeannotation(cls: Type[T], /) -> Type[T]: ...
-
-
-@overload
-def typeannotation(
- cls: None, *, eq: bool = True, order: bool = False
-) -> Callable[[Type[T]], Type[T]]: ...
-
-
-@dataclass_transform(eq_default=True, order_default=False)
-def typeannotation(
- cls: Optional[Type[T]] = None, *, eq: bool = True, order: bool = False
-) -> Union[Type[T], Callable[[Type[T]], Type[T]]]:
- """
- Returns the same class as was passed in, with dunder methods added based on the fields defined in the class.
-
- :param cls: The data-class type to transform into a type annotation.
- :param eq: Whether to generate functions to support equality comparison.
- :param order: Whether to generate functions to support ordering.
- :returns: A data-class type, or a wrapper for data-class types.
- """
-
- def wrap(cls: Type[T]) -> Type[T]:
- setattr(cls, "__repr__", _compact_dataclass_repr)
- if not dataclasses.is_dataclass(cls):
- cls = dataclasses.dataclass( # type: ignore[call-overload]
- cls,
- init=True,
- repr=False,
- eq=eq,
- order=order,
- unsafe_hash=False,
- frozen=True,
- )
- return cls
-
- # see if decorator is used as @typeannotation or @typeannotation()
- if cls is None:
- # called with parentheses
- return wrap
- else:
- # called without parentheses
- return wrap(cls)
-
-
-@typeannotation
-class Alias:
- "Alternative name of a property, typically used in JSON serialization."
-
- name: str
-
-
-@typeannotation
-class Signed:
- "Signedness of an integer type."
-
- is_signed: bool
-
-
-@typeannotation
-class Storage:
- "Number of bytes the binary representation of an integer type takes, e.g. 4 bytes for an int32."
-
- bytes: int
-
-
-@typeannotation
-class IntegerRange:
- "Minimum and maximum value of an integer. The range is inclusive."
-
- minimum: int
- maximum: int
-
-
-@typeannotation
-class Precision:
- "Precision of a floating-point value."
-
- significant_digits: int
- decimal_digits: int = 0
-
- @property
- def integer_digits(self) -> int:
- return self.significant_digits - self.decimal_digits
-
-
-@typeannotation
-class TimePrecision:
- """
- Precision of a timestamp or time interval.
-
- :param decimal_digits: Number of fractional digits retained in the sub-seconds field for a timestamp.
- """
-
- decimal_digits: int = 0
-
-
-@typeannotation
-class Length:
- "Exact length of a string."
-
- value: int
-
-
-@typeannotation
-class MinLength:
- "Minimum length of a string."
-
- value: int
-
-
-@typeannotation
-class MaxLength:
- "Maximum length of a string."
-
- value: int
-
-
-@typeannotation
-class SpecialConversion:
- "Indicates that the annotated type is subject to custom conversion rules."
-
-
-int8: TypeAlias = Annotated[int, Signed(True), Storage(1), IntegerRange(-128, 127)]
-int16: TypeAlias = Annotated[int, Signed(True), Storage(2), IntegerRange(-32768, 32767)]
-int32: TypeAlias = Annotated[
- int,
- Signed(True),
- Storage(4),
- IntegerRange(-2147483648, 2147483647),
-]
-int64: TypeAlias = Annotated[
- int,
- Signed(True),
- Storage(8),
- IntegerRange(-9223372036854775808, 9223372036854775807),
-]
-
-uint8: TypeAlias = Annotated[int, Signed(False), Storage(1), IntegerRange(0, 255)]
-uint16: TypeAlias = Annotated[int, Signed(False), Storage(2), IntegerRange(0, 65535)]
-uint32: TypeAlias = Annotated[
- int,
- Signed(False),
- Storage(4),
- IntegerRange(0, 4294967295),
-]
-uint64: TypeAlias = Annotated[
- int,
- Signed(False),
- Storage(8),
- IntegerRange(0, 18446744073709551615),
-]
-
-float32: TypeAlias = Annotated[float, Storage(4)]
-float64: TypeAlias = Annotated[float, Storage(8)]
-
-# maps globals of type Annotated[T, ...] defined in this module to their string names
-_auxiliary_types: Dict[object, str] = {}
-module = sys.modules[__name__]
-for var in dir(module):
- typ = getattr(module, var)
- if getattr(typ, "__metadata__", None) is not None:
- # type is Annotated[T, ...]
- _auxiliary_types[typ] = var
-
-
-def get_auxiliary_format(data_type: object) -> Optional[str]:
- "Returns the JSON format string corresponding to an auxiliary type."
-
- return _auxiliary_types.get(data_type)
diff --git a/docs/openapi_generator/strong_typing/classdef.py b/docs/openapi_generator/strong_typing/classdef.py
deleted file mode 100644
index b86940420..000000000
--- a/docs/openapi_generator/strong_typing/classdef.py
+++ /dev/null
@@ -1,460 +0,0 @@
-# Copyright (c) Meta Platforms, Inc. and affiliates.
-# All rights reserved.
-#
-# This source code is licensed under the terms described in the LICENSE file in
-# the root directory of this source tree.
-
-import copy
-import dataclasses
-import datetime
-import decimal
-import enum
-import ipaddress
-import math
-import re
-import sys
-import types
-import typing
-import uuid
-from dataclasses import dataclass
-from typing import Any, Dict, List, Literal, Optional, Tuple, Type, TypeVar, Union
-
-from .auxiliary import (
- Alias,
- Annotated,
- float32,
- float64,
- int16,
- int32,
- int64,
- MaxLength,
- Precision,
-)
-from .core import JsonType, Schema
-from .docstring import Docstring, DocstringParam
-from .inspection import TypeLike
-from .serialization import json_to_object, object_to_json
-
-T = TypeVar("T")
-
-
-@dataclass
-class JsonSchemaNode:
- title: Optional[str]
- description: Optional[str]
-
-
-@dataclass
-class JsonSchemaType(JsonSchemaNode):
- type: str
- format: Optional[str]
-
-
-@dataclass
-class JsonSchemaBoolean(JsonSchemaType):
- type: Literal["boolean"]
- const: Optional[bool]
- default: Optional[bool]
- examples: Optional[List[bool]]
-
-
-@dataclass
-class JsonSchemaInteger(JsonSchemaType):
- type: Literal["integer"]
- const: Optional[int]
- default: Optional[int]
- examples: Optional[List[int]]
- enum: Optional[List[int]]
- minimum: Optional[int]
- maximum: Optional[int]
-
-
-@dataclass
-class JsonSchemaNumber(JsonSchemaType):
- type: Literal["number"]
- const: Optional[float]
- default: Optional[float]
- examples: Optional[List[float]]
- minimum: Optional[float]
- maximum: Optional[float]
- exclusiveMinimum: Optional[float]
- exclusiveMaximum: Optional[float]
- multipleOf: Optional[float]
-
-
-@dataclass
-class JsonSchemaString(JsonSchemaType):
- type: Literal["string"]
- const: Optional[str]
- default: Optional[str]
- examples: Optional[List[str]]
- enum: Optional[List[str]]
- minLength: Optional[int]
- maxLength: Optional[int]
-
-
-@dataclass
-class JsonSchemaArray(JsonSchemaType):
- type: Literal["array"]
- items: "JsonSchemaAny"
-
-
-@dataclass
-class JsonSchemaObject(JsonSchemaType):
- type: Literal["object"]
- properties: Optional[Dict[str, "JsonSchemaAny"]]
- additionalProperties: Optional[bool]
- required: Optional[List[str]]
-
-
-@dataclass
-class JsonSchemaRef(JsonSchemaNode):
- ref: Annotated[str, Alias("$ref")]
-
-
-@dataclass
-class JsonSchemaAllOf(JsonSchemaNode):
- allOf: List["JsonSchemaAny"]
-
-
-@dataclass
-class JsonSchemaAnyOf(JsonSchemaNode):
- anyOf: List["JsonSchemaAny"]
-
-
-@dataclass
-class Discriminator:
- propertyName: str
- mapping: Dict[str, str]
-
-
-@dataclass
-class JsonSchemaOneOf(JsonSchemaNode):
- oneOf: List["JsonSchemaAny"]
- discriminator: Optional[Discriminator]
-
-
-JsonSchemaAny = Union[
- JsonSchemaRef,
- JsonSchemaBoolean,
- JsonSchemaInteger,
- JsonSchemaNumber,
- JsonSchemaString,
- JsonSchemaArray,
- JsonSchemaObject,
- JsonSchemaOneOf,
-]
-
-
-@dataclass
-class JsonSchemaTopLevelObject(JsonSchemaObject):
- schema: Annotated[str, Alias("$schema")]
- definitions: Optional[Dict[str, JsonSchemaAny]]
-
-
-def integer_range_to_type(min_value: float, max_value: float) -> type:
- if min_value >= -(2**15) and max_value < 2**15:
- return int16
- elif min_value >= -(2**31) and max_value < 2**31:
- return int32
- else:
- return int64
-
-
-def enum_safe_name(name: str) -> str:
- name = re.sub(r"\W", "_", name)
- is_dunder = name.startswith("__")
- is_sunder = name.startswith("_") and name.endswith("_")
- if is_dunder or is_sunder: # provide an alternative for dunder and sunder names
- name = f"v{name}"
- return name
-
-
-def enum_values_to_type(
- module: types.ModuleType,
- name: str,
- values: Dict[str, Any],
- title: Optional[str] = None,
- description: Optional[str] = None,
-) -> Type[enum.Enum]:
- enum_class: Type[enum.Enum] = enum.Enum(name, values) # type: ignore
-
- # assign the newly created type to the same module where the defining class is
- enum_class.__module__ = module.__name__
- enum_class.__doc__ = str(
- Docstring(short_description=title, long_description=description)
- )
- setattr(module, name, enum_class)
-
- return enum.unique(enum_class)
-
-
-def schema_to_type(
- schema: Schema, *, module: types.ModuleType, class_name: str
-) -> TypeLike:
- """
- Creates a Python type from a JSON schema.
-
- :param schema: The JSON schema that the types would correspond to.
- :param module: The module in which to create the new types.
- :param class_name: The name assigned to the top-level class.
- """
-
- top_node = typing.cast(
- JsonSchemaTopLevelObject, json_to_object(JsonSchemaTopLevelObject, schema)
- )
- if top_node.definitions is not None:
- for type_name, type_node in top_node.definitions.items():
- type_def = node_to_typedef(module, type_name, type_node)
- if type_def.default is not dataclasses.MISSING:
- raise TypeError("disallowed: `default` for top-level type definitions")
-
- setattr(type_def.type, "__module__", module.__name__)
- setattr(module, type_name, type_def.type)
-
- return node_to_typedef(module, class_name, top_node).type
-
-
-@dataclass
-class TypeDef:
- type: TypeLike
- default: Any = dataclasses.MISSING
-
-
-def json_to_value(target_type: TypeLike, data: JsonType) -> Any:
- if data is not None:
- return json_to_object(target_type, data)
- else:
- return dataclasses.MISSING
-
-
-def node_to_typedef(
- module: types.ModuleType, context: str, node: JsonSchemaNode
-) -> TypeDef:
- if isinstance(node, JsonSchemaRef):
- match_obj = re.match(r"^#/definitions/(\w+)$", node.ref)
- if not match_obj:
- raise ValueError(f"invalid reference: {node.ref}")
-
- type_name = match_obj.group(1)
- return TypeDef(getattr(module, type_name), dataclasses.MISSING)
-
- elif isinstance(node, JsonSchemaBoolean):
- if node.const is not None:
- return TypeDef(Literal[node.const], dataclasses.MISSING)
-
- default = json_to_value(bool, node.default)
- return TypeDef(bool, default)
-
- elif isinstance(node, JsonSchemaInteger):
- if node.const is not None:
- return TypeDef(Literal[node.const], dataclasses.MISSING)
-
- integer_type: TypeLike
- if node.format == "int16":
- integer_type = int16
- elif node.format == "int32":
- integer_type = int32
- elif node.format == "int64":
- integer_type = int64
- else:
- if node.enum is not None:
- integer_type = integer_range_to_type(min(node.enum), max(node.enum))
- elif node.minimum is not None and node.maximum is not None:
- integer_type = integer_range_to_type(node.minimum, node.maximum)
- else:
- integer_type = int
-
- default = json_to_value(integer_type, node.default)
- return TypeDef(integer_type, default)
-
- elif isinstance(node, JsonSchemaNumber):
- if node.const is not None:
- return TypeDef(Literal[node.const], dataclasses.MISSING)
-
- number_type: TypeLike
- if node.format == "float32":
- number_type = float32
- elif node.format == "float64":
- number_type = float64
- else:
- if (
- node.exclusiveMinimum is not None
- and node.exclusiveMaximum is not None
- and node.exclusiveMinimum == -node.exclusiveMaximum
- ):
- integer_digits = round(math.log10(node.exclusiveMaximum))
- else:
- integer_digits = None
-
- if node.multipleOf is not None:
- decimal_digits = -round(math.log10(node.multipleOf))
- else:
- decimal_digits = None
-
- if integer_digits is not None and decimal_digits is not None:
- number_type = Annotated[
- decimal.Decimal,
- Precision(integer_digits + decimal_digits, decimal_digits),
- ]
- else:
- number_type = float
-
- default = json_to_value(number_type, node.default)
- return TypeDef(number_type, default)
-
- elif isinstance(node, JsonSchemaString):
- if node.const is not None:
- return TypeDef(Literal[node.const], dataclasses.MISSING)
-
- string_type: TypeLike
- if node.format == "date-time":
- string_type = datetime.datetime
- elif node.format == "uuid":
- string_type = uuid.UUID
- elif node.format == "ipv4":
- string_type = ipaddress.IPv4Address
- elif node.format == "ipv6":
- string_type = ipaddress.IPv6Address
-
- elif node.enum is not None:
- string_type = enum_values_to_type(
- module,
- context,
- {enum_safe_name(e): e for e in node.enum},
- title=node.title,
- description=node.description,
- )
-
- elif node.maxLength is not None:
- string_type = Annotated[str, MaxLength(node.maxLength)]
- else:
- string_type = str
-
- default = json_to_value(string_type, node.default)
- return TypeDef(string_type, default)
-
- elif isinstance(node, JsonSchemaArray):
- type_def = node_to_typedef(module, context, node.items)
- if type_def.default is not dataclasses.MISSING:
- raise TypeError("disallowed: `default` for array element type")
- list_type = List[(type_def.type,)] # type: ignore
- return TypeDef(list_type, dataclasses.MISSING)
-
- elif isinstance(node, JsonSchemaObject):
- if node.properties is None:
- return TypeDef(JsonType, dataclasses.MISSING)
-
- if node.additionalProperties is None or node.additionalProperties is not False:
- raise TypeError("expected: `additionalProperties` equals `false`")
-
- required = node.required if node.required is not None else []
-
- class_name = context
-
- fields: List[Tuple[str, Any, dataclasses.Field]] = []
- params: Dict[str, DocstringParam] = {}
- for prop_name, prop_node in node.properties.items():
- type_def = node_to_typedef(module, f"{class_name}__{prop_name}", prop_node)
- if prop_name in required:
- prop_type = type_def.type
- else:
- prop_type = Union[(None, type_def.type)]
- fields.append(
- (prop_name, prop_type, dataclasses.field(default=type_def.default))
- )
- prop_desc = prop_node.title or prop_node.description
- if prop_desc is not None:
- params[prop_name] = DocstringParam(prop_name, prop_desc)
-
- fields.sort(key=lambda t: t[2].default is not dataclasses.MISSING)
- if sys.version_info >= (3, 12):
- class_type = dataclasses.make_dataclass(
- class_name, fields, module=module.__name__
- )
- else:
- class_type = dataclasses.make_dataclass(
- class_name, fields, namespace={"__module__": module.__name__}
- )
- class_type.__doc__ = str(
- Docstring(
- short_description=node.title,
- long_description=node.description,
- params=params,
- )
- )
- setattr(module, class_name, class_type)
- return TypeDef(class_type, dataclasses.MISSING)
-
- elif isinstance(node, JsonSchemaOneOf):
- union_defs = tuple(node_to_typedef(module, context, n) for n in node.oneOf)
- if any(d.default is not dataclasses.MISSING for d in union_defs):
- raise TypeError("disallowed: `default` for union member type")
- union_types = tuple(d.type for d in union_defs)
- return TypeDef(Union[union_types], dataclasses.MISSING)
-
- raise NotImplementedError()
-
-
-@dataclass
-class SchemaFlatteningOptions:
- qualified_names: bool = False
- recursive: bool = False
-
-
-def flatten_schema(
- schema: Schema, *, options: Optional[SchemaFlatteningOptions] = None
-) -> Schema:
- top_node = typing.cast(
- JsonSchemaTopLevelObject, json_to_object(JsonSchemaTopLevelObject, schema)
- )
- flattener = SchemaFlattener(options)
- obj = flattener.flatten(top_node)
- return typing.cast(Schema, object_to_json(obj))
-
-
-class SchemaFlattener:
- options: SchemaFlatteningOptions
-
- def __init__(self, options: Optional[SchemaFlatteningOptions] = None) -> None:
- self.options = options or SchemaFlatteningOptions()
-
- def flatten(self, source_node: JsonSchemaObject) -> JsonSchemaObject:
- if source_node.type != "object":
- return source_node
-
- source_props = source_node.properties or {}
- target_props: Dict[str, JsonSchemaAny] = {}
-
- source_reqs = source_node.required or []
- target_reqs: List[str] = []
-
- for name, prop in source_props.items():
- if not isinstance(prop, JsonSchemaObject):
- target_props[name] = prop
- if name in source_reqs:
- target_reqs.append(name)
- continue
-
- if self.options.recursive:
- obj = self.flatten(prop)
- else:
- obj = prop
- if obj.properties is not None:
- if self.options.qualified_names:
- target_props.update(
- (f"{name}.{n}", p) for n, p in obj.properties.items()
- )
- else:
- target_props.update(obj.properties.items())
- if obj.required is not None:
- if self.options.qualified_names:
- target_reqs.extend(f"{name}.{n}" for n in obj.required)
- else:
- target_reqs.extend(obj.required)
-
- target_node = copy.copy(source_node)
- target_node.properties = target_props or None
- target_node.additionalProperties = False
- target_node.required = target_reqs or None
- return target_node
diff --git a/docs/openapi_generator/strong_typing/core.py b/docs/openapi_generator/strong_typing/core.py
deleted file mode 100644
index 501b6a5db..000000000
--- a/docs/openapi_generator/strong_typing/core.py
+++ /dev/null
@@ -1,46 +0,0 @@
-# Copyright (c) Meta Platforms, Inc. and affiliates.
-# All rights reserved.
-#
-# This source code is licensed under the terms described in the LICENSE file in
-# the root directory of this source tree.
-
-"""
-Type-safe data interchange for Python data classes.
-
-:see: https://github.com/hunyadi/strong_typing
-"""
-
-from typing import Dict, List, Union
-
-
-class JsonObject:
- "Placeholder type for an unrestricted JSON object."
-
-
-class JsonArray:
- "Placeholder type for an unrestricted JSON array."
-
-
-# a JSON type with possible `null` values
-JsonType = Union[
- None,
- bool,
- int,
- float,
- str,
- Dict[str, "JsonType"],
- List["JsonType"],
-]
-
-# a JSON type that cannot contain `null` values
-StrictJsonType = Union[
- bool,
- int,
- float,
- str,
- Dict[str, "StrictJsonType"],
- List["StrictJsonType"],
-]
-
-# a meta-type that captures the object type in a JSON schema
-Schema = Dict[str, JsonType]
diff --git a/docs/openapi_generator/strong_typing/deserializer.py b/docs/openapi_generator/strong_typing/deserializer.py
deleted file mode 100644
index 5859d3bbe..000000000
--- a/docs/openapi_generator/strong_typing/deserializer.py
+++ /dev/null
@@ -1,959 +0,0 @@
-# Copyright (c) Meta Platforms, Inc. and affiliates.
-# All rights reserved.
-#
-# This source code is licensed under the terms described in the LICENSE file in
-# the root directory of this source tree.
-
-"""
-Type-safe data interchange for Python data classes.
-
-:see: https://github.com/hunyadi/strong_typing
-"""
-
-import abc
-import base64
-import dataclasses
-import datetime
-import enum
-import inspect
-import ipaddress
-import sys
-import typing
-import uuid
-from types import ModuleType
-from typing import (
- Any,
- Callable,
- Dict,
- Generic,
- List,
- Literal,
- NamedTuple,
- Optional,
- Set,
- Tuple,
- Type,
- TypeVar,
- Union,
-)
-
-from .core import JsonType
-from .exception import JsonKeyError, JsonTypeError, JsonValueError
-from .inspection import (
- create_object,
- enum_value_types,
- evaluate_type,
- get_class_properties,
- get_class_property,
- get_resolved_hints,
- is_dataclass_instance,
- is_dataclass_type,
- is_named_tuple_type,
- is_type_annotated,
- is_type_literal,
- is_type_optional,
- TypeLike,
- unwrap_annotated_type,
- unwrap_literal_values,
- unwrap_optional_type,
-)
-from .mapping import python_field_to_json_property
-from .name import python_type_to_str
-
-E = TypeVar("E", bound=enum.Enum)
-T = TypeVar("T")
-R = TypeVar("R")
-K = TypeVar("K")
-V = TypeVar("V")
-
-
-class Deserializer(abc.ABC, Generic[T]):
- "Parses a JSON value into a Python type."
-
- def build(self, context: Optional[ModuleType]) -> None:
- """
- Creates auxiliary parsers that this parser is depending on.
-
- :param context: A module context for evaluating types specified as a string.
- """
-
- @abc.abstractmethod
- def parse(self, data: JsonType) -> T:
- """
- Parses a JSON value into a Python type.
-
- :param data: The JSON value to de-serialize.
- :returns: The Python object that the JSON value de-serializes to.
- """
-
-
-class NoneDeserializer(Deserializer[None]):
- "Parses JSON `null` values into Python `None`."
-
- def parse(self, data: JsonType) -> None:
- if data is not None:
- raise JsonTypeError(
- f"`None` type expects JSON `null` but instead received: {data}"
- )
- return None
-
-
-class BoolDeserializer(Deserializer[bool]):
- "Parses JSON `boolean` values into Python `bool` type."
-
- def parse(self, data: JsonType) -> bool:
- if not isinstance(data, bool):
- raise JsonTypeError(
- f"`bool` type expects JSON `boolean` data but instead received: {data}"
- )
- return bool(data)
-
-
-class IntDeserializer(Deserializer[int]):
- "Parses JSON `number` values into Python `int` type."
-
- def parse(self, data: JsonType) -> int:
- if not isinstance(data, int):
- raise JsonTypeError(
- f"`int` type expects integer data as JSON `number` but instead received: {data}"
- )
- return int(data)
-
-
-class FloatDeserializer(Deserializer[float]):
- "Parses JSON `number` values into Python `float` type."
-
- def parse(self, data: JsonType) -> float:
- if not isinstance(data, float) and not isinstance(data, int):
- raise JsonTypeError(
- f"`int` type expects data as JSON `number` but instead received: {data}"
- )
- return float(data)
-
-
-class StringDeserializer(Deserializer[str]):
- "Parses JSON `string` values into Python `str` type."
-
- def parse(self, data: JsonType) -> str:
- if not isinstance(data, str):
- raise JsonTypeError(
- f"`str` type expects JSON `string` data but instead received: {data}"
- )
- return str(data)
-
-
-class BytesDeserializer(Deserializer[bytes]):
- "Parses JSON `string` values of Base64-encoded strings into Python `bytes` type."
-
- def parse(self, data: JsonType) -> bytes:
- if not isinstance(data, str):
- raise JsonTypeError(
- f"`bytes` type expects JSON `string` data but instead received: {data}"
- )
- return base64.b64decode(data, validate=True)
-
-
-class DateTimeDeserializer(Deserializer[datetime.datetime]):
- "Parses JSON `string` values representing timestamps in ISO 8601 format to Python `datetime` with time zone."
-
- def parse(self, data: JsonType) -> datetime.datetime:
- if not isinstance(data, str):
- raise JsonTypeError(
- f"`datetime` type expects JSON `string` data but instead received: {data}"
- )
-
- if data.endswith("Z"):
- data = f"{data[:-1]}+00:00" # Python's isoformat() does not support military time zones like "Zulu" for UTC
- timestamp = datetime.datetime.fromisoformat(data)
- if timestamp.tzinfo is None:
- raise JsonValueError(
- f"timestamp lacks explicit time zone designator: {data}"
- )
- return timestamp
-
-
-class DateDeserializer(Deserializer[datetime.date]):
- "Parses JSON `string` values representing dates in ISO 8601 format to Python `date` type."
-
- def parse(self, data: JsonType) -> datetime.date:
- if not isinstance(data, str):
- raise JsonTypeError(
- f"`date` type expects JSON `string` data but instead received: {data}"
- )
-
- return datetime.date.fromisoformat(data)
-
-
-class TimeDeserializer(Deserializer[datetime.time]):
- "Parses JSON `string` values representing time instances in ISO 8601 format to Python `time` type with time zone."
-
- def parse(self, data: JsonType) -> datetime.time:
- if not isinstance(data, str):
- raise JsonTypeError(
- f"`time` type expects JSON `string` data but instead received: {data}"
- )
-
- return datetime.time.fromisoformat(data)
-
-
-class UUIDDeserializer(Deserializer[uuid.UUID]):
- "Parses JSON `string` values of UUID strings into Python `uuid.UUID` type."
-
- def parse(self, data: JsonType) -> uuid.UUID:
- if not isinstance(data, str):
- raise JsonTypeError(
- f"`UUID` type expects JSON `string` data but instead received: {data}"
- )
- return uuid.UUID(data)
-
-
-class IPv4Deserializer(Deserializer[ipaddress.IPv4Address]):
- "Parses JSON `string` values of IPv4 address strings into Python `ipaddress.IPv4Address` type."
-
- def parse(self, data: JsonType) -> ipaddress.IPv4Address:
- if not isinstance(data, str):
- raise JsonTypeError(
- f"`IPv4Address` type expects JSON `string` data but instead received: {data}"
- )
- return ipaddress.IPv4Address(data)
-
-
-class IPv6Deserializer(Deserializer[ipaddress.IPv6Address]):
- "Parses JSON `string` values of IPv6 address strings into Python `ipaddress.IPv6Address` type."
-
- def parse(self, data: JsonType) -> ipaddress.IPv6Address:
- if not isinstance(data, str):
- raise JsonTypeError(
- f"`IPv6Address` type expects JSON `string` data but instead received: {data}"
- )
- return ipaddress.IPv6Address(data)
-
-
-class ListDeserializer(Deserializer[List[T]]):
- "Recursively de-serializes a JSON array into a Python `list`."
-
- item_type: Type[T]
- item_parser: Deserializer
-
- def __init__(self, item_type: Type[T]) -> None:
- self.item_type = item_type
-
- def build(self, context: Optional[ModuleType]) -> None:
- self.item_parser = _get_deserializer(self.item_type, context)
-
- def parse(self, data: JsonType) -> List[T]:
- if not isinstance(data, list):
- type_name = python_type_to_str(self.item_type)
- raise JsonTypeError(
- f"type `List[{type_name}]` expects JSON `array` data but instead received: {data}"
- )
-
- return [self.item_parser.parse(item) for item in data]
-
-
-class DictDeserializer(Deserializer[Dict[K, V]]):
- "Recursively de-serializes a JSON object into a Python `dict`."
-
- key_type: Type[K]
- value_type: Type[V]
- value_parser: Deserializer[V]
-
- def __init__(self, key_type: Type[K], value_type: Type[V]) -> None:
- self.key_type = key_type
- self.value_type = value_type
- self._check_key_type()
-
- def build(self, context: Optional[ModuleType]) -> None:
- self.value_parser = _get_deserializer(self.value_type, context)
-
- def _check_key_type(self) -> None:
- if self.key_type is str:
- return
-
- if issubclass(self.key_type, enum.Enum):
- value_types = enum_value_types(self.key_type)
- if len(value_types) != 1:
- raise JsonTypeError(
- f"type `{self.container_type}` has invalid key type, "
- f"enumerations must have a consistent member value type but several types found: {value_types}"
- )
- value_type = value_types.pop()
- if value_type is not str:
- f"`type `{self.container_type}` has invalid enumeration key type, expected `enum.Enum` with string values"
- return
-
- raise JsonTypeError(
- f"`type `{self.container_type}` has invalid key type, expected `str` or `enum.Enum` with string values"
- )
-
- @property
- def container_type(self) -> str:
- key_type_name = python_type_to_str(self.key_type)
- value_type_name = python_type_to_str(self.value_type)
- return f"Dict[{key_type_name}, {value_type_name}]"
-
- def parse(self, data: JsonType) -> Dict[K, V]:
- if not isinstance(data, dict):
- raise JsonTypeError(
- f"`type `{self.container_type}` expects JSON `object` data but instead received: {data}"
- )
-
- return dict(
- (self.key_type(key), self.value_parser.parse(value)) # type: ignore[call-arg]
- for key, value in data.items()
- )
-
-
-class SetDeserializer(Deserializer[Set[T]]):
- "Recursively de-serializes a JSON list into a Python `set`."
-
- member_type: Type[T]
- member_parser: Deserializer
-
- def __init__(self, member_type: Type[T]) -> None:
- self.member_type = member_type
-
- def build(self, context: Optional[ModuleType]) -> None:
- self.member_parser = _get_deserializer(self.member_type, context)
-
- def parse(self, data: JsonType) -> Set[T]:
- if not isinstance(data, list):
- type_name = python_type_to_str(self.member_type)
- raise JsonTypeError(
- f"type `Set[{type_name}]` expects JSON `array` data but instead received: {data}"
- )
-
- return set(self.member_parser.parse(item) for item in data)
-
-
-class TupleDeserializer(Deserializer[Tuple[Any, ...]]):
- "Recursively de-serializes a JSON list into a Python `tuple`."
-
- item_types: Tuple[Type[Any], ...]
- item_parsers: Tuple[Deserializer[Any], ...]
-
- def __init__(self, item_types: Tuple[Type[Any], ...]) -> None:
- self.item_types = item_types
-
- def build(self, context: Optional[ModuleType]) -> None:
- self.item_parsers = tuple(
- _get_deserializer(item_type, context) for item_type in self.item_types
- )
-
- @property
- def container_type(self) -> str:
- type_names = ", ".join(
- python_type_to_str(item_type) for item_type in self.item_types
- )
- return f"Tuple[{type_names}]"
-
- def parse(self, data: JsonType) -> Tuple[Any, ...]:
- if not isinstance(data, list) or len(data) != len(self.item_parsers):
- if not isinstance(data, list):
- raise JsonTypeError(
- f"type `{self.container_type}` expects JSON `array` data but instead received: {data}"
- )
- else:
- count = len(self.item_parsers)
- raise JsonValueError(
- f"type `{self.container_type}` expects a JSON `array` of length {count} but received length {len(data)}"
- )
-
- return tuple(
- item_parser.parse(item)
- for item_parser, item in zip(self.item_parsers, data)
- )
-
-
-class UnionDeserializer(Deserializer):
- "De-serializes a JSON value (of any type) into a Python union type."
-
- member_types: Tuple[type, ...]
- member_parsers: Tuple[Deserializer, ...]
-
- def __init__(self, member_types: Tuple[type, ...]) -> None:
- self.member_types = member_types
-
- def build(self, context: Optional[ModuleType]) -> None:
- self.member_parsers = tuple(
- _get_deserializer(member_type, context) for member_type in self.member_types
- )
-
- def parse(self, data: JsonType) -> Any:
- for member_parser in self.member_parsers:
- # iterate over potential types of discriminated union
- try:
- return member_parser.parse(data)
- except (JsonKeyError, JsonTypeError):
- # indicates a required field is missing from JSON dict -OR- the data cannot be cast to the expected type,
- # i.e. we don't have the type that we are looking for
- continue
-
- type_names = ", ".join(
- python_type_to_str(member_type) for member_type in self.member_types
- )
- raise JsonKeyError(
- f"type `Union[{type_names}]` could not be instantiated from: {data}"
- )
-
-
-def get_literal_properties(typ: type) -> Set[str]:
- "Returns the names of all properties in a class that are of a literal type."
-
- return set(
- property_name
- for property_name, property_type in get_class_properties(typ)
- if is_type_literal(property_type)
- )
-
-
-def get_discriminating_properties(types: Tuple[type, ...]) -> Set[str]:
- "Returns a set of properties with literal type that are common across all specified classes."
-
- if not types or not all(isinstance(typ, type) for typ in types):
- return set()
-
- props = get_literal_properties(types[0])
- for typ in types[1:]:
- props = props & get_literal_properties(typ)
-
- return props
-
-
-class TaggedUnionDeserializer(Deserializer):
- "De-serializes a JSON value with one or more disambiguating properties into a Python union type."
-
- member_types: Tuple[type, ...]
- disambiguating_properties: Set[str]
- member_parsers: Dict[Tuple[str, Any], Deserializer]
-
- def __init__(self, member_types: Tuple[type, ...]) -> None:
- self.member_types = member_types
- self.disambiguating_properties = get_discriminating_properties(member_types)
-
- def build(self, context: Optional[ModuleType]) -> None:
- self.member_parsers = {}
- for member_type in self.member_types:
- for property_name in self.disambiguating_properties:
- literal_type = get_class_property(member_type, property_name)
- if not literal_type:
- continue
-
- for literal_value in unwrap_literal_values(literal_type):
- tpl = (property_name, literal_value)
- if tpl in self.member_parsers:
- raise JsonTypeError(
- f"disambiguating property `{property_name}` in type `{self.union_type}` has a duplicate value: {literal_value}"
- )
-
- self.member_parsers[tpl] = _get_deserializer(member_type, context)
-
- @property
- def union_type(self) -> str:
- type_names = ", ".join(
- python_type_to_str(member_type) for member_type in self.member_types
- )
- return f"Union[{type_names}]"
-
- def parse(self, data: JsonType) -> Any:
- if not isinstance(data, dict):
- raise JsonTypeError(
- f"tagged union type `{self.union_type}` expects JSON `object` data but instead received: {data}"
- )
-
- for property_name in self.disambiguating_properties:
- disambiguating_value = data.get(property_name)
- if disambiguating_value is None:
- continue
-
- member_parser = self.member_parsers.get(
- (property_name, disambiguating_value)
- )
- if member_parser is None:
- raise JsonTypeError(
- f"disambiguating property value is invalid for tagged union type `{self.union_type}`: {data}"
- )
-
- return member_parser.parse(data)
-
- raise JsonTypeError(
- f"disambiguating property value is missing for tagged union type `{self.union_type}`: {data}"
- )
-
-
-class LiteralDeserializer(Deserializer):
- "De-serializes a JSON value into a Python literal type."
-
- values: Tuple[Any, ...]
- parser: Deserializer
-
- def __init__(self, values: Tuple[Any, ...]) -> None:
- self.values = values
-
- def build(self, context: Optional[ModuleType]) -> None:
- literal_type_tuple = tuple(type(value) for value in self.values)
- literal_type_set = set(literal_type_tuple)
- if len(literal_type_set) != 1:
- value_names = ", ".join(repr(value) for value in self.values)
- raise TypeError(
- f"type `Literal[{value_names}]` expects consistent literal value types but got: {literal_type_tuple}"
- )
-
- literal_type = literal_type_set.pop()
- self.parser = _get_deserializer(literal_type, context)
-
- def parse(self, data: JsonType) -> Any:
- value = self.parser.parse(data)
- if value not in self.values:
- value_names = ", ".join(repr(value) for value in self.values)
- raise JsonTypeError(
- f"type `Literal[{value_names}]` could not be instantiated from: {data}"
- )
- return value
-
-
-class EnumDeserializer(Deserializer[E]):
- "Returns an enumeration instance based on the enumeration value read from a JSON value."
-
- enum_type: Type[E]
-
- def __init__(self, enum_type: Type[E]) -> None:
- self.enum_type = enum_type
-
- def parse(self, data: JsonType) -> E:
- return self.enum_type(data)
-
-
-class CustomDeserializer(Deserializer[T]):
- "Uses the `from_json` class method in class to de-serialize the object from JSON."
-
- converter: Callable[[JsonType], T]
-
- def __init__(self, converter: Callable[[JsonType], T]) -> None:
- self.converter = converter
-
- def parse(self, data: JsonType) -> T:
- return self.converter(data)
-
-
-class FieldDeserializer(abc.ABC, Generic[T, R]):
- """
- Deserializes a JSON property into a Python object field.
-
- :param property_name: The name of the JSON property to read from a JSON `object`.
- :param field_name: The name of the field in a Python class to write data to.
- :param parser: A compatible deserializer that can handle the field's type.
- """
-
- property_name: str
- field_name: str
- parser: Deserializer[T]
-
- def __init__(
- self, property_name: str, field_name: str, parser: Deserializer[T]
- ) -> None:
- self.property_name = property_name
- self.field_name = field_name
- self.parser = parser
-
- @abc.abstractmethod
- def parse_field(self, data: Dict[str, JsonType]) -> R: ...
-
-
-class RequiredFieldDeserializer(FieldDeserializer[T, T]):
- "Deserializes a JSON property into a mandatory Python object field."
-
- def parse_field(self, data: Dict[str, JsonType]) -> T:
- if self.property_name not in data:
- raise JsonKeyError(
- f"missing required property `{self.property_name}` from JSON object: {data}"
- )
-
- return self.parser.parse(data[self.property_name])
-
-
-class OptionalFieldDeserializer(FieldDeserializer[T, Optional[T]]):
- "Deserializes a JSON property into an optional Python object field with a default value of `None`."
-
- def parse_field(self, data: Dict[str, JsonType]) -> Optional[T]:
- value = data.get(self.property_name)
- if value is not None:
- return self.parser.parse(value)
- else:
- return None
-
-
-class DefaultFieldDeserializer(FieldDeserializer[T, T]):
- "Deserializes a JSON property into a Python object field with an explicit default value."
-
- default_value: T
-
- def __init__(
- self,
- property_name: str,
- field_name: str,
- parser: Deserializer,
- default_value: T,
- ) -> None:
- super().__init__(property_name, field_name, parser)
- self.default_value = default_value
-
- def parse_field(self, data: Dict[str, JsonType]) -> T:
- value = data.get(self.property_name)
- if value is not None:
- return self.parser.parse(value)
- else:
- return self.default_value
-
-
-class DefaultFactoryFieldDeserializer(FieldDeserializer[T, T]):
- "Deserializes a JSON property into an optional Python object field with an explicit default value factory."
-
- default_factory: Callable[[], T]
-
- def __init__(
- self,
- property_name: str,
- field_name: str,
- parser: Deserializer[T],
- default_factory: Callable[[], T],
- ) -> None:
- super().__init__(property_name, field_name, parser)
- self.default_factory = default_factory
-
- def parse_field(self, data: Dict[str, JsonType]) -> T:
- value = data.get(self.property_name)
- if value is not None:
- return self.parser.parse(value)
- else:
- return self.default_factory()
-
-
-class ClassDeserializer(Deserializer[T]):
- "Base class for de-serializing class-like types such as data classes, named tuples and regular classes."
-
- class_type: type
- property_parsers: List[FieldDeserializer]
- property_fields: Set[str]
-
- def __init__(self, class_type: Type[T]) -> None:
- self.class_type = class_type
-
- def assign(self, property_parsers: List[FieldDeserializer]) -> None:
- self.property_parsers = property_parsers
- self.property_fields = set(
- property_parser.property_name for property_parser in property_parsers
- )
-
- def parse(self, data: JsonType) -> T:
- if not isinstance(data, dict):
- type_name = python_type_to_str(self.class_type)
- raise JsonTypeError(
- f"`type `{type_name}` expects JSON `object` data but instead received: {data}"
- )
-
- object_data: Dict[str, JsonType] = typing.cast(Dict[str, JsonType], data)
-
- field_values = {}
- for property_parser in self.property_parsers:
- field_values[property_parser.field_name] = property_parser.parse_field(
- object_data
- )
-
- if not self.property_fields.issuperset(object_data):
- unassigned_names = [
- name for name in object_data if name not in self.property_fields
- ]
- raise JsonKeyError(
- f"unrecognized fields in JSON object: {unassigned_names}"
- )
-
- return self.create(**field_values)
-
- def create(self, **field_values: Any) -> T:
- "Instantiates an object with a collection of property values."
-
- obj: T = create_object(self.class_type)
-
- # use `setattr` on newly created object instance
- for field_name, field_value in field_values.items():
- setattr(obj, field_name, field_value)
- return obj
-
-
-class NamedTupleDeserializer(ClassDeserializer[NamedTuple]):
- "De-serializes a named tuple from a JSON `object`."
-
- def build(self, context: Optional[ModuleType]) -> None:
- property_parsers: List[FieldDeserializer] = [
- RequiredFieldDeserializer(
- field_name, field_name, _get_deserializer(field_type, context)
- )
- for field_name, field_type in get_resolved_hints(self.class_type).items()
- ]
- super().assign(property_parsers)
-
- def create(self, **field_values: Any) -> NamedTuple:
- return self.class_type(**field_values)
-
-
-class DataclassDeserializer(ClassDeserializer[T]):
- "De-serializes a data class from a JSON `object`."
-
- def __init__(self, class_type: Type[T]) -> None:
- if not dataclasses.is_dataclass(class_type):
- raise TypeError("expected: data-class type")
- super().__init__(class_type) # type: ignore[arg-type]
-
- def build(self, context: Optional[ModuleType]) -> None:
- property_parsers: List[FieldDeserializer] = []
- resolved_hints = get_resolved_hints(self.class_type)
- for field in dataclasses.fields(self.class_type):
- field_type = resolved_hints[field.name]
- property_name = python_field_to_json_property(field.name, field_type)
-
- is_optional = is_type_optional(field_type)
- has_default = field.default is not dataclasses.MISSING
- has_default_factory = field.default_factory is not dataclasses.MISSING
-
- if is_optional:
- required_type: Type[T] = unwrap_optional_type(field_type)
- else:
- required_type = field_type
-
- parser = _get_deserializer(required_type, context)
-
- if has_default:
- field_parser: FieldDeserializer = DefaultFieldDeserializer(
- property_name, field.name, parser, field.default
- )
- elif has_default_factory:
- default_factory = typing.cast(Callable[[], Any], field.default_factory)
- field_parser = DefaultFactoryFieldDeserializer(
- property_name, field.name, parser, default_factory
- )
- elif is_optional:
- field_parser = OptionalFieldDeserializer(
- property_name, field.name, parser
- )
- else:
- field_parser = RequiredFieldDeserializer(
- property_name, field.name, parser
- )
-
- property_parsers.append(field_parser)
-
- super().assign(property_parsers)
-
-
-class FrozenDataclassDeserializer(DataclassDeserializer[T]):
- "De-serializes a frozen data class from a JSON `object`."
-
- def create(self, **field_values: Any) -> T:
- "Instantiates an object with a collection of property values."
-
- # create object instance without calling `__init__`
- obj: T = create_object(self.class_type)
-
- # can't use `setattr` on frozen dataclasses, pass member variable values to `__init__`
- obj.__init__(**field_values) # type: ignore
- return obj
-
-
-class TypedClassDeserializer(ClassDeserializer[T]):
- "De-serializes a class with type annotations from a JSON `object` by iterating over class properties."
-
- def build(self, context: Optional[ModuleType]) -> None:
- property_parsers: List[FieldDeserializer] = []
- for field_name, field_type in get_resolved_hints(self.class_type).items():
- property_name = python_field_to_json_property(field_name, field_type)
-
- is_optional = is_type_optional(field_type)
-
- if is_optional:
- required_type: Type[T] = unwrap_optional_type(field_type)
- else:
- required_type = field_type
-
- parser = _get_deserializer(required_type, context)
-
- if is_optional:
- field_parser: FieldDeserializer = OptionalFieldDeserializer(
- property_name, field_name, parser
- )
- else:
- field_parser = RequiredFieldDeserializer(
- property_name, field_name, parser
- )
-
- property_parsers.append(field_parser)
-
- super().assign(property_parsers)
-
-
-def create_deserializer(
- typ: TypeLike, context: Optional[ModuleType] = None
-) -> Deserializer:
- """
- Creates a de-serializer engine to produce a Python object from an object obtained from a JSON string.
-
- When de-serializing a JSON object into a Python object, the following transformations are applied:
-
- * Fundamental types are parsed as `bool`, `int`, `float` or `str`.
- * Date and time types are parsed from the ISO 8601 format with time zone into the corresponding Python type
- `datetime`, `date` or `time`.
- * Byte arrays are read from a string with Base64 encoding into a `bytes` instance.
- * UUIDs are extracted from a UUID string compliant with RFC 4122 into a `uuid.UUID` instance.
- * Enumerations are instantiated with a lookup on enumeration value.
- * Containers (e.g. `list`, `dict`, `set`, `tuple`) are parsed recursively.
- * Complex objects with properties (including data class types) are populated from dictionaries of key-value pairs
- using reflection (enumerating type annotations).
-
- :raises TypeError: A de-serializer engine cannot be constructed for the input type.
- """
-
- if context is None:
- if isinstance(typ, type):
- context = sys.modules[typ.__module__]
-
- return _get_deserializer(typ, context)
-
-
-_CACHE: Dict[Tuple[str, str], Deserializer] = {}
-
-
-def _get_deserializer(typ: TypeLike, context: Optional[ModuleType]) -> Deserializer:
- "Creates or re-uses a de-serializer engine to parse an object obtained from a JSON string."
-
- cache_key = None
-
- if isinstance(typ, (str, typing.ForwardRef)):
- if context is None:
- raise TypeError(f"missing context for evaluating type: {typ}")
-
- if isinstance(typ, str):
- if hasattr(context, typ):
- cache_key = (context.__name__, typ)
- elif isinstance(typ, typing.ForwardRef):
- if hasattr(context, typ.__forward_arg__):
- cache_key = (context.__name__, typ.__forward_arg__)
-
- typ = evaluate_type(typ, context)
-
- typ = unwrap_annotated_type(typ) if is_type_annotated(typ) else typ
-
- if isinstance(typ, type) and typing.get_origin(typ) is None:
- cache_key = (typ.__module__, typ.__name__)
-
- if cache_key is not None:
- deserializer = _CACHE.get(cache_key)
- if deserializer is None:
- deserializer = _create_deserializer(typ)
-
- # store de-serializer immediately in cache to avoid stack overflow for recursive types
- _CACHE[cache_key] = deserializer
-
- if isinstance(typ, type):
- # use type's own module as context for evaluating member types
- context = sys.modules[typ.__module__]
-
- # create any de-serializers this de-serializer is depending on
- deserializer.build(context)
- else:
- # special forms are not always hashable, create a new de-serializer every time
- deserializer = _create_deserializer(typ)
- deserializer.build(context)
-
- return deserializer
-
-
-def _create_deserializer(typ: TypeLike) -> Deserializer:
- "Creates a de-serializer engine to parse an object obtained from a JSON string."
-
- # check for well-known types
- if typ is type(None):
- return NoneDeserializer()
- elif typ is bool:
- return BoolDeserializer()
- elif typ is int:
- return IntDeserializer()
- elif typ is float:
- return FloatDeserializer()
- elif typ is str:
- return StringDeserializer()
- elif typ is bytes:
- return BytesDeserializer()
- elif typ is datetime.datetime:
- return DateTimeDeserializer()
- elif typ is datetime.date:
- return DateDeserializer()
- elif typ is datetime.time:
- return TimeDeserializer()
- elif typ is uuid.UUID:
- return UUIDDeserializer()
- elif typ is ipaddress.IPv4Address:
- return IPv4Deserializer()
- elif typ is ipaddress.IPv6Address:
- return IPv6Deserializer()
-
- # dynamically-typed collection types
- if typ is list:
- raise TypeError("explicit item type required: use `List[T]` instead of `list`")
- if typ is dict:
- raise TypeError(
- "explicit key and value types required: use `Dict[K, V]` instead of `dict`"
- )
- if typ is set:
- raise TypeError("explicit member type required: use `Set[T]` instead of `set`")
- if typ is tuple:
- raise TypeError(
- "explicit item type list required: use `Tuple[T, ...]` instead of `tuple`"
- )
-
- # generic types (e.g. list, dict, set, etc.)
- origin_type = typing.get_origin(typ)
- if origin_type is list:
- (list_item_type,) = typing.get_args(typ) # unpack single tuple element
- return ListDeserializer(list_item_type)
- elif origin_type is dict:
- key_type, value_type = typing.get_args(typ)
- return DictDeserializer(key_type, value_type)
- elif origin_type is set:
- (set_member_type,) = typing.get_args(typ) # unpack single tuple element
- return SetDeserializer(set_member_type)
- elif origin_type is tuple:
- return TupleDeserializer(typing.get_args(typ))
- elif origin_type is Union:
- union_args = typing.get_args(typ)
- if get_discriminating_properties(union_args):
- return TaggedUnionDeserializer(union_args)
- else:
- return UnionDeserializer(union_args)
- elif origin_type is Literal:
- return LiteralDeserializer(typing.get_args(typ))
-
- if not inspect.isclass(typ):
- if is_dataclass_instance(typ):
- raise TypeError(f"dataclass type expected but got instance: {typ}")
- else:
- raise TypeError(f"unable to de-serialize unrecognized type: {typ}")
-
- if issubclass(typ, enum.Enum):
- return EnumDeserializer(typ)
-
- if is_named_tuple_type(typ):
- return NamedTupleDeserializer(typ)
-
- # check if object has custom serialization method
- convert_func = getattr(typ, "from_json", None)
- if callable(convert_func):
- return CustomDeserializer(convert_func)
-
- if is_dataclass_type(typ):
- dataclass_params = getattr(typ, "__dataclass_params__", None)
- if dataclass_params is not None and dataclass_params.frozen:
- return FrozenDataclassDeserializer(typ)
- else:
- return DataclassDeserializer(typ)
-
- return TypedClassDeserializer(typ)
diff --git a/docs/openapi_generator/strong_typing/docstring.py b/docs/openapi_generator/strong_typing/docstring.py
deleted file mode 100644
index 3ef1e5e7a..000000000
--- a/docs/openapi_generator/strong_typing/docstring.py
+++ /dev/null
@@ -1,437 +0,0 @@
-# Copyright (c) Meta Platforms, Inc. and affiliates.
-# All rights reserved.
-#
-# This source code is licensed under the terms described in the LICENSE file in
-# the root directory of this source tree.
-
-"""
-Type-safe data interchange for Python data classes.
-
-:see: https://github.com/hunyadi/strong_typing
-"""
-
-import builtins
-import dataclasses
-import inspect
-import re
-import sys
-import types
-import typing
-from dataclasses import dataclass
-from io import StringIO
-from typing import Any, Callable, Dict, Optional, Protocol, Type, TypeVar
-
-if sys.version_info >= (3, 10):
- from typing import TypeGuard
-else:
- from typing_extensions import TypeGuard
-
-from .inspection import (
- DataclassInstance,
- get_class_properties,
- get_signature,
- is_dataclass_type,
- is_type_enum,
-)
-
-T = TypeVar("T")
-
-
-@dataclass
-class DocstringParam:
- """
- A parameter declaration in a parameter block.
-
- :param name: The name of the parameter.
- :param description: The description text for the parameter.
- """
-
- name: str
- description: str
- param_type: type = inspect.Signature.empty
-
- def __str__(self) -> str:
- return f":param {self.name}: {self.description}"
-
-
-@dataclass
-class DocstringReturns:
- """
- A `returns` declaration extracted from a docstring.
-
- :param description: The description text for the return value.
- """
-
- description: str
- return_type: type = inspect.Signature.empty
-
- def __str__(self) -> str:
- return f":returns: {self.description}"
-
-
-@dataclass
-class DocstringRaises:
- """
- A `raises` declaration extracted from a docstring.
-
- :param typename: The type name of the exception raised.
- :param description: The description associated with the exception raised.
- """
-
- typename: str
- description: str
- raise_type: type = inspect.Signature.empty
-
- def __str__(self) -> str:
- return f":raises {self.typename}: {self.description}"
-
-
-@dataclass
-class Docstring:
- """
- Represents the documentation string (a.k.a. docstring) for a type such as a (data) class or function.
-
- A docstring is broken down into the following components:
- * A short description, which is the first block of text in the documentation string, and ends with a double
- newline or a parameter block.
- * A long description, which is the optional block of text following the short description, and ends with
- a parameter block.
- * A parameter block of named parameter and description string pairs in ReST-style.
- * A `returns` declaration, which adds explanation to the return value.
- * A `raises` declaration, which adds explanation to the exception type raised by the function on error.
-
- When the docstring is attached to a data class, it is understood as the documentation string of the class
- `__init__` method.
-
- :param short_description: The short description text parsed from a docstring.
- :param long_description: The long description text parsed from a docstring.
- :param params: The parameter block extracted from a docstring.
- :param returns: The returns declaration extracted from a docstring.
- """
-
- short_description: Optional[str] = None
- long_description: Optional[str] = None
- params: Dict[str, DocstringParam] = dataclasses.field(default_factory=dict)
- returns: Optional[DocstringReturns] = None
- raises: Dict[str, DocstringRaises] = dataclasses.field(default_factory=dict)
-
- @property
- def full_description(self) -> Optional[str]:
- if self.short_description and self.long_description:
- return f"{self.short_description}\n\n{self.long_description}"
- elif self.short_description:
- return self.short_description
- else:
- return None
-
- def __str__(self) -> str:
- output = StringIO()
-
- has_description = self.short_description or self.long_description
- has_blocks = self.params or self.returns or self.raises
-
- if has_description:
- if self.short_description and self.long_description:
- output.write(self.short_description)
- output.write("\n\n")
- output.write(self.long_description)
- elif self.short_description:
- output.write(self.short_description)
-
- if has_blocks:
- if has_description:
- output.write("\n")
-
- for param in self.params.values():
- output.write("\n")
- output.write(str(param))
- if self.returns:
- output.write("\n")
- output.write(str(self.returns))
- for raises in self.raises.values():
- output.write("\n")
- output.write(str(raises))
-
- s = output.getvalue()
- output.close()
- return s
-
-
-def is_exception(member: object) -> TypeGuard[Type[BaseException]]:
- return isinstance(member, type) and issubclass(member, BaseException)
-
-
-def get_exceptions(module: types.ModuleType) -> Dict[str, Type[BaseException]]:
- "Returns all exception classes declared in a module."
-
- return {
- name: class_type
- for name, class_type in inspect.getmembers(module, is_exception)
- }
-
-
-class SupportsDoc(Protocol):
- __doc__: Optional[str]
-
-
-def parse_type(typ: SupportsDoc) -> Docstring:
- """
- Parse the docstring of a type into its components.
-
- :param typ: The type whose documentation string to parse.
- :returns: Components of the documentation string.
- """
-
- doc = get_docstring(typ)
- if doc is None:
- return Docstring()
-
- docstring = parse_text(doc)
- check_docstring(typ, docstring)
-
- # assign parameter and return types
- if is_dataclass_type(typ):
- properties = dict(get_class_properties(typing.cast(type, typ)))
-
- for name, param in docstring.params.items():
- param.param_type = properties[name]
-
- elif inspect.isfunction(typ):
- signature = get_signature(typ)
- for name, param in docstring.params.items():
- param.param_type = signature.parameters[name].annotation
- if docstring.returns:
- docstring.returns.return_type = signature.return_annotation
-
- # assign exception types
- defining_module = inspect.getmodule(typ)
- if defining_module:
- context: Dict[str, type] = {}
- context.update(get_exceptions(builtins))
- context.update(get_exceptions(defining_module))
- for exc_name, exc in docstring.raises.items():
- raise_type = context.get(exc_name)
- if raise_type is None:
- type_name = (
- getattr(typ, "__qualname__", None)
- or getattr(typ, "__name__", None)
- or None
- )
- raise TypeError(
- f"doc-string exception type `{exc_name}` is not an exception defined in the context of `{type_name}`"
- )
-
- exc.raise_type = raise_type
-
- return docstring
-
-
-def parse_text(text: str) -> Docstring:
- """
- Parse a ReST-style docstring into its components.
-
- :param text: The documentation string to parse, typically acquired as `type.__doc__`.
- :returns: Components of the documentation string.
- """
-
- if not text:
- return Docstring()
-
- # find block that starts object metadata block (e.g. `:param p:` or `:returns:`)
- text = inspect.cleandoc(text)
- match = re.search("^:", text, flags=re.MULTILINE)
- if match:
- desc_chunk = text[: match.start()]
- meta_chunk = text[match.start() :] # noqa: E203
- else:
- desc_chunk = text
- meta_chunk = ""
-
- # split description text into short and long description
- parts = desc_chunk.split("\n\n", 1)
-
- # ensure short description has no newlines
- short_description = parts[0].strip().replace("\n", " ") or None
-
- # ensure long description preserves its structure (e.g. preformatted text)
- if len(parts) > 1:
- long_description = parts[1].strip() or None
- else:
- long_description = None
-
- params: Dict[str, DocstringParam] = {}
- raises: Dict[str, DocstringRaises] = {}
- returns = None
- for match in re.finditer(
- r"(^:.*?)(?=^:|\Z)", meta_chunk, flags=re.DOTALL | re.MULTILINE
- ):
- chunk = match.group(0)
- if not chunk:
- continue
-
- args_chunk, desc_chunk = chunk.lstrip(":").split(":", 1)
- args = args_chunk.split()
- desc = re.sub(r"\s+", " ", desc_chunk.strip())
-
- if len(args) > 0:
- kw = args[0]
- if len(args) == 2:
- if kw == "param":
- params[args[1]] = DocstringParam(
- name=args[1],
- description=desc,
- )
- elif kw == "raise" or kw == "raises":
- raises[args[1]] = DocstringRaises(
- typename=args[1],
- description=desc,
- )
-
- elif len(args) == 1:
- if kw == "return" or kw == "returns":
- returns = DocstringReturns(description=desc)
-
- return Docstring(
- long_description=long_description,
- short_description=short_description,
- params=params,
- returns=returns,
- raises=raises,
- )
-
-
-def has_default_docstring(typ: SupportsDoc) -> bool:
- "Check if class has the auto-generated string assigned by @dataclass."
-
- if not isinstance(typ, type):
- return False
-
- if is_dataclass_type(typ):
- return (
- typ.__doc__ is not None
- and re.match(f"^{re.escape(typ.__name__)}[(].*[)]$", typ.__doc__)
- is not None
- )
-
- if is_type_enum(typ):
- return typ.__doc__ is not None and typ.__doc__ == "An enumeration."
-
- return False
-
-
-def has_docstring(typ: SupportsDoc) -> bool:
- "Check if class has a documentation string other than the auto-generated string assigned by @dataclass."
-
- if has_default_docstring(typ):
- return False
-
- return bool(typ.__doc__)
-
-
-def get_docstring(typ: SupportsDoc) -> Optional[str]:
- if typ.__doc__ is None:
- return None
-
- if has_default_docstring(typ):
- return None
-
- return typ.__doc__
-
-
-def check_docstring(
- typ: SupportsDoc, docstring: Docstring, strict: bool = False
-) -> None:
- """
- Verifies the doc-string of a type.
-
- :raises TypeError: Raised on a mismatch between doc-string parameters, and function or type signature.
- """
-
- if is_dataclass_type(typ):
- check_dataclass_docstring(typ, docstring, strict)
- elif inspect.isfunction(typ):
- check_function_docstring(typ, docstring, strict)
-
-
-def check_dataclass_docstring(
- typ: Type[DataclassInstance], docstring: Docstring, strict: bool = False
-) -> None:
- """
- Verifies the doc-string of a data-class type.
-
- :param strict: Whether to check if all data-class members have doc-strings.
- :raises TypeError: Raised on a mismatch between doc-string parameters and data-class members.
- """
-
- if not is_dataclass_type(typ):
- raise TypeError("not a data-class type")
-
- properties = dict(get_class_properties(typ))
- class_name = typ.__name__
-
- for name in docstring.params:
- if name not in properties:
- raise TypeError(
- f"doc-string parameter `{name}` is not a member of the data-class `{class_name}`"
- )
-
- if not strict:
- return
-
- for name in properties:
- if name not in docstring.params:
- raise TypeError(
- f"member `{name}` in data-class `{class_name}` is missing its doc-string"
- )
-
-
-def check_function_docstring(
- fn: Callable[..., Any], docstring: Docstring, strict: bool = False
-) -> None:
- """
- Verifies the doc-string of a function or member function.
-
- :param strict: Whether to check if all function parameters and the return type have doc-strings.
- :raises TypeError: Raised on a mismatch between doc-string parameters and function signature.
- """
-
- signature = get_signature(fn)
- func_name = fn.__qualname__
-
- for name in docstring.params:
- if name not in signature.parameters:
- raise TypeError(
- f"doc-string parameter `{name}` is absent from signature of function `{func_name}`"
- )
-
- if (
- docstring.returns is not None
- and signature.return_annotation is inspect.Signature.empty
- ):
- raise TypeError(
- f"doc-string has returns description in function `{func_name}` with no return type annotation"
- )
-
- if not strict:
- return
-
- for name, param in signature.parameters.items():
- # ignore `self` in member function signatures
- if name == "self" and (
- param.kind is inspect.Parameter.POSITIONAL_ONLY
- or param.kind is inspect.Parameter.POSITIONAL_OR_KEYWORD
- ):
- continue
-
- if name not in docstring.params:
- raise TypeError(
- f"function parameter `{name}` in `{func_name}` is missing its doc-string"
- )
-
- if (
- signature.return_annotation is not inspect.Signature.empty
- and docstring.returns is None
- ):
- raise TypeError(
- f"function `{func_name}` has no returns description in its doc-string"
- )
diff --git a/docs/openapi_generator/strong_typing/exception.py b/docs/openapi_generator/strong_typing/exception.py
deleted file mode 100644
index af037cc3c..000000000
--- a/docs/openapi_generator/strong_typing/exception.py
+++ /dev/null
@@ -1,23 +0,0 @@
-# Copyright (c) Meta Platforms, Inc. and affiliates.
-# All rights reserved.
-#
-# This source code is licensed under the terms described in the LICENSE file in
-# the root directory of this source tree.
-
-"""
-Type-safe data interchange for Python data classes.
-
-:see: https://github.com/hunyadi/strong_typing
-"""
-
-
-class JsonKeyError(Exception):
- "Raised when deserialization for a class or union type has failed because a matching member was not found."
-
-
-class JsonValueError(Exception):
- "Raised when (de)serialization of data has failed due to invalid value."
-
-
-class JsonTypeError(Exception):
- "Raised when deserialization of data has failed due to a type mismatch."
diff --git a/docs/openapi_generator/strong_typing/inspection.py b/docs/openapi_generator/strong_typing/inspection.py
deleted file mode 100644
index 41804f12c..000000000
--- a/docs/openapi_generator/strong_typing/inspection.py
+++ /dev/null
@@ -1,1053 +0,0 @@
-# Copyright (c) Meta Platforms, Inc. and affiliates.
-# All rights reserved.
-#
-# This source code is licensed under the terms described in the LICENSE file in
-# the root directory of this source tree.
-
-"""
-Type-safe data interchange for Python data classes.
-
-:see: https://github.com/hunyadi/strong_typing
-"""
-
-import dataclasses
-import datetime
-import enum
-import importlib
-import importlib.machinery
-import importlib.util
-import inspect
-import re
-import sys
-import types
-import typing
-import uuid
-from typing import (
- Any,
- Callable,
- Dict,
- Iterable,
- List,
- Literal,
- NamedTuple,
- Optional,
- Protocol,
- runtime_checkable,
- Set,
- Tuple,
- Type,
- TypeVar,
- Union,
-)
-
-if sys.version_info >= (3, 9):
- from typing import Annotated
-else:
- from typing_extensions import Annotated
-
-if sys.version_info >= (3, 10):
- from typing import TypeGuard
-else:
- from typing_extensions import TypeGuard
-
-S = TypeVar("S")
-T = TypeVar("T")
-K = TypeVar("K")
-V = TypeVar("V")
-
-
-def _is_type_like(data_type: object) -> bool:
- """
- Checks if the object is a type or type-like object (e.g. generic type).
-
- :param data_type: The object to validate.
- :returns: True if the object is a type or type-like object.
- """
-
- if isinstance(data_type, type):
- # a standard type
- return True
- elif typing.get_origin(data_type) is not None:
- # a generic type such as `list`, `dict` or `set`
- return True
- elif hasattr(data_type, "__forward_arg__"):
- # an instance of `ForwardRef`
- return True
- elif data_type is Any:
- # the special form `Any`
- return True
- else:
- return False
-
-
-if sys.version_info >= (3, 9):
- TypeLike = Union[type, types.GenericAlias, typing.ForwardRef, Any]
-
- def is_type_like(
- data_type: object,
- ) -> TypeGuard[TypeLike]:
- """
- Checks if the object is a type or type-like object (e.g. generic type).
-
- :param data_type: The object to validate.
- :returns: True if the object is a type or type-like object.
- """
-
- return _is_type_like(data_type)
-
-else:
- TypeLike = object
-
- def is_type_like(
- data_type: object,
- ) -> bool:
- return _is_type_like(data_type)
-
-
-def evaluate_member_type(typ: Any, cls: type) -> Any:
- """
- Evaluates a forward reference type in a dataclass member.
-
- :param typ: The dataclass member type to convert.
- :param cls: The dataclass in which the member is defined.
- :returns: The evaluated type.
- """
-
- return evaluate_type(typ, sys.modules[cls.__module__])
-
-
-def evaluate_type(typ: Any, module: types.ModuleType) -> Any:
- """
- Evaluates a forward reference type.
-
- :param typ: The type to convert, typically a dataclass member type.
- :param module: The context for the type, i.e. the module in which the member is defined.
- :returns: The evaluated type.
- """
-
- if isinstance(typ, str):
- # evaluate data-class field whose type annotation is a string
- return eval(typ, module.__dict__, locals())
- if isinstance(typ, typing.ForwardRef):
- if sys.version_info >= (3, 9):
- return typ._evaluate(module.__dict__, locals(), recursive_guard=frozenset())
- else:
- return typ._evaluate(module.__dict__, locals())
- else:
- return typ
-
-
-@runtime_checkable
-class DataclassInstance(Protocol):
- __dataclass_fields__: typing.ClassVar[Dict[str, dataclasses.Field]]
-
-
-def is_dataclass_type(typ: Any) -> TypeGuard[Type[DataclassInstance]]:
- "True if the argument corresponds to a data class type (but not an instance)."
-
- typ = unwrap_annotated_type(typ)
- return isinstance(typ, type) and dataclasses.is_dataclass(typ)
-
-
-def is_dataclass_instance(obj: Any) -> TypeGuard[DataclassInstance]:
- "True if the argument corresponds to a data class instance (but not a type)."
-
- return not isinstance(obj, type) and dataclasses.is_dataclass(obj)
-
-
-@dataclasses.dataclass
-class DataclassField:
- name: str
- type: Any
- default: Any
-
- def __init__(
- self, name: str, type: Any, default: Any = dataclasses.MISSING
- ) -> None:
- self.name = name
- self.type = type
- self.default = default
-
-
-def dataclass_fields(cls: Type[DataclassInstance]) -> Iterable[DataclassField]:
- "Generates the fields of a data-class resolving forward references."
-
- for field in dataclasses.fields(cls):
- yield DataclassField(
- field.name, evaluate_member_type(field.type, cls), field.default
- )
-
-
-def dataclass_field_by_name(cls: Type[DataclassInstance], name: str) -> DataclassField:
- "Looks up a field in a data-class by its field name."
-
- for field in dataclasses.fields(cls):
- if field.name == name:
- return DataclassField(field.name, evaluate_member_type(field.type, cls))
-
- raise LookupError(f"field `{name}` missing from class `{cls.__name__}`")
-
-
-def is_named_tuple_instance(obj: Any) -> TypeGuard[NamedTuple]:
- "True if the argument corresponds to a named tuple instance."
-
- return is_named_tuple_type(type(obj))
-
-
-def is_named_tuple_type(typ: Any) -> TypeGuard[Type[NamedTuple]]:
- """
- True if the argument corresponds to a named tuple type.
-
- Calling the function `collections.namedtuple` gives a new type that is a subclass of `tuple` (and no other classes)
- with a member named `_fields` that is a tuple whose items are all strings.
- """
-
- if not isinstance(typ, type):
- return False
-
- typ = unwrap_annotated_type(typ)
-
- b = getattr(typ, "__bases__", None)
- if b is None:
- return False
-
- if len(b) != 1 or b[0] != tuple:
- return False
-
- f = getattr(typ, "_fields", None)
- if not isinstance(f, tuple):
- return False
-
- return all(isinstance(n, str) for n in f)
-
-
-if sys.version_info >= (3, 11):
-
- def is_type_enum(typ: object) -> TypeGuard[Type[enum.Enum]]:
- "True if the specified type is an enumeration type."
-
- typ = unwrap_annotated_type(typ)
- return isinstance(typ, enum.EnumType)
-
-else:
-
- def is_type_enum(typ: object) -> TypeGuard[Type[enum.Enum]]:
- "True if the specified type is an enumeration type."
-
- typ = unwrap_annotated_type(typ)
-
- # use an explicit isinstance(..., type) check to filter out special forms like generics
- return isinstance(typ, type) and issubclass(typ, enum.Enum)
-
-
-def enum_value_types(enum_type: Type[enum.Enum]) -> List[type]:
- """
- Returns all unique value types of the `enum.Enum` type in definition order.
- """
-
- # filter unique enumeration value types by keeping definition order
- return list(dict.fromkeys(type(e.value) for e in enum_type))
-
-
-def extend_enum(
- source: Type[enum.Enum],
-) -> Callable[[Type[enum.Enum]], Type[enum.Enum]]:
- """
- Creates a new enumeration type extending the set of values in an existing type.
-
- :param source: The existing enumeration type to be extended with new values.
- :returns: A new enumeration type with the extended set of values.
- """
-
- def wrap(extend: Type[enum.Enum]) -> Type[enum.Enum]:
- # create new enumeration type combining the values from both types
- values: Dict[str, Any] = {}
- values.update((e.name, e.value) for e in source)
- values.update((e.name, e.value) for e in extend)
- enum_class: Type[enum.Enum] = enum.Enum(extend.__name__, values) # type: ignore
-
- # assign the newly created type to the same module where the extending class is defined
- setattr(enum_class, "__module__", extend.__module__)
- setattr(enum_class, "__doc__", extend.__doc__)
- setattr(sys.modules[extend.__module__], extend.__name__, enum_class)
-
- return enum.unique(enum_class)
-
- return wrap
-
-
-if sys.version_info >= (3, 10):
-
- def _is_union_like(typ: object) -> bool:
- "True if type is a union such as `Union[T1, T2, ...]` or a union type `T1 | T2`."
-
- return typing.get_origin(typ) is Union or isinstance(typ, types.UnionType)
-
-else:
-
- def _is_union_like(typ: object) -> bool:
- "True if type is a union such as `Union[T1, T2, ...]` or a union type `T1 | T2`."
-
- return typing.get_origin(typ) is Union
-
-
-def is_type_optional(
- typ: object, strict: bool = False
-) -> TypeGuard[Type[Optional[Any]]]:
- """
- True if the type annotation corresponds to an optional type (e.g. `Optional[T]` or `Union[T1,T2,None]`).
-
- `Optional[T]` is represented as `Union[T, None]` is classic style, and is equivalent to `T | None` in new style.
-
- :param strict: True if only `Optional[T]` qualifies as an optional type but `Union[T1, T2, None]` does not.
- """
-
- typ = unwrap_annotated_type(typ)
-
- if _is_union_like(typ):
- args = typing.get_args(typ)
- if strict and len(args) != 2:
- return False
-
- return type(None) in args
-
- return False
-
-
-def unwrap_optional_type(typ: Type[Optional[T]]) -> Type[T]:
- """
- Extracts the inner type of an optional type.
-
- :param typ: The optional type `Optional[T]`.
- :returns: The inner type `T`.
- """
-
- return rewrap_annotated_type(_unwrap_optional_type, typ)
-
-
-def _unwrap_optional_type(typ: Type[Optional[T]]) -> Type[T]:
- "Extracts the type qualified as optional (e.g. returns `T` for `Optional[T]`)."
-
- # Optional[T] is represented internally as Union[T, None]
- if not _is_union_like(typ):
- raise TypeError("optional type must have un-subscripted type of Union")
-
- # will automatically unwrap Union[T] into T
- return Union[
- tuple(filter(lambda item: item is not type(None), typing.get_args(typ))) # type: ignore
- ]
-
-
-def is_type_union(typ: object) -> bool:
- "True if the type annotation corresponds to a union type (e.g. `Union[T1,T2,T3]`)."
-
- typ = unwrap_annotated_type(typ)
- if _is_union_like(typ):
- args = typing.get_args(typ)
- return len(args) > 2 or type(None) not in args
-
- return False
-
-
-def unwrap_union_types(typ: object) -> Tuple[object, ...]:
- """
- Extracts the inner types of a union type.
-
- :param typ: The union type `Union[T1, T2, ...]`.
- :returns: The inner types `T1`, `T2`, etc.
- """
-
- typ = unwrap_annotated_type(typ)
- return _unwrap_union_types(typ)
-
-
-def _unwrap_union_types(typ: object) -> Tuple[object, ...]:
- "Extracts the types in a union (e.g. returns a tuple of types `T1` and `T2` for `Union[T1, T2]`)."
-
- if not _is_union_like(typ):
- raise TypeError("union type must have un-subscripted type of Union")
-
- return typing.get_args(typ)
-
-
-def is_type_literal(typ: object) -> bool:
- "True if the specified type is a literal of one or more constant values, e.g. `Literal['string']` or `Literal[42]`."
-
- typ = unwrap_annotated_type(typ)
- return typing.get_origin(typ) is Literal
-
-
-def unwrap_literal_value(typ: object) -> Any:
- """
- Extracts the single constant value captured by a literal type.
-
- :param typ: The literal type `Literal[value]`.
- :returns: The values captured by the literal type.
- """
-
- args = unwrap_literal_values(typ)
- if len(args) != 1:
- raise TypeError("too many values in literal type")
-
- return args[0]
-
-
-def unwrap_literal_values(typ: object) -> Tuple[Any, ...]:
- """
- Extracts the constant values captured by a literal type.
-
- :param typ: The literal type `Literal[value, ...]`.
- :returns: A tuple of values captured by the literal type.
- """
-
- typ = unwrap_annotated_type(typ)
- return typing.get_args(typ)
-
-
-def unwrap_literal_types(typ: object) -> Tuple[type, ...]:
- """
- Extracts the types of the constant values captured by a literal type.
-
- :param typ: The literal type `Literal[value, ...]`.
- :returns: A tuple of item types `T` such that `type(value) == T`.
- """
-
- return tuple(type(t) for t in unwrap_literal_values(typ))
-
-
-def is_generic_list(typ: object) -> TypeGuard[Type[list]]:
- "True if the specified type is a generic list, i.e. `List[T]`."
-
- typ = unwrap_annotated_type(typ)
- return typing.get_origin(typ) is list
-
-
-def unwrap_generic_list(typ: Type[List[T]]) -> Type[T]:
- """
- Extracts the item type of a list type.
-
- :param typ: The list type `List[T]`.
- :returns: The item type `T`.
- """
-
- return rewrap_annotated_type(_unwrap_generic_list, typ)
-
-
-def _unwrap_generic_list(typ: Type[List[T]]) -> Type[T]:
- "Extracts the item type of a list type (e.g. returns `T` for `List[T]`)."
-
- (list_type,) = typing.get_args(typ) # unpack single tuple element
- return list_type
-
-
-def is_generic_set(typ: object) -> TypeGuard[Type[set]]:
- "True if the specified type is a generic set, i.e. `Set[T]`."
-
- typ = unwrap_annotated_type(typ)
- return typing.get_origin(typ) is set
-
-
-def unwrap_generic_set(typ: Type[Set[T]]) -> Type[T]:
- """
- Extracts the item type of a set type.
-
- :param typ: The set type `Set[T]`.
- :returns: The item type `T`.
- """
-
- return rewrap_annotated_type(_unwrap_generic_set, typ)
-
-
-def _unwrap_generic_set(typ: Type[Set[T]]) -> Type[T]:
- "Extracts the item type of a set type (e.g. returns `T` for `Set[T]`)."
-
- (set_type,) = typing.get_args(typ) # unpack single tuple element
- return set_type
-
-
-def is_generic_dict(typ: object) -> TypeGuard[Type[dict]]:
- "True if the specified type is a generic dictionary, i.e. `Dict[KeyType, ValueType]`."
-
- typ = unwrap_annotated_type(typ)
- return typing.get_origin(typ) is dict
-
-
-def unwrap_generic_dict(typ: Type[Dict[K, V]]) -> Tuple[Type[K], Type[V]]:
- """
- Extracts the key and value types of a dictionary type as a tuple.
-
- :param typ: The dictionary type `Dict[K, V]`.
- :returns: The key and value types `K` and `V`.
- """
-
- return _unwrap_generic_dict(unwrap_annotated_type(typ))
-
-
-def _unwrap_generic_dict(typ: Type[Dict[K, V]]) -> Tuple[Type[K], Type[V]]:
- "Extracts the key and value types of a dict type (e.g. returns (`K`, `V`) for `Dict[K, V]`)."
-
- key_type, value_type = typing.get_args(typ)
- return key_type, value_type
-
-
-def is_type_annotated(typ: TypeLike) -> bool:
- "True if the type annotation corresponds to an annotated type (i.e. `Annotated[T, ...]`)."
-
- return getattr(typ, "__metadata__", None) is not None
-
-
-def get_annotation(data_type: TypeLike, annotation_type: Type[T]) -> Optional[T]:
- """
- Returns the first annotation on a data type that matches the expected annotation type.
-
- :param data_type: The annotated type from which to extract the annotation.
- :param annotation_type: The annotation class to look for.
- :returns: The annotation class instance found (if any).
- """
-
- metadata = getattr(data_type, "__metadata__", None)
- if metadata is not None:
- for annotation in metadata:
- if isinstance(annotation, annotation_type):
- return annotation
-
- return None
-
-
-def unwrap_annotated_type(typ: T) -> T:
- "Extracts the wrapped type from an annotated type (e.g. returns `T` for `Annotated[T, ...]`)."
-
- if is_type_annotated(typ):
- # type is Annotated[T, ...]
- return typing.get_args(typ)[0]
- else:
- # type is a regular type
- return typ
-
-
-def rewrap_annotated_type(
- transform: Callable[[Type[S]], Type[T]], typ: Type[S]
-) -> Type[T]:
- """
- Un-boxes, transforms and re-boxes an optionally annotated type.
-
- :param transform: A function that maps an un-annotated type to another type.
- :param typ: A type to un-box (if necessary), transform, and re-box (if necessary).
- """
-
- metadata = getattr(typ, "__metadata__", None)
- if metadata is not None:
- # type is Annotated[T, ...]
- inner_type = typing.get_args(typ)[0]
- else:
- # type is a regular type
- inner_type = typ
-
- transformed_type = transform(inner_type)
-
- if metadata is not None:
- return Annotated[(transformed_type, *metadata)] # type: ignore
- else:
- return transformed_type
-
-
-def get_module_classes(module: types.ModuleType) -> List[type]:
- "Returns all classes declared directly in a module."
-
- def is_class_member(member: object) -> TypeGuard[type]:
- return inspect.isclass(member) and member.__module__ == module.__name__
-
- return [class_type for _, class_type in inspect.getmembers(module, is_class_member)]
-
-
-if sys.version_info >= (3, 9):
-
- def get_resolved_hints(typ: type) -> Dict[str, type]:
- return typing.get_type_hints(typ, include_extras=True)
-
-else:
-
- def get_resolved_hints(typ: type) -> Dict[str, type]:
- return typing.get_type_hints(typ)
-
-
-def get_class_properties(typ: type) -> Iterable[Tuple[str, type]]:
- "Returns all properties of a class."
-
- if is_dataclass_type(typ):
- return ((field.name, field.type) for field in dataclasses.fields(typ))
- else:
- resolved_hints = get_resolved_hints(typ)
- return resolved_hints.items()
-
-
-def get_class_property(typ: type, name: str) -> Optional[type]:
- "Looks up the annotated type of a property in a class by its property name."
-
- for property_name, property_type in get_class_properties(typ):
- if name == property_name:
- return property_type
- return None
-
-
-@dataclasses.dataclass
-class _ROOT:
- pass
-
-
-def get_referenced_types(
- typ: TypeLike, module: Optional[types.ModuleType] = None
-) -> Set[type]:
- """
- Extracts types directly or indirectly referenced by this type.
-
- For example, extract `T` from `List[T]`, `Optional[T]` or `Annotated[T, ...]`, `K` and `V` from `Dict[K,V]`,
- `A` and `B` from `Union[A,B]`.
-
- :param typ: A type or special form.
- :param module: The context in which types are evaluated.
- :returns: Types referenced by the given type or special form.
- """
-
- collector = TypeCollector()
- collector.run(typ, _ROOT, module)
- return collector.references
-
-
-class TypeCollector:
- """
- Collects types directly or indirectly referenced by a type.
-
- :param graph: The type dependency graph, linking types to types they depend on.
- """
-
- graph: Dict[type, Set[type]]
-
- @property
- def references(self) -> Set[type]:
- "Types collected by the type collector."
-
- dependencies = set()
- for edges in self.graph.values():
- dependencies.update(edges)
- return dependencies
-
- def __init__(self) -> None:
- self.graph = {_ROOT: set()}
-
- def traverse(self, typ: type) -> None:
- "Finds all dependent types of a type."
-
- self.run(typ, _ROOT, sys.modules[typ.__module__])
-
- def traverse_all(self, types: Iterable[type]) -> None:
- "Finds all dependent types of a list of types."
-
- for typ in types:
- self.traverse(typ)
-
- def run(
- self,
- typ: TypeLike,
- cls: Type[DataclassInstance],
- module: Optional[types.ModuleType],
- ) -> None:
- """
- Extracts types indirectly referenced by this type.
-
- For example, extract `T` from `List[T]`, `Optional[T]` or `Annotated[T, ...]`, `K` and `V` from `Dict[K,V]`,
- `A` and `B` from `Union[A,B]`.
-
- :param typ: A type or special form.
- :param cls: A dataclass type being expanded for dependent types.
- :param module: The context in which types are evaluated.
- :returns: Types referenced by the given type or special form.
- """
-
- if typ is type(None) or typ is Any:
- return
-
- if isinstance(typ, type):
- self.graph[cls].add(typ)
-
- if typ in self.graph:
- return
-
- self.graph[typ] = set()
-
- metadata = getattr(typ, "__metadata__", None)
- if metadata is not None:
- # type is Annotated[T, ...]
- arg = typing.get_args(typ)[0]
- return self.run(arg, cls, module)
-
- # type is a forward reference
- if isinstance(typ, str) or isinstance(typ, typing.ForwardRef):
- if module is None:
- raise ValueError("missing context for evaluating types")
-
- evaluated_type = evaluate_type(typ, module)
- return self.run(evaluated_type, cls, module)
-
- # type is a special form
- origin = typing.get_origin(typ)
- if origin in [list, dict, frozenset, set, tuple, Union]:
- for arg in typing.get_args(typ):
- self.run(arg, cls, module)
- return
- elif origin is Literal:
- return
-
- # type is optional or a union type
- if is_type_optional(typ):
- return self.run(unwrap_optional_type(typ), cls, module)
- if is_type_union(typ):
- for union_type in unwrap_union_types(typ):
- self.run(union_type, cls, module)
- return
-
- # type is a regular type
- elif is_dataclass_type(typ) or is_type_enum(typ) or isinstance(typ, type):
- context = sys.modules[typ.__module__]
- if is_dataclass_type(typ):
- for field in dataclass_fields(typ):
- self.run(field.type, typ, context)
- else:
- for field_name, field_type in get_resolved_hints(typ).items():
- self.run(field_type, typ, context)
- return
-
- raise TypeError(f"expected: type-like; got: {typ}")
-
-
-if sys.version_info >= (3, 10):
-
- def get_signature(fn: Callable[..., Any]) -> inspect.Signature:
- "Extracts the signature of a function."
-
- return inspect.signature(fn, eval_str=True)
-
-else:
-
- def get_signature(fn: Callable[..., Any]) -> inspect.Signature:
- "Extracts the signature of a function."
-
- return inspect.signature(fn)
-
-
-def is_reserved_property(name: str) -> bool:
- "True if the name stands for an internal property."
-
- # filter built-in and special properties
- if re.match(r"^__.+__$", name):
- return True
-
- # filter built-in special names
- if name in ["_abc_impl"]:
- return True
-
- return False
-
-
-def create_module(name: str) -> types.ModuleType:
- """
- Creates a new module dynamically at run-time.
-
- :param name: Fully qualified name of the new module (with dot notation).
- """
-
- if name in sys.modules:
- raise KeyError(f"{name!r} already in sys.modules")
-
- spec = importlib.machinery.ModuleSpec(name, None)
- module = importlib.util.module_from_spec(spec)
- sys.modules[name] = module
- if spec.loader is not None:
- spec.loader.exec_module(module)
- return module
-
-
-if sys.version_info >= (3, 10):
-
- def create_data_type(class_name: str, fields: List[Tuple[str, type]]) -> type:
- """
- Creates a new data-class type dynamically.
-
- :param class_name: The name of new data-class type.
- :param fields: A list of fields (and their type) that the new data-class type is expected to have.
- :returns: The newly created data-class type.
- """
-
- # has the `slots` parameter
- return dataclasses.make_dataclass(class_name, fields, slots=True)
-
-else:
-
- def create_data_type(class_name: str, fields: List[Tuple[str, type]]) -> type:
- """
- Creates a new data-class type dynamically.
-
- :param class_name: The name of new data-class type.
- :param fields: A list of fields (and their type) that the new data-class type is expected to have.
- :returns: The newly created data-class type.
- """
-
- cls = dataclasses.make_dataclass(class_name, fields)
-
- cls_dict = dict(cls.__dict__)
- field_names = tuple(field.name for field in dataclasses.fields(cls))
-
- cls_dict["__slots__"] = field_names
-
- for field_name in field_names:
- cls_dict.pop(field_name, None)
- cls_dict.pop("__dict__", None)
-
- qualname = getattr(cls, "__qualname__", None)
- cls = type(cls)(cls.__name__, (), cls_dict)
- if qualname is not None:
- cls.__qualname__ = qualname
-
- return cls
-
-
-def create_object(typ: Type[T]) -> T:
- "Creates an instance of a type."
-
- if issubclass(typ, Exception):
- # exception types need special treatment
- e = typ.__new__(typ)
- return typing.cast(T, e)
- else:
- return object.__new__(typ)
-
-
-if sys.version_info >= (3, 9):
- TypeOrGeneric = Union[type, types.GenericAlias]
-
-else:
- TypeOrGeneric = object
-
-
-def is_generic_instance(obj: Any, typ: TypeLike) -> bool:
- """
- Returns whether an object is an instance of a generic class, a standard class or of a subclass thereof.
-
- This function checks the following items recursively:
- * items of a list
- * keys and values of a dictionary
- * members of a set
- * items of a tuple
- * members of a union type
-
- :param obj: The (possibly generic container) object to check recursively.
- :param typ: The expected type of the object.
- """
-
- if isinstance(typ, typing.ForwardRef):
- fwd: typing.ForwardRef = typ
- identifier = fwd.__forward_arg__
- typ = eval(identifier)
- if isinstance(typ, type):
- return isinstance(obj, typ)
- else:
- return False
-
- # generic types (e.g. list, dict, set, etc.)
- origin_type = typing.get_origin(typ)
- if origin_type is list:
- if not isinstance(obj, list):
- return False
- (list_item_type,) = typing.get_args(typ) # unpack single tuple element
- list_obj: list = obj
- return all(is_generic_instance(item, list_item_type) for item in list_obj)
- elif origin_type is dict:
- if not isinstance(obj, dict):
- return False
- key_type, value_type = typing.get_args(typ)
- dict_obj: dict = obj
- return all(
- is_generic_instance(key, key_type)
- and is_generic_instance(value, value_type)
- for key, value in dict_obj.items()
- )
- elif origin_type is set:
- if not isinstance(obj, set):
- return False
- (set_member_type,) = typing.get_args(typ) # unpack single tuple element
- set_obj: set = obj
- return all(is_generic_instance(item, set_member_type) for item in set_obj)
- elif origin_type is tuple:
- if not isinstance(obj, tuple):
- return False
- return all(
- is_generic_instance(item, tuple_item_type)
- for tuple_item_type, item in zip(
- (tuple_item_type for tuple_item_type in typing.get_args(typ)),
- (item for item in obj),
- )
- )
- elif origin_type is Union:
- return any(
- is_generic_instance(obj, member_type)
- for member_type in typing.get_args(typ)
- )
- elif isinstance(typ, type):
- return isinstance(obj, typ)
- else:
- raise TypeError(f"expected `type` but got: {typ}")
-
-
-class RecursiveChecker:
- _pred: Optional[Callable[[type, Any], bool]]
-
- def __init__(self, pred: Callable[[type, Any], bool]) -> None:
- """
- Creates a checker to verify if a predicate applies to all nested member properties of an object recursively.
-
- :param pred: The predicate to test on member properties. Takes a property type and a property value.
- """
-
- self._pred = pred
-
- def pred(self, typ: type, obj: Any) -> bool:
- "Acts as a workaround for the type checker mypy."
-
- assert self._pred is not None
- return self._pred(typ, obj)
-
- def check(self, typ: TypeLike, obj: Any) -> bool:
- """
- Checks if a predicate applies to all nested member properties of an object recursively.
-
- :param typ: The type to recurse into.
- :param obj: The object to inspect recursively. Must be an instance of the given type.
- :returns: True if all member properties pass the filter predicate.
- """
-
- # check for well-known types
- if (
- typ is type(None)
- or typ is bool
- or typ is int
- or typ is float
- or typ is str
- or typ is bytes
- or typ is datetime.datetime
- or typ is datetime.date
- or typ is datetime.time
- or typ is uuid.UUID
- ):
- return self.pred(typing.cast(type, typ), obj)
-
- # generic types (e.g. list, dict, set, etc.)
- origin_type = typing.get_origin(typ)
- if origin_type is list:
- if not isinstance(obj, list):
- raise TypeError(f"expected `list` but got: {obj}")
- (list_item_type,) = typing.get_args(typ) # unpack single tuple element
- list_obj: list = obj
- return all(self.check(list_item_type, item) for item in list_obj)
- elif origin_type is dict:
- if not isinstance(obj, dict):
- raise TypeError(f"expected `dict` but got: {obj}")
- key_type, value_type = typing.get_args(typ)
- dict_obj: dict = obj
- return all(self.check(value_type, item) for item in dict_obj.values())
- elif origin_type is set:
- if not isinstance(obj, set):
- raise TypeError(f"expected `set` but got: {obj}")
- (set_member_type,) = typing.get_args(typ) # unpack single tuple element
- set_obj: set = obj
- return all(self.check(set_member_type, item) for item in set_obj)
- elif origin_type is tuple:
- if not isinstance(obj, tuple):
- raise TypeError(f"expected `tuple` but got: {obj}")
- return all(
- self.check(tuple_item_type, item)
- for tuple_item_type, item in zip(
- (tuple_item_type for tuple_item_type in typing.get_args(typ)),
- (item for item in obj),
- )
- )
- elif origin_type is Union:
- return self.pred(typ, obj) # type: ignore[arg-type]
-
- if not inspect.isclass(typ):
- raise TypeError(f"expected `type` but got: {typ}")
-
- # enumeration type
- if issubclass(typ, enum.Enum):
- if not isinstance(obj, enum.Enum):
- raise TypeError(f"expected `{typ}` but got: {obj}")
- return self.pred(typ, obj)
-
- # class types with properties
- if is_named_tuple_type(typ):
- if not isinstance(obj, tuple):
- raise TypeError(f"expected `NamedTuple` but got: {obj}")
- return all(
- self.check(field_type, getattr(obj, field_name))
- for field_name, field_type in typing.get_type_hints(typ).items()
- )
- elif is_dataclass_type(typ):
- if not isinstance(obj, typ):
- raise TypeError(f"expected `{typ}` but got: {obj}")
- resolved_hints = get_resolved_hints(typ)
- return all(
- self.check(resolved_hints[field.name], getattr(obj, field.name))
- for field in dataclasses.fields(typ)
- )
- else:
- if not isinstance(obj, typ):
- raise TypeError(f"expected `{typ}` but got: {obj}")
- return all(
- self.check(property_type, getattr(obj, property_name))
- for property_name, property_type in get_class_properties(typ)
- )
-
-
-def check_recursive(
- obj: object,
- /,
- *,
- pred: Optional[Callable[[type, Any], bool]] = None,
- type_pred: Optional[Callable[[type], bool]] = None,
- value_pred: Optional[Callable[[Any], bool]] = None,
-) -> bool:
- """
- Checks if a predicate applies to all nested member properties of an object recursively.
-
- :param obj: The object to inspect recursively.
- :param pred: The predicate to test on member properties. Takes a property type and a property value.
- :param type_pred: Constrains the check to properties of an expected type. Properties of other types pass automatically.
- :param value_pred: Verifies a condition on member property values (of an expected type).
- :returns: True if all member properties pass the filter predicate(s).
- """
-
- if type_pred is not None and value_pred is not None:
- if pred is not None:
- raise TypeError(
- "filter predicate not permitted when type and value predicates are present"
- )
-
- type_p: Callable[[Type[T]], bool] = type_pred
- value_p: Callable[[T], bool] = value_pred
- pred = lambda typ, obj: not type_p(typ) or value_p(obj) # noqa: E731
-
- elif value_pred is not None:
- if pred is not None:
- raise TypeError(
- "filter predicate not permitted when value predicate is present"
- )
-
- value_only_p: Callable[[T], bool] = value_pred
- pred = lambda typ, obj: value_only_p(obj) # noqa: E731
-
- elif type_pred is not None:
- raise TypeError("value predicate required when type predicate is present")
-
- elif pred is None:
- pred = lambda typ, obj: True # noqa: E731
-
- return RecursiveChecker(pred).check(type(obj), obj)
diff --git a/docs/openapi_generator/strong_typing/mapping.py b/docs/openapi_generator/strong_typing/mapping.py
deleted file mode 100644
index 2bc68bb63..000000000
--- a/docs/openapi_generator/strong_typing/mapping.py
+++ /dev/null
@@ -1,42 +0,0 @@
-# Copyright (c) Meta Platforms, Inc. and affiliates.
-# All rights reserved.
-#
-# This source code is licensed under the terms described in the LICENSE file in
-# the root directory of this source tree.
-
-"""
-Type-safe data interchange for Python data classes.
-
-:see: https://github.com/hunyadi/strong_typing
-"""
-
-import keyword
-from typing import Optional
-
-from .auxiliary import Alias
-from .inspection import get_annotation
-
-
-def python_field_to_json_property(
- python_id: str, python_type: Optional[object] = None
-) -> str:
- """
- Map a Python field identifier to a JSON property name.
-
- Authors may use an underscore appended at the end of a Python identifier as per PEP 8 if it clashes with a Python
- keyword: e.g. `in` would become `in_` and `from` would become `from_`. Remove these suffixes when exporting to JSON.
-
- Authors may supply an explicit alias with the type annotation `Alias`, e.g. `Annotated[MyType, Alias("alias")]`.
- """
-
- if python_type is not None:
- alias = get_annotation(python_type, Alias)
- if alias:
- return alias.name
-
- if python_id.endswith("_"):
- id = python_id[:-1]
- if keyword.iskeyword(id):
- return id
-
- return python_id
diff --git a/docs/openapi_generator/strong_typing/name.py b/docs/openapi_generator/strong_typing/name.py
deleted file mode 100644
index c883794c0..000000000
--- a/docs/openapi_generator/strong_typing/name.py
+++ /dev/null
@@ -1,188 +0,0 @@
-# Copyright (c) Meta Platforms, Inc. and affiliates.
-# All rights reserved.
-#
-# This source code is licensed under the terms described in the LICENSE file in
-# the root directory of this source tree.
-
-"""
-Type-safe data interchange for Python data classes.
-
-:see: https://github.com/hunyadi/strong_typing
-"""
-
-import typing
-from typing import Any, Literal, Optional, Tuple, Union
-
-from .auxiliary import _auxiliary_types
-from .inspection import (
- is_generic_dict,
- is_generic_list,
- is_type_optional,
- is_type_union,
- TypeLike,
- unwrap_generic_dict,
- unwrap_generic_list,
- unwrap_optional_type,
- unwrap_union_types,
-)
-
-
-class TypeFormatter:
- """
- Type formatter.
-
- :param use_union_operator: Whether to emit union types as `X | Y` as per PEP 604.
- """
-
- use_union_operator: bool
-
- def __init__(self, use_union_operator: bool = False) -> None:
- self.use_union_operator = use_union_operator
-
- def union_to_str(self, data_type_args: Tuple[TypeLike, ...]) -> str:
- if self.use_union_operator:
- return " | ".join(self.python_type_to_str(t) for t in data_type_args)
- else:
- if len(data_type_args) == 2 and type(None) in data_type_args:
- # Optional[T] is represented as Union[T, None]
- origin_name = "Optional"
- data_type_args = tuple(t for t in data_type_args if t is not type(None))
- else:
- origin_name = "Union"
-
- args = ", ".join(self.python_type_to_str(t) for t in data_type_args)
- return f"{origin_name}[{args}]"
-
- def plain_type_to_str(self, data_type: TypeLike) -> str:
- "Returns the string representation of a Python type without metadata."
-
- # return forward references as the annotation string
- if isinstance(data_type, typing.ForwardRef):
- fwd: typing.ForwardRef = data_type
- return fwd.__forward_arg__
- elif isinstance(data_type, str):
- return data_type
-
- origin = typing.get_origin(data_type)
- if origin is not None:
- data_type_args = typing.get_args(data_type)
-
- if origin is dict: # Dict[T]
- origin_name = "Dict"
- elif origin is list: # List[T]
- origin_name = "List"
- elif origin is set: # Set[T]
- origin_name = "Set"
- elif origin is Union:
- return self.union_to_str(data_type_args)
- elif origin is Literal:
- args = ", ".join(repr(arg) for arg in data_type_args)
- return f"Literal[{args}]"
- else:
- origin_name = origin.__name__
-
- args = ", ".join(self.python_type_to_str(t) for t in data_type_args)
- return f"{origin_name}[{args}]"
-
- return data_type.__name__
-
- def python_type_to_str(self, data_type: TypeLike) -> str:
- "Returns the string representation of a Python type."
-
- if data_type is type(None):
- return "None"
-
- # use compact name for alias types
- name = _auxiliary_types.get(data_type)
- if name is not None:
- return name
-
- metadata = getattr(data_type, "__metadata__", None)
- if metadata is not None:
- # type is Annotated[T, ...]
- metatuple: Tuple[Any, ...] = metadata
- arg = typing.get_args(data_type)[0]
-
- # check for auxiliary types with user-defined annotations
- metaset = set(metatuple)
- for auxiliary_type, auxiliary_name in _auxiliary_types.items():
- auxiliary_arg = typing.get_args(auxiliary_type)[0]
- if arg is not auxiliary_arg:
- continue
-
- auxiliary_metatuple: Optional[Tuple[Any, ...]] = getattr(
- auxiliary_type, "__metadata__", None
- )
- if auxiliary_metatuple is None:
- continue
-
- if metaset.issuperset(auxiliary_metatuple):
- # type is an auxiliary type with extra annotations
- auxiliary_args = ", ".join(
- repr(m) for m in metatuple if m not in auxiliary_metatuple
- )
- return f"Annotated[{auxiliary_name}, {auxiliary_args}]"
-
- # type is an annotated type
- args = ", ".join(repr(m) for m in metatuple)
- return f"Annotated[{self.plain_type_to_str(arg)}, {args}]"
- else:
- # type is a regular type
- return self.plain_type_to_str(data_type)
-
-
-def python_type_to_str(data_type: TypeLike, use_union_operator: bool = False) -> str:
- """
- Returns the string representation of a Python type.
-
- :param use_union_operator: Whether to emit union types as `X | Y` as per PEP 604.
- """
-
- fmt = TypeFormatter(use_union_operator)
- return fmt.python_type_to_str(data_type)
-
-
-def python_type_to_name(data_type: TypeLike, force: bool = False) -> str:
- """
- Returns the short name of a Python type.
-
- :param force: Whether to produce a name for composite types such as generics.
- """
-
- # use compact name for alias types
- name = _auxiliary_types.get(data_type)
- if name is not None:
- return name
-
- # unwrap annotated types
- metadata = getattr(data_type, "__metadata__", None)
- if metadata is not None:
- # type is Annotated[T, ...]
- arg = typing.get_args(data_type)[0]
- return python_type_to_name(arg)
-
- if force:
- # generic types
- if is_type_optional(data_type, strict=True):
- inner_name = python_type_to_name(unwrap_optional_type(data_type))
- return f"Optional__{inner_name}"
- elif is_generic_list(data_type):
- item_name = python_type_to_name(unwrap_generic_list(data_type))
- return f"List__{item_name}"
- elif is_generic_dict(data_type):
- key_type, value_type = unwrap_generic_dict(data_type)
- key_name = python_type_to_name(key_type)
- value_name = python_type_to_name(value_type)
- return f"Dict__{key_name}__{value_name}"
- elif is_type_union(data_type):
- member_types = unwrap_union_types(data_type)
- member_names = "__".join(
- python_type_to_name(member_type) for member_type in member_types
- )
- return f"Union__{member_names}"
-
- # named system or user-defined type
- if hasattr(data_type, "__name__") and not typing.get_args(data_type):
- return data_type.__name__
-
- raise TypeError(f"cannot assign a simple name to type: {data_type}")
diff --git a/docs/openapi_generator/strong_typing/py.typed b/docs/openapi_generator/strong_typing/py.typed
deleted file mode 100644
index e69de29bb..000000000
diff --git a/docs/openapi_generator/strong_typing/schema.py b/docs/openapi_generator/strong_typing/schema.py
deleted file mode 100644
index 7f44435b8..000000000
--- a/docs/openapi_generator/strong_typing/schema.py
+++ /dev/null
@@ -1,792 +0,0 @@
-# Copyright (c) Meta Platforms, Inc. and affiliates.
-# All rights reserved.
-#
-# This source code is licensed under the terms described in the LICENSE file in
-# the root directory of this source tree.
-
-"""
-Type-safe data interchange for Python data classes.
-
-:see: https://github.com/hunyadi/strong_typing
-"""
-
-import dataclasses
-import datetime
-import decimal
-import enum
-import functools
-import inspect
-import json
-import typing
-import uuid
-from copy import deepcopy
-from typing import (
- Any,
- Callable,
- ClassVar,
- Dict,
- List,
- Literal,
- Optional,
- overload,
- Tuple,
- Type,
- TypeVar,
- Union,
-)
-
-import jsonschema
-from typing_extensions import Annotated
-
-from . import docstring
-from .auxiliary import (
- Alias,
- get_auxiliary_format,
- IntegerRange,
- MaxLength,
- MinLength,
- Precision,
-)
-from .core import JsonArray, JsonObject, JsonType, Schema, StrictJsonType
-from .inspection import (
- enum_value_types,
- get_annotation,
- get_class_properties,
- is_type_enum,
- is_type_like,
- is_type_optional,
- TypeLike,
- unwrap_optional_type,
-)
-from .name import python_type_to_name
-from .serialization import object_to_json
-
-# determines the maximum number of distinct enum members up to which a Dict[EnumType, Any] is converted into a JSON
-# schema with explicitly listed properties (rather than employing a pattern constraint on property names)
-OBJECT_ENUM_EXPANSION_LIMIT = 4
-
-
-T = TypeVar("T")
-
-
-def get_class_docstrings(data_type: type) -> Tuple[Optional[str], Optional[str]]:
- docstr = docstring.parse_type(data_type)
-
- # check if class has a doc-string other than the auto-generated string assigned by @dataclass
- if docstring.has_default_docstring(data_type):
- return None, None
-
- return docstr.short_description, docstr.long_description
-
-
-def get_class_property_docstrings(
- data_type: type, transform_fun: Optional[Callable[[type, str, str], str]] = None
-) -> Dict[str, str]:
- """
- Extracts the documentation strings associated with the properties of a composite type.
-
- :param data_type: The object whose properties to iterate over.
- :param transform_fun: An optional function that maps a property documentation string to a custom tailored string.
- :returns: A dictionary mapping property names to descriptions.
- """
-
- result = {}
- for base in inspect.getmro(data_type):
- docstr = docstring.parse_type(base)
- for param in docstr.params.values():
- if param.name in result:
- continue
-
- if transform_fun:
- description = transform_fun(data_type, param.name, param.description)
- else:
- description = param.description
-
- result[param.name] = description
- return result
-
-
-def docstring_to_schema(data_type: type) -> Schema:
- short_description, long_description = get_class_docstrings(data_type)
- schema: Schema = {}
-
- description = "\n".join(filter(None, [short_description, long_description]))
- if description:
- schema["description"] = description
- return schema
-
-
-def id_from_ref(data_type: Union[typing.ForwardRef, str, type]) -> str:
- "Extracts the name of a possibly forward-referenced type."
-
- if isinstance(data_type, typing.ForwardRef):
- forward_type: typing.ForwardRef = data_type
- return forward_type.__forward_arg__
- elif isinstance(data_type, str):
- return data_type
- else:
- return data_type.__name__
-
-
-def type_from_ref(data_type: Union[typing.ForwardRef, str, type]) -> Tuple[str, type]:
- "Creates a type from a forward reference."
-
- if isinstance(data_type, typing.ForwardRef):
- forward_type: typing.ForwardRef = data_type
- true_type = eval(forward_type.__forward_code__)
- return forward_type.__forward_arg__, true_type
- elif isinstance(data_type, str):
- true_type = eval(data_type)
- return data_type, true_type
- else:
- return data_type.__name__, data_type
-
-
-@dataclasses.dataclass
-class TypeCatalogEntry:
- schema: Optional[Schema]
- identifier: str
- examples: Optional[JsonType] = None
-
-
-class TypeCatalog:
- "Maintains an association of well-known Python types to their JSON schema."
-
- _by_type: Dict[TypeLike, TypeCatalogEntry]
- _by_name: Dict[str, TypeCatalogEntry]
-
- def __init__(self) -> None:
- self._by_type = {}
- self._by_name = {}
-
- def __contains__(self, data_type: TypeLike) -> bool:
- if isinstance(data_type, typing.ForwardRef):
- fwd: typing.ForwardRef = data_type
- name = fwd.__forward_arg__
- return name in self._by_name
- else:
- return data_type in self._by_type
-
- def add(
- self,
- data_type: TypeLike,
- schema: Optional[Schema],
- identifier: str,
- examples: Optional[List[JsonType]] = None,
- ) -> None:
- if isinstance(data_type, typing.ForwardRef):
- raise TypeError("forward references cannot be used to register a type")
-
- if data_type in self._by_type:
- raise ValueError(f"type {data_type} is already registered in the catalog")
-
- entry = TypeCatalogEntry(schema, identifier, examples)
- self._by_type[data_type] = entry
- self._by_name[identifier] = entry
-
- def get(self, data_type: TypeLike) -> TypeCatalogEntry:
- if isinstance(data_type, typing.ForwardRef):
- fwd: typing.ForwardRef = data_type
- name = fwd.__forward_arg__
- return self._by_name[name]
- else:
- return self._by_type[data_type]
-
-
-@dataclasses.dataclass
-class SchemaOptions:
- definitions_path: str = "#/definitions/"
- use_descriptions: bool = True
- use_examples: bool = True
- property_description_fun: Optional[Callable[[type, str, str], str]] = None
-
-
-class JsonSchemaGenerator:
- "Creates a JSON schema with user-defined type definitions."
-
- type_catalog: ClassVar[TypeCatalog] = TypeCatalog()
- types_used: Dict[str, TypeLike]
- options: SchemaOptions
-
- def __init__(self, options: Optional[SchemaOptions] = None):
- if options is None:
- self.options = SchemaOptions()
- else:
- self.options = options
- self.types_used = {}
-
- @functools.singledispatchmethod
- def _metadata_to_schema(self, arg: object) -> Schema:
- # unrecognized annotation
- return {}
-
- @_metadata_to_schema.register
- def _(self, arg: IntegerRange) -> Schema:
- return {"minimum": arg.minimum, "maximum": arg.maximum}
-
- @_metadata_to_schema.register
- def _(self, arg: Precision) -> Schema:
- return {
- "multipleOf": 10 ** (-arg.decimal_digits),
- "exclusiveMinimum": -(10**arg.integer_digits),
- "exclusiveMaximum": (10**arg.integer_digits),
- }
-
- @_metadata_to_schema.register
- def _(self, arg: MinLength) -> Schema:
- return {"minLength": arg.value}
-
- @_metadata_to_schema.register
- def _(self, arg: MaxLength) -> Schema:
- return {"maxLength": arg.value}
-
- def _with_metadata(
- self, type_schema: Schema, metadata: Optional[Tuple[Any, ...]]
- ) -> Schema:
- if metadata:
- for m in metadata:
- type_schema.update(self._metadata_to_schema(m))
- return type_schema
-
- def _simple_type_to_schema(
- self, typ: TypeLike, json_schema_extra: Optional[dict] = None
- ) -> Optional[Schema]:
- """
- Returns the JSON schema associated with a simple, unrestricted type.
-
- :returns: The schema for a simple type, or `None`.
- """
-
- if typ is type(None):
- return {"type": "null"}
- elif typ is bool:
- return {"type": "boolean"}
- elif typ is int:
- return {"type": "integer"}
- elif typ is float:
- return {"type": "number"}
- elif typ is str:
- if json_schema_extra and "contentEncoding" in json_schema_extra:
- return {
- "type": "string",
- "contentEncoding": json_schema_extra["contentEncoding"],
- }
- return {"type": "string"}
- elif typ is bytes:
- return {"type": "string", "contentEncoding": "base64"}
- elif typ is datetime.datetime:
- # 2018-11-13T20:20:39+00:00
- return {
- "type": "string",
- "format": "date-time",
- }
- elif typ is datetime.date:
- # 2018-11-13
- return {"type": "string", "format": "date"}
- elif typ is datetime.time:
- # 20:20:39+00:00
- return {"type": "string", "format": "time"}
- elif typ is decimal.Decimal:
- return {"type": "number"}
- elif typ is uuid.UUID:
- # f81d4fae-7dec-11d0-a765-00a0c91e6bf6
- return {"type": "string", "format": "uuid"}
- elif typ is Any:
- return {
- "oneOf": [
- {"type": "null"},
- {"type": "boolean"},
- {"type": "number"},
- {"type": "string"},
- {"type": "array"},
- {"type": "object"},
- ]
- }
- elif typ is JsonObject:
- return {"type": "object"}
- elif typ is JsonArray:
- return {"type": "array"}
- else:
- # not a simple type
- return None
-
- def type_to_schema(
- self,
- data_type: TypeLike,
- force_expand: bool = False,
- json_schema_extra: Optional[dict] = None,
- ) -> Schema:
- """
- Returns the JSON schema associated with a type.
-
- :param data_type: The Python type whose JSON schema to return.
- :param force_expand: Forces a JSON schema to be returned even if the type is registered in the catalog of known types.
- :returns: The JSON schema associated with the type.
- """
-
- # short-circuit for common simple types
- schema = self._simple_type_to_schema(data_type, json_schema_extra)
- if schema is not None:
- return schema
-
- # types registered in the type catalog of well-known types
- type_catalog = JsonSchemaGenerator.type_catalog
- if not force_expand and data_type in type_catalog:
- # user-defined type
- identifier = type_catalog.get(data_type).identifier
- self.types_used.setdefault(identifier, data_type)
- return {"$ref": f"{self.options.definitions_path}{identifier}"}
-
- # unwrap annotated types
- metadata = getattr(data_type, "__metadata__", None)
- if metadata is not None:
- # type is Annotated[T, ...]
- typ = typing.get_args(data_type)[0]
- schema = self._simple_type_to_schema(typ)
- if schema is not None:
- # recognize well-known auxiliary types
- fmt = get_auxiliary_format(data_type)
- if fmt is not None:
- schema.update({"format": fmt})
- return schema
- else:
- return self._with_metadata(schema, metadata)
-
- else:
- # type is a regular type
- typ = data_type
-
- if isinstance(typ, typing.ForwardRef) or isinstance(typ, str):
- if force_expand:
- identifier, true_type = type_from_ref(typ)
- return self.type_to_schema(true_type, force_expand=True)
- else:
- try:
- identifier, true_type = type_from_ref(typ)
- self.types_used[identifier] = true_type
- except NameError:
- identifier = id_from_ref(typ)
-
- return {"$ref": f"{self.options.definitions_path}{identifier}"}
-
- if is_type_enum(typ):
- enum_type: Type[enum.Enum] = typ
- value_types = enum_value_types(enum_type)
- if len(value_types) != 1:
- raise ValueError(
- f"enumerations must have a consistent member value type but several types found: {value_types}"
- )
- enum_value_type = value_types.pop()
-
- enum_schema: Schema
- if (
- enum_value_type is bool
- or enum_value_type is int
- or enum_value_type is float
- or enum_value_type is str
- ):
- if enum_value_type is bool:
- enum_schema_type = "boolean"
- elif enum_value_type is int:
- enum_schema_type = "integer"
- elif enum_value_type is float:
- enum_schema_type = "number"
- elif enum_value_type is str:
- enum_schema_type = "string"
-
- enum_schema = {
- "type": enum_schema_type,
- "enum": [object_to_json(e.value) for e in enum_type],
- }
- if self.options.use_descriptions:
- enum_schema.update(docstring_to_schema(typ))
- return enum_schema
- else:
- enum_schema = self.type_to_schema(enum_value_type)
- if self.options.use_descriptions:
- enum_schema.update(docstring_to_schema(typ))
- return enum_schema
-
- origin_type = typing.get_origin(typ)
- if origin_type is list:
- (list_type,) = typing.get_args(typ) # unpack single tuple element
- return {"type": "array", "items": self.type_to_schema(list_type)}
- elif origin_type is dict:
- key_type, value_type = typing.get_args(typ)
- if not (key_type is str or key_type is int or is_type_enum(key_type)):
- raise ValueError(
- "`dict` with key type not coercible to `str` is not supported"
- )
-
- dict_schema: Schema
- value_schema = self.type_to_schema(value_type)
- if is_type_enum(key_type):
- enum_values = [str(e.value) for e in key_type]
- if len(enum_values) > OBJECT_ENUM_EXPANSION_LIMIT:
- dict_schema = {
- "propertyNames": {
- "pattern": "^(" + "|".join(enum_values) + ")$"
- },
- "additionalProperties": value_schema,
- }
- else:
- dict_schema = {
- "properties": {value: value_schema for value in enum_values},
- "additionalProperties": False,
- }
- else:
- dict_schema = {"additionalProperties": value_schema}
-
- schema = {"type": "object"}
- schema.update(dict_schema)
- return schema
- elif origin_type is set:
- (set_type,) = typing.get_args(typ) # unpack single tuple element
- return {
- "type": "array",
- "items": self.type_to_schema(set_type),
- "uniqueItems": True,
- }
- elif origin_type is tuple:
- args = typing.get_args(typ)
- return {
- "type": "array",
- "minItems": len(args),
- "maxItems": len(args),
- "prefixItems": [
- self.type_to_schema(member_type) for member_type in args
- ],
- }
- elif origin_type is Union:
- discriminator = None
- if typing.get_origin(data_type) is Annotated:
- discriminator = typing.get_args(data_type)[1].discriminator
- ret = {
- "oneOf": [
- self.type_to_schema(union_type)
- for union_type in typing.get_args(typ)
- ]
- }
- if discriminator:
- # for each union type, we need to read the value of the discriminator
- mapping = {}
- for union_type in typing.get_args(typ):
- props = self.type_to_schema(union_type, force_expand=True)[
- "properties"
- ]
- mapping[props[discriminator]["default"]] = self.type_to_schema(
- union_type
- )["$ref"]
-
- ret["discriminator"] = {
- "propertyName": discriminator,
- "mapping": mapping,
- }
- return ret
- elif origin_type is Literal:
- (literal_value,) = typing.get_args(typ) # unpack value of literal type
- schema = self.type_to_schema(type(literal_value))
- schema["const"] = literal_value
- return schema
- elif origin_type is type:
- (concrete_type,) = typing.get_args(typ) # unpack single tuple element
- return {"const": self.type_to_schema(concrete_type, force_expand=True)}
-
- # dictionary of class attributes
- members = dict(inspect.getmembers(typ, lambda a: not inspect.isroutine(a)))
-
- property_docstrings = get_class_property_docstrings(
- typ, self.options.property_description_fun
- )
- properties: Dict[str, Schema] = {}
- required: List[str] = []
- for property_name, property_type in get_class_properties(typ):
- # rename property if an alias name is specified
- alias = get_annotation(property_type, Alias)
- if alias:
- output_name = alias.name
- else:
- output_name = property_name
-
- defaults = {}
- json_schema_extra = None
- if "model_fields" in members:
- f = members["model_fields"]
- defaults = {k: finfo.default for k, finfo in f.items()}
- json_schema_extra = f.get(output_name, None).json_schema_extra
-
- if is_type_optional(property_type):
- optional_type: type = unwrap_optional_type(property_type)
- property_def = self.type_to_schema(
- optional_type, json_schema_extra=json_schema_extra
- )
- else:
- property_def = self.type_to_schema(
- property_type, json_schema_extra=json_schema_extra
- )
- required.append(output_name)
-
- # check if attribute has a default value initializer
- if defaults.get(property_name) is not None:
- def_value = defaults[property_name]
- # check if value can be directly represented in JSON
- if isinstance(
- def_value,
- (
- bool,
- int,
- float,
- str,
- enum.Enum,
- datetime.datetime,
- datetime.date,
- datetime.time,
- ),
- ):
- property_def["default"] = object_to_json(def_value)
-
- # add property docstring if available
- property_doc = property_docstrings.get(property_name)
- if property_doc:
- # print(output_name, property_doc)
- property_def.pop("title", None)
- property_def["description"] = property_doc
-
- properties[output_name] = property_def
-
- schema = {"type": "object"}
- if len(properties) > 0:
- schema["properties"] = typing.cast(JsonType, properties)
- schema["additionalProperties"] = False
- if len(required) > 0:
- schema["required"] = typing.cast(JsonType, required)
- if self.options.use_descriptions:
- schema.update(docstring_to_schema(typ))
- return schema
-
- def _type_to_schema_with_lookup(self, data_type: TypeLike) -> Schema:
- """
- Returns the JSON schema associated with a type that may be registered in the catalog of known types.
-
- :param data_type: The type whose JSON schema we seek.
- :returns: The JSON schema associated with the type.
- """
-
- entry = JsonSchemaGenerator.type_catalog.get(data_type)
- if entry.schema is None:
- type_schema = self.type_to_schema(data_type, force_expand=True)
- else:
- type_schema = deepcopy(entry.schema)
-
- # add descriptive text (if present)
- if self.options.use_descriptions:
- if isinstance(data_type, type) and not isinstance(
- data_type, typing.ForwardRef
- ):
- type_schema.update(docstring_to_schema(data_type))
-
- # add example (if present)
- if self.options.use_examples and entry.examples:
- type_schema["examples"] = entry.examples
-
- return type_schema
-
- def classdef_to_schema(
- self, data_type: TypeLike, force_expand: bool = False
- ) -> Tuple[Schema, Dict[str, Schema]]:
- """
- Returns the JSON schema associated with a type and any nested types.
-
- :param data_type: The type whose JSON schema to return.
- :param force_expand: True if a full JSON schema is to be returned even for well-known types; false if a schema
- reference is to be used for well-known types.
- :returns: A tuple of the JSON schema, and a mapping between nested type names and their corresponding schema.
- """
-
- if not is_type_like(data_type):
- raise TypeError(f"expected a type-like object but got: {data_type}")
-
- self.types_used = {}
- try:
- type_schema = self.type_to_schema(data_type, force_expand=force_expand)
-
- types_defined: Dict[str, Schema] = {}
- while len(self.types_used) > len(types_defined):
- # make a snapshot copy; original collection is going to be modified
- types_undefined = {
- sub_name: sub_type
- for sub_name, sub_type in self.types_used.items()
- if sub_name not in types_defined
- }
-
- # expand undefined types, which may lead to additional types to be defined
- for sub_name, sub_type in types_undefined.items():
- types_defined[sub_name] = self._type_to_schema_with_lookup(sub_type)
-
- type_definitions = dict(sorted(types_defined.items()))
- finally:
- self.types_used = {}
-
- return type_schema, type_definitions
-
-
-class Validator(enum.Enum):
- "Defines constants for JSON schema standards."
-
- Draft7 = jsonschema.Draft7Validator
- Draft201909 = jsonschema.Draft201909Validator
- Draft202012 = jsonschema.Draft202012Validator
- Latest = jsonschema.Draft202012Validator
-
-
-def classdef_to_schema(
- data_type: TypeLike,
- options: Optional[SchemaOptions] = None,
- validator: Validator = Validator.Latest,
-) -> Schema:
- """
- Returns the JSON schema corresponding to the given type.
-
- :param data_type: The Python type used to generate the JSON schema
- :returns: A JSON object that you can serialize to a JSON string with json.dump or json.dumps
- :raises TypeError: Indicates that the generated JSON schema does not validate against the desired meta-schema.
- """
-
- # short-circuit with an error message when passing invalid data
- if not is_type_like(data_type):
- raise TypeError(f"expected a type-like object but got: {data_type}")
-
- generator = JsonSchemaGenerator(options)
- type_schema, type_definitions = generator.classdef_to_schema(data_type)
-
- class_schema: Schema = {}
- if type_definitions:
- class_schema["definitions"] = typing.cast(JsonType, type_definitions)
- class_schema.update(type_schema)
-
- validator_id = validator.value.META_SCHEMA["$id"]
- try:
- validator.value.check_schema(class_schema)
- except jsonschema.exceptions.SchemaError:
- raise TypeError(
- f"schema does not validate against meta-schema <{validator_id}>"
- )
-
- schema = {"$schema": validator_id}
- schema.update(class_schema)
- return schema
-
-
-def validate_object(data_type: TypeLike, json_dict: JsonType) -> None:
- """
- Validates if the JSON dictionary object conforms to the expected type.
-
- :param data_type: The type to match against.
- :param json_dict: A JSON object obtained with `json.load` or `json.loads`.
- :raises jsonschema.exceptions.ValidationError: Indicates that the JSON object cannot represent the type.
- """
-
- schema_dict = classdef_to_schema(data_type)
- jsonschema.validate(
- json_dict, schema_dict, format_checker=jsonschema.FormatChecker()
- )
-
-
-def print_schema(data_type: type) -> None:
- """Pretty-prints the JSON schema corresponding to the type."""
-
- s = classdef_to_schema(data_type)
- print(json.dumps(s, indent=4))
-
-
-def get_schema_identifier(data_type: type) -> Optional[str]:
- if data_type in JsonSchemaGenerator.type_catalog:
- return JsonSchemaGenerator.type_catalog.get(data_type).identifier
- else:
- return None
-
-
-def register_schema(
- data_type: T,
- schema: Optional[Schema] = None,
- name: Optional[str] = None,
- examples: Optional[List[JsonType]] = None,
-) -> T:
- """
- Associates a type with a JSON schema definition.
-
- :param data_type: The type to associate with a JSON schema.
- :param schema: The schema to associate the type with. Derived automatically if omitted.
- :param name: The name used for looking uo the type. Determined automatically if omitted.
- :returns: The input type.
- """
-
- JsonSchemaGenerator.type_catalog.add(
- data_type,
- schema,
- name if name is not None else python_type_to_name(data_type),
- examples,
- )
- return data_type
-
-
-@overload
-def json_schema_type(cls: Type[T], /) -> Type[T]: ...
-
-
-@overload
-def json_schema_type(
- cls: None, *, schema: Optional[Schema] = None
-) -> Callable[[Type[T]], Type[T]]: ...
-
-
-def json_schema_type(
- cls: Optional[Type[T]] = None,
- *,
- schema: Optional[Schema] = None,
- examples: Optional[List[JsonType]] = None,
-) -> Union[Type[T], Callable[[Type[T]], Type[T]]]:
- """Decorator to add user-defined schema definition to a class."""
-
- def wrap(cls: Type[T]) -> Type[T]:
- return register_schema(cls, schema, examples=examples)
-
- # see if decorator is used as @json_schema_type or @json_schema_type()
- if cls is None:
- # called with parentheses
- return wrap
- else:
- # called as @json_schema_type without parentheses
- return wrap(cls)
-
-
-register_schema(JsonObject, name="JsonObject")
-register_schema(JsonArray, name="JsonArray")
-
-register_schema(
- JsonType,
- name="JsonType",
- examples=[
- {
- "property1": None,
- "property2": True,
- "property3": 64,
- "property4": "string",
- "property5": ["item"],
- "property6": {"key": "value"},
- }
- ],
-)
-register_schema(
- StrictJsonType,
- name="StrictJsonType",
- examples=[
- {
- "property1": True,
- "property2": 64,
- "property3": "string",
- "property4": ["item"],
- "property5": {"key": "value"},
- }
- ],
-)
diff --git a/docs/openapi_generator/strong_typing/serialization.py b/docs/openapi_generator/strong_typing/serialization.py
deleted file mode 100644
index 88d8fccad..000000000
--- a/docs/openapi_generator/strong_typing/serialization.py
+++ /dev/null
@@ -1,101 +0,0 @@
-# Copyright (c) Meta Platforms, Inc. and affiliates.
-# All rights reserved.
-#
-# This source code is licensed under the terms described in the LICENSE file in
-# the root directory of this source tree.
-
-"""
-Type-safe data interchange for Python data classes.
-
-:see: https://github.com/hunyadi/strong_typing
-"""
-
-import inspect
-import json
-import sys
-from types import ModuleType
-from typing import Any, Optional, TextIO, TypeVar
-
-from .core import JsonType
-from .deserializer import create_deserializer
-from .inspection import TypeLike
-from .serializer import create_serializer
-
-T = TypeVar("T")
-
-
-def object_to_json(obj: Any) -> JsonType:
- """
- Converts a Python object to a representation that can be exported to JSON.
-
- * Fundamental types (e.g. numeric types) are written as is.
- * Date and time types are serialized in the ISO 8601 format with time zone.
- * A byte array is written as a string with Base64 encoding.
- * UUIDs are written as a UUID string.
- * Enumerations are written as their value.
- * Containers (e.g. `list`, `dict`, `set`, `tuple`) are exported recursively.
- * Objects with properties (including data class types) are converted to a dictionaries of key-value pairs.
- """
-
- typ: type = type(obj)
- generator = create_serializer(typ)
- return generator.generate(obj)
-
-
-def json_to_object(
- typ: TypeLike, data: JsonType, *, context: Optional[ModuleType] = None
-) -> object:
- """
- Creates an object from a representation that has been de-serialized from JSON.
-
- When de-serializing a JSON object into a Python object, the following transformations are applied:
-
- * Fundamental types are parsed as `bool`, `int`, `float` or `str`.
- * Date and time types are parsed from the ISO 8601 format with time zone into the corresponding Python type
- `datetime`, `date` or `time`
- * A byte array is read from a string with Base64 encoding into a `bytes` instance.
- * UUIDs are extracted from a UUID string into a `uuid.UUID` instance.
- * Enumerations are instantiated with a lookup on enumeration value.
- * Containers (e.g. `list`, `dict`, `set`, `tuple`) are parsed recursively.
- * Complex objects with properties (including data class types) are populated from dictionaries of key-value pairs
- using reflection (enumerating type annotations).
-
- :raises TypeError: A de-serializing engine cannot be constructed for the input type.
- :raises JsonKeyError: Deserialization for a class or union type has failed because a matching member was not found.
- :raises JsonTypeError: Deserialization for data has failed due to a type mismatch.
- """
-
- # use caller context for evaluating types if no context is supplied
- if context is None:
- this_frame = inspect.currentframe()
- if this_frame is not None:
- caller_frame = this_frame.f_back
- del this_frame
-
- if caller_frame is not None:
- try:
- context = sys.modules[caller_frame.f_globals["__name__"]]
- finally:
- del caller_frame
-
- parser = create_deserializer(typ, context)
- return parser.parse(data)
-
-
-def json_dump_string(json_object: JsonType) -> str:
- "Dump an object as a JSON string with a compact representation."
-
- return json.dumps(
- json_object, ensure_ascii=False, check_circular=False, separators=(",", ":")
- )
-
-
-def json_dump(json_object: JsonType, file: TextIO) -> None:
- json.dump(
- json_object,
- file,
- ensure_ascii=False,
- check_circular=False,
- separators=(",", ":"),
- )
- file.write("\n")
diff --git a/docs/openapi_generator/strong_typing/serializer.py b/docs/openapi_generator/strong_typing/serializer.py
deleted file mode 100644
index f1252e374..000000000
--- a/docs/openapi_generator/strong_typing/serializer.py
+++ /dev/null
@@ -1,522 +0,0 @@
-# Copyright (c) Meta Platforms, Inc. and affiliates.
-# All rights reserved.
-#
-# This source code is licensed under the terms described in the LICENSE file in
-# the root directory of this source tree.
-
-"""
-Type-safe data interchange for Python data classes.
-
-:see: https://github.com/hunyadi/strong_typing
-"""
-
-import abc
-import base64
-import datetime
-import enum
-import functools
-import inspect
-import ipaddress
-import sys
-import typing
-import uuid
-from types import FunctionType, MethodType, ModuleType
-from typing import (
- Any,
- Callable,
- Dict,
- Generic,
- List,
- Literal,
- NamedTuple,
- Optional,
- Set,
- Tuple,
- Type,
- TypeVar,
- Union,
-)
-
-from .core import JsonType
-from .exception import JsonTypeError, JsonValueError
-from .inspection import (
- enum_value_types,
- evaluate_type,
- get_class_properties,
- get_resolved_hints,
- is_dataclass_type,
- is_named_tuple_type,
- is_reserved_property,
- is_type_annotated,
- is_type_enum,
- TypeLike,
- unwrap_annotated_type,
-)
-from .mapping import python_field_to_json_property
-
-T = TypeVar("T")
-
-
-class Serializer(abc.ABC, Generic[T]):
- @abc.abstractmethod
- def generate(self, data: T) -> JsonType: ...
-
-
-class NoneSerializer(Serializer[None]):
- def generate(self, data: None) -> None:
- # can be directly represented in JSON
- return None
-
-
-class BoolSerializer(Serializer[bool]):
- def generate(self, data: bool) -> bool:
- # can be directly represented in JSON
- return data
-
-
-class IntSerializer(Serializer[int]):
- def generate(self, data: int) -> int:
- # can be directly represented in JSON
- return data
-
-
-class FloatSerializer(Serializer[float]):
- def generate(self, data: float) -> float:
- # can be directly represented in JSON
- return data
-
-
-class StringSerializer(Serializer[str]):
- def generate(self, data: str) -> str:
- # can be directly represented in JSON
- return data
-
-
-class BytesSerializer(Serializer[bytes]):
- def generate(self, data: bytes) -> str:
- return base64.b64encode(data).decode("ascii")
-
-
-class DateTimeSerializer(Serializer[datetime.datetime]):
- def generate(self, obj: datetime.datetime) -> str:
- if obj.tzinfo is None:
- raise JsonValueError(
- f"timestamp lacks explicit time zone designator: {obj}"
- )
- fmt = obj.isoformat()
- if fmt.endswith("+00:00"):
- fmt = f"{fmt[:-6]}Z" # Python's isoformat() does not support military time zones like "Zulu" for UTC
- return fmt
-
-
-class DateSerializer(Serializer[datetime.date]):
- def generate(self, obj: datetime.date) -> str:
- return obj.isoformat()
-
-
-class TimeSerializer(Serializer[datetime.time]):
- def generate(self, obj: datetime.time) -> str:
- return obj.isoformat()
-
-
-class UUIDSerializer(Serializer[uuid.UUID]):
- def generate(self, obj: uuid.UUID) -> str:
- return str(obj)
-
-
-class IPv4Serializer(Serializer[ipaddress.IPv4Address]):
- def generate(self, obj: ipaddress.IPv4Address) -> str:
- return str(obj)
-
-
-class IPv6Serializer(Serializer[ipaddress.IPv6Address]):
- def generate(self, obj: ipaddress.IPv6Address) -> str:
- return str(obj)
-
-
-class EnumSerializer(Serializer[enum.Enum]):
- def generate(self, obj: enum.Enum) -> Union[int, str]:
- return obj.value
-
-
-class UntypedListSerializer(Serializer[list]):
- def generate(self, obj: list) -> List[JsonType]:
- return [object_to_json(item) for item in obj]
-
-
-class UntypedDictSerializer(Serializer[dict]):
- def generate(self, obj: dict) -> Dict[str, JsonType]:
- if obj and isinstance(next(iter(obj.keys())), enum.Enum):
- iterator = (
- (key.value, object_to_json(value)) for key, value in obj.items()
- )
- else:
- iterator = ((str(key), object_to_json(value)) for key, value in obj.items())
- return dict(iterator)
-
-
-class UntypedSetSerializer(Serializer[set]):
- def generate(self, obj: set) -> List[JsonType]:
- return [object_to_json(item) for item in obj]
-
-
-class UntypedTupleSerializer(Serializer[tuple]):
- def generate(self, obj: tuple) -> List[JsonType]:
- return [object_to_json(item) for item in obj]
-
-
-class TypedCollectionSerializer(Serializer, Generic[T]):
- generator: Serializer[T]
-
- def __init__(self, item_type: Type[T], context: Optional[ModuleType]) -> None:
- self.generator = _get_serializer(item_type, context)
-
-
-class TypedListSerializer(TypedCollectionSerializer[T]):
- def generate(self, obj: List[T]) -> List[JsonType]:
- return [self.generator.generate(item) for item in obj]
-
-
-class TypedStringDictSerializer(TypedCollectionSerializer[T]):
- def __init__(self, value_type: Type[T], context: Optional[ModuleType]) -> None:
- super().__init__(value_type, context)
-
- def generate(self, obj: Dict[str, T]) -> Dict[str, JsonType]:
- return {key: self.generator.generate(value) for key, value in obj.items()}
-
-
-class TypedEnumDictSerializer(TypedCollectionSerializer[T]):
- def __init__(
- self,
- key_type: Type[enum.Enum],
- value_type: Type[T],
- context: Optional[ModuleType],
- ) -> None:
- super().__init__(value_type, context)
-
- value_types = enum_value_types(key_type)
- if len(value_types) != 1:
- raise JsonTypeError(
- f"invalid key type, enumerations must have a consistent member value type but several types found: {value_types}"
- )
-
- value_type = value_types.pop()
- if value_type is not str:
- raise JsonTypeError(
- "invalid enumeration key type, expected `enum.Enum` with string values"
- )
-
- def generate(self, obj: Dict[enum.Enum, T]) -> Dict[str, JsonType]:
- return {key.value: self.generator.generate(value) for key, value in obj.items()}
-
-
-class TypedSetSerializer(TypedCollectionSerializer[T]):
- def generate(self, obj: Set[T]) -> JsonType:
- return [self.generator.generate(item) for item in obj]
-
-
-class TypedTupleSerializer(Serializer[tuple]):
- item_generators: Tuple[Serializer, ...]
-
- def __init__(
- self, item_types: Tuple[type, ...], context: Optional[ModuleType]
- ) -> None:
- self.item_generators = tuple(
- _get_serializer(item_type, context) for item_type in item_types
- )
-
- def generate(self, obj: tuple) -> List[JsonType]:
- return [
- item_generator.generate(item)
- for item_generator, item in zip(self.item_generators, obj)
- ]
-
-
-class CustomSerializer(Serializer):
- converter: Callable[[object], JsonType]
-
- def __init__(self, converter: Callable[[object], JsonType]) -> None:
- self.converter = converter
-
- def generate(self, obj: object) -> JsonType:
- return self.converter(obj)
-
-
-class FieldSerializer(Generic[T]):
- """
- Serializes a Python object field into a JSON property.
-
- :param field_name: The name of the field in a Python class to read data from.
- :param property_name: The name of the JSON property to write to a JSON `object`.
- :param generator: A compatible serializer that can handle the field's type.
- """
-
- field_name: str
- property_name: str
- generator: Serializer
-
- def __init__(
- self, field_name: str, property_name: str, generator: Serializer[T]
- ) -> None:
- self.field_name = field_name
- self.property_name = property_name
- self.generator = generator
-
- def generate_field(self, obj: object, object_dict: Dict[str, JsonType]) -> None:
- value = getattr(obj, self.field_name)
- if value is not None:
- object_dict[self.property_name] = self.generator.generate(value)
-
-
-class TypedClassSerializer(Serializer[T]):
- property_generators: List[FieldSerializer]
-
- def __init__(self, class_type: Type[T], context: Optional[ModuleType]) -> None:
- self.property_generators = [
- FieldSerializer(
- field_name,
- python_field_to_json_property(field_name, field_type),
- _get_serializer(field_type, context),
- )
- for field_name, field_type in get_class_properties(class_type)
- ]
-
- def generate(self, obj: T) -> Dict[str, JsonType]:
- object_dict: Dict[str, JsonType] = {}
- for property_generator in self.property_generators:
- property_generator.generate_field(obj, object_dict)
-
- return object_dict
-
-
-class TypedNamedTupleSerializer(TypedClassSerializer[NamedTuple]):
- def __init__(
- self, class_type: Type[NamedTuple], context: Optional[ModuleType]
- ) -> None:
- super().__init__(class_type, context)
-
-
-class DataclassSerializer(TypedClassSerializer[T]):
- def __init__(self, class_type: Type[T], context: Optional[ModuleType]) -> None:
- super().__init__(class_type, context)
-
-
-class UnionSerializer(Serializer):
- def generate(self, obj: Any) -> JsonType:
- return object_to_json(obj)
-
-
-class LiteralSerializer(Serializer):
- generator: Serializer
-
- def __init__(self, values: Tuple[Any, ...], context: Optional[ModuleType]) -> None:
- literal_type_tuple = tuple(type(value) for value in values)
- literal_type_set = set(literal_type_tuple)
- if len(literal_type_set) != 1:
- value_names = ", ".join(repr(value) for value in values)
- raise TypeError(
- f"type `Literal[{value_names}]` expects consistent literal value types but got: {literal_type_tuple}"
- )
-
- literal_type = literal_type_set.pop()
- self.generator = _get_serializer(literal_type, context)
-
- def generate(self, obj: Any) -> JsonType:
- return self.generator.generate(obj)
-
-
-class UntypedNamedTupleSerializer(Serializer):
- fields: Dict[str, str]
-
- def __init__(self, class_type: Type[NamedTuple]) -> None:
- # named tuples are also instances of tuple
- self.fields = {}
- field_names: Tuple[str, ...] = class_type._fields
- for field_name in field_names:
- self.fields[field_name] = python_field_to_json_property(field_name)
-
- def generate(self, obj: NamedTuple) -> JsonType:
- object_dict = {}
- for field_name, property_name in self.fields.items():
- value = getattr(obj, field_name)
- object_dict[property_name] = object_to_json(value)
-
- return object_dict
-
-
-class UntypedClassSerializer(Serializer):
- def generate(self, obj: object) -> JsonType:
- # iterate over object attributes to get a standard representation
- object_dict = {}
- for name in dir(obj):
- if is_reserved_property(name):
- continue
-
- value = getattr(obj, name)
- if value is None:
- continue
-
- # filter instance methods
- if inspect.ismethod(value):
- continue
-
- object_dict[python_field_to_json_property(name)] = object_to_json(value)
-
- return object_dict
-
-
-def create_serializer(
- typ: TypeLike, context: Optional[ModuleType] = None
-) -> Serializer:
- """
- Creates a serializer engine to produce an object that can be directly converted into a JSON string.
-
- When serializing a Python object into a JSON object, the following transformations are applied:
-
- * Fundamental types (`bool`, `int`, `float` or `str`) are returned as-is.
- * Date and time types (`datetime`, `date` or `time`) produce an ISO 8601 format string with time zone
- (ending with `Z` for UTC).
- * Byte arrays (`bytes`) are written as a string with Base64 encoding.
- * UUIDs (`uuid.UUID`) are written as a UUID string as per RFC 4122.
- * Enumerations yield their enumeration value.
- * Containers (e.g. `list`, `dict`, `set`, `tuple`) are processed recursively.
- * Complex objects with properties (including data class types) generate dictionaries of key-value pairs.
-
- :raises TypeError: A serializer engine cannot be constructed for the input type.
- """
-
- if context is None:
- if isinstance(typ, type):
- context = sys.modules[typ.__module__]
-
- return _get_serializer(typ, context)
-
-
-def _get_serializer(typ: TypeLike, context: Optional[ModuleType]) -> Serializer:
- if isinstance(typ, (str, typing.ForwardRef)):
- if context is None:
- raise TypeError(f"missing context for evaluating type: {typ}")
-
- typ = evaluate_type(typ, context)
-
- if isinstance(typ, type):
- return _fetch_serializer(typ)
- else:
- # special forms are not always hashable
- return _create_serializer(typ, context)
-
-
-@functools.lru_cache(maxsize=None)
-def _fetch_serializer(typ: type) -> Serializer:
- context = sys.modules[typ.__module__]
- return _create_serializer(typ, context)
-
-
-def _create_serializer(typ: TypeLike, context: Optional[ModuleType]) -> Serializer:
- # check for well-known types
- if typ is type(None):
- return NoneSerializer()
- elif typ is bool:
- return BoolSerializer()
- elif typ is int:
- return IntSerializer()
- elif typ is float:
- return FloatSerializer()
- elif typ is str:
- return StringSerializer()
- elif typ is bytes:
- return BytesSerializer()
- elif typ is datetime.datetime:
- return DateTimeSerializer()
- elif typ is datetime.date:
- return DateSerializer()
- elif typ is datetime.time:
- return TimeSerializer()
- elif typ is uuid.UUID:
- return UUIDSerializer()
- elif typ is ipaddress.IPv4Address:
- return IPv4Serializer()
- elif typ is ipaddress.IPv6Address:
- return IPv6Serializer()
-
- # dynamically-typed collection types
- if typ is list:
- return UntypedListSerializer()
- elif typ is dict:
- return UntypedDictSerializer()
- elif typ is set:
- return UntypedSetSerializer()
- elif typ is tuple:
- return UntypedTupleSerializer()
-
- # generic types (e.g. list, dict, set, etc.)
- origin_type = typing.get_origin(typ)
- if origin_type is list:
- (list_item_type,) = typing.get_args(typ) # unpack single tuple element
- return TypedListSerializer(list_item_type, context)
- elif origin_type is dict:
- key_type, value_type = typing.get_args(typ)
- if key_type is str:
- return TypedStringDictSerializer(value_type, context)
- elif issubclass(key_type, enum.Enum):
- return TypedEnumDictSerializer(key_type, value_type, context)
- elif origin_type is set:
- (set_member_type,) = typing.get_args(typ) # unpack single tuple element
- return TypedSetSerializer(set_member_type, context)
- elif origin_type is tuple:
- return TypedTupleSerializer(typing.get_args(typ), context)
- elif origin_type is Union:
- return UnionSerializer()
- elif origin_type is Literal:
- return LiteralSerializer(typing.get_args(typ), context)
-
- if is_type_annotated(typ):
- return create_serializer(unwrap_annotated_type(typ))
-
- # check if object has custom serialization method
- convert_func = getattr(typ, "to_json", None)
- if callable(convert_func):
- return CustomSerializer(convert_func)
-
- if is_type_enum(typ):
- return EnumSerializer()
- if is_dataclass_type(typ):
- return DataclassSerializer(typ, context)
- if is_named_tuple_type(typ):
- if getattr(typ, "__annotations__", None):
- return TypedNamedTupleSerializer(typ, context)
- else:
- return UntypedNamedTupleSerializer(typ)
-
- # fail early if caller passes an object with an exotic type
- if (
- not isinstance(typ, type)
- or typ is FunctionType
- or typ is MethodType
- or typ is type
- or typ is ModuleType
- ):
- raise TypeError(f"object of type {typ} cannot be represented in JSON")
-
- if get_resolved_hints(typ):
- return TypedClassSerializer(typ, context)
- else:
- return UntypedClassSerializer()
-
-
-def object_to_json(obj: Any) -> JsonType:
- """
- Converts a Python object to a representation that can be exported to JSON.
-
- * Fundamental types (e.g. numeric types) are written as is.
- * Date and time types are serialized in the ISO 8601 format with time zone.
- * A byte array is written as a string with Base64 encoding.
- * UUIDs are written as a UUID string.
- * Enumerations are written as their value.
- * Containers (e.g. `list`, `dict`, `set`, `tuple`) are exported recursively.
- * Objects with properties (including data class types) are converted to a dictionaries of key-value pairs.
- """
-
- typ: type = type(obj)
- generator = create_serializer(typ)
- return generator.generate(obj)
diff --git a/docs/openapi_generator/strong_typing/slots.py b/docs/openapi_generator/strong_typing/slots.py
deleted file mode 100644
index 564ffa11f..000000000
--- a/docs/openapi_generator/strong_typing/slots.py
+++ /dev/null
@@ -1,29 +0,0 @@
-# Copyright (c) Meta Platforms, Inc. and affiliates.
-# All rights reserved.
-#
-# This source code is licensed under the terms described in the LICENSE file in
-# the root directory of this source tree.
-
-from typing import Any, Dict, Tuple, Type, TypeVar
-
-T = TypeVar("T")
-
-
-class SlotsMeta(type):
- def __new__(
- cls: Type[T], name: str, bases: Tuple[type, ...], ns: Dict[str, Any]
- ) -> T:
- # caller may have already provided slots, in which case just retain them and keep going
- slots: Tuple[str, ...] = ns.get("__slots__", ())
-
- # add fields with type annotations to slots
- annotations: Dict[str, Any] = ns.get("__annotations__", {})
- members = tuple(member for member in annotations.keys() if member not in slots)
-
- # assign slots
- ns["__slots__"] = slots + tuple(members)
- return super().__new__(cls, name, bases, ns) # type: ignore
-
-
-class Slots(metaclass=SlotsMeta):
- pass
diff --git a/docs/openapi_generator/strong_typing/topological.py b/docs/openapi_generator/strong_typing/topological.py
deleted file mode 100644
index 28bf4bd0f..000000000
--- a/docs/openapi_generator/strong_typing/topological.py
+++ /dev/null
@@ -1,89 +0,0 @@
-# Copyright (c) Meta Platforms, Inc. and affiliates.
-# All rights reserved.
-#
-# This source code is licensed under the terms described in the LICENSE file in
-# the root directory of this source tree.
-
-"""
-Type-safe data interchange for Python data classes.
-
-:see: https://github.com/hunyadi/strong_typing
-"""
-
-from typing import Callable, Dict, Iterable, List, Optional, Set, TypeVar
-
-from .inspection import TypeCollector
-
-T = TypeVar("T")
-
-
-def topological_sort(graph: Dict[T, Set[T]]) -> List[T]:
- """
- Performs a topological sort of a graph.
-
- Nodes with no outgoing edges are first. Nodes with no incoming edges are last.
- The topological ordering is not unique.
-
- :param graph: A dictionary of mappings from nodes to adjacent nodes. Keys and set members must be hashable.
- :returns: The list of nodes in topological order.
- """
-
- # empty list that will contain the sorted nodes (in reverse order)
- ordered: List[T] = []
-
- seen: Dict[T, bool] = {}
-
- def _visit(n: T) -> None:
- status = seen.get(n)
- if status is not None:
- if status: # node has a permanent mark
- return
- else: # node has a temporary mark
- raise RuntimeError(f"cycle detected in graph for node {n}")
-
- seen[n] = False # apply temporary mark
- for m in graph[n]: # visit all adjacent nodes
- if m != n: # ignore self-referencing nodes
- _visit(m)
-
- seen[n] = True # apply permanent mark
- ordered.append(n)
-
- for n in graph.keys():
- _visit(n)
-
- return ordered
-
-
-def type_topological_sort(
- types: Iterable[type],
- dependency_fn: Optional[Callable[[type], Iterable[type]]] = None,
-) -> List[type]:
- """
- Performs a topological sort of a list of types.
-
- Types that don't depend on other types (i.e. fundamental types) are first. Types on which no other types depend
- are last. The topological ordering is not unique.
-
- :param types: A list of types (simple or composite).
- :param dependency_fn: Returns a list of additional dependencies for a class (e.g. classes referenced by a foreign key).
- :returns: The list of types in topological order.
- """
-
- if not all(isinstance(typ, type) for typ in types):
- raise TypeError("expected a list of types")
-
- collector = TypeCollector()
- collector.traverse_all(types)
- graph = collector.graph
-
- if dependency_fn:
- new_types: Set[type] = set()
- for source_type, references in graph.items():
- dependent_types = dependency_fn(source_type)
- references.update(dependent_types)
- new_types.update(dependent_types)
- for new_type in new_types:
- graph[new_type] = set()
-
- return topological_sort(graph)
diff --git a/docs/source/distributions/building_distro.md b/docs/source/distributions/building_distro.md
index 90239cb4e..9cb1a402f 100644
--- a/docs/source/distributions/building_distro.md
+++ b/docs/source/distributions/building_distro.md
@@ -23,7 +23,8 @@ The main points to consider are:
```
llama stack build -h
-usage: llama stack build [-h] [--config CONFIG] [--template TEMPLATE] [--list-templates | --no-list-templates] [--image-type {conda,container,venv}] [--image-name IMAGE_NAME]
+usage: llama stack build [-h] [--config CONFIG] [--template TEMPLATE] [--list-templates]
+ [--image-type {conda,container,venv}] [--image-name IMAGE_NAME] [--print-deps-only]
Build a Llama stack container
@@ -32,14 +33,14 @@ options:
--config CONFIG Path to a config file to use for the build. You can find example configs in llama_stack/distribution/**/build.yaml.
If this argument is not provided, you will be prompted to enter information interactively
--template TEMPLATE Name of the example template config to use for build. You may use `llama stack build --list-templates` to check out the available templates
- --list-templates, --no-list-templates
- Show the available templates for building a Llama Stack distribution (default: False)
+ --list-templates Show the available templates for building a Llama Stack distribution
--image-type {conda,container,venv}
Image Type to use for the build. This can be either conda or container or venv. If not specified, will use the image type from the template config.
--image-name IMAGE_NAME
[for image-type=conda] Name of the conda environment to use for the build. If
not specified, currently active Conda environment will be used. If no Conda
environment is active, you must specify a name.
+ --print-deps-only Print the dependencies for the stack only, without building the stack
```
After this step is complete, a file named `-build.yaml` and template file `-run.yaml` will be generated and saved at the output file path specified at the end of the command.
diff --git a/docs/source/getting_started/index.md b/docs/source/getting_started/index.md
index b28b9afa3..554f4354a 100644
--- a/docs/source/getting_started/index.md
+++ b/docs/source/getting_started/index.md
@@ -214,10 +214,16 @@ documents = [
for i, url in enumerate(urls)
]
+vector_providers = [
+ provider for provider in client.providers.list() if provider.api == "vector_io"
+]
+provider_id = vector_providers[0].provider_id # Use the first available vector provider
+
# Register a vector database
vector_db_id = f"test-vector-db-{uuid.uuid4().hex}"
client.vector_dbs.register(
vector_db_id=vector_db_id,
+ provider_id=provider_id,
embedding_model="all-MiniLM-L6-v2",
embedding_dimension=384,
)
diff --git a/docs/source/references/llama_cli_reference/download_models.md b/docs/source/references/llama_cli_reference/download_models.md
index 3c40f1392..6c791bcb7 100644
--- a/docs/source/references/llama_cli_reference/download_models.md
+++ b/docs/source/references/llama_cli_reference/download_models.md
@@ -39,7 +39,7 @@ You should see a table like this:
```
+----------------------------------+------------------------------------------+----------------+
-| Model Descriptor | Hugging Face Repo | Context Length |
+| Model Descriptor(ID) | Hugging Face Repo | Context Length |
+----------------------------------+------------------------------------------+----------------+
| Llama3.1-8B | meta-llama/Llama-3.1-8B | 128K |
+----------------------------------+------------------------------------------+----------------+
diff --git a/docs/source/references/llama_cli_reference/index.md b/docs/source/references/llama_cli_reference/index.md
index f7ac5fe36..76abce544 100644
--- a/docs/source/references/llama_cli_reference/index.md
+++ b/docs/source/references/llama_cli_reference/index.md
@@ -63,7 +63,7 @@ You should see a table like this:
```
+----------------------------------+------------------------------------------+----------------+
-| Model Descriptor | Hugging Face Repo | Context Length |
+| Model Descriptor(ID) | Hugging Face Repo | Context Length |
+----------------------------------+------------------------------------------+----------------+
| Llama3.1-8B | meta-llama/Llama-3.1-8B | 128K |
+----------------------------------+------------------------------------------+----------------+
diff --git a/docs/zero_to_hero_guide/Tool_Calling101_Using_Together's_Llama_Stack_Server.ipynb b/docs/zero_to_hero_guide/Tool_Calling101_Using_Together_Llama_Stack_Server.ipynb
similarity index 100%
rename from docs/zero_to_hero_guide/Tool_Calling101_Using_Together's_Llama_Stack_Server.ipynb
rename to docs/zero_to_hero_guide/Tool_Calling101_Using_Together_Llama_Stack_Server.ipynb
diff --git a/llama_stack/apis/inference/inference.py b/llama_stack/apis/inference/inference.py
index 433ba3274..a3fb69477 100644
--- a/llama_stack/apis/inference/inference.py
+++ b/llama_stack/apis/inference/inference.py
@@ -182,10 +182,12 @@ class ToolChoice(Enum):
:cvar auto: The model may use tools if it determines that is appropriate.
:cvar required: The model must use tools.
+ :cvar none: The model must not use tools.
"""
auto = "auto"
required = "required"
+ none = "none"
@json_schema_type
@@ -326,7 +328,7 @@ class SystemMessageBehavior(Enum):
class ToolConfig(BaseModel):
"""Configuration for tool use.
- :param tool_choice: (Optional) Whether tool use is required or automatic. Defaults to ToolChoice.auto.
+ :param tool_choice: (Optional) Whether tool use is automatic, required, or none. Can also specify a tool name to use a specific tool. Defaults to ToolChoice.auto.
:param tool_prompt_format: (Optional) Instructs the model how to format tool calls. By default, Llama Stack will attempt to use a format that is best adapted to the model.
- `ToolPromptFormat.json`: The tool calls are formatted as a JSON object.
- `ToolPromptFormat.function_tag`: The tool calls are enclosed in a tag.
@@ -337,9 +339,16 @@ class ToolConfig(BaseModel):
'{{function_definitions}}' to indicate where the function definitions should be inserted.
"""
- tool_choice: Optional[ToolChoice] = Field(default=ToolChoice.auto)
+ tool_choice: Optional[ToolChoice | str] = Field(default=ToolChoice.auto)
tool_prompt_format: Optional[ToolPromptFormat] = Field(default=None)
- system_message_behavior: SystemMessageBehavior = Field(default=SystemMessageBehavior.append)
+ system_message_behavior: Optional[SystemMessageBehavior] = Field(default=SystemMessageBehavior.append)
+
+ def model_post_init(self, __context: Any) -> None:
+ if isinstance(self.tool_choice, str):
+ try:
+ self.tool_choice = ToolChoice[self.tool_choice]
+ except KeyError:
+ pass
# This is an internally used class
diff --git a/llama_stack/cli/download.py b/llama_stack/cli/download.py
index 8afc6d31d..af86f7243 100644
--- a/llama_stack/cli/download.py
+++ b/llama_stack/cli/download.py
@@ -56,7 +56,7 @@ def setup_download_parser(parser: argparse.ArgumentParser) -> None:
parser.add_argument(
"--model-id",
required=False,
- help="See `llama model list` or `llama model list --show-all` for the list of available models",
+ help="See `llama model list` or `llama model list --show-all` for the list of available models. Specify multiple model IDs with commas, e.g. --model-id Llama3.2-1B,Llama3.2-3B",
)
parser.add_argument(
"--hf-token",
diff --git a/llama_stack/cli/model/list.py b/llama_stack/cli/model/list.py
index 4fe28751e..e6bf2216a 100644
--- a/llama_stack/cli/model/list.py
+++ b/llama_stack/cli/model/list.py
@@ -36,8 +36,8 @@ class ModelList(Subcommand):
from .safety_models import prompt_guard_model_sku
headers = [
- "Model Descriptor",
- "Model ID",
+ "Model Descriptor(ID)",
+ "Hugging Face Repo",
"Context Length",
]
diff --git a/llama_stack/cli/model/verify_download.py b/llama_stack/cli/model/verify_download.py
index b8e6bf173..e7159c0aa 100644
--- a/llama_stack/cli/model/verify_download.py
+++ b/llama_stack/cli/model/verify_download.py
@@ -15,7 +15,7 @@ class ModelVerifyDownload(Subcommand):
self.parser = subparsers.add_parser(
"verify-download",
prog="llama model verify-download",
- description="Verify the downloaded checkpoints' checksums",
+ description="Verify the downloaded checkpoints' checksums for models downloaded from Meta",
formatter_class=argparse.RawTextHelpFormatter,
)
diff --git a/llama_stack/cli/stack/build.py b/llama_stack/cli/stack/build.py
index ca4c0d8ce..7b17a960a 100644
--- a/llama_stack/cli/stack/build.py
+++ b/llama_stack/cli/stack/build.py
@@ -38,9 +38,8 @@ class StackBuild(Subcommand):
self.parser.add_argument(
"--list-templates",
- type=bool,
+ action="store_true",
default=False,
- action=argparse.BooleanOptionalAction,
help="Show the available templates for building a Llama Stack distribution",
)
diff --git a/llama_stack/cli/table.py b/llama_stack/cli/table.py
index 599749231..bf59e6103 100644
--- a/llama_stack/cli/table.py
+++ b/llama_stack/cli/table.py
@@ -4,75 +4,36 @@
# This source code is licensed under the terms described in the LICENSE file in
# the root directory of this source tree.
-import re
-import textwrap
from typing import Iterable
-from termcolor import cprint
-
-
-def strip_ansi_colors(text):
- ansi_escape = re.compile(r"\x1B(?:[@-Z\\-_]|\[[0-?]*[ -/]*[@-~])")
- return ansi_escape.sub("", text)
-
-
-def format_row(row, col_widths):
- def wrap(text, width):
- lines = []
- for line in text.split("\n"):
- if line.strip() == "":
- lines.append("")
- else:
- lines.extend(textwrap.wrap(line, width, break_long_words=False, replace_whitespace=False))
- return lines
-
- wrapped = [wrap(item, width) for item, width in zip(row, col_widths, strict=False)]
- max_lines = max(len(subrow) for subrow in wrapped)
-
- lines = []
- for i in range(max_lines):
- line = []
- for cell_lines, width in zip(wrapped, col_widths, strict=False):
- value = cell_lines[i] if i < len(cell_lines) else ""
- line.append(value + " " * (width - len(strip_ansi_colors(value))))
- lines.append("| " + (" | ".join(line)) + " |")
-
- return "\n".join(lines)
+from rich.console import Console
+from rich.table import Table
def print_table(rows, headers=None, separate_rows: bool = False, sort_by: Iterable[int] = tuple()):
- def itemlen(item):
- return max([len(line) for line in strip_ansi_colors(item).split("\n")])
-
+ # Convert rows and handle None values
rows = [[x or "" for x in row] for row in rows]
+ # Sort rows if sort_by is specified
if sort_by:
rows.sort(key=lambda x: tuple(x[i] for i in sort_by))
- if not headers:
- col_widths = [max(itemlen(item) for item in col) for col in zip(*rows, strict=False)]
- else:
- col_widths = [
- max(
- itemlen(header),
- max(itemlen(item) for item in col),
- )
- for header, col in zip(headers, zip(*rows, strict=False), strict=False)
- ]
- col_widths = [min(w, 80) for w in col_widths]
-
- header_line = "+".join("-" * (width + 2) for width in col_widths)
- header_line = f"+{header_line}+"
+ # Create Rich table
+ table = Table(show_lines=separate_rows)
+ # Add headers if provided
if headers:
- print(header_line)
- cprint(format_row(headers, col_widths), "white", attrs=["bold"])
+ for header in headers:
+ table.add_column(header, style="bold white")
+ else:
+ # Add unnamed columns based on first row
+ for _ in range(len(rows[0]) if rows else 0):
+ table.add_column()
- print(header_line)
+ # Add rows
for row in rows:
- print(format_row(row, col_widths))
- if separate_rows:
- print(header_line)
+ table.add_row(*row)
- if not separate_rows:
- print(header_line)
+ # Print table
+ console = Console()
+ console.print(table)
diff --git a/llama_stack/cli/verify_download.py b/llama_stack/cli/verify_download.py
index 47993c361..1229e8601 100644
--- a/llama_stack/cli/verify_download.py
+++ b/llama_stack/cli/verify_download.py
@@ -44,7 +44,7 @@ def setup_verify_download_parser(parser: argparse.ArgumentParser) -> None:
parser.add_argument(
"--model-id",
required=True,
- help="Model ID to verify",
+ help="Model ID to verify (only for models downloaded from Meta)",
)
parser.set_defaults(func=partial(run_verify_cmd, parser=parser))
diff --git a/llama_stack/distribution/library_client.py b/llama_stack/distribution/library_client.py
index a7ef753b9..639e5ee73 100644
--- a/llama_stack/distribution/library_client.py
+++ b/llama_stack/distribution/library_client.py
@@ -13,7 +13,7 @@ import re
from concurrent.futures import ThreadPoolExecutor
from enum import Enum
from pathlib import Path
-from typing import Any, Optional, TypeVar, get_args, get_origin
+from typing import Any, Optional, TypeVar, Union, get_args, get_origin
import httpx
import yaml
@@ -47,6 +47,8 @@ from llama_stack.providers.utils.telemetry.tracing import (
start_trace,
)
+logger = logging.getLogger(__name__)
+
T = TypeVar("T")
@@ -81,12 +83,13 @@ def convert_to_pydantic(annotation: Any, value: Any) -> Any:
return value
origin = get_origin(annotation)
+
if origin is list:
item_type = get_args(annotation)[0]
try:
return [convert_to_pydantic(item_type, item) for item in value]
except Exception:
- print(f"Error converting list {value}")
+ logger.error(f"Error converting list {value} into {item_type}")
return value
elif origin is dict:
@@ -94,17 +97,25 @@ def convert_to_pydantic(annotation: Any, value: Any) -> Any:
try:
return {k: convert_to_pydantic(val_type, v) for k, v in value.items()}
except Exception:
- print(f"Error converting dict {value}")
+ logger.error(f"Error converting dict {value} into {val_type}")
return value
try:
# Handle Pydantic models and discriminated unions
return TypeAdapter(annotation).validate_python(value)
+
except Exception as e:
- cprint(
- f"Warning: direct client failed to convert parameter {value} into {annotation}: {e}",
- "yellow",
- )
+ # TODO: this is workardound for having Union[str, AgentToolGroup] in API schema.
+ # We should get rid of any non-discriminated unions in the API schema.
+ if origin is Union:
+ for union_type in get_args(annotation):
+ try:
+ return convert_to_pydantic(union_type, value)
+ except Exception:
+ continue
+ logger.warning(
+ f"Warning: direct client failed to convert parameter {value} into {annotation}: {e}",
+ )
return value
@@ -142,7 +153,7 @@ class LlamaStackAsLibraryClient(LlamaStackClient):
for handler in root_logger.handlers[:]:
root_logger.removeHandler(handler)
- print(f"Removed handler {handler.__class__.__name__} from root logger")
+ logger.info(f"Removed handler {handler.__class__.__name__} from root logger")
def request(self, *args, **kwargs):
if kwargs.get("stream"):
@@ -421,4 +432,5 @@ class AsyncLlamaStackAsLibraryClient(AsyncLlamaStackClient):
if param_name in body:
value = body.get(param_name)
converted_body[param_name] = convert_to_pydantic(param.annotation, value)
+
return converted_body
diff --git a/llama_stack/distribution/routers/routers.py b/llama_stack/distribution/routers/routers.py
index f45975189..9d12c8a40 100644
--- a/llama_stack/distribution/routers/routers.py
+++ b/llama_stack/distribution/routers/routers.py
@@ -128,7 +128,7 @@ class InferenceRouter(Inference):
sampling_params: Optional[SamplingParams] = SamplingParams(),
response_format: Optional[ResponseFormat] = None,
tools: Optional[List[ToolDefinition]] = None,
- tool_choice: Optional[ToolChoice] = ToolChoice.auto,
+ tool_choice: Optional[ToolChoice] = None,
tool_prompt_format: Optional[ToolPromptFormat] = None,
stream: Optional[bool] = False,
logprobs: Optional[LogProbConfig] = None,
@@ -140,20 +140,36 @@ class InferenceRouter(Inference):
if model.model_type == ModelType.embedding:
raise ValueError(f"Model '{model_id}' is an embedding model and does not support chat completions")
if tool_config:
- if tool_choice != tool_config.tool_choice:
+ if tool_choice and tool_choice != tool_config.tool_choice:
raise ValueError("tool_choice and tool_config.tool_choice must match")
- if tool_prompt_format != tool_config.tool_prompt_format:
+ if tool_prompt_format and tool_prompt_format != tool_config.tool_prompt_format:
raise ValueError("tool_prompt_format and tool_config.tool_prompt_format must match")
else:
- tool_config = ToolConfig(
- tool_choice=tool_choice,
- tool_prompt_format=tool_prompt_format,
- )
+ params = {}
+ if tool_choice:
+ params["tool_choice"] = tool_choice
+ if tool_prompt_format:
+ params["tool_prompt_format"] = tool_prompt_format
+ tool_config = ToolConfig(**params)
+
+ tools = tools or []
+ if tool_config.tool_choice == ToolChoice.none:
+ tools = []
+ elif tool_config.tool_choice == ToolChoice.auto:
+ pass
+ elif tool_config.tool_choice == ToolChoice.required:
+ pass
+ else:
+ # verify tool_choice is one of the tools
+ tool_names = [t.tool_name if isinstance(t.tool_name, str) else t.tool_name.value for t in tools]
+ if tool_config.tool_choice not in tool_names:
+ raise ValueError(f"Tool choice {tool_config.tool_choice} is not one of the tools: {tool_names}")
+
params = dict(
model_id=model_id,
messages=messages,
sampling_params=sampling_params,
- tools=tools or [],
+ tools=tools,
tool_choice=tool_choice,
tool_prompt_format=tool_prompt_format,
response_format=response_format,
diff --git a/llama_stack/providers/remote/inference/nvidia/nvidia.py b/llama_stack/providers/remote/inference/nvidia/nvidia.py
index ec00b02f0..fdd464af2 100644
--- a/llama_stack/providers/remote/inference/nvidia/nvidia.py
+++ b/llama_stack/providers/remote/inference/nvidia/nvidia.py
@@ -4,6 +4,7 @@
# This source code is licensed under the terms described in the LICENSE file in
# the root directory of this source tree.
+import logging
import warnings
from typing import AsyncIterator, List, Optional, Union
@@ -49,7 +50,7 @@ class NVIDIAInferenceAdapter(Inference, ModelRegistryHelper):
# TODO(mf): filter by available models
ModelRegistryHelper.__init__(self, model_aliases=_MODEL_ALIASES)
- print(f"Initializing NVIDIAInferenceAdapter({config.url})...")
+ logger.info(f"Initializing NVIDIAInferenceAdapter({config.url})...")
if _is_nvidia_hosted(config):
if not config.api_key:
diff --git a/llama_stack/providers/remote/inference/nvidia/utils.py b/llama_stack/providers/remote/inference/nvidia/utils.py
index 0ec80e9dd..7d3f3f27e 100644
--- a/llama_stack/providers/remote/inference/nvidia/utils.py
+++ b/llama_stack/providers/remote/inference/nvidia/utils.py
@@ -4,12 +4,15 @@
# This source code is licensed under the terms described in the LICENSE file in
# the root directory of this source tree.
+import logging
from typing import Tuple
import httpx
from . import NVIDIAConfig
+logger = logging.getLogger(__name__)
+
def _is_nvidia_hosted(config: NVIDIAConfig) -> bool:
return "integrate.api.nvidia.com" in config.url
@@ -42,7 +45,7 @@ async def check_health(config: NVIDIAConfig) -> None:
RuntimeError: If the server is not running or ready
"""
if not _is_nvidia_hosted(config):
- print("Checking NVIDIA NIM health...")
+ logger.info("Checking NVIDIA NIM health...")
try:
is_live, is_ready = await _get_health(config.url)
if not is_live:
diff --git a/llama_stack/providers/utils/inference/prompt_adapter.py b/llama_stack/providers/utils/inference/prompt_adapter.py
index b7945dee7..2782c661f 100644
--- a/llama_stack/providers/utils/inference/prompt_adapter.py
+++ b/llama_stack/providers/utils/inference/prompt_adapter.py
@@ -31,6 +31,7 @@ from llama_stack.apis.inference import (
SystemMessage,
SystemMessageBehavior,
ToolChoice,
+ ToolDefinition,
UserMessage,
)
from llama_stack.models.llama.datatypes import (
@@ -311,8 +312,6 @@ def response_format_prompt(fmt: Optional[ResponseFormat]):
def augment_messages_for_tools_llama_3_1(
request: ChatCompletionRequest,
) -> List[Message]:
- assert request.tool_config.tool_choice == ToolChoice.auto, "Only `ToolChoice.auto` supported"
-
existing_messages = request.messages
existing_system_message = None
if existing_messages[0].role == Role.system.value:
@@ -352,6 +351,10 @@ def augment_messages_for_tools_llama_3_1(
elif isinstance(existing_system_message.content, list):
sys_content += "\n".join([_process(c) for c in existing_system_message.content])
+ tool_choice_prompt = _get_tool_choice_prompt(request.tool_config.tool_choice, request.tools)
+ if tool_choice_prompt:
+ sys_content += "\n" + tool_choice_prompt
+
messages.append(SystemMessage(content=sys_content))
has_custom_tools = any(isinstance(dfn.tool_name, str) for dfn in request.tools)
@@ -377,8 +380,6 @@ def augment_messages_for_tools_llama_3_1(
def augment_messages_for_tools_llama_3_2(
request: ChatCompletionRequest,
) -> List[Message]:
- assert request.tool_config.tool_choice == ToolChoice.auto, "Only `ToolChoice.auto` supported"
-
existing_messages = request.messages
existing_system_message = None
if existing_messages[0].role == Role.system.value:
@@ -386,7 +387,6 @@ def augment_messages_for_tools_llama_3_2(
assert existing_messages[0].role != Role.system.value, "Should only have 1 system message"
- messages = []
sys_content = ""
custom_tools, builtin_tools = [], []
for t in request.tools:
@@ -395,7 +395,6 @@ def augment_messages_for_tools_llama_3_2(
else:
builtin_tools.append(t)
- tool_template = None
if builtin_tools:
tool_gen = BuiltinToolGenerator()
tool_template = tool_gen.gen(builtin_tools)
@@ -423,8 +422,22 @@ def augment_messages_for_tools_llama_3_2(
):
sys_content += interleaved_content_as_str(existing_system_message.content, sep="\n")
- messages.append(SystemMessage(content=sys_content.strip("\n")))
+ tool_choice_prompt = _get_tool_choice_prompt(request.tool_config.tool_choice, request.tools)
+ if tool_choice_prompt:
+ sys_content += "\n" + tool_choice_prompt
- # Add back existing messages from the request
- messages += existing_messages
+ messages = [SystemMessage(content=sys_content.strip("\n")), *existing_messages]
return messages
+
+
+def _get_tool_choice_prompt(tool_choice: ToolChoice | str, tools: List[ToolDefinition]) -> str:
+ if tool_choice == ToolChoice.auto:
+ return ""
+ elif tool_choice == ToolChoice.required:
+ return "You MUST use one of the provided functions/tools to answer the user query."
+ elif tool_choice == ToolChoice.none:
+ # tools are already not passed in
+ return ""
+ else:
+ # specific tool
+ return f"You MUST use the tool `{tool_choice}` to answer the user query."
diff --git a/llama_stack/strong_typing/auxiliary.py b/llama_stack/strong_typing/auxiliary.py
index fd183da18..cf19d6083 100644
--- a/llama_stack/strong_typing/auxiliary.py
+++ b/llama_stack/strong_typing/auxiliary.py
@@ -77,7 +77,7 @@ def typeannotation(
"""
def wrap(cls: Type[T]) -> Type[T]:
- setattr(cls, "__repr__", _compact_dataclass_repr)
+ cls.__repr__ = _compact_dataclass_repr
if not dataclasses.is_dataclass(cls):
cls = dataclasses.dataclass( # type: ignore[call-overload]
cls,
diff --git a/llama_stack/strong_typing/classdef.py b/llama_stack/strong_typing/classdef.py
index d2d8688e4..5ead886d4 100644
--- a/llama_stack/strong_typing/classdef.py
+++ b/llama_stack/strong_typing/classdef.py
@@ -203,7 +203,7 @@ def schema_to_type(schema: Schema, *, module: types.ModuleType, class_name: str)
if type_def.default is not dataclasses.MISSING:
raise TypeError("disallowed: `default` for top-level type definitions")
- setattr(type_def.type, "__module__", module.__name__)
+ type_def.type.__module__ = module.__name__
setattr(module, type_name, type_def.type)
return node_to_typedef(module, class_name, top_node).type
diff --git a/llama_stack/strong_typing/deserializer.py b/llama_stack/strong_typing/deserializer.py
index 4c4ee9d89..fc0f40f83 100644
--- a/llama_stack/strong_typing/deserializer.py
+++ b/llama_stack/strong_typing/deserializer.py
@@ -325,7 +325,7 @@ class TupleDeserializer(Deserializer[Tuple[Any, ...]]):
f"type `{self.container_type}` expects a JSON `array` of length {count} but received length {len(data)}"
)
- return tuple(item_parser.parse(item) for item_parser, item in zip(self.item_parsers, data))
+ return tuple(item_parser.parse(item) for item_parser, item in zip(self.item_parsers, data, strict=False))
class UnionDeserializer(Deserializer):
diff --git a/llama_stack/strong_typing/inspection.py b/llama_stack/strong_typing/inspection.py
index 69bc15597..8bc313021 100644
--- a/llama_stack/strong_typing/inspection.py
+++ b/llama_stack/strong_typing/inspection.py
@@ -263,8 +263,8 @@ def extend_enum(
enum_class: Type[enum.Enum] = enum.Enum(extend.__name__, values) # type: ignore
# assign the newly created type to the same module where the extending class is defined
- setattr(enum_class, "__module__", extend.__module__)
- setattr(enum_class, "__doc__", extend.__doc__)
+ enum_class.__module__ = extend.__module__
+ enum_class.__doc__ = extend.__doc__
setattr(sys.modules[extend.__module__], extend.__name__, enum_class)
return enum.unique(enum_class)
@@ -874,6 +874,7 @@ def is_generic_instance(obj: Any, typ: TypeLike) -> bool:
for tuple_item_type, item in zip(
(tuple_item_type for tuple_item_type in typing.get_args(typ)),
(item for item in obj),
+ strict=False,
)
)
elif origin_type is Union:
@@ -954,6 +955,7 @@ class RecursiveChecker:
for tuple_item_type, item in zip(
(tuple_item_type for tuple_item_type in typing.get_args(typ)),
(item for item in obj),
+ strict=False,
)
)
elif origin_type is Union:
diff --git a/llama_stack/strong_typing/schema.py b/llama_stack/strong_typing/schema.py
index ddff7cf82..dfc51ea78 100644
--- a/llama_stack/strong_typing/schema.py
+++ b/llama_stack/strong_typing/schema.py
@@ -108,7 +108,9 @@ def get_class_property_docstrings(
def docstring_to_schema(data_type: type) -> Schema:
short_description, long_description = get_class_docstrings(data_type)
- schema: Schema = {}
+ schema: Schema = {
+ "title": python_type_to_name(data_type),
+ }
description = "\n".join(filter(None, [short_description, long_description]))
if description:
@@ -311,6 +313,17 @@ class JsonSchemaGenerator:
data_type: TypeLike,
force_expand: bool = False,
json_schema_extra: Optional[dict] = None,
+ ) -> Schema:
+ common_info = {}
+ if json_schema_extra and "deprecated" in json_schema_extra:
+ common_info["deprecated"] = json_schema_extra["deprecated"]
+ return self._type_to_schema(data_type, force_expand, json_schema_extra) | common_info
+
+ def _type_to_schema(
+ self,
+ data_type: TypeLike,
+ force_expand: bool = False,
+ json_schema_extra: Optional[dict] = None,
) -> Schema:
"""
Returns the JSON schema associated with a type.
@@ -487,7 +500,11 @@ class JsonSchemaGenerator:
if "model_fields" in members:
f = members["model_fields"]
defaults = {k: finfo.default for k, finfo in f.items()}
- json_schema_extra = f.get(output_name, None).json_schema_extra
+ if output_name in f:
+ finfo = f[output_name]
+ json_schema_extra = finfo.json_schema_extra or {}
+ if finfo.deprecated:
+ json_schema_extra["deprecated"] = True
if is_type_optional(property_type):
optional_type: type = unwrap_optional_type(property_type)
diff --git a/llama_stack/strong_typing/serializer.py b/llama_stack/strong_typing/serializer.py
index 5e93e4c4d..4ca4a4119 100644
--- a/llama_stack/strong_typing/serializer.py
+++ b/llama_stack/strong_typing/serializer.py
@@ -216,7 +216,7 @@ class TypedTupleSerializer(Serializer[tuple]):
self.item_generators = tuple(_get_serializer(item_type, context) for item_type in item_types)
def generate(self, obj: tuple) -> List[JsonType]:
- return [item_generator.generate(item) for item_generator, item in zip(self.item_generators, obj)]
+ return [item_generator.generate(item) for item_generator, item in zip(self.item_generators, obj, strict=False)]
class CustomSerializer(Serializer):
diff --git a/tests/client-sdk/agents/test_agents.py b/tests/client-sdk/agents/test_agents.py
index 0369f325b..e5380d357 100644
--- a/tests/client-sdk/agents/test_agents.py
+++ b/tests/client-sdk/agents/test_agents.py
@@ -98,7 +98,6 @@ def agent_config(llama_stack_client, text_model_id):
},
},
toolgroups=[],
- tool_choice="auto",
input_shields=available_shields,
output_shields=available_shields,
enable_session_persistence=False,
@@ -322,6 +321,38 @@ def test_custom_tool(llama_stack_client, agent_config):
assert "get_boiling_point" in logs_str
+def test_tool_choice(llama_stack_client, agent_config):
+ data = [
+ ("required", '{"type": "function"'),
+ ("none", None),
+ ("get_boiling_point", '{"type": "function", "name": "get_boiling_point"'),
+ ]
+ client_tool = TestClientTool()
+ for tool_choice, expected_tool in data:
+ agent_config["tool_config"] = {"tool_choice": tool_choice}
+ agent_config["client_tools"] = [client_tool.get_tool_definition()]
+
+ agent = Agent(llama_stack_client, agent_config, client_tools=(client_tool,))
+ session_id = agent.create_session(f"test-session-{uuid4()}")
+
+ response = agent.create_turn(
+ messages=[
+ {
+ "role": "user",
+ "content": "What is the boiling point of polyjuice?",
+ },
+ ],
+ session_id=session_id,
+ )
+
+ logs = [str(log) for log in EventLogger().log(response) if log is not None]
+ logs_str = "".join(logs)
+ if expected_tool:
+ assert expected_tool in logs_str
+ else:
+ assert '{"type": "function"' not in logs_str
+
+
# TODO: fix this flaky test
def xtest_override_system_message_behavior(llama_stack_client, agent_config):
client_tool = TestClientTool()
diff --git a/tests/client-sdk/inference/test_text_inference.py b/tests/client-sdk/inference/test_text_inference.py
index c931ca255..6a113c463 100644
--- a/tests/client-sdk/inference/test_text_inference.py
+++ b/tests/client-sdk/inference/test_text_inference.py
@@ -247,6 +247,40 @@ def test_text_chat_completion_with_tool_calling_and_streaming(
assert tool_invocation_content == "[get_weather, {'location': 'San Francisco, CA'}]"
+def test_text_chat_completion_with_tool_choice_required(
+ llama_stack_client, text_model_id, get_weather_tool_definition, provider_tool_format, inference_provider_type
+):
+ response = llama_stack_client.inference.chat_completion(
+ model_id=text_model_id,
+ messages=[
+ {"role": "system", "content": "You are a helpful assistant."},
+ {"role": "user", "content": "What's the weather like in San Francisco?"},
+ ],
+ tools=[get_weather_tool_definition],
+ tool_config={"tool_choice": "required", "tool_prompt_format": provider_tool_format},
+ stream=True,
+ )
+ tool_invocation_content = extract_tool_invocation_content(response)
+ assert tool_invocation_content == "[get_weather, {'location': 'San Francisco, CA'}]"
+
+
+def test_text_chat_completion_with_tool_choice_none(
+ llama_stack_client, text_model_id, get_weather_tool_definition, provider_tool_format
+):
+ response = llama_stack_client.inference.chat_completion(
+ model_id=text_model_id,
+ messages=[
+ {"role": "system", "content": "You are a helpful assistant."},
+ {"role": "user", "content": "What's the weather like in San Francisco?"},
+ ],
+ tools=[get_weather_tool_definition],
+ tool_config={"tool_choice": "none", "tool_prompt_format": provider_tool_format},
+ stream=True,
+ )
+ tool_invocation_content = extract_tool_invocation_content(response)
+ assert tool_invocation_content == ""
+
+
def test_text_chat_completion_structured_output(llama_stack_client, text_model_id, inference_provider_type):
class AnswerFormat(BaseModel):
first_name: str