diff --git a/requirements.txt b/requirements.txt
index e3d9f3f5c..cb104b6bd 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -1,2 +1,4 @@
json-strong-typing
python-openapi
+flask
+requests
diff --git a/source/api_definitions.py b/source/api_definitions.py
index 84ccdd20d..2f6944007 100644
--- a/source/api_definitions.py
+++ b/source/api_definitions.py
@@ -196,6 +196,12 @@ class AgenticSystem(Protocol):
AgenticSystemExecuteResponse, AgenticSystemExecuteResponseStreamChunk
]: ...
+ @webmethod(route="/agentic_system/delete")
+ def delete_agentic_system(
+ self,
+ agent_id: str,
+ ) -> None: ...
+
class LlamaStackEndpoints(Inference, AgenticSystem): ...
diff --git a/source/client.py b/source/client.py
new file mode 100644
index 000000000..222845b91
--- /dev/null
+++ b/source/client.py
@@ -0,0 +1,59 @@
+import requests
+from dataclasses import dataclass, field, asdict
+from typing import List, Set, Optional, Union, Protocol
+from enum import Enum
+
+import json
+
+from model_types import *
+from agentic_system_types import *
+from api_definitions import *
+
+class EnumEncoder(json.JSONEncoder):
+ def default(self, obj):
+ if isinstance(obj, Enum):
+ return obj.value
+ elif isinstance(obj, set):
+ return list(obj)
+ return json.JSONEncoder.default(self, obj)
+
+
+class AgenticSystemClient:
+ def __init__(self, base_url: str):
+ self.base_url = base_url
+
+ def create_agentic_system(self, request: AgenticSystemCreateRequest) -> AgenticSystemCreateResponse:
+ response = requests.post(f"{self.base_url}/agentic_system/create", data=json.dumps(asdict(request), cls=EnumEncoder), headers={'Content-Type': 'application/json'})
+ response.raise_for_status()
+ return AgenticSystemCreateResponse(**response.json())
+
+ def execute_agentic_system(self, request: AgenticSystemExecuteRequest) -> Union[AgenticSystemExecuteResponse, AgenticSystemExecuteResponseStreamChunk]:
+ response = requests.post(f"{self.base_url}/agentic_system/execute", data=json.dumps(asdict(request), cls=EnumEncoder), headers={'Content-Type': 'application/json'})
+ response.raise_for_status()
+ response_json = response.json()
+ if 'turn' in response_json:
+ return AgenticSystemExecuteResponse(**response_json)
+ else:
+ return AgenticSystemExecuteResponseStreamChunk(**response_json)
+
+# Example usage
+if __name__ == "__main__":
+ client = AgenticSystemClient("http://localhost:5000")
+
+ # Create a new agentic system
+ create_request = AgenticSystemCreateRequest(
+ instructions="Your instructions here",
+ model=InstructModel.llama3_8b_chat,
+ )
+ create_response = client.create_agentic_system(create_request)
+ print("Agent ID:", create_response.agent_id)
+
+ # Execute the agentic system
+ execute_request = AgenticSystemExecuteRequest(
+ agent_id=create_response.agent_id,
+ messages=[Message(role="user", content="Tell me a joke")],
+ turn_history=[],
+ stream=False
+ )
+ execute_response = client.execute_agentic_system(execute_request)
+ print("Execute Response:", execute_response)
diff --git a/source/codegen/openapi-generator-cli.jar b/source/codegen/openapi-generator-cli.jar
new file mode 100644
index 000000000..7102a497d
Binary files /dev/null and b/source/codegen/openapi-generator-cli.jar differ
diff --git a/source/create_code.sh b/source/create_code.sh
new file mode 100644
index 000000000..fa927ba7d
--- /dev/null
+++ b/source/create_code.sh
@@ -0,0 +1,14 @@
+#!/bin/bash
+
+set -euo pipefail
+set -x
+
+export JAVA_HOME=/usr/local/java-runtime/impl/11
+
+$JAVA_HOME/bin/java -jar codegen/openapi-generator-cli.jar \
+ generate \
+ -i openapi.yaml \
+ -g python-flask \
+ -o /tmp/foo \
+ --log-to-stderr \
+ --global-property debugModels,debugOperations,debugOpenAPI,debugSupportingFiles
diff --git a/source/openapi.html b/source/openapi.html
index e17db94d8..e52e21643 100644
--- a/source/openapi.html
+++ b/source/openapi.html
@@ -96,6 +96,28 @@
}
}
},
+ "/agentic_system/delete": {
+ "delete": {
+ "responses": {
+ "200": {
+ "description": "OK"
+ }
+ },
+ "tags": [
+ "AgenticSystem"
+ ],
+ "parameters": [
+ {
+ "name": "agent_id",
+ "in": "query",
+ "required": true,
+ "schema": {
+ "type": "string"
+ }
+ }
+ ]
+ }
+ },
"/chat_completion": {
"post": {
"responses": {
@@ -1440,10 +1462,10 @@
],
"tags": [
{
- "name": "Inference"
+ "name": "AgenticSystem"
},
{
- "name": "AgenticSystem"
+ "name": "Inference"
},
{
"name": "ShieldConfig",
diff --git a/source/openapi.yaml b/source/openapi.yaml
index 8282e497b..f43164410 100644
--- a/source/openapi.yaml
+++ b/source/openapi.yaml
@@ -803,6 +803,19 @@ paths:
description: OK
tags:
- AgenticSystem
+ /agentic_system/delete:
+ delete:
+ parameters:
+ - in: query
+ name: agent_id
+ required: true
+ schema:
+ type: string
+ responses:
+ '200':
+ description: OK
+ tags:
+ - AgenticSystem
/agentic_system/execute:
post:
parameters: []
@@ -870,8 +883,8 @@ security:
servers:
- url: http://llama.meta.com
tags:
-- name: Inference
- name: AgenticSystem
+- name: Inference
- description:
name: ShieldConfig
- description: AgenticSystemCreateResponse:
+ # Mock implementation
+ return AgenticSystemCreateResponse(agent_id="12345")
+
+ def create_agentic_system_execute(self, request: AgenticSystemExecuteRequest) -> Union[AgenticSystemExecuteResponse, AgenticSystemExecuteResponseStreamChunk]:
+ # Mock implementation
+ return AgenticSystemExecuteResponse(
+ turn=AgenticSystemTurn(
+ user_messages=[],
+ steps=[],
+ response_message=Message(
+ role="assistant",
+ content="Hello, I am an agent. I can help you with your tasks. What can I help you with?",
+ )
+ )
+ )
+
+agentic_system = AgenticSystemImpl()
+
+@app.route("/agentic_system/create", methods=["POST"])
+def create_agentic_system():
+ data = request.json
+ create_request = AgenticSystemCreateRequest(**data)
+ response = agentic_system.create_agentic_system(create_request)
+ return jsonify(response)
+
+@app.route("/agentic_system/execute", methods=["POST"])
+def create_agentic_system_execute():
+ data = request.json
+ execute_request = AgenticSystemExecuteRequest(**data)
+ response = agentic_system.create_agentic_system_execute(execute_request)
+ return jsonify(response)
+
+if __name__ == "__main__":
+ app.run(debug=True)