OpenAI Tip: If you need structured output from the LLM, use structured responses and define the expected format with JSON Schema.
Here’s an example:
from openai import OpenAI
import os
client = OpenAI(api_key=os.getenv("OPENAI_API_KEY"))
# Define the exact shape you want back.
schema = {
"name": "acl_parse",
"schema": {
"type": "object",
"properties": {
"entries": {
"type": "array",
"items": {
"type": "object",
"properties": {
"action": {"type": "string"},
"protocol": {"type": "string"},
"src": {"type": "string"},
"dst": {"type": "string"},
"port": {"type": "string"},
},
"required": ["action", "protocol", "src", "dst"],
},
}
},
"required": ["entries"],
},
}
acl = """\
permit tcp any host 10.0.0.10 eq 443
deny ip any any
"""
resp = client.chat.completions.create(
model="gpt-4o-mini",
messages=[
{"role": "system", "content": "Parse ACL lines into JSON."},
{"role": "user", "content": acl},
],
response_format={"type": "json_schema", "json_schema": schema},
)
print(resp.choices[0].message.content)
# Output:
# {
# "entries": [
# {
# "action": "permit",
# "protocol": "tcp",
# "src": "any",
# "dst": "host 10.0.0.10",
# "port": "eq 443"
# },
# {
# "action": "deny",
# "protocol": "ip",
# "src": "any",
# "dst": "any",
# "port": ""
# }
# ]
# }
