Keyboard shortcuts

Press or to navigate between chapters

Press S or / to search in the book

Press ? to show this help

Press Esc to hide this help

Structured Output

Force the model to return JSON matching a specific schema. Guarantees valid, parseable output without prompt engineering tricks.

See the official Structured Outputs guide for schema format and limitations.

Derive JsonSchema on your struct and call parse::<T>(). The SDK auto-generates the schema and deserializes the response.

Requires feature structured: cargo add openai-oxide --features structured

#![allow(unused)]
fn main() {
// Live test for all new features — requires OPENAI_API_KEY
//
// cargo run --example live_features_test --features structured

use futures_util::StreamExt;
use openai_oxide::OpenAI;
use openai_oxide::stream_helpers::ChatStreamEvent;
use openai_oxide::types::chat::{ChatCompletionMessageParam, ChatCompletionRequest, UserContent};

#[derive(Debug, serde::Deserialize, schemars::JsonSchema)]
struct MathAnswer {
    steps: Vec<Step>,
    final_answer: String,
}

#[derive(Debug, serde::Deserialize, schemars::JsonSchema)]
struct Step {
    explanation: String,
    output: String,
}

#[derive(Debug, serde::Deserialize, schemars::JsonSchema)]
struct Sentiment {
    sentiment: String,
    confidence: f64,
}

fn msg(text: &str) -> Vec<ChatCompletionMessageParam> {
    vec![ChatCompletionMessageParam::User {
        content: UserContent::Text(text.into()),
}

Rust — Manual Schema

For full control, construct the schema yourself:

//! Structured output with JSON Schema — model returns a validated JSON object.
//!
//! Run with: `OPENAI_API_KEY=sk-... cargo run --example structured_output`

use openai_oxide::OpenAI;
use openai_oxide::types::chat::*;

#[tokio::main]
async fn main() -> Result<(), Box<dyn std::error::Error>> {
    let client = OpenAI::from_env()?;

    let request = ChatCompletionRequest::new(
        "gpt-4o-mini",
        vec![
            ChatCompletionMessageParam::System {
                content: "Extract structured data from user messages.".into(),
                name: None,
            },
            ChatCompletionMessageParam::User {
                content: UserContent::Text(
                    "My name is Alice, I'm 30, and I work as a software engineer at Acme Corp."
                        .into(),
                ),
                name: None,
            },
        ],
    )
    .response_format(ResponseFormat::JsonSchema {
        json_schema: JsonSchema {
            name: "person_info".into(),
            description: Some("Extracted person information".into()),
            schema: Some(serde_json::json!({
                "type": "object",
                "properties": {
                    "name": {"type": "string"},
                    "age": {"type": "integer"},
                    "occupation": {"type": "string"},
                    "company": {"type": "string"}
                },
                "required": ["name", "age", "occupation", "company"],
                "additionalProperties": false
            })),
            strict: Some(true),
        },
    });

    let response = client.chat().completions().create(request).await?;
    let content = response.choices[0]
        .message
        .content
        .as_deref()
        .unwrap_or("{}");

    let parsed: serde_json::Value = serde_json::from_str(content)?;
    println!("Extracted data:");
    println!("  Name: {}", parsed["name"]);
    println!("  Age: {}", parsed["age"]);
    println!("  Occupation: {}", parsed["occupation"]);
    println!("  Company: {}", parsed["company"]);

    Ok(())
}

Node.js (drop-in replacement)

Same syntax as official openai package — change one import:

/**
 * Drop-in replacement for official openai SDK structured output example.
 * Change: `const OpenAI = require('openai')` → `const { OpenAI } = require('openai-oxide/compat')`
 *
 * For Zod support: npm install zod zod-to-json-schema
 */

// ── One-line change from official SDK ──
// const OpenAI = require('openai');
const { OpenAI } = require('../compat');

async function main() {
    const client = new OpenAI();

    // JSON Schema (works without Zod)
    const MathResponseSchema = {
        type: "object",
        properties: {
            steps: {
                type: "array",
                items: {
                    type: "object",
                    properties: {
                        explanation: { type: "string" },
                        output: { type: "string" },
                    },
                    required: ["explanation", "output"],
                    additionalProperties: false,
                },
            },
            final_answer: { type: "string" },
        },
        required: ["steps", "final_answer"],
        additionalProperties: false,
    };

    const result = await client.chat.completions.parse({
        model: "gpt-5.4-mini",
        messages: [
            { role: "system", content: "You are a helpful math tutor." },
            { role: "user", content: "solve 8x + 31 = 2" },
        ],
        response_format: {
            type: "json_schema",
            json_schema: {
                name: "MathResponse",
                schema: MathResponseSchema,
                strict: true,
            },
        },
    });

    const message = result.choices[0].message;
    const parsed = JSON.parse(message.content);
    for (const step of parsed.steps) {
        console.log(`  ${step.explanation} → ${step.output}`);
    }
    console.log("answer:", parsed.final_answer);
}

main();

Python (drop-in replacement)

Same syntax as official openai package — change one import:

#!/usr/bin/env python3
"""
Drop-in replacement for official openai SDK parsing example.
Change: `from openai import AsyncOpenAI` → `from openai_oxide.compat import AsyncOpenAI`
"""

import asyncio
from typing import List

from pydantic import BaseModel

# ── One-line change from official SDK ──
# from openai import AsyncOpenAI
from openai_oxide.compat import AsyncOpenAI


class Step(BaseModel):
    explanation: str
    output: str


class MathResponse(BaseModel):
    steps: List[Step]
    final_answer: str


async def main():
    client = AsyncOpenAI()

    completion = await client.chat.completions.parse(
        model="gpt-5.4-mini",
        messages=[
            {"role": "system", "content": "You are a helpful math tutor."},
            {"role": "user", "content": "solve 8x + 31 = 2"},
        ],
        response_format=MathResponse,
    )

    message = completion.choices[0].message
    if message.parsed:
        for step in message.parsed.steps:
            print(f"  {step.explanation} → {step.output}")
        print("answer:", message.parsed.final_answer)
    else:
        print("refusal:", message.refusal)


asyncio.run(main())

Next Steps