Skip to content

Mirascope

Tests Coverage Docs PyPI Version Stars License

Mirascope is a powerful, flexible, and user-friendly library that simplifies the process of working with LLMs through a unified interface that works across various supported providers, including OpenAI, Anthropic, Mistral, Gemini, Groq, Cohere, LiteLLM, Azure AI, Vertex AI, and Bedrock.

Whether you're generating text, extracting structured information, or developing complex AI-driven agent systems, Mirascope provides the tools you need to streamline your development process and create powerful, robust applications.

Why Use Mirascope Join Our Community Star the Repo

30 Second Quickstart

Install Mirascope, specifying the provider(s) you intend to use, and set your API key:

pip install "mirascope[openai]"

export OPENAI_API_KEY=XXXXX
pip install "mirascope[anthropic]"

export ANTHROPIC_API_KEY=XXXXX
pip install "mirascope[mistral]"

export MISTRAL_API_KEY=XXXXX
pip install "mirascope[google]"

export GOOGLE_API_KEY=XXXXX
pip install "mirascope[groq]"

export GROQ_API_KEY=XXXXX
pip install "mirascope[cohere]"

export CO_API_KEY=XXXXX
pip install "mirascope[litellm]"

export OPENAI_API_KEY=XXXXX 
pip install "mirascope[azure]"

export AZURE_INFERENCE_ENDPOINT=XXXXX
export AZURE_INFERENCE_CREDENTIAL=XXXXX
pip install "mirascope[bedrock]"

aws configure
pip install "mirascope[openai]"

set OPENAI_API_KEY=XXXXX
pip install "mirascope[anthropic]"

set ANTHROPIC_API_KEY=XXXXX
pip install "mirascope[mistral]"

set MISTRAL_API_KEY=XXXXX
pip install "mirascope[google]"

set GOOGLE_API_KEY=XXXXX
pip install "mirascope[groq]"

set GROQ_API_KEY=XXXXX
pip install "mirascope[cohere]"

set CO_API_KEY=XXXXX
pip install "mirascope[litellm]"

set OPENAI_API_KEY=XXXXX 
pip install "mirascope[azure]"

set AZURE_INFERENCE_ENDPOINT=XXXXX
set AZURE_INFERENCE_CREDENTIAL=XXXXX
pip install "mirascope[bedrock]"

aws configure

Make your first call to an LLM to extract the title and author of a book from the given text:

Mirascope

from mirascope import llm
from pydantic import BaseModel


class Book(BaseModel):
    title: str
    author: str


@llm.call(provider="openai", model="gpt-4o-mini", response_model=Book)
def extract_book(text: str) -> str:
    return f"Extract {text}"


book = extract_book("The Name of the Wind by Patrick Rothfuss")
print(book)
# Output: title='The Name of the Wind' author='Patrick Rothfuss'
from mirascope import llm
from pydantic import BaseModel


class Book(BaseModel):
    title: str
    author: str


@llm.call(provider="anthropic", model="claude-3-5-sonnet-latest", response_model=Book)
def extract_book(text: str) -> str:
    return f"Extract {text}"


book = extract_book("The Name of the Wind by Patrick Rothfuss")
print(book)
# Output: title='The Name of the Wind' author='Patrick Rothfuss'
from mirascope import llm
from pydantic import BaseModel


class Book(BaseModel):
    title: str
    author: str


@llm.call(provider="mistral", model="mistral-large-latest", response_model=Book)
def extract_book(text: str) -> str:
    return f"Extract {text}"


book = extract_book("The Name of the Wind by Patrick Rothfuss")
print(book)
# Output: title='The Name of the Wind' author='Patrick Rothfuss'
from mirascope import llm
from pydantic import BaseModel


class Book(BaseModel):
    title: str
    author: str


@llm.call(provider="google", model="gemini-2.0-flash", response_model=Book)
def extract_book(text: str) -> str:
    return f"Extract {text}"


book = extract_book("The Name of the Wind by Patrick Rothfuss")
print(book)
# Output: title='The Name of the Wind' author='Patrick Rothfuss'
from mirascope import llm
from pydantic import BaseModel


class Book(BaseModel):
    title: str
    author: str


@llm.call(provider="groq", model="llama-3.1-70b-versatile", response_model=Book)
def extract_book(text: str) -> str:
    return f"Extract {text}"


book = extract_book("The Name of the Wind by Patrick Rothfuss")
print(book)
# Output: title='The Name of the Wind' author='Patrick Rothfuss'
from mirascope import llm
from pydantic import BaseModel


class Book(BaseModel):
    title: str
    author: str


@llm.call(provider="cohere", model="command-r-plus", response_model=Book)
def extract_book(text: str) -> str:
    return f"Extract {text}"


book = extract_book("The Name of the Wind by Patrick Rothfuss")
print(book)
# Output: title='The Name of the Wind' author='Patrick Rothfuss'
from mirascope import llm
from pydantic import BaseModel


class Book(BaseModel):
    title: str
    author: str


@llm.call(provider="litellm", model="gpt-4o-mini", response_model=Book)
def extract_book(text: str) -> str:
    return f"Extract {text}"


book = extract_book("The Name of the Wind by Patrick Rothfuss")
print(book)
# Output: title='The Name of the Wind' author='Patrick Rothfuss'
from mirascope import llm
from pydantic import BaseModel


class Book(BaseModel):
    title: str
    author: str


@llm.call(provider="azure", model="gpt-4o-mini", response_model=Book)
def extract_book(text: str) -> str:
    return f"Extract {text}"


book = extract_book("The Name of the Wind by Patrick Rothfuss")
print(book)
# Output: title='The Name of the Wind' author='Patrick Rothfuss'
from mirascope import llm
from pydantic import BaseModel


class Book(BaseModel):
    title: str
    author: str


@llm.call(provider="bedrock", model="amazon.nova-lite-v1:0", response_model=Book)
def extract_book(text: str) -> str:
    return f"Extract {text}"


book = extract_book("The Name of the Wind by Patrick Rothfuss")
print(book)
# Output: title='The Name of the Wind' author='Patrick Rothfuss'
from mirascope import Messages, llm
from pydantic import BaseModel


class Book(BaseModel):
    title: str
    author: str


@llm.call(provider="openai", model="gpt-4o-mini", response_model=Book)
def extract_book(text: str) -> Messages.Type:
    return Messages.User(f"Extract {text}")


book = extract_book("The Name of the Wind by Patrick Rothfuss")
print(book)
# Output: title='The Name of the Wind' author='Patrick Rothfuss'
from mirascope import Messages, llm
from pydantic import BaseModel


class Book(BaseModel):
    title: str
    author: str


@llm.call(provider="anthropic", model="claude-3-5-sonnet-latest", response_model=Book)
def extract_book(text: str) -> Messages.Type:
    return Messages.User(f"Extract {text}")


book = extract_book("The Name of the Wind by Patrick Rothfuss")
print(book)
# Output: title='The Name of the Wind' author='Patrick Rothfuss'
from mirascope import Messages, llm
from pydantic import BaseModel


class Book(BaseModel):
    title: str
    author: str


@llm.call(provider="mistral", model="mistral-large-latest", response_model=Book)
def extract_book(text: str) -> Messages.Type:
    return Messages.User(f"Extract {text}")


book = extract_book("The Name of the Wind by Patrick Rothfuss")
print(book)
# Output: title='The Name of the Wind' author='Patrick Rothfuss'
from mirascope import Messages, llm
from pydantic import BaseModel


class Book(BaseModel):
    title: str
    author: str


@llm.call(provider="google", model="gemini-2.0-flash", response_model=Book)
def extract_book(text: str) -> Messages.Type:
    return Messages.User(f"Extract {text}")


book = extract_book("The Name of the Wind by Patrick Rothfuss")
print(book)
# Output: title='The Name of the Wind' author='Patrick Rothfuss'
from mirascope import Messages, llm
from pydantic import BaseModel


class Book(BaseModel):
    title: str
    author: str


@llm.call(provider="groq", model="llama-3.1-70b-versatile", response_model=Book)
def extract_book(text: str) -> Messages.Type:
    return Messages.User(f"Extract {text}")


book = extract_book("The Name of the Wind by Patrick Rothfuss")
print(book)
# Output: title='The Name of the Wind' author='Patrick Rothfuss'
from mirascope import Messages, llm
from pydantic import BaseModel


class Book(BaseModel):
    title: str
    author: str


@llm.call(provider="cohere", model="command-r-plus", response_model=Book)
def extract_book(text: str) -> Messages.Type:
    return Messages.User(f"Extract {text}")


book = extract_book("The Name of the Wind by Patrick Rothfuss")
print(book)
# Output: title='The Name of the Wind' author='Patrick Rothfuss'
from mirascope import Messages, llm
from pydantic import BaseModel


class Book(BaseModel):
    title: str
    author: str


@llm.call(provider="litellm", model="gpt-4o-mini", response_model=Book)
def extract_book(text: str) -> Messages.Type:
    return Messages.User(f"Extract {text}")


book = extract_book("The Name of the Wind by Patrick Rothfuss")
print(book)
# Output: title='The Name of the Wind' author='Patrick Rothfuss'
from mirascope import Messages, llm
from pydantic import BaseModel


class Book(BaseModel):
    title: str
    author: str


@llm.call(provider="azure", model="gpt-4o-mini", response_model=Book)
def extract_book(text: str) -> Messages.Type:
    return Messages.User(f"Extract {text}")


book = extract_book("The Name of the Wind by Patrick Rothfuss")
print(book)
# Output: title='The Name of the Wind' author='Patrick Rothfuss'
from mirascope import Messages, llm
from pydantic import BaseModel


class Book(BaseModel):
    title: str
    author: str


@llm.call(provider="bedrock", model="amazon.nova-lite-v1:0", response_model=Book)
def extract_book(text: str) -> Messages.Type:
    return Messages.User(f"Extract {text}")


book = extract_book("The Name of the Wind by Patrick Rothfuss")
print(book)
# Output: title='The Name of the Wind' author='Patrick Rothfuss'
from mirascope import llm, prompt_template
from pydantic import BaseModel


class Book(BaseModel):
    title: str
    author: str


@llm.call(provider="openai", model="gpt-4o-mini", response_model=Book)
@prompt_template("Extract {text}")
def extract_book(text: str): ...


book = extract_book("The Name of the Wind by Patrick Rothfuss")
print(book)
# Output: title='The Name of the Wind' author='Patrick Rothfuss'
from mirascope import llm, prompt_template
from pydantic import BaseModel


class Book(BaseModel):
    title: str
    author: str


@llm.call(provider="anthropic", model="claude-3-5-sonnet-latest", response_model=Book)
@prompt_template("Extract {text}")
def extract_book(text: str): ...


book = extract_book("The Name of the Wind by Patrick Rothfuss")
print(book)
# Output: title='The Name of the Wind' author='Patrick Rothfuss'
from mirascope import llm, prompt_template
from pydantic import BaseModel


class Book(BaseModel):
    title: str
    author: str


@llm.call(provider="mistral", model="mistral-large-latest", response_model=Book)
@prompt_template("Extract {text}")
def extract_book(text: str): ...


book = extract_book("The Name of the Wind by Patrick Rothfuss")
print(book)
# Output: title='The Name of the Wind' author='Patrick Rothfuss'
from mirascope import llm, prompt_template
from pydantic import BaseModel


class Book(BaseModel):
    title: str
    author: str


@llm.call(provider="google", model="gemini-2.0-flash", response_model=Book)
@prompt_template("Extract {text}")
def extract_book(text: str): ...


book = extract_book("The Name of the Wind by Patrick Rothfuss")
print(book)
# Output: title='The Name of the Wind' author='Patrick Rothfuss'
from mirascope import llm, prompt_template
from pydantic import BaseModel


class Book(BaseModel):
    title: str
    author: str


@llm.call(provider="groq", model="llama-3.1-70b-versatile", response_model=Book)
@prompt_template("Extract {text}")
def extract_book(text: str): ...


book = extract_book("The Name of the Wind by Patrick Rothfuss")
print(book)
# Output: title='The Name of the Wind' author='Patrick Rothfuss'
from mirascope import llm, prompt_template
from pydantic import BaseModel


class Book(BaseModel):
    title: str
    author: str


@llm.call(provider="cohere", model="command-r-plus", response_model=Book)
@prompt_template("Extract {text}")
def extract_book(text: str): ...


book = extract_book("The Name of the Wind by Patrick Rothfuss")
print(book)
# Output: title='The Name of the Wind' author='Patrick Rothfuss'
from mirascope import llm, prompt_template
from pydantic import BaseModel


class Book(BaseModel):
    title: str
    author: str


@llm.call(provider="litellm", model="gpt-4o-mini", response_model=Book)
@prompt_template("Extract {text}")
def extract_book(text: str): ...


book = extract_book("The Name of the Wind by Patrick Rothfuss")
print(book)
# Output: title='The Name of the Wind' author='Patrick Rothfuss'
from mirascope import llm, prompt_template
from pydantic import BaseModel


class Book(BaseModel):
    title: str
    author: str


@llm.call(provider="azure", model="gpt-4o-mini", response_model=Book)
@prompt_template("Extract {text}")
def extract_book(text: str): ...


book = extract_book("The Name of the Wind by Patrick Rothfuss")
print(book)
# Output: title='The Name of the Wind' author='Patrick Rothfuss'
from mirascope import llm, prompt_template
from pydantic import BaseModel


class Book(BaseModel):
    title: str
    author: str


@llm.call(provider="bedrock", model="amazon.nova-lite-v1:0", response_model=Book)
@prompt_template("Extract {text}")
def extract_book(text: str): ...


book = extract_book("The Name of the Wind by Patrick Rothfuss")
print(book)
# Output: title='The Name of the Wind' author='Patrick Rothfuss'
from mirascope import BaseMessageParam, llm
from pydantic import BaseModel


class Book(BaseModel):
    title: str
    author: str


@llm.call(provider="openai", model="gpt-4o-mini", response_model=Book)
def extract_book(text: str) -> list[BaseMessageParam]:
    return [BaseMessageParam(role="user", content=f"Extract {text}")]


book = extract_book("The Name of the Wind by Patrick Rothfuss")
print(book)
# Output: title='The Name of the Wind' author='Patrick Rothfuss'
from mirascope import BaseMessageParam, llm
from pydantic import BaseModel


class Book(BaseModel):
    title: str
    author: str


@llm.call(provider="anthropic", model="claude-3-5-sonnet-latest", response_model=Book)
def extract_book(text: str) -> list[BaseMessageParam]:
    return [BaseMessageParam(role="user", content=f"Extract {text}")]


book = extract_book("The Name of the Wind by Patrick Rothfuss")
print(book)
# Output: title='The Name of the Wind' author='Patrick Rothfuss'
from mirascope import BaseMessageParam, llm
from pydantic import BaseModel


class Book(BaseModel):
    title: str
    author: str


@llm.call(provider="mistral", model="mistral-large-latest", response_model=Book)
def extract_book(text: str) -> list[BaseMessageParam]:
    return [BaseMessageParam(role="user", content=f"Extract {text}")]


book = extract_book("The Name of the Wind by Patrick Rothfuss")
print(book)
# Output: title='The Name of the Wind' author='Patrick Rothfuss'
from mirascope import BaseMessageParam, llm
from pydantic import BaseModel


class Book(BaseModel):
    title: str
    author: str


@llm.call(provider="google", model="gemini-2.0-flash", response_model=Book)
def extract_book(text: str) -> list[BaseMessageParam]:
    return [BaseMessageParam(role="user", content=f"Extract {text}")]


book = extract_book("The Name of the Wind by Patrick Rothfuss")
print(book)
# Output: title='The Name of the Wind' author='Patrick Rothfuss'
from mirascope import BaseMessageParam, llm
from pydantic import BaseModel


class Book(BaseModel):
    title: str
    author: str


@llm.call(provider="groq", model="llama-3.1-70b-versatile", response_model=Book)
def extract_book(text: str) -> list[BaseMessageParam]:
    return [BaseMessageParam(role="user", content=f"Extract {text}")]


book = extract_book("The Name of the Wind by Patrick Rothfuss")
print(book)
# Output: title='The Name of the Wind' author='Patrick Rothfuss'
from mirascope import BaseMessageParam, llm
from pydantic import BaseModel


class Book(BaseModel):
    title: str
    author: str


@llm.call(provider="cohere", model="command-r-plus", response_model=Book)
def extract_book(text: str) -> list[BaseMessageParam]:
    return [BaseMessageParam(role="user", content=f"Extract {text}")]


book = extract_book("The Name of the Wind by Patrick Rothfuss")
print(book)
# Output: title='The Name of the Wind' author='Patrick Rothfuss'
from mirascope import BaseMessageParam, llm
from pydantic import BaseModel


class Book(BaseModel):
    title: str
    author: str


@llm.call(provider="litellm", model="gpt-4o-mini", response_model=Book)
def extract_book(text: str) -> list[BaseMessageParam]:
    return [BaseMessageParam(role="user", content=f"Extract {text}")]


book = extract_book("The Name of the Wind by Patrick Rothfuss")
print(book)
# Output: title='The Name of the Wind' author='Patrick Rothfuss'
from mirascope import BaseMessageParam, llm
from pydantic import BaseModel


class Book(BaseModel):
    title: str
    author: str


@llm.call(provider="azure", model="gpt-4o-mini", response_model=Book)
def extract_book(text: str) -> list[BaseMessageParam]:
    return [BaseMessageParam(role="user", content=f"Extract {text}")]


book = extract_book("The Name of the Wind by Patrick Rothfuss")
print(book)
# Output: title='The Name of the Wind' author='Patrick Rothfuss'
from mirascope import BaseMessageParam, llm
from pydantic import BaseModel


class Book(BaseModel):
    title: str
    author: str


@llm.call(provider="bedrock", model="amazon.nova-lite-v1:0", response_model=Book)
def extract_book(text: str) -> list[BaseMessageParam]:
    return [BaseMessageParam(role="user", content=f"Extract {text}")]


book = extract_book("The Name of the Wind by Patrick Rothfuss")
print(book)
# Output: title='The Name of the Wind' author='Patrick Rothfuss'
Official SDK
from openai import OpenAI
from pydantic import BaseModel

client = OpenAI()


class Book(BaseModel):
    """An extracted book."""

    title: str
    author: str


def extract_book(text: str) -> Book:
    completion = client.chat.completions.create(
        model="gpt-4o-mini",
        messages=[{"role": "user", "content": f"Extract {text}"}],
        tools=[
            {
                "function": {
                    "name": "Book",
                    "description": "An extracted book.",
                    "parameters": {
                        "properties": {
                            "title": {"type": "string"},
                            "author": {"type": "string"},
                        },
                        "required": ["title", "author"],
                        "type": "object",
                    },
                },
                "type": "function",
            }
        ],
        tool_choice="required",
    )
    if tool_calls := completion.choices[0].message.tool_calls:
        return Book.model_validate_json(tool_calls[0].function.arguments)
    raise ValueError("No tool call found")


book = extract_book("The Name of the Wind by Patrick Rothfuss")
print(book)
# Output: title='The Name of the Wind' author='Patrick Rothfuss'
from anthropic import Anthropic
from pydantic import BaseModel

client = Anthropic()


class Book(BaseModel):
    """An extracted book."""

    title: str
    author: str


def extract_book(text: str) -> Book:
    message = client.messages.create(
        model="claude-3-5-sonnet-latest",
        max_tokens=1024,
        messages=[{"role": "user", "content": f"Extract {text}"}],
        tools=[
            {
                "name": "Book",
                "description": "An extracted book.",
                "input_schema": {
                    "properties": {
                        "title": {"type": "string"},
                        "author": {"type": "string"},
                    },
                    "required": ["title", "author"],
                    "type": "object",
                },
            }
        ],
        tool_choice={"type": "tool", "name": "Book"},
    )
    for block in message.content:
        if block.type == "tool_use" and block.input is not None:
            return Book.model_validate(block.input)
    raise ValueError("No tool call found")


book = extract_book("The Name of the Wind by Patrick Rothfuss")
print(book)
# Output: title='The Name of the Wind' author='Patrick Rothfuss'
import os
from typing import cast

from mistralai import Mistral
from pydantic import BaseModel

client = Mistral(api_key=os.environ["MISTRAL_API_KEY"])


class Book(BaseModel):
    """An extracted book."""

    title: str
    author: str


def extract_book(text: str) -> Book:
    completion = client.chat.complete(
        model="mistral-large-latest",
        messages=[{"role": "user", "content": f"Extract {text}"}],
        tools=[
            {
                "function": {
                    "name": "Book",
                    "description": "An extracted book.",
                    "parameters": {
                        "properties": {
                            "title": {"type": "string"},
                            "author": {"type": "string"},
                        },
                        "required": ["title", "author"],
                        "type": "object",
                    },
                },
                "type": "function",
            }
        ],
        tool_choice="any",
    )
    if (
        completion
        and (choices := completion.choices)
        and (tool_calls := choices[0].message.tool_calls)
    ):
        return Book.model_validate_json(cast(str, tool_calls[0].function.arguments))
    raise ValueError("No tool call found")


book = extract_book("The Name of the Wind by Patrick Rothfuss")
print(book)
# Output: title='The Name of the Wind' author='Patrick Rothfuss'
from google.genai import Client
from google.genai.types import FunctionDeclaration, Tool
from proto.marshal.collections import RepeatedComposite
from pydantic import BaseModel

client = Client()


class Book(BaseModel):
    """An extracted book."""

    title: str
    author: str


def extract_book(text: str) -> Book:
    response = client.models.generate_content(
        model="gemini-2.0-flash",
        contents={"parts": [{"text": f"Extract {text}"}]},
        config={
            "tools": [
                Tool(
                    function_declarations=[
                        FunctionDeclaration(
                            **{
                                "name": "Book",
                                "description": "An extracted book.",
                                "parameters": {
                                    "properties": {
                                        "title": {"type": "string"},
                                        "author": {"type": "string"},
                                    },
                                    "required": ["title", "author"],
                                    "type": "object",
                                },
                            }
                        )
                    ]
                )
            ],
            "tool_config": {
                "function_calling_config": {
                    "mode": "any",
                    "allowed_function_names": ["Book"],
                }
            },  # pyright: ignore [reportArgumentType]
        },
    )
    if tool_calls := [
        function_call
        for function_call in (response.function_calls or [])
        if function_call.args
    ]:
        return Book.model_validate(
            {
                k: v if not isinstance(v, RepeatedComposite) else list(v)
                for k, v in (tool_calls[0].args or {}).items()
            }
        )
    raise ValueError("No tool call found")


book = extract_book("The Name of the Wind by Patrick Rothfuss")
print(book)
# Output: title='The Name of the Wind' author='Patrick Rothfuss'
from groq import Groq
from pydantic import BaseModel

client = Groq()


class Book(BaseModel):
    """An extracted book."""

    title: str
    author: str


def extract_book(text: str) -> Book:
    completion = client.chat.completions.create(
        model="llama-3.1-70b-versatile",
        messages=[{"role": "user", "content": f"Extract {text}"}],
        tools=[
            {
                "function": {
                    "name": "Book",
                    "description": "An extracted book.",
                    "parameters": {
                        "properties": {
                            "title": {"type": "string"},
                            "author": {"type": "string"},
                        },
                        "required": ["title", "author"],
                        "type": "object",
                    },
                },
                "type": "function",
            }
        ],
        tool_choice="required",
    )
    if tool_calls := completion.choices[0].message.tool_calls:
        return Book.model_validate_json(tool_calls[0].function.arguments)
    raise ValueError("No tool call found")


book = extract_book("The Name of the Wind by Patrick Rothfuss")
print(book)
# Output: title='The Name of the Wind' author='Patrick Rothfuss'
from cohere import Client
from cohere.types import Tool, ToolParameterDefinitionsValue
from pydantic import BaseModel

client = Client()


class Book(BaseModel):
    """An extracted book."""

    title: str
    author: str


def extract_book(text: str) -> Book:
    response = client.chat(
        model="command-r-plus",
        message=f"Extract {text}",
        tools=[
            Tool(
                name="Book",
                description="An extracted book.",
                parameter_definitions={
                    "title": ToolParameterDefinitionsValue(
                        description=None, type="string", required=True
                    ),
                    "author": ToolParameterDefinitionsValue(
                        description=None, type="string", required=True
                    ),
                },
            )
        ],
    )
    if response.tool_calls:
        return Book.model_validate(response.tool_calls[0].parameters)
    raise ValueError("No tool call found")


book = extract_book("The Name of the Wind by Patrick Rothfuss")
print(book)
# Output: title='The Name of the Wind' author='Patrick Rothfuss'
from litellm import completion
from pydantic import BaseModel


class Book(BaseModel):
    """An extracted book."""

    title: str
    author: str


def extract_book(text: str) -> Book:
    response = completion(
        model="gpt-4o-mini",
        messages=[{"role": "user", "content": f"Extract {text}"}],
        tools=[
            {
                "function": {
                    "name": "Book",
                    "description": "An extracted book.",
                    "parameters": {
                        "properties": {
                            "title": {"type": "string"},
                            "author": {"type": "string"},
                        },
                        "required": ["title", "author"],
                        "type": "object",
                    },
                },
                "type": "function",
            }
        ],
        tool_choice="required",
    )
    if tool_calls := response.choices[0].message.tool_calls:  # pyright: ignore [reportAttributeAccessIssue]
        return Book.model_validate_json(tool_calls[0].function.arguments)
    raise ValueError("No tool call found")


book = extract_book("The Name of the Wind by Patrick Rothfuss")
print(book)
# Output: title='The Name of the Wind' author='Patrick Rothfuss'
from azure.ai.inference import ChatCompletionsClient
from azure.ai.inference.models import (
    ChatCompletionsToolDefinition,
    ChatRequestMessage,
    FunctionDefinition,
)
from azure.core.credentials import AzureKeyCredential
from pydantic import BaseModel

client = ChatCompletionsClient(
    endpoint="YOUR_ENDPOINT", credential=AzureKeyCredential("YOUR_KEY")
)


class Book(BaseModel):
    """An extracted book."""

    title: str
    author: str


def extract_book(text: str) -> Book:
    completion = client.complete(
        model="gpt-4o-mini",
        messages=[ChatRequestMessage({"role": "user", "content": f"Extract {text}"})],
        tools=[
            ChatCompletionsToolDefinition(
                function=FunctionDefinition(
                    name="Book",
                    description="An extracted book.",
                    parameters={
                        "properties": {
                            "title": {"type": "string"},
                            "author": {"type": "string"},
                        },
                        "required": ["title", "author"],
                        "type": "object",
                    },
                )
            )
        ],
        tool_choices="required",
    )
    if tool_calls := completion.choices[0].message.tool_calls:
        return Book.model_validate_json(tool_calls[0].function.arguments)
    raise ValueError("No tool call found")


book = extract_book("The Name of the Wind by Patrick Rothfuss")
print(book)
# Output: title='The Name of the Wind' author='Patrick Rothfuss'
import boto3
from pydantic import BaseModel

bedrock_client = boto3.client(service_name="bedrock-runtime")


class Book(BaseModel):
    """An extracted book."""

    title: str
    author: str


def extract_book(text: str) -> Book:
    messages = [{"role": "user", "content": [{"text": f"Extract {text}"}]}]
    tool_config = {
        "tools": [
            {
                "toolSpec": {
                    "name": "Book",
                    "description": "An extracted book.",
                    "inputSchema": {
                        "json": {
                            "type": "object",
                            "properties": {
                                "title": {"type": "string"},
                                "author": {"type": "string"},
                            },
                            "required": ["title", "author"],
                        }
                    },
                }
            }
        ],
        "toolChoice": {"type": "tool", "name": "Book"},
    }
    response = bedrock_client.converse(
        modelId="amazon.nova-lite-v1:0",
        messages=messages,
        toolConfig=tool_config,
    )
    output_message = response["output"]["message"]
    messages.append(output_message)
    for content_piece in output_message["content"]:
        if "toolUse" in content_piece and content_piece["toolUse"].get("input"):
            tool_input = content_piece["toolUse"]["input"]
            return Book.model_validate(tool_input)
    raise ValueError("No tool call found")


book = extract_book("The Name of the Wind by Patrick Rothfuss")
print(book)
# Output: title='The Name of the Wind' author='Patrick Rothfuss'

Choose Your Path

Tutorials

  • Quickstart Guide


    Comprehensive overview of core features and building blocks

    Quickstart

  • Structured Outputs


    Explore various techniques for generating structured outputs

    Structured Outputs

  • Dynamic Configuration & Chaining


    Examples ranging from basic usage to more complex chaining techniques

    Dynamic Configuration & Chaining

  • Tools & Agents


    Learn how to define tools for your LLM to build advanced AI agents

    Tools & Agents

Dive Deeper

  • Learn


    In-depth exploration of Mirascope's many features and capabilities

    Learn

  • Tutorials


    Advanced usage patterns and real-world applications

    Tutorials

  • Integrations


    Integrations with third-party tools for enhanced usage

    Integrations

  • API Reference


    Detailed information on classes and functions

    Reference

We're excited to see what you'll build with Mirascope, and we're here to help! Don't hesitate to reach out :)