Skip to content

mirascope.core.azure.stream

The AzureStream class for convenience around streaming LLM calls.

Usage Documentation

Streams

AzureStream

AzureStream(
    *,
    stream: (
        Generator[
            tuple[
                _BaseCallResponseChunkT, _BaseToolT | None
            ],
            None,
            None,
        ]
        | AsyncGenerator[
            tuple[
                _BaseCallResponseChunkT, _BaseToolT | None
            ],
            None,
        ]
    ),
    metadata: Metadata,
    tool_types: list[type[_BaseToolT]] | None,
    call_response_type: type[_BaseCallResponseT],
    model: str,
    prompt_template: str | None,
    fn_args: dict[str, Any],
    dynamic_config: _BaseDynamicConfigT,
    messages: list[_MessageParamT],
    call_params: _BaseCallParamsT,
    call_kwargs: BaseCallKwargs[_ToolSchemaT]
)

Bases: BaseStream[AzureCallResponse, AzureCallResponseChunk, UserMessage, AssistantMessage, ToolMessage, ChatRequestMessage, AzureTool, ChatCompletionsToolDefinition, AsyncAzureDynamicConfig | AzureDynamicConfig, AzureCallParams, CompletionsFinishReason]

A class for convenience around streaming Azure LLM calls.

Example:

from mirascope.core import prompt_template
from mirascope.core.azure import azure_call


@azure_call("gpt-4o-mini", stream=True)
def recommend_book(genre: str) -> str:
    return f"Recommend a {genre} book"


stream = recommend_book("fantasy")  # returns `AzureStream` instance
for chunk, _ in stream:
    print(chunk.content, end="", flush=True)
Source code in mirascope/core/base/stream.py
def __init__(
    self,
    *,
    stream: Generator[tuple[_BaseCallResponseChunkT, _BaseToolT | None], None, None]
    | AsyncGenerator[
        tuple[_BaseCallResponseChunkT, _BaseToolT | None],
        None,
    ],
    metadata: Metadata,
    tool_types: list[type[_BaseToolT]] | None,
    call_response_type: type[_BaseCallResponseT],
    model: str,
    prompt_template: str | None,
    fn_args: dict[str, Any],
    dynamic_config: _BaseDynamicConfigT,
    messages: list[_MessageParamT],
    call_params: _BaseCallParamsT,
    call_kwargs: BaseCallKwargs[_ToolSchemaT],
) -> None:
    """Initializes an instance of `BaseStream`."""
    self.content = ""
    self.stream = stream
    self.metadata = metadata
    self.tool_types = tool_types
    self.call_response_type = call_response_type
    self.model = model
    self.prompt_template = prompt_template
    self.fn_args = fn_args
    self.dynamic_config = dynamic_config
    self.messages = messages
    self.call_params = call_params
    self.call_kwargs = call_kwargs
    self.user_message_param = get_possible_user_message_param(messages)  # pyright: ignore [reportAttributeAccessIssue]

cost property

cost: float | None

Returns the cost of the call.

construct_call_response

construct_call_response() -> AzureCallResponse

Constructs the call response from a consumed AzureStream.

Raises:

Type Description
ValueError

if the stream has not yet been consumed.

Source code in mirascope/core/azure/stream.py
def construct_call_response(self) -> AzureCallResponse:
    """Constructs the call response from a consumed AzureStream.

    Raises:
        ValueError: if the stream has not yet been consumed.
    """
    if not hasattr(self, "message_param"):
        raise ValueError(
            "No stream response, check if the stream has been consumed."
        )
    message = ChatResponseMessage(
        role=self.message_param["role"],
        content=self.message_param.get("content", ""),
        tool_calls=self.message_param.get("tool_calls", []),
    )
    if not self.input_tokens and not self.output_tokens:
        usage = CompletionsUsage(
            completion_tokens=0, prompt_tokens=0, total_tokens=0
        )
    else:
        usage = CompletionsUsage(
            prompt_tokens=int(self.input_tokens or 0),
            completion_tokens=int(self.output_tokens or 0),
            total_tokens=int(self.input_tokens or 0) + int(self.output_tokens or 0),
        )
    completion = ChatCompletions(
        id=self.id if self.id else "",
        model=self.model,
        choices=[
            ChatChoice(
                finish_reason=self.finish_reasons[0]
                if self.finish_reasons
                else "stop",
                index=0,
                message=message,
            )
        ],
        created=datetime.datetime.now(),
        usage=usage,
    )
    return AzureCallResponse(
        metadata=self.metadata,
        response=completion,
        tool_types=self.tool_types,
        prompt_template=self.prompt_template,
        fn_args=self.fn_args if self.fn_args else {},
        dynamic_config=self.dynamic_config,
        messages=self.messages,
        call_params=self.call_params,
        call_kwargs=self.call_kwargs,
        user_message_param=self.user_message_param,
        start_time=self.start_time,
        end_time=self.end_time,
    )