Skip to content

mirascope.core.mistral.stream

The MistralStream class for convenience around streaming LLM calls.

Usage Documentation

Streams

MistralStream

MistralStream(
    *,
    stream: (
        Generator[
            tuple[
                _BaseCallResponseChunkT, _BaseToolT | None
            ],
            None,
            None,
        ]
        | AsyncGenerator[
            tuple[
                _BaseCallResponseChunkT, _BaseToolT | None
            ],
            None,
        ]
    ),
    metadata: Metadata,
    tool_types: list[type[_BaseToolT]] | None,
    call_response_type: type[_BaseCallResponseT],
    model: str,
    prompt_template: str | None,
    fn_args: dict[str, Any],
    dynamic_config: _BaseDynamicConfigT,
    messages: list[_MessageParamT],
    call_params: _BaseCallParamsT,
    call_kwargs: BaseCallKwargs[_ToolSchemaT]
)

Bases: BaseStream[MistralCallResponse, MistralCallResponseChunk, UserMessage, AssistantMessage, ToolMessage, AssistantMessage | SystemMessage | ToolMessage | UserMessage, MistralTool, dict[str, Any], MistralDynamicConfig, MistralCallParams, FinishReason]

A class for convenience around streaming Mistral LLM calls.

Example:

from mirascope.core import prompt_template
from mirascope.core.mistral import mistral_call


@mistral_call("mistral-large-latest", stream=True)
def recommend_book(genre: str) -> str:
    return f"Recommend a {genre} book"


stream = recommend_book("fantasy")  # returns `MistralStream` instance
for chunk, _ in stream:
    print(chunk.content, end="", flush=True)
Source code in mirascope/core/base/stream.py
def __init__(
    self,
    *,
    stream: Generator[tuple[_BaseCallResponseChunkT, _BaseToolT | None], None, None]
    | AsyncGenerator[
        tuple[_BaseCallResponseChunkT, _BaseToolT | None],
        None,
    ],
    metadata: Metadata,
    tool_types: list[type[_BaseToolT]] | None,
    call_response_type: type[_BaseCallResponseT],
    model: str,
    prompt_template: str | None,
    fn_args: dict[str, Any],
    dynamic_config: _BaseDynamicConfigT,
    messages: list[_MessageParamT],
    call_params: _BaseCallParamsT,
    call_kwargs: BaseCallKwargs[_ToolSchemaT],
) -> None:
    """Initializes an instance of `BaseStream`."""
    self.content = ""
    self.stream = stream
    self.metadata = metadata
    self.tool_types = tool_types
    self.call_response_type = call_response_type
    self.model = model
    self.prompt_template = prompt_template
    self.fn_args = fn_args
    self.dynamic_config = dynamic_config
    self.messages = messages
    self.call_params = call_params
    self.call_kwargs = call_kwargs
    self.user_message_param = get_possible_user_message_param(messages)  # pyright: ignore [reportAttributeAccessIssue]

cost property

cost: float | None

Returns the cost of the call.

construct_call_response

construct_call_response() -> MistralCallResponse

Constructs the call response from a consumed MistralStream.

Raises:

Type Description
ValueError

if the stream has not yet been consumed.

Source code in mirascope/core/mistral/stream.py
def construct_call_response(self) -> MistralCallResponse:
    """Constructs the call response from a consumed MistralStream.

    Raises:
        ValueError: if the stream has not yet been consumed.
    """
    if not hasattr(self, "message_param"):
        raise ValueError(
            "No stream response, check if the stream has been consumed."
        )
    usage = UsageInfo(
        prompt_tokens=int(self.input_tokens or 0),
        completion_tokens=int(self.output_tokens or 0),
        total_tokens=int(self.input_tokens or 0) + int(self.output_tokens or 0),
    )
    finish_reason = cast(FinishReason, (self.finish_reasons or [])[0])
    completion = ChatCompletionResponse(
        id=self.id if self.id else "",
        choices=[
            ChatCompletionChoice(
                finish_reason=finish_reason,
                index=0,
                message=self.message_param,
            )
        ],
        created=0,
        model=self.model,
        object="",
        usage=usage,
    )
    return MistralCallResponse(
        metadata=self.metadata,
        response=completion,
        tool_types=self.tool_types,
        prompt_template=self.prompt_template,
        fn_args=self.fn_args if self.fn_args else {},
        dynamic_config=self.dynamic_config,
        messages=self.messages,
        call_params=self.call_params,
        call_kwargs=self.call_kwargs,
        user_message_param=self.user_message_param,
        start_time=self.start_time,
        end_time=self.end_time,
    )