Langfuse¶
Mirascope provides out-of-the-box integration with Langfuse.
You can install the necessary packages directly or using the langfuse
extras flag:
You can then use the with_langfuse
decorator to automatically log calls:
from mirascope.core import Messages, bedrock
from mirascope.integrations.langfuse import with_langfuse
@with_langfuse()
@bedrock.call("anthropic.claude-3-haiku-20240307-v1:0")
def recommend_book(genre: str) -> Messages.Type:
return Messages.User(f"Recommend a {genre} book.")
print(recommend_book("fantasy"))
from mirascope.core import BaseMessageParam, openai
from mirascope.integrations.langfuse import with_langfuse
@with_langfuse()
@openai.call("gpt-4o-mini")
def recommend_book(genre: str) -> list[BaseMessageParam]:
return [BaseMessageParam(role="user", content=f"Recommend a {genre} book.")]
print(recommend_book("fantasy"))
from mirascope.core import BaseMessageParam, anthropic
from mirascope.integrations.langfuse import with_langfuse
@with_langfuse()
@anthropic.call("claude-3-5-sonnet-20240620")
def recommend_book(genre: str) -> list[BaseMessageParam]:
return [BaseMessageParam(role="user", content=f"Recommend a {genre} book")]
print(recommend_book("fantasy"))
from mirascope.core import BaseMessageParam, mistral
from mirascope.integrations.langfuse import with_langfuse
@with_langfuse()
@mistral.call("mistral-large-latest")
def recommend_book(genre: str) -> list[BaseMessageParam]:
return [BaseMessageParam(role="user", content=f"Recommend a {genre} book")]
print(recommend_book("fantasy"))
from mirascope.core import BaseMessageParam, groq
from mirascope.integrations.langfuse import with_langfuse
@with_langfuse()
@groq.call("llama-3.1-70b-versatile")
def recommend_book(genre: str) -> list[BaseMessageParam]:
return [BaseMessageParam(role="user", content=f"Recommend a {genre} book")]
print(recommend_book("fantasy"))
from mirascope.core import BaseMessageParam, cohere
from mirascope.integrations.langfuse import with_langfuse
@with_langfuse()
@cohere.call("command-r-plus")
def recommend_book(genre: str) -> list[BaseMessageParam]:
return [BaseMessageParam(role="user", content=f"Recommend a {genre} book")]
print(recommend_book("fantasy"))
from mirascope.core import BaseMessageParam, litellm
from mirascope.integrations.langfuse import with_langfuse
@with_langfuse()
@litellm.call("gpt-4o-mini")
def recommend_book(genre: str) -> list[BaseMessageParam]:
return [BaseMessageParam(role="user", content=f"Recommend a {genre} book")]
print(recommend_book("fantasy"))
from mirascope.core import BaseMessageParam, azure
from mirascope.integrations.langfuse import with_langfuse
@with_langfuse()
@azure.call("gpt-4o-mini")
def recommend_book(genre: str) -> list[BaseMessageParam]:
return [BaseMessageParam(role="user", content=f"Recommend a {genre} book")]
print(recommend_book("fantasy"))
from mirascope.core import BaseMessageParam, bedrock
from mirascope.integrations.langfuse import with_langfuse
@with_langfuse()
@bedrock.call("anthropic.claude-3-haiku-20240307-v1:0")
def recommend_book(genre: str) -> list[BaseMessageParam]:
return [BaseMessageParam(role="user", content=f"Recommend a {genre} book.")]
print(recommend_book("fantasy"))
This will give you:
- A trace around the
recommend_book
function that captures items like the prompt template, and input/output attributes and more. - Human-readable display of the conversation with the agent
- Details of the response, including the number of tokens used
Example trace
Handling streams
When logging streams, the span will not be logged until the stream has been exhausted. This is a function of how streaming works.
You will also need to set certain call_params
for usage to be tracked for certain providers (such as OpenAI).