Mirascope provides out-of-the-box integration with Langfuse.
You can install the necessary packages directly or using the langfuse extras flag:
pipinstall"mirascope[langfuse]"
You can then use the with_langfuse decorator to automatically log calls:
frommirascope.coreimportopenaifrommirascope.integrations.langfuseimportwith_langfuse@with_langfuse()@openai.call("gpt-4o-mini")defrecommend_book(genre:str)->str:returnf"Recommend a {genre} book."print(recommend_book("fantasy"))
frommirascope.coreimportanthropicfrommirascope.integrations.langfuseimportwith_langfuse@with_langfuse()@anthropic.call("claude-3-5-sonnet-20240620")defrecommend_book(genre:str)->str:returnf"Recommend a {genre} book"print(recommend_book("fantasy"))
frommirascope.coreimportmistralfrommirascope.integrations.langfuseimportwith_langfuse@with_langfuse()@mistral.call("mistral-large-latest")defrecommend_book(genre:str)->str:returnf"Recommend a {genre} book"print(recommend_book("fantasy"))
frommirascope.coreimportgeminifrommirascope.integrations.langfuseimportwith_langfuse@with_langfuse()@gemini.call("gemini-1.5-flash")defrecommend_book(genre:str)->str:returnf"Recommend a {genre} book"print(recommend_book("fantasy"))
frommirascope.coreimportgroqfrommirascope.integrations.langfuseimportwith_langfuse@with_langfuse()@groq.call("llama-3.1-70b-versatile")defrecommend_book(genre:str)->str:returnf"Recommend a {genre} book"print(recommend_book("fantasy"))
frommirascope.coreimportcoherefrommirascope.integrations.langfuseimportwith_langfuse@with_langfuse()@cohere.call("command-r-plus")defrecommend_book(genre:str)->str:returnf"Recommend a {genre} book"print(recommend_book("fantasy"))
frommirascope.coreimportlitellmfrommirascope.integrations.langfuseimportwith_langfuse@with_langfuse()@litellm.call("gpt-4o-mini")defrecommend_book(genre:str)->str:returnf"Recommend a {genre} book"print(recommend_book("fantasy"))
frommirascope.coreimportazurefrommirascope.integrations.langfuseimportwith_langfuse@with_langfuse()@azure.call("gpt-4o-mini")defrecommend_book(genre:str)->str:returnf"Recommend a {genre} book"print(recommend_book("fantasy"))
frommirascope.coreimportvertexfrommirascope.integrations.langfuseimportwith_langfuse@with_langfuse()@vertex.call("gemini-1.5-flash")defrecommend_book(genre:str)->str:returnf"Recommend a {genre} book"print(recommend_book("fantasy"))
frommirascope.coreimportbedrockfrommirascope.integrations.langfuseimportwith_langfuse@with_langfuse()@bedrock.call("anthropic.claude-3-haiku-20240307-v1:0")defrecommend_book(genre:str)->str:returnf"Recommend a {genre} book."print(recommend_book("fantasy"))
frommirascope.coreimportMessages,openaifrommirascope.integrations.langfuseimportwith_langfuse@with_langfuse()@openai.call("gpt-4o-mini")defrecommend_book(genre:str)->Messages.Type:returnMessages.User(f"Recommend a {genre} book.")print(recommend_book("fantasy"))
frommirascope.coreimportMessages,anthropicfrommirascope.integrations.langfuseimportwith_langfuse@with_langfuse()@anthropic.call("claude-3-5-sonnet-20240620")defrecommend_book(genre:str)->Messages.Type:returnMessages.User(f"Recommend a {genre} book")print(recommend_book("fantasy"))
frommirascope.coreimportMessages,mistralfrommirascope.integrations.langfuseimportwith_langfuse@with_langfuse()@mistral.call("mistral-large-latest")defrecommend_book(genre:str)->Messages.Type:returnMessages.User(f"Recommend a {genre} book")print(recommend_book("fantasy"))
frommirascope.coreimportMessages,geminifrommirascope.integrations.langfuseimportwith_langfuse@with_langfuse()@gemini.call("gemini-1.5-flash")defrecommend_book(genre:str)->Messages.Type:returnMessages.User(f"Recommend a {genre} book")print(recommend_book("fantasy"))
frommirascope.coreimportMessages,groqfrommirascope.integrations.langfuseimportwith_langfuse@with_langfuse()@groq.call("llama-3.1-70b-versatile")defrecommend_book(genre:str)->Messages.Type:returnMessages.User(f"Recommend a {genre} book")print(recommend_book("fantasy"))
frommirascope.coreimportMessages,coherefrommirascope.integrations.langfuseimportwith_langfuse@with_langfuse()@cohere.call("command-r-plus")defrecommend_book(genre:str)->Messages.Type:returnMessages.User(f"Recommend a {genre} book")print(recommend_book("fantasy"))
frommirascope.coreimportMessages,litellmfrommirascope.integrations.langfuseimportwith_langfuse@with_langfuse()@litellm.call("gpt-4o-mini")defrecommend_book(genre:str)->Messages.Type:returnMessages.User(f"Recommend a {genre} book")print(recommend_book("fantasy"))
frommirascope.coreimportMessages,azurefrommirascope.integrations.langfuseimportwith_langfuse@with_langfuse()@azure.call("gpt-4o-mini")defrecommend_book(genre:str)->Messages.Type:returnMessages.User(f"Recommend a {genre} book")print(recommend_book("fantasy"))
frommirascope.coreimportMessages,vertexfrommirascope.integrations.langfuseimportwith_langfuse@with_langfuse()@vertex.call("gemini-1.5-flash")defrecommend_book(genre:str)->Messages.Type:returnMessages.User(f"Recommend a {genre} book")print(recommend_book("fantasy"))
frommirascope.coreimportMessages,bedrockfrommirascope.integrations.langfuseimportwith_langfuse@with_langfuse()@bedrock.call("anthropic.claude-3-haiku-20240307-v1:0")defrecommend_book(genre:str)->Messages.Type:returnMessages.User(f"Recommend a {genre} book.")print(recommend_book("fantasy"))
frommirascope.coreimportopenai,prompt_templatefrommirascope.integrations.langfuseimportwith_langfuse@with_langfuse()@openai.call("gpt-4o-mini")@prompt_template("Recommend a {genre} book")defrecommend_book(genre:str):...print(recommend_book("fantasy"))
frommirascope.coreimportanthropic,prompt_templatefrommirascope.integrations.langfuseimportwith_langfuse@with_langfuse()@anthropic.call("claude-3-5-sonnet-20240620")@prompt_template("Recommend a {genre} book")defrecommend_book(genre:str):...print(recommend_book("fantasy"))
frommirascope.coreimportmistral,prompt_templatefrommirascope.integrations.langfuseimportwith_langfuse@with_langfuse()@mistral.call("mistral-large-latest")@prompt_template("Recommend a {genre} book")defrecommend_book(genre:str):...print(recommend_book("fantasy"))
frommirascope.coreimportgemini,prompt_templatefrommirascope.integrations.langfuseimportwith_langfuse@with_langfuse()@gemini.call("gemini-1.5-flash")@prompt_template("Recommend a {genre} book")defrecommend_book(genre:str):...print(recommend_book("fantasy"))
frommirascope.coreimportgroq,prompt_templatefrommirascope.integrations.langfuseimportwith_langfuse@with_langfuse()@groq.call("llama-3.1-70b-versatile")@prompt_template("Recommend a {genre} book")defrecommend_book(genre:str):...print(recommend_book("fantasy"))
frommirascope.coreimportcohere,prompt_templatefrommirascope.integrations.langfuseimportwith_langfuse@with_langfuse()@cohere.call("command-r-plus")@prompt_template("Recommend a {genre} book")defrecommend_book(genre:str):...print(recommend_book("fantasy"))
frommirascope.coreimportlitellm,prompt_templatefrommirascope.integrations.langfuseimportwith_langfuse@with_langfuse()@litellm.call("gpt-4o-mini")@prompt_template("Recommend a {genre} book")defrecommend_book(genre:str):...print(recommend_book("fantasy"))
frommirascope.coreimportazure,prompt_templatefrommirascope.integrations.langfuseimportwith_langfuse@with_langfuse()@azure.call("gpt-4o-mini")@prompt_template("Recommend a {genre} book")defrecommend_book(genre:str):...print(recommend_book("fantasy"))
frommirascope.coreimportprompt_template,vertexfrommirascope.integrations.langfuseimportwith_langfuse@with_langfuse()@vertex.call("gemini-1.5-flash")@prompt_template("Recommend a {genre} book")defrecommend_book(genre:str):...print(recommend_book("fantasy"))
frommirascope.coreimportbedrock,prompt_templatefrommirascope.integrations.langfuseimportwith_langfuse@with_langfuse()@bedrock.call("anthropic.claude-3-haiku-20240307-v1:0")@prompt_template("Recommend a {genre} book")defrecommend_book(genre:str):...print(recommend_book("fantasy"))
frommirascope.coreimportBaseMessageParam,openaifrommirascope.integrations.langfuseimportwith_langfuse@with_langfuse()@openai.call("gpt-4o-mini")defrecommend_book(genre:str)->list[BaseMessageParam]:return[BaseMessageParam(role="user",content=f"Recommend a {genre} book.")]print(recommend_book("fantasy"))
frommirascope.coreimportBaseMessageParam,anthropicfrommirascope.integrations.langfuseimportwith_langfuse@with_langfuse()@anthropic.call("claude-3-5-sonnet-20240620")defrecommend_book(genre:str)->list[BaseMessageParam]:return[BaseMessageParam(role="user",content=f"Recommend a {genre} book")]print(recommend_book("fantasy"))
frommirascope.coreimportBaseMessageParam,mistralfrommirascope.integrations.langfuseimportwith_langfuse@with_langfuse()@mistral.call("mistral-large-latest")defrecommend_book(genre:str)->list[BaseMessageParam]:return[BaseMessageParam(role="user",content=f"Recommend a {genre} book")]print(recommend_book("fantasy"))
frommirascope.coreimportBaseMessageParam,geminifrommirascope.integrations.langfuseimportwith_langfuse@with_langfuse()@gemini.call("gemini-1.5-flash")defrecommend_book(genre:str)->list[BaseMessageParam]:return[BaseMessageParam(role="user",content=f"Recommend a {genre} book")]print(recommend_book("fantasy"))
frommirascope.coreimportBaseMessageParam,groqfrommirascope.integrations.langfuseimportwith_langfuse@with_langfuse()@groq.call("llama-3.1-70b-versatile")defrecommend_book(genre:str)->list[BaseMessageParam]:return[BaseMessageParam(role="user",content=f"Recommend a {genre} book")]print(recommend_book("fantasy"))
frommirascope.coreimportBaseMessageParam,coherefrommirascope.integrations.langfuseimportwith_langfuse@with_langfuse()@cohere.call("command-r-plus")defrecommend_book(genre:str)->list[BaseMessageParam]:return[BaseMessageParam(role="user",content=f"Recommend a {genre} book")]print(recommend_book("fantasy"))
frommirascope.coreimportBaseMessageParam,litellmfrommirascope.integrations.langfuseimportwith_langfuse@with_langfuse()@litellm.call("gpt-4o-mini")defrecommend_book(genre:str)->list[BaseMessageParam]:return[BaseMessageParam(role="user",content=f"Recommend a {genre} book")]print(recommend_book("fantasy"))
frommirascope.coreimportBaseMessageParam,azurefrommirascope.integrations.langfuseimportwith_langfuse@with_langfuse()@azure.call("gpt-4o-mini")defrecommend_book(genre:str)->list[BaseMessageParam]:return[BaseMessageParam(role="user",content=f"Recommend a {genre} book")]print(recommend_book("fantasy"))
frommirascope.coreimportBaseMessageParam,vertexfrommirascope.integrations.langfuseimportwith_langfuse@with_langfuse()@vertex.call("gemini-1.5-flash")defrecommend_book(genre:str)->list[BaseMessageParam]:return[BaseMessageParam(role="user",content=f"Recommend a {genre} book")]print(recommend_book("fantasy"))
frommirascope.coreimportBaseMessageParam,bedrockfrommirascope.integrations.langfuseimportwith_langfuse@with_langfuse()@bedrock.call("anthropic.claude-3-haiku-20240307-v1:0")defrecommend_book(genre:str)->list[BaseMessageParam]:return[BaseMessageParam(role="user",content=f"Recommend a {genre} book.")]print(recommend_book("fantasy"))
This will give you:
A trace around the recommend_book function that captures items like the prompt template, and input/output attributes and more.
Human-readable display of the conversation with the agent
Details of the response, including the number of tokens used
Example traceHandling streams
When logging streams, the span will not be logged until the stream has been exhausted. This is a function of how streaming works.
You will also need to set certain call_params for usage to be tracked for certain providers (such as OpenAI).