haystack-core-integrations
haystack-core-integrations copied to clipboard
Support for additional LLM generators in Langfuse integration
At the moment, we only support OpenAI generator, but there has been demand in the community to use other models, such as Anthropic's Claude LLMs.
Below is an example from Ardaric in discord:
import os
from haystack import Pipeline
from haystack.components.builders import PromptBuilder
from haystack_integrations.components.generators.anthropic import AnthropicGenerator
from dotenv import load_dotenv
def make_pipeline():
from haystack_integrations.components.connectors.langfuse import LangfuseConnector
pipe = Pipeline()
extract_prompt_builder = PromptBuilder(template=extract_question_prompt)
extractor_llm = AnthropicGenerator(model="claude-3-haiku-20240307")
pipe.add_component("tracer", LangfuseConnector("Test Logging"))
pipe.add_component(instance=extract_prompt_builder, name="extract_prompt_builder")
pipe.add_component(instance=extractor_llm, name="extractor_llm")
pipe.connect("extract_prompt_builder", "extractor_llm")
return pipe
if __name__ == "__main__":
load_dotenv()
print(os.getenv('LANGFUSE_SECRET_KEY'), os.getenv('LANGFUSE_PUBLIC_KEY'), os.getenv('LANGFUSE_HOST'), os.getenv('HAYSTACK_CONTENT_TRACING_ENABLED'))
# os.environ['LANGFUSE_DEBUG'] = 'True'
extract_question_prompt = "{{batch}} | How old is John?"
pipe = make_pipeline()
result = pipe.run(
{"extract_prompt_builder": {"batch": "John is 34 years old"}}
)
print(result)