llama-stack
llama-stack copied to clipboard
'HuggingFaceLLM' object has no attribute '_llm_type'
import torch
from transformers import AutoModelForCausalLM, AutoTokenizer, AwqConfig
model_id = "hugging-quants/Meta-Llama-3.1-70B-Instruct-AWQ-INT4"
llm = HuggingFaceLLM(
context_window=8192, #4096
max_new_tokens=512,
generate_kwargs={"temperature": 0, "do_sample": False},
system_prompt=system_prompt,
query_wrapper_prompt=query_wrapper_prompt,
tokenizer_name=model_id,
model_name=model_id,
device_map="auto",
tokenizer_kwargs={"max_length": 8192} # 4096
)
from pandasai.llm.langchain import LangchainLLM
import pandas as pd
from pandasai import SmartDataframe
langchain_llm = LangchainLLM(langchain_llm=llm)
df = pd.read_csv("data/deneme.csv")
smart_df = SmartDataframe(df, config={"llm": langchain_llm})
smart_df.chat(query="question?")
I want to use pandasai with HuggingFaceLLM to create graph. But I get below error:
"Unfortunately, I was not able to get your answers, because of the following error:\n\n'HuggingFaceLLM' object has no attribute '_llm_type'\n"