# Read the data import pandas as pd df = pd.read_csv('./Automobile_data.csv') #df = df.drop(columns = ['normalized-losses','symboling'], axis = 1) context_data = [] for i in range(len(df)): # Loop over rows context = "" for j in range(26): # Loop over the first 8 columns context += df.columns[j] # Add column name context += ": " context += str(df.iloc[i][j]) # Convert value to string context += " " context_data.append(context) import os # Get the secret key from the environment groq_key = os.environ.get('groq_API_Keys') ## LLM used for RAG from langchain_groq import ChatGroq llm = ChatGroq(model="llama-3.1-70b-versatile",api_key=groq_key) ## Embedding model! from langchain_huggingface import HuggingFaceEmbeddings embed_model = HuggingFaceEmbeddings(model_name="mixedbread-ai/mxbai-embed-large-v1") # create vector store! from langchain_chroma import Chroma vectorstore = Chroma( collection_name="car_dataset_store", embedding_function=embed_model, persist_directory="./", ) # add data to vector nstore vectorstore.add_texts(context_data) retriever = vectorstore.as_retriever() from langchain_core.prompts import PromptTemplate template = ("""You are a car expert. Use the provided context to answer the question. If you don't know the answer, say so. Explain your answer in detail. Do not discuss the context in your response; just provide the answer directly. Context: {context} Question: {question} Answer:""") rag_prompt = PromptTemplate.from_template(template) from langchain_core.output_parsers import StrOutputParser from langchain_core.runnables import RunnablePassthrough rag_chain = ( {"context": retriever, "question": RunnablePassthrough()} | rag_prompt | llm | StrOutputParser() ) import gradio as gr # Function for the chatbot response stream with error handling def rag_memory_stream(message, history): partial_text = "" try: for new_text in rag_chain.stream(message): print(f"Processing: {new_text}") # Debugging output partial_text += new_text yield partial_text except Exception as e: yield f"An error occurred: {str(e)}" # Function to process car preferences def process_preferences(make, budget, fuel_type): return ( f"You've selected:\n" f"- **Preferred Make**: {make}\n" f"- **Budget**: ${budget}\n" f"- **Fuel Type**: {fuel_type}\n\n" f"Based on your preferences, I recommend exploring the latest models of {make} " f"that fit your budget and offer {fuel_type.lower()} options!" ) # Examples and app information examples = ['I need a car', 'What is the make and fuel type of a car?'] description = "An advanced chatbot that helps you choose the right car based on your preferences and budget." title = "Car Expert :) Let Me Help You Find the Perfect Ride!" # Custom theme with sky-blue background and black text custom_theme = gr.themes.Base(primary_hue="blue", secondary_hue="green").set( body_background_fill="#87CEEB", # Sky blue background body_text_color="#000000", # Black text ) # Advanced Interface with Car Preferences with gr.Blocks(theme=custom_theme) as demo: gr.Markdown(f"# {title}") gr.Markdown(description) with gr.Tabs(): # Chat Tab with gr.Tab("Chat"): chat_interface = gr.ChatInterface( fn=rag_memory_stream, type="messages", examples=examples, fill_height=True, ) # Car Preferences Tab with gr.Tab("Car Preferences"): gr.Markdown("### Provide your preferences to get tailored advice:") make = gr.Dropdown( choices=["Toyota", "Honda", "BMW", "Tesla", "Ford"], label="Preferred Make", info="Choose the car manufacturer you prefer.", ) budget = gr.Slider( minimum=5000, maximum=100000, step=500, label="Budget (in USD)", info="Select your budget range.", ) fuel_type = gr.Radio( choices=["Gasoline", "Diesel", "Electric", "Hybrid"], label="Fuel Type", info="Choose the type of fuel you prefer.", ) submit_button = gr.Button("Submit Preferences") output = gr.Textbox( label="Recommendation", placeholder="Your recommendations will appear here...", ) # Link the submit button to the processing function submit_button.click( process_preferences, # Function to call inputs=[make, budget, fuel_type], # Inputs from UI outputs=output, # Where to display the result ) # Upload Documents Tab with gr.Tab("Upload Documents"): gr.Markdown("### Upload any related documents for personalized suggestions:") file_upload = gr.File(label="Upload Car Listings or Preferences") # Help Tab with gr.Tab("Help"): gr.Markdown("### Need Assistance?") gr.Markdown( """ - Use the **Chat** tab to ask questions about cars. - Fill in your **Car Preferences** for tailored recommendations. - Upload files in the **Upload Documents** tab. - Contact support at: support@carexpert.com """ ) gr.Markdown("### About") gr.Markdown( """ This chatbot is powered by LangChain and Groq API for real-time AI interactions. Designed to provide personalized car-buying assistance! """ ) # Launch the app if __name__ == "__main__": demo.launch()