|
import gradio as gr |
|
import os |
|
from huggingface_hub import login |
|
from dotenv import load_dotenv |
|
from embedding import embeddings |
|
from db.chroma import load_and_setup_db, search_cases |
|
from chat.hermes_llm import ChatManager |
|
|
|
|
|
load_dotenv() |
|
|
|
|
|
login(token=os.getenv("HUGGINGFACEHUB_API_TOKEN"), add_to_git_credential=True) |
|
|
|
|
|
VECTOR_DB_PATH = os.getenv("VECTOR_DB_PATH") |
|
vector_store = load_and_setup_db(VECTOR_DB_PATH, embeddings) |
|
legal_chat = ChatManager(temperature=0.1) |
|
|
|
def process_query(query, chat_history): |
|
try: |
|
|
|
results = search_cases(vectorstore=vector_store, query=query, k=1) |
|
response=None |
|
if len(results)>0: |
|
|
|
response = legal_chat.get_response(results[0]['content'], query=query) |
|
else : |
|
response = "No Document match" |
|
|
|
chat_history.append((query, response)) |
|
return "", chat_history |
|
except Exception as e: |
|
return "", chat_history + [(query, f"Error: {str(e)}")] |
|
|
|
|
|
with gr.Blocks(title="Legal Chat Assistant") as demo: |
|
gr.Markdown("# Legal Chat Assistant") |
|
gr.Markdown("Ask questions about legal cases and get AI-powered responses.") |
|
|
|
chatbot = gr.Chatbot( |
|
[], |
|
elem_id="chatbot", |
|
bubble_full_width=False, |
|
height=400 |
|
) |
|
|
|
with gr.Row(): |
|
query_input = gr.Textbox( |
|
placeholder="Enter your query here...", |
|
show_label=False, |
|
scale=4 |
|
) |
|
submit_btn = gr.Button("Send", scale=1) |
|
|
|
|
|
|
|
submit_btn.click( |
|
process_query, |
|
inputs=[query_input, chatbot], |
|
outputs=[query_input, chatbot] |
|
) |
|
query_input.submit( |
|
process_query, |
|
inputs=[query_input, chatbot], |
|
outputs=[query_input, chatbot] |
|
) |
|
|
|
if __name__ == "__main__": |
|
demo.launch(share=True) |