LegalAlly / src /buildgraph.py
Rohil Bansal
committing chatbot
353edf3
raw
history blame
3.41 kB
from src.graph import *
from pprint import pprint
from langgraph.graph import END, StateGraph, START
import sys
from langgraph.checkpoint.memory import MemorySaver
import json
memory = MemorySaver()
try:
print("Initializing StateGraph...")
workflow = StateGraph(GraphState)
print("Adding nodes to the graph...")
workflow.add_node("web_search", web_search)
workflow.add_node("retrieve", retrieve)
workflow.add_node("grade_documents", grade_documents)
workflow.add_node("generate", generate)
workflow.add_node("transform_query", transform_query)
print("Nodes added successfully.")
print("Building graph edges...")
workflow.add_conditional_edges(
START,
route_question,
{
"web_search": "web_search",
"vectorstore": "retrieve",
},
)
workflow.add_edge("web_search", "generate")
workflow.add_edge("retrieve", "grade_documents")
workflow.add_conditional_edges(
"grade_documents",
decide_to_generate,
{
"transform_query": "transform_query",
"generate": "generate",
},
)
workflow.add_edge("transform_query", "retrieve")
workflow.add_conditional_edges(
"generate",
grade_generation_v_documents_and_question,
{
"not supported": "generate",
"useful": END,
"not useful": "transform_query",
},
)
print("Graph edges built successfully.")
print("Compiling the workflow...")
app = workflow.compile(checkpointer=memory)
print("Workflow compiled successfully.")
except Exception as e:
print(f"Error building the graph: {e}")
sys.exit(1)
def run_workflow(question, config):
try:
print(f"Running workflow for question: {question}")
# Retrieve the previous state from memory
previous_state = memory.get(config)
# Initialize the input state
input_state = {
"question": question,
"chat_history": previous_state.get("chat_history", []) if previous_state else []
}
final_output = None
for output in app.stream(input_state, config):
for key, value in output.items():
print(f"Node '{key}':")
if key == "generate":
final_output = value
if final_output is None:
return {"generation": "I'm sorry, I couldn't generate a response. Could you please rephrase your question?"}
elif isinstance(final_output, dict) and "generation" in final_output:
return {"generation": str(final_output["generation"])}
elif isinstance(final_output, str):
return {"generation": final_output}
else:
return {"generation": str(final_output)}
except Exception as e:
print(f"Error running the workflow: {e}")
import traceback
traceback.print_exc()
return {"generation": "I encountered an error while processing your question. Please try again."}
if __name__ == "__main__":
config = {"configurable": {"thread_id": "test_thread"}}
while True:
question = input("Enter your question (or 'quit' to exit): ")
if question.lower() == 'quit':
break
result = run_workflow(question, config)
print("Chatbot:", result["generation"])