File size: 3,408 Bytes
353edf3
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
from src.graph import *
from pprint import pprint
from langgraph.graph import END, StateGraph, START
import sys
from langgraph.checkpoint.memory import MemorySaver
import json

memory = MemorySaver()

try:
    print("Initializing StateGraph...")
    workflow = StateGraph(GraphState)

    print("Adding nodes to the graph...")
    workflow.add_node("web_search", web_search)
    workflow.add_node("retrieve", retrieve)
    workflow.add_node("grade_documents", grade_documents)
    workflow.add_node("generate", generate)
    workflow.add_node("transform_query", transform_query)
    print("Nodes added successfully.")

    print("Building graph edges...")
    workflow.add_conditional_edges(
        START,
        route_question,
        {
            "web_search": "web_search",
            "vectorstore": "retrieve",
        },
    )
    workflow.add_edge("web_search", "generate")
    workflow.add_edge("retrieve", "grade_documents")
    workflow.add_conditional_edges(
        "grade_documents",
        decide_to_generate,
        {
            "transform_query": "transform_query",
            "generate": "generate",
        },
    )
    workflow.add_edge("transform_query", "retrieve")
    workflow.add_conditional_edges(
        "generate",
        grade_generation_v_documents_and_question,
        {
            "not supported": "generate",
            "useful": END,
            "not useful": "transform_query",
        },
    )
    print("Graph edges built successfully.")

    print("Compiling the workflow...")
    app = workflow.compile(checkpointer=memory)
    print("Workflow compiled successfully.")

except Exception as e:
    print(f"Error building the graph: {e}")
    sys.exit(1)

def run_workflow(question, config):
    try:
        print(f"Running workflow for question: {question}")
        
        # Retrieve the previous state from memory
        previous_state = memory.get(config)
        
        # Initialize the input state
        input_state = {
            "question": question,
            "chat_history": previous_state.get("chat_history", []) if previous_state else []
        }
        
        final_output = None
        for output in app.stream(input_state, config):
            for key, value in output.items():
                print(f"Node '{key}':")
                if key == "generate":
                    final_output = value
        
        if final_output is None:
            return {"generation": "I'm sorry, I couldn't generate a response. Could you please rephrase your question?"}
        elif isinstance(final_output, dict) and "generation" in final_output:
            return {"generation": str(final_output["generation"])}
        elif isinstance(final_output, str):
            return {"generation": final_output}
        else:
            return {"generation": str(final_output)}
    except Exception as e:
        print(f"Error running the workflow: {e}")
        import traceback
        traceback.print_exc()
        return {"generation": "I encountered an error while processing your question. Please try again."}

if __name__ == "__main__":
    config = {"configurable": {"thread_id": "test_thread"}}
    while True:
        question = input("Enter your question (or 'quit' to exit): ")
        if question.lower() == 'quit':
            break
        result = run_workflow(question, config)
        print("Chatbot:", result["generation"])