File size: 2,682 Bytes
4321f1b
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
import os
import gradio as gr

from langchain.llms import AzureOpenAI
from langchain.chat_models import AzureChatOpenAI
from langchain.chains import SQLDatabaseChain
from langchain.agents import Tool, AgentType, initialize_agent
from langchain.memory import ConversationBufferMemory
from langchain.prompts.prompt import PromptTemplate

from load_dotenv import load_dotenv
from sqlalchemy import MetaData, create_engine, inspect, select, text

# The file we created above
from loader import load_database
# Load the .env file to access the keys
load_dotenv()

# Set up the completion and chat llm (optional, experiment with this!)
llm = AzureOpenAI(deployment_name="your_deployment_name", 
                  model_name="text-davinci-003")
chat_llm = AzureChatOpenAI(deployment_name="gpt-35-turbo",temperature=0.1) 

# Set up the chat llm
os.environ["OPENAI_API_VERSION"]=os.getenv('OPENAI_CHAT_API_VERSION')

# Create engine and Call the function to ingest the data
engine = create_engine('sqlite:///db', echo=True)
db = load_database(engine)

# OR 

# if the database exists somewhere you could do something like;
engine = create_engine("your custom URL, example - postgresql+psycopg2://scott:tiger@localhost:5432/mydatabase")
db = load_database(engine)

# Create an inspector object to inspect the database
inspector = inspect(engine)

# Get the list of table names
table_names = inspector.get_table_names()

# Create SQLDatabaseChain
sql_chain = SQLDatabaseChain.from_llm(llm, db, 
                  verbose=True, use_query_checker=True)


# Create SQLDatabaseChain
one_sql_chain = SQLDatabaseChain.from_llm(llm, car_db, 
                  verbose=True, use_query_checker=True)

two_sql_chain = SQLDatabaseChain.from_llm(llm, bike_db, 
                  verbose=True, use_query_checker=True)



memory = ConversationBufferMemory(memory_key="chat_history", 
         return_messages=True)

tools = [one_sql_tool, two_sql_tool]

conversational_agent = initialize_agent(
    agent=AgentType.ZERO_SHOT_REACT_DESCRIPTION,
    tools=tools,
    llm=llm,
    verbose=True,
    max_iterations=3,
    memory=memory,
    dialect='ansi',
    early_stopping_method="generate",
    handle_parsing_errors=True,
)

# Define a simple query function that runs the query agent and returns the response
def query_fnr(input_text):
    response = conversational_agent.run(input=input_text)
    return response

# Build the UI
iface = gr.Interface(
    fn=query_fn,
    inputs=gr.inputs.Textbox(label="Enter your query"),
    outputs=gr.outputs.Textbox(label="Query Result"),
    title="Domain-specific chatbot"
)

# Launch the UI but do not share it publicly
iface.launch(share=False, server_port=8080)