AjayKr09 commited on
Commit
392151a
·
verified ·
1 Parent(s): df8000b

Update pages/1_Interactive Chat Bot.py

Browse files
Files changed (1) hide show
  1. pages/1_Interactive Chat Bot.py +52 -52
pages/1_Interactive Chat Bot.py CHANGED
@@ -1,52 +1,52 @@
1
- import streamlit as st
2
- import os
3
- from langchain_core.prompts import ChatPromptTemplate
4
- from langchain_google_genai import ChatGoogleGenerativeAI
5
- from langchain_core.prompts import MessagesPlaceholder
6
- from langchain.memory import ConversationBufferWindowMemory
7
- from operator import itemgetter
8
- from langchain_core.runnables import RunnableLambda, RunnablePassthrough
9
-
10
- # Set the API key for Google Generative AI
11
- os.environ['GOOGLE_API_KEY'] = 'AIzaSyBHPIIk4-BOgXvnQ2_o6c2wTGpY2ByRIDs'
12
-
13
- # Initialize the Google Generative AI model
14
- model_gemini = ChatGoogleGenerativeAI(model='gemini-pro', temperature=0, max_output_tokens=500, convert_system_message_to_human=True)
15
-
16
- # Define the prompt
17
- prompt = ChatPromptTemplate.from_messages(
18
- [
19
- ('system', 'you are a good assistant.'),
20
- MessagesPlaceholder(variable_name='history'),
21
- ("human", "{input}")
22
- ]
23
- )
24
-
25
- # Initialize memory in session state
26
- if 'memory' not in st.session_state:
27
- st.session_state.memory = ConversationBufferWindowMemory(k=10, return_messages=True)
28
-
29
- # Define the chain
30
- chain = (RunnablePassthrough.assign(history=RunnableLambda(st.session_state.memory.load_memory_variables) | itemgetter("history")) |
31
- prompt | model_gemini)
32
-
33
- # Streamlit app
34
- st.title("Interactive Chatbot")
35
-
36
- # Initialize session state for user input
37
- if 'user_input' not in st.session_state:
38
- st.session_state.user_input = ""
39
-
40
- # Input from user
41
- user_input = st.text_area("User: ", st.session_state.user_input, height=100)
42
-
43
- if st.button("Submit"):
44
- response = chain.invoke({"input": user_input})
45
- st.write(f"Assistant: {response.content}")
46
- st.session_state.memory.save_context({"input": user_input}, {"output": response.content})
47
- st.session_state.user_input = "" # Clear the input box
48
-
49
- # Display chat history
50
- if st.checkbox("Show Chat History"):
51
- chat_history = st.session_state.memory.load_memory_variables({})
52
- st.write(chat_history)
 
1
+ import streamlit as st
2
+ import os
3
+ from langchain_core.prompts import ChatPromptTemplate
4
+ from langchain_google_genai import ChatGoogleGenerativeAI
5
+ from langchain_core.prompts import MessagesPlaceholder
6
+ from langchain.memory import ConversationBufferWindowMemory
7
+ from operator import itemgetter
8
+ from langchain_core.runnables import RunnableLambda, RunnablePassthrough
9
+
10
+ # Set the API key for Google Generative AI
11
+ os.environ['GOOGLE_API_KEY'] = 'AIzaSyBHPIIk4-BOgXvnQ2_o6c2wTGpY2ByRIDs'
12
+
13
+ # Initialize the Google Generative AI model
14
+ model_gemini = ChatGoogleGenerativeAI(model='gemini-pro', temperature=0, max_output_tokens=500, convert_system_message_to_human=True)
15
+
16
+ # Define the prompt
17
+ prompt = ChatPromptTemplate.from_messages(
18
+ [
19
+ ('system', 'you are a good assistant.'),
20
+ MessagesPlaceholder(variable_name='history'),
21
+ ("human", "{input}")
22
+ ]
23
+ )
24
+
25
+ # Initialize memory in session state
26
+ if 'memory' not in st.session_state:
27
+ st.session_state.memory = ConversationBufferWindowMemory(k=10, return_messages=True)
28
+
29
+ # Define the chain
30
+ chain = (RunnablePassthrough.assign(history=RunnableLambda(st.session_state.memory.load_memory_variables) | itemgetter("history")) |
31
+ prompt | model_gemini)
32
+
33
+ # Streamlit app
34
+ st.title("Interactive Chatbot")
35
+
36
+ # Initialize session state for user input
37
+ if 'user_input' not in st.session_state:
38
+ st.session_state.user_input = ""
39
+
40
+ # Input from user
41
+ user_input = st.text_area("User: ", st.session_state.user_input, height=100)
42
+
43
+ if st.button("Submit"):
44
+ response = chain.invoke({"input": user_input})
45
+ st.write(f"Assistant: {response.content}")
46
+ st.session_state.memory.save_context({"input": user_input}, {"output": response.content})
47
+ st.session_state.user_input = "" # Clear the input box
48
+
49
+ # Display chat history
50
+ if st.checkbox("Show Chat History"):
51
+ chat_history = st.session_state.memory.load_memory_variables({})
52
+ st.write(chat_history)