Spaces:
Running
Running
"""UI for the Math Helper prototype for exploring safety engineering concepts and LLMs. | |
Using Streamlit to provide a python-based front end. | |
""" | |
#Import the libraries for human interaction and visualization. | |
import streamlit as st | |
import logging | |
from helpers.constant import * | |
from helpers.chat import basicChat, guidedMM, mmChat | |
import os | |
logger = logging.getLogger(__name__) | |
logging.basicConfig(filename='app.log', level=logging.INFO) | |
def update_model(name): | |
if name == "Llama": | |
st.session_state.model = LLAMA | |
else: | |
st.session_state.model = QWEN | |
if "model" not in st.session_state: | |
st.session_state.model = LLAMA | |
if "systemPrompt" not in st.session_state: | |
st.session_state.systemPrompt = "Model" | |
st.set_page_config(page_title="IMSA Math Helper v0.1") | |
st.title("IMSA Math Helper v0.1") | |
with st.sidebar: | |
# User selects a model | |
model_choice = st.radio("Please select the model:", options=["Llama","QWEN"]) | |
update_model(model_choice) | |
logger.info(f"Model changed to {model_choice}.") | |
systemPrompt = st.radio("Designate a control persona:",options=["Model","Tutor"]) | |
st.session_state.systemPrompt = systemPrompt | |
st.subheader(f"This experience is currently running on {st.session_state.model}") | |
# Initialize chat history | |
if "messages" not in st.session_state: | |
st.session_state.messages = [] | |
# Display chat messages from history on app rerun | |
for message in st.session_state.messages: | |
with st.chat_message(message["role"]): | |
st.markdown(message["content"]) | |
enable = st.checkbox("Enable camera") | |
picture = st.camera_input("Take a picture of your math work", disabled=not enable) | |
if picture is not None: | |
with open(os.path.join("tempDir","picture.png"),"wb") as f: | |
f.write(picture.getbuffer()) | |
mmChat("./tempDir/picture.png") | |
#guidedMM(st.session_state.systemPrompt, "http://192.168.50.36:8501/tempDir/picture") | |
else: | |
basicChat() | |