pdf-chatbot / app.py
ashok2216's picture
Update app.py
ddf824b verified
raw
history blame
1.59 kB
import tempfile
import PyPDF2
import streamlit as st
from transformers import GPT2LMHeadModel, GPT2Tokenizer
# Load pre-trained GPT-3 model and tokenizer
tokenizer = GPT2Tokenizer.from_pretrained("openai-community/gpt2")
model = GPT2LMHeadModel.from_pretrained("openai-community/gpt2")
def extract_text_from_pdf(file_path):
text = ""
with open(file_path, "rb") as f:
reader = PyPDF2.PdfFileReader(f)
for page_num in range(reader.numPages):
text += reader.getPage(page_num).extractText()
return text
def generate_response(user_input):
input_ids = tokenizer.encode(user_input, return_tensors="pt")
output = model.generate(input_ids, max_length=100, num_return_sequences=1, temperature=0.7)
response = tokenizer.decode(output[0], skip_special_tokens=True)
return response
def main():
st.title("PDF Chatbot")
pdf_file = st.file_uploader("Upload an pdf file", type=["pdf"], accept_multiple_files=False)
if pdf_file is not None:
with tempfile.NamedTemporaryFile(delete=False) as tmp_file:
tmp_file.write(pdf_file.read())
st.success("PDF file successfully uploaded and stored temporally.")
file_path = tmp_file.name
pdf_text = extract_text_from_pdf(file_path)
st.text_area("PDF Content", pdf_text)
else:
st.markdown('File not found!')
user_input = st.text_input("You:", "")
if st.button("Send"):
response = generate_response(user_input)
st.text_area("Chatbot:", response)
if __name__ == "__main__":
main()