-
Notifications
You must be signed in to change notification settings - Fork 0
/
app.py
122 lines (104 loc) · 4.29 KB
/
app.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
import streamlit as st
import os
from PyPDF2 import PdfReader
import docx
from langchain.chat_models import ChatOpenAI
from langchain.text_splitter import CharacterTextSplitter
from langchain.embeddings.huggingface import HuggingFaceEmbeddings
from langchain.vectorstores import FAISS
from langchain.chains import ConversationalRetrievalChain
from langchain.memory import ConversationBufferMemory
from streamlit_chat import message
from langchain.callbacks import get_openai_callback
from dotenv import load_dotenv
def main():
load_dotenv()
st.set_page_config(page_title="Chat With files")
st.header("ChatPDF will help you to ask from your PDFs 🤓")
if "conversation" not in st.session_state:
st.session_state.conversation = None
if "chat_history" not in st.session_state:
st.session_state.chat_history = None
if "processComplete" not in st.session_state:
st.session_state.processComplete = None
with st.sidebar:
uploaded_files = st.file_uploader("Upload your file",type=['pdf','docx'],accept_multiple_files=True)
openai_api_key = st.text_input("OpenAI API Key", key="chatbot_api_key", type="password")
process = st.button("Process")
if process:
if not openai_api_key:
st.info("Please add your OpenAI API key to continue.")
st.stop()
files_text = get_files_text(uploaded_files)
text_chunks = get_text_chunks(files_text)
vetorestore = get_vectorstore(text_chunks)
st.session_state.conversation = get_conversation_chain(vetorestore,openai_api_key)
st.session_state.processComplete = True
if st.session_state.processComplete == True:
user_question = st.chat_input("Chat with your file")
if user_question:
handel_userinput(user_question)
def get_files_text(uploaded_files):
text = ""
for uploaded_file in uploaded_files:
split_tup = os.path.splitext(uploaded_file.name)
file_extension = split_tup[1]
if file_extension == ".pdf":
text += get_pdf_text(uploaded_file)
elif file_extension == ".docx":
text += get_docx_text(uploaded_file)
else:
text += get_csv_text(uploaded_file)
return text
def get_pdf_text(pdf):
pdf_reader = PdfReader(pdf)
text = ""
for page in pdf_reader.pages:
text += page.extract_text()
return text
def get_docx_text(file):
doc = docx.Document(file)
allText = []
for docpara in doc.paragraphs:
allText.append(docpara.text)
text = ' '.join(allText)
return text
def get_csv_text(file):
return "a"
def get_text_chunks(text):
text_splitter = CharacterTextSplitter(
separator="\n",
chunk_size=900,
chunk_overlap=100,
length_function=len
)
chunks = text_splitter.split_text(text)
return chunks
def get_vectorstore(text_chunks):
embeddings = HuggingFaceEmbeddings()
knowledge_base = FAISS.from_texts(text_chunks,embeddings)
return knowledge_base
def get_conversation_chain(vetorestore,openai_api_key):
llm = ChatOpenAI(openai_api_key=openai_api_key, model_name = 'gpt-3.5-turbo',temperature=0)
memory = ConversationBufferMemory(memory_key='chat_history', return_messages=True)
conversation_chain = ConversationalRetrievalChain.from_llm(
llm=llm,
retriever=vetorestore.as_retriever(),
memory=memory
)
return conversation_chain
# This function takes a user question as input, sends it to a conversation model and displays the conversation history along with some additional information.
def handel_userinput(user_question):
with get_openai_callback() as cb:
response = st.session_state.conversation({'question':user_question})
st.session_state.chat_history = response['chat_history']
response_container = st.container()
with response_container:
for i, messages in enumerate(st.session_state.chat_history):
if i % 2 == 0:
message(messages.content, is_user=True, key=str(i))
else:
message(messages.content, key=str(i))
st.write(f"Total Tokens: {cb.total_tokens}" f", Prompt Tokens: {cb.prompt_tokens}" f", Completion Tokens: {cb.completion_tokens}" f", Total Cost (USD): ${cb.total_cost}")
if __name__ == '__main__':
main()