Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -1,42 +1,5 @@
|
|
| 1 |
-
|
| 2 |
-
import os
|
| 3 |
-
from utils.database import initialize_vector_db, add_to_collection, get_vector_db
|
| 4 |
-
from utils.rag_utils import process_pdf, get_groq_response
|
| 5 |
|
| 6 |
-
# Configuration
|
| 7 |
-
os.environ["TOKENIZERS_PARALLELISM"] = "false"
|
| 8 |
-
PERSISTENT_DB_DIR = "chroma_db_storage"
|
| 9 |
-
PDF_STORAGE_DIR = "data/pdf_files"
|
| 10 |
-
|
| 11 |
-
# Initialize directories
|
| 12 |
-
os.makedirs(PDF_STORAGE_DIR, exist_ok=True)
|
| 13 |
-
os.makedirs(PERSISTENT_DB_DIR, exist_ok=True)
|
| 14 |
-
|
| 15 |
-
# Initialize session state
|
| 16 |
-
if 'vector_db' not in st.session_state:
|
| 17 |
-
st.session_state.vector_db = initialize_vector_db(PERSISTENT_DB_DIR)
|
| 18 |
-
|
| 19 |
-
# PDF Processing
|
| 20 |
-
def handle_pdf_upload():
|
| 21 |
-
st.subheader("π Document Management")
|
| 22 |
-
uploaded_file = st.file_uploader("Upload PDF", type="pdf")
|
| 23 |
-
|
| 24 |
-
if uploaded_file:
|
| 25 |
-
# Save PDF permanently
|
| 26 |
-
pdf_path = os.path.join(PDF_STORAGE_DIR, uploaded_file.name)
|
| 27 |
-
with open(pdf_path, "wb") as f:
|
| 28 |
-
f.write(uploaded_file.getbuffer())
|
| 29 |
-
|
| 30 |
-
# Process and store in vector DB
|
| 31 |
-
with st.spinner(f"Processing {uploaded_file.name}..."):
|
| 32 |
-
try:
|
| 33 |
-
chunks = process_pdf(pdf_path)
|
| 34 |
-
add_to_collection(chunks)
|
| 35 |
-
st.success(f"β
Added {len(chunks)} chunks from {uploaded_file.name}")
|
| 36 |
-
except Exception as e:
|
| 37 |
-
st.error(f"Error: {str(e)}")
|
| 38 |
-
|
| 39 |
-
# Chat Interface
|
| 40 |
def handle_chat():
|
| 41 |
st.subheader("π¬ Document Chat")
|
| 42 |
|
|
@@ -59,21 +22,11 @@ def handle_chat():
|
|
| 59 |
response = get_groq_response(
|
| 60 |
prompt,
|
| 61 |
get_vector_db(),
|
| 62 |
-
model_name="
|
| 63 |
)
|
| 64 |
st.markdown(response)
|
| 65 |
-
st.session_state.messages.append(
|
|
|
|
|
|
|
| 66 |
except Exception as e:
|
| 67 |
-
st.error(f"Error: {str(e)}")
|
| 68 |
-
|
| 69 |
-
# Main App
|
| 70 |
-
def main():
|
| 71 |
-
st.title("π Smart Document Assistant")
|
| 72 |
-
tab1, tab2 = st.tabs(["Manage Documents", "Chat"])
|
| 73 |
-
with tab1:
|
| 74 |
-
handle_pdf_upload()
|
| 75 |
-
with tab2:
|
| 76 |
-
handle_chat()
|
| 77 |
-
|
| 78 |
-
if __name__ == "__main__":
|
| 79 |
-
main()
|
|
|
|
| 1 |
+
# ... (previous imports remain the same)
|
|
|
|
|
|
|
|
|
|
| 2 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 3 |
def handle_chat():
|
| 4 |
st.subheader("π¬ Document Chat")
|
| 5 |
|
|
|
|
| 22 |
response = get_groq_response(
|
| 23 |
prompt,
|
| 24 |
get_vector_db(),
|
| 25 |
+
model_name="llama3-70b-8192" # Updated model
|
| 26 |
)
|
| 27 |
st.markdown(response)
|
| 28 |
+
st.session_state.messages.append(
|
| 29 |
+
{"role": "assistant", "content": response}
|
| 30 |
+
)
|
| 31 |
except Exception as e:
|
| 32 |
+
st.error(f"Please check the model name in rag_utils.py. Error: {str(e)}")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|