Spaces:
Sleeping
Sleeping
File size: 1,201 Bytes
c6eeec2 8b1b20c 3c55bc5 2182ed2 02ebc7d 2182ed2 02ebc7d 2182ed2 02ebc7d 3c55bc5 2182ed2 c6eeec2 3c55bc5 02ebc7d c6eeec2 02ebc7d c6eeec2 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 |
# ... (previous imports remain the same)
def handle_chat():
st.subheader("💬 Document Chat")
if "messages" not in st.session_state:
st.session_state.messages = []
for message in st.session_state.messages:
with st.chat_message(message["role"]):
st.markdown(message["content"])
if prompt := st.chat_input("Ask about your documents"):
st.session_state.messages.append({"role": "user", "content": prompt})
with st.chat_message("user"):
st.markdown(prompt)
with st.chat_message("assistant"):
with st.spinner("Thinking..."):
try:
response = get_groq_response(
prompt,
get_vector_db(),
model_name="llama3-70b-8192" # Updated model
)
st.markdown(response)
st.session_state.messages.append(
{"role": "assistant", "content": response}
)
except Exception as e:
st.error(f"Please check the model name in rag_utils.py. Error: {str(e)}") |