Divyansh12 commited on
Commit
cee9704
·
verified ·
1 Parent(s): f5ba2c4

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +75 -0
app.py ADDED
@@ -0,0 +1,75 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ from llama_cpp import Llama
3
+
4
+ # Initialize and cache the model
5
+ @st.cache_resource
6
+ def load_model():
7
+ return Llama.from_pretrained(
8
+ repo_id="Divyansh12/check",
9
+ filename="unsloth.F16.gguf",
10
+ verbose=True,
11
+ n_ctx=32768,
12
+ n_threads=2,
13
+ chat_format="chatml"
14
+ )
15
+
16
+ # Load the model only once
17
+ llm = load_model()
18
+
19
+ # Define the function to get responses from the model
20
+ def respond(message, history):
21
+ messages = []
22
+
23
+ for user_message, assistant_message in history:
24
+ if user_message:
25
+ messages.append({"role": "user", "content": user_message})
26
+ if assistant_message:
27
+ messages.append({"role": "assistant", "content": assistant_message})
28
+
29
+ messages.append({"role": "user", "content": message})
30
+
31
+ response = ""
32
+ # Stream the response from the model
33
+ response_stream = llm.create_chat_completion(
34
+ messages=messages,
35
+ stream=True,
36
+ max_tokens=512, # Use a default value for simplicity
37
+ temperature=0.7, # Use a default value for simplicity
38
+ top_p=0.95 # Use a default value for simplicity
39
+ )
40
+
41
+ # Collect the response chunks
42
+ for chunk in response_stream:
43
+ if len(chunk['choices'][0]["delta"]) != 0 and "content" in chunk['choices'][0]["delta"]:
44
+ response += chunk['choices'][0]["delta"]["content"]
45
+
46
+ return response # Return the full response
47
+
48
+ # Streamlit UI
49
+ st.title("Simple Chatbot")
50
+ st.write("### Interact with the chatbot!")
51
+
52
+ # User input field
53
+ user_message = st.text_area("Your Message:", "")
54
+
55
+ # Chat history
56
+ if 'history' not in st.session_state:
57
+ st.session_state.history = []
58
+
59
+ # Button to send the message
60
+ if st.button("Send"):
61
+ if user_message: # Check if user has entered a message
62
+ # Get the response from the model
63
+ response = respond(user_message, st.session_state.history)
64
+
65
+ # Add user message and model response to history
66
+ st.session_state.history.append((user_message, response))
67
+
68
+ # Clear the input field after sending
69
+ user_message = "" # Reset user_message to clear input
70
+
71
+ # Display the chat history
72
+ st.write("### Chat History")
73
+ for user_msg, assistant_msg in st.session_state.history:
74
+ st.write(f"**User:** {user_msg}")
75
+ st.write(f"**Assistant:** {assistant_msg}")