LLM-Test / app.py
SoSa123456's picture
Update app.py
8af9560
raw
history blame contribute delete
593 Bytes
import gradio as gr
from langchain import HuggingFaceHub
import os
# Set the Hugging Face API token in your environment variables
#os.environ["HUGGINGFACEHUB_API_TOKEN"] = "YOUR_TOKEN"
# Initialize the language model
llm = HuggingFaceHub(repo_id="tiiuae/falcon-7b-instruct", model_kwargs={"temperature":0.6})
def chat(user_message):
# Generate a response from the language model
response = llm(user_message)
# Return the response
return response
# Create a Gradio interface
iface = gr.Interface(fn=chat, inputs="text", outputs="text")
# Launch the interface
iface.launch()