Spaces:
Runtime error
Runtime error
| # import gradio as gr | |
| # import random | |
| # from smolagents import GradioUI, CodeAgent, HfApiModel | |
| # # Import our custom tools from their modules | |
| # from tools import DuckDuckGoSearchTool, WeatherInfoTool, HubStatsTool | |
| # from retriever import load_guest_dataset | |
| # # Initialize the Hugging Face model | |
| # model = HfApiModel() | |
| # # Initialize the web search tool | |
| # search_tool = DuckDuckGoSearchTool() | |
| # # Initialize the weather tool | |
| # weather_info_tool = WeatherInfoTool() | |
| # # Initialize the Hub stats tool | |
| # hub_stats_tool = HubStatsTool() | |
| # # Load the guest dataset and initialize the guest info tool | |
| # guest_info_tool = load_guest_dataset() | |
| # # Create Alfred with all the tools | |
| # alfred = CodeAgent( | |
| # tools=[guest_info_tool, weather_info_tool, hub_stats_tool, search_tool], | |
| # model=model, | |
| # add_base_tools=True, # Add any additional base tools | |
| # planning_interval=3 # Enable planning every 3 steps | |
| # ) | |
| # if __name__ == "__main__": | |
| # GradioUI(alfred).launch() | |
| from typing import TypedDict, Annotated | |
| from langgraph.graph.message import add_messages | |
| from langchain_core.messages import AnyMessage, HumanMessage, AIMessage | |
| from langgraph.prebuilt import ToolNode | |
| from langgraph.graph import START, StateGraph | |
| from langgraph.prebuilt import tools_condition | |
| from langchain_huggingface import HuggingFaceEndpoint, ChatHuggingFace | |
| from tools import DuckDuckGoSearchRun, weather_info_tool, hub_stats_tool | |
| from retriever import guest_info_tool | |
| # Initialize the web search tool | |
| search_tool = DuckDuckGoSearchRun() | |
| # Generate the chat interface, including the tools | |
| llm = HuggingFaceEndpoint( | |
| repo_id="Qwen/Qwen2.5-Coder-32B-Instruct", | |
| huggingfacehub_api_token=HUGGINGFACEHUB_API_TOKEN, | |
| ) | |
| chat = ChatHuggingFace(llm=llm, verbose=True) | |
| tools = [guest_info_tool, search_tool, weather_info_tool, hub_stats_tool] | |
| chat_with_tools = chat.bind_tools(tools) | |
| # Generate the AgentState and Agent graph | |
| class AgentState(TypedDict): | |
| messages: Annotated[list[AnyMessage], add_messages] | |
| def assistant(state: AgentState): | |
| return { | |
| "messages": [chat_with_tools.invoke(state["messages"])], | |
| } | |
| ## The graph | |
| builder = StateGraph(AgentState) | |
| # Define nodes: these do the work | |
| builder.add_node("assistant", assistant) | |
| builder.add_node("tools", ToolNode(tools)) | |
| # Define edges: these determine how the control flow moves | |
| builder.add_edge(START, "assistant") | |
| builder.add_conditional_edges( | |
| "assistant", | |
| # If the latest message requires a tool, route to tools | |
| # Otherwise, provide a direct response | |
| tools_condition, | |
| ) | |
| builder.add_edge("tools", "assistant") | |
| alfred = builder.compile() | |
| if __name__ == "__main__": | |
| GradioUI(alfred).launch() |