Update app.py
Browse files
app.py
CHANGED
|
@@ -27,24 +27,14 @@ class BasicAgent:
|
|
| 27 |
fixed_answer = "This is a default answer."
|
| 28 |
print(f"Agent returning fixed answer: {fixed_answer}")
|
| 29 |
return fixed_answer
|
| 30 |
-
## TOOL AND MODEL
|
| 31 |
-
search_tool = DuckDuckGoSearchTool()
|
| 32 |
-
from transformers import AutoTokenizer, AutoModelForCausalLM, pipeline
|
| 33 |
-
from smolagents import ToolCallingAgent
|
| 34 |
-
from smolagents.models import PipelineModel
|
| 35 |
-
# Load small instruction-tuned model
|
| 36 |
-
tokenizer = AutoTokenizer.from_pretrained("microsoft/phi-2")
|
| 37 |
-
model = AutoModelForCausalLM.from_pretrained("microsoft/phi-2")
|
| 38 |
-
pipe = pipeline("text-generation", model=model, tokenizer=tokenizer)
|
| 39 |
-
# Wrap for smolagents
|
| 40 |
-
llm = PipelineModel(pipe)
|
| 41 |
|
| 42 |
|
| 43 |
-
#from huggingface_hub import login
|
| 44 |
-
#import os
|
| 45 |
-
#login(token=os.environ["HUGGINGFACEHUB_API_TOKEN"])
|
| 46 |
|
|
|
|
|
|
|
|
|
|
| 47 |
|
|
|
|
| 48 |
def run_and_submit_all(profile: gr.OAuthProfile | None):
|
| 49 |
"""
|
| 50 |
Fetches all questions, runs the BasicAgent on them, submits all answers.
|
|
@@ -54,7 +44,7 @@ def run_and_submit_all(profile: gr.OAuthProfile | None):
|
|
| 54 |
try:
|
| 55 |
agent = CodeAgent(
|
| 56 |
tools=[search_tool],
|
| 57 |
-
model=
|
| 58 |
max_steps=5,
|
| 59 |
verbosity_level=2
|
| 60 |
)
|
|
|
|
| 27 |
fixed_answer = "This is a default answer."
|
| 28 |
print(f"Agent returning fixed answer: {fixed_answer}")
|
| 29 |
return fixed_answer
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 30 |
|
| 31 |
|
|
|
|
|
|
|
|
|
|
| 32 |
|
| 33 |
+
from huggingface_hub import login
|
| 34 |
+
import os
|
| 35 |
+
login(token=os.environ["HUGGINGFACEHUB_API_TOKEN"])
|
| 36 |
|
| 37 |
+
search_tool = DuckDuckGoSearchTool()
|
| 38 |
def run_and_submit_all(profile: gr.OAuthProfile | None):
|
| 39 |
"""
|
| 40 |
Fetches all questions, runs the BasicAgent on them, submits all answers.
|
|
|
|
| 44 |
try:
|
| 45 |
agent = CodeAgent(
|
| 46 |
tools=[search_tool],
|
| 47 |
+
model=InferenceClientModel(mistralai/Magistral-Small-2506),
|
| 48 |
max_steps=5,
|
| 49 |
verbosity_level=2
|
| 50 |
)
|