jkorstad commited on
Commit
6cf05c1
·
1 Parent(s): 9d51f35

Fix app for HF Space deployment: update model in README, fix logging, remove unused litellm, remove verification script

Browse files
README.md CHANGED
@@ -18,7 +18,7 @@ This Hugging Face Space hosts SmolMCP, a powerful AI agent capable of using othe
18
 
19
  * **Framework:** Gradio
20
  * **Agent:** Hugging Face SmolAgent (CodeAgent)
21
- * **Model:** GLM4.6 (or other models supported by LiteLLM)
22
  * **Infrastructure:** Hugging Face ZeroGPU
23
 
24
- This project is built by Jules, your AI software engineer.
 
18
 
19
  * **Framework:** Gradio
20
  * **Agent:** Hugging Face SmolAgent (CodeAgent)
21
+ * **Model:** Qwen/Qwen2.5-Coder-32B-Instruct
22
  * **Infrastructure:** Hugging Face ZeroGPU
23
 
24
+ This project is built by Jules, your AI software engineer.
app.py CHANGED
@@ -6,13 +6,13 @@ from smolagents import CodeAgent, WebSearchTool, InferenceClientModel
6
  from hf_tools import SearchHfSpacesTool, CallHfSpaceApiTool
7
 
8
  # Set up logging
9
- logging.basicConfig(filename='app_detailed.log', level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
10
  logging.info("Application starting...")
11
 
12
  try:
13
  # --- Agent Configuration ---
14
 
15
- # 1. Set up the model
16
  # We will use the Hugging Face Inference API.
17
  # The user needs to set the HUGGING_FACE_HUB_TOKEN in the Space's secrets.
18
  model = InferenceClientModel(
@@ -30,7 +30,7 @@ agent = CodeAgent(
30
  # stream_outputs=True, # Streaming is not yet supported in this Gradio setup
31
  )
32
 
33
- # --- Gradio Interface ---
34
 
35
  @spaces.GPU
36
  def run_agent(prompt):
@@ -46,15 +46,15 @@ def run_agent(prompt):
46
  return f"An error occurred: {e}"
47
 
48
  # Define the Gradio interface
49
- iface = gr.Interface(
50
- fn=run_agent,
51
- inputs=gr.Textbox(lines=4, label="Your Prompt", placeholder="Enter your request for the agent..."),
52
- outputs=gr.Markdown(label="Agent's Response"),
53
- title="SmolMCP: A Hugging Face Agent",
54
- description="This agent can use tools to answer your questions. Enter your Hugging Face Hub token in the settings to allow the agent to use Hugging Face tools.",
55
- )
56
 
57
- # Launch the Gradio app
58
  logging.info("Launching Gradio app...")
59
  iface.launch()
60
  logging.info("Gradio app launched.")
@@ -62,4 +62,4 @@ iface = gr.Interface(
62
  except Exception as e:
63
  logging.error(f"An unhandled exception occurred during setup: {e}", exc_info=True)
64
  # Also print to stderr so it's visible if the app is run in the foreground
65
- print(f"An unhandled exception occurred during setup: {e}")
 
6
  from hf_tools import SearchHfSpacesTool, CallHfSpaceApiTool
7
 
8
  # Set up logging
9
+ logging.basicConfig(filename='app.log', level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
10
  logging.info("Application starting...")
11
 
12
  try:
13
  # --- Agent Configuration ---
14
 
15
+ # 1. Set up the model
16
  # We will use the Hugging Face Inference API.
17
  # The user needs to set the HUGGING_FACE_HUB_TOKEN in the Space's secrets.
18
  model = InferenceClientModel(
 
30
  # stream_outputs=True, # Streaming is not yet supported in this Gradio setup
31
  )
32
 
33
+ # --- Gradio Interface ---
34
 
35
  @spaces.GPU
36
  def run_agent(prompt):
 
46
  return f"An error occurred: {e}"
47
 
48
  # Define the Gradio interface
49
+ iface = gr.Interface(
50
+ fn=run_agent,
51
+ inputs=gr.Textbox(lines=4, label="Your Prompt", placeholder="Enter your request for the agent..."),
52
+ outputs=gr.Markdown(label="Agent's Response"),
53
+ title="SmolMCP: A Hugging Face Agent",
54
+ description="This agent can use tools to answer your questions. Enter your Hugging Face Hub token in the settings to allow the agent to use Hugging Face tools.",
55
+ )
56
 
57
+ # Launch the Gradio app
58
  logging.info("Launching Gradio app...")
59
  iface.launch()
60
  logging.info("Gradio app launched.")
 
62
  except Exception as e:
63
  logging.error(f"An unhandled exception occurred during setup: {e}", exc_info=True)
64
  # Also print to stderr so it's visible if the app is run in the foreground
65
+ print(f"An unhandled exception occurred during setup: {e}")
jules-scratch/verification/verify_app.py DELETED
@@ -1,11 +0,0 @@
1
- from playwright.sync_api import sync_playwright
2
-
3
- def run(playwright):
4
- browser = playwright.chromium.launch()
5
- page = browser.new_page()
6
- page.goto("http://localhost:7860")
7
- page.screenshot(path="jules-scratch/verification/verification.png")
8
- browser.close()
9
-
10
- with sync_playwright() as playwright:
11
- run(playwright)
 
 
 
 
 
 
 
 
 
 
 
 
requirements.txt CHANGED
@@ -1,8 +1,7 @@
1
  gradio
2
  smolagents
3
  huggingface_hub
4
- litellm
5
  lxml
6
  beautifulsoup4
7
  spaces
8
- transformers
 
1
  gradio
2
  smolagents
3
  huggingface_hub
 
4
  lxml
5
  beautifulsoup4
6
  spaces
7
+ transformers