Jonathan Korstad commited on
Commit
9d51f35
·
unverified ·
2 Parent(s): f9b0143 3c512ac

Merge pull request #1 from Jpalmer95/feat/smolmcp-agent

Browse files
README.md CHANGED
@@ -1,2 +1,24 @@
1
- # SmolMCP
2
- A Smolagent based agent that has a chat mode, planning summaries, and a confirmation of Spaces, Tools, or other MCP servers that it plans to use.
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # SmolMCP: A Hugging Face Agent
2
+
3
+ This Hugging Face Space hosts SmolMCP, a powerful AI agent capable of using other Hugging Face Spaces and tools to complete complex tasks.
4
+
5
+ ## How to Use
6
+
7
+ 1. **Provide Your Access Tokens:**
8
+ To use this Space, you need to provide API keys for the services you want the agent to use. Go to the "Settings" tab of this Space and add the following secrets:
9
+ * `HUGGING_FACE_HUB_TOKEN`: Your Hugging Face Hub token. This is required for the agent to interact with the Hugging Face Hub and use the Inference API.
10
+
11
+ 2. **Enter Your Prompt:**
12
+ In the Gradio interface, type your request for the agent in the text box.
13
+
14
+ 3. **Run the Agent:**
15
+ Click the "Run" button to start the agent. The agent will then generate and execute code to fulfill your request.
16
+
17
+ ## Technical Details
18
+
19
+ * **Framework:** Gradio
20
+ * **Agent:** Hugging Face SmolAgent (CodeAgent)
21
+ * **Model:** GLM4.6 (or other models supported by LiteLLM)
22
+ * **Infrastructure:** Hugging Face ZeroGPU
23
+
24
+ This project is built by Jules, your AI software engineer.
__pycache__/hf_tools.cpython-312.pyc ADDED
Binary file (1.84 kB). View file
 
app.log ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ File "/app/app.py", line 18
2
+ model = InferenceClientModel(
3
+ ^^^^^
4
+ IndentationError: expected an indented block after 'try' statement on line 12
app.py ADDED
@@ -0,0 +1,65 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ import os
3
+ import spaces
4
+ import logging
5
+ from smolagents import CodeAgent, WebSearchTool, InferenceClientModel
6
+ from hf_tools import SearchHfSpacesTool, CallHfSpaceApiTool
7
+
8
+ # Set up logging
9
+ logging.basicConfig(filename='app_detailed.log', level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
10
+ logging.info("Application starting...")
11
+
12
+ try:
13
+ # --- Agent Configuration ---
14
+
15
+ # 1. Set up the model
16
+ # We will use the Hugging Face Inference API.
17
+ # The user needs to set the HUGGING_FACE_HUB_TOKEN in the Space's secrets.
18
+ model = InferenceClientModel(
19
+ model_id="Qwen/Qwen2.5-Coder-32B-Instruct",
20
+ )
21
+
22
+ # 2. Define the tools
23
+ # The agent will have access to a web search tool and Hugging Face tools.
24
+ tools = [WebSearchTool(), SearchHfSpacesTool(), CallHfSpaceApiTool()]
25
+
26
+ # 3. Instantiate the agent
27
+ agent = CodeAgent(
28
+ tools=tools,
29
+ model=model,
30
+ # stream_outputs=True, # Streaming is not yet supported in this Gradio setup
31
+ )
32
+
33
+ # --- Gradio Interface ---
34
+
35
+ @spaces.GPU
36
+ def run_agent(prompt):
37
+ """
38
+ This function runs the SmolAgent with the user's prompt.
39
+ The @spaces.GPU decorator ensures that this function runs on a GPU.
40
+ """
41
+ try:
42
+ # The agent.run() method returns a string with the final answer.
43
+ answer = agent.run(prompt)
44
+ return answer
45
+ except Exception as e:
46
+ return f"An error occurred: {e}"
47
+
48
+ # Define the Gradio interface
49
+ iface = gr.Interface(
50
+ fn=run_agent,
51
+ inputs=gr.Textbox(lines=4, label="Your Prompt", placeholder="Enter your request for the agent..."),
52
+ outputs=gr.Markdown(label="Agent's Response"),
53
+ title="SmolMCP: A Hugging Face Agent",
54
+ description="This agent can use tools to answer your questions. Enter your Hugging Face Hub token in the settings to allow the agent to use Hugging Face tools.",
55
+ )
56
+
57
+ # Launch the Gradio app
58
+ logging.info("Launching Gradio app...")
59
+ iface.launch()
60
+ logging.info("Gradio app launched.")
61
+
62
+ except Exception as e:
63
+ logging.error(f"An unhandled exception occurred during setup: {e}", exc_info=True)
64
+ # Also print to stderr so it's visible if the app is run in the foreground
65
+ print(f"An unhandled exception occurred during setup: {e}")
hf_tools.py ADDED
@@ -0,0 +1,67 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from huggingface_hub import HfApi, InferenceClient
2
+ from smolagents import Tool
3
+
4
+ class SearchHfSpacesTool(Tool):
5
+ name = "search_hf_spaces"
6
+ description = "Searches for Hugging Face Spaces that match the given query."
7
+ inputs = {
8
+ "query": {
9
+ "type": "string",
10
+ "description": "The search query.",
11
+ }
12
+ }
13
+ output_type = "string"
14
+
15
+ def forward(self, query: str) -> str:
16
+ """
17
+ Searches for Hugging Face Spaces that match the given query.
18
+
19
+ Args:
20
+ query: The search query.
21
+
22
+ Returns:
23
+ A string containing a list of matching Spaces, including their ID and description.
24
+ """
25
+ api = HfApi()
26
+ spaces = api.list_spaces(search=query)
27
+ return "\n".join([f"- {space.id}: {space.card_data.get('title', '')}" for space in spaces])
28
+
29
+ class CallHfSpaceApiTool(Tool):
30
+ name = "call_hf_space_api"
31
+ description = "Calls the API of a Hugging Face Space."
32
+ inputs = {
33
+ "space_id": {
34
+ "type": "string",
35
+ "description": "The ID of the Space to call.",
36
+ },
37
+ "args": {
38
+ "type": "array",
39
+ "description": "Positional arguments to pass to the Space's API.",
40
+ },
41
+ "kwargs": {
42
+ "type": "object",
43
+ "description": "Keyword arguments to pass to the Space's API.",
44
+ },
45
+ }
46
+ output_type = "string"
47
+
48
+ def forward(self, space_id: str, *args, **kwargs) -> str:
49
+ """
50
+ Calls the API of a Hugging Face Space.
51
+
52
+ Args:
53
+ space_id: The ID of the Space to call.
54
+ *args: Positional arguments to pass to the Space's API.
55
+ **kwargs: Keyword arguments to pass to the Space's API.
56
+
57
+ Returns:
58
+ The result of the API call.
59
+ """
60
+ client = InferenceClient()
61
+ try:
62
+ # This is a generic way to call a Gradio API.
63
+ # The exact parameters will depend on the specific Space.
64
+ result = client.predict(repo_id=space_id, *args, **kwargs)
65
+ return str(result)
66
+ except Exception as e:
67
+ return f"Error calling Space API: {e}"
jules-scratch/verification/verify_app.py ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from playwright.sync_api import sync_playwright
2
+
3
+ def run(playwright):
4
+ browser = playwright.chromium.launch()
5
+ page = browser.new_page()
6
+ page.goto("http://localhost:7860")
7
+ page.screenshot(path="jules-scratch/verification/verification.png")
8
+ browser.close()
9
+
10
+ with sync_playwright() as playwright:
11
+ run(playwright)
requirements.txt ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ gradio
2
+ smolagents
3
+ huggingface_hub
4
+ litellm
5
+ lxml
6
+ beautifulsoup4
7
+ spaces
8
+ transformers