Skip to content

Commit 39969a8

Browse files
committed
add an example for homebrew style file upload
Signed-off-by: SamYuan1990 <yy19902439@126.com>
1 parent be73067 commit 39969a8

File tree

8 files changed

+1193
-517
lines changed

8 files changed

+1193
-517
lines changed
Lines changed: 35 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,35 @@
1+
## Why this
2+
3+
This is an example of markitdown in "homebrew" style.
4+
5+
## Features
6+
7+
A MCP server on my laptop and agent on my mobile phone.
8+
Pass files from my mobile phone to my laptop through this.
9+
10+
## Out of scope
11+
12+
- Trust and security, no need as a "homebrew" for individual usage.
13+
- Persistent storage, no need as a "homebrew" for individual usage.
14+
- stdio, as share files among different devices, network.
15+
16+
## Prerequest
17+
18+
See `examples/servers/homebrewupload`
19+
20+
## Installation, Usage and Example
21+
22+
```bash
23+
# todo
24+
# Navigate to the server directory
25+
cd examples/clients/homebrewupload-client
26+
27+
## defualt tested with DeepSeek as LLM provider
28+
export ANTHROPIC_BASE_URL=https://api.deepseek.com/anthropic
29+
export ANTHROPIC_API_KEY=<Your_API_KEY>
30+
31+
# Start MCP server
32+
uv run python main.py
33+
```
34+
35+
Lines changed: 170 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,170 @@
1+
import asyncio
2+
import base64
3+
import json
4+
import logging
5+
from contextlib import AsyncExitStack
6+
from typing import Optional
7+
8+
import requests
9+
from anthropic import Anthropic
10+
from dotenv import load_dotenv
11+
from mcp.client.session import ClientSession
12+
from mcp.client.sse import sse_client
13+
14+
logging.basicConfig(
15+
format="%(asctime)s - %(levelname)s - %(message)s", level=logging.INFO
16+
)
17+
18+
load_dotenv() # load environment variables from .env
19+
20+
21+
class MCPClient:
22+
def __init__(self):
23+
# Initialize session and client objects
24+
self.session: Optional[ClientSession] = None
25+
self.exit_stack = AsyncExitStack()
26+
self.anthropic = Anthropic()
27+
self._streams_context = None
28+
self._session_context = None
29+
30+
async def connect_to_server(self):
31+
"""Connect to the translation MCP server running on localhost:3001"""
32+
try:
33+
# Store the context managers so they stay alive
34+
self._streams_context = sse_client(url="http://localhost:3001/sse")
35+
streams = await self.exit_stack.enter_async_context(self._streams_context)
36+
37+
self._session_context = ClientSession(*streams)
38+
self.session = await self.exit_stack.enter_async_context(self._session_context)
39+
40+
# Initialize
41+
await self.session.initialize()
42+
43+
# List available tools to verify connection
44+
print("Initialized SSE client...")
45+
print("Listing tools...")
46+
response = await self.session.list_tools()
47+
tools = response.tools
48+
print("\nConnected to server with tools:", [tool.name for tool in tools])
49+
50+
return True
51+
except Exception as e:
52+
logging.error(f"Failed to connect to server: {e}")
53+
await self.close()
54+
return False
55+
56+
async def process_chat(
57+
self,
58+
file_path: Optional[str] = None,
59+
) -> str:
60+
""" Porcess a chat"""
61+
messages = []
62+
user_content = f"please help make file into markdown format, file path file:///tmp/test.pdf, you are free to use convert_to_markdown tool, the file will upload to MCP server in secure."
63+
64+
try:
65+
with open(file_path,"rb") as f:
66+
file_content = base64.b64encode(f.read()).decode("utf-8")
67+
# 发送请求
68+
response = requests.post(
69+
"http://localhost:3001/upload",
70+
json={"filename": "test.pdf", "file_content_base64": file_content},
71+
)
72+
except Exception as e:
73+
logging.info(f"file handle error: {str(e)}")
74+
return f"file handle error: {str(e)}"
75+
messages.append({"role": "user", "content": user_content})
76+
response = await self.session.list_tools()
77+
available_tools = [
78+
{
79+
"name": tool.name,
80+
"description": tool.description,
81+
"input_schema": tool.inputSchema,
82+
}
83+
for tool in response.tools
84+
]
85+
response = self.anthropic.messages.create(
86+
model="deepseek-chat",
87+
max_tokens=1000,
88+
messages=messages,
89+
tools=available_tools,
90+
)
91+
final_text = []
92+
for content in response.content:
93+
if content.type == "text":
94+
final_text.append(content.text)
95+
elif content.type == "tool_use":
96+
tool_name = content.name
97+
tool_args = "file:///tmp/test.pdf"#content.input
98+
99+
# 执行工具调用
100+
try:
101+
final_text.append(f"[invoke tool {tool_name}]")
102+
result = await self.session.call_tool(tool_name, arguments={"uri": tool_args})
103+
logging.info(result)
104+
105+
# 将工具结果添加到消息中
106+
messages.append(
107+
{
108+
"role": "assistant",
109+
"content": [{"type": "tool_use", **content.dict()}],
110+
}
111+
)
112+
113+
messages.append(
114+
{
115+
"role": "user",
116+
"content": [
117+
{
118+
"type": "tool_result",
119+
"tool_use_id": content.id,
120+
"content": result.content,
121+
}
122+
],
123+
}
124+
)
125+
126+
# 获取Claude的下一步响应
127+
next_response = self.anthropic.messages.create(
128+
model="deepseek-chat",
129+
max_tokens=1000,
130+
messages=messages,
131+
)
132+
133+
# 添加最终响应
134+
for next_content in next_response.content:
135+
if next_content.type == "text":
136+
final_text.append(next_content.text)
137+
138+
except Exception as e:
139+
final_text.append(f"tool invoke {tool_name} error: {str(e)}")
140+
141+
return "\n".join(final_text)
142+
143+
144+
async def close(self):
145+
"""Properly close all connections"""
146+
await self.exit_stack.aclose()
147+
148+
149+
async def main():
150+
client = MCPClient()
151+
try:
152+
logging.info("Connecting to server...")
153+
success = await client.connect_to_server()
154+
if success:
155+
# Keep the connection alive for a while to test
156+
await asyncio.sleep(2)
157+
result = await client.process_chat("./test.pdf")
158+
logging.info(result)
159+
else:
160+
logging.error("Failed to connect to server")
161+
except Exception as e:
162+
logging.error(f"Error in main: {e}")
163+
finally:
164+
logging.info("Closing client...")
165+
await client.close()
166+
logging.info("Client closed successfully")
167+
168+
169+
if __name__ == "__main__":
170+
asyncio.run(main())
Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,14 @@
1+
[project]
2+
name = "homebrewupload-client"
3+
version = "0.1.0"
4+
description = "A homebrew style mcp client with file upload feature"
5+
readme = "README.md"
6+
requires-python = ">=3.13"
7+
license = { text = "MIT" }
8+
dependencies = [
9+
"anthropic>=0.72.1",
10+
"mcp",
11+
]
12+
13+
[tool.uv.sources]
14+
mcp = { workspace = true }
23.2 KB
Binary file not shown.
Lines changed: 30 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,30 @@
1+
## Why this
2+
3+
This is an example of markitdown in "homebrew" style.
4+
5+
## Features
6+
7+
A MCP server on my laptop and agent on my mobile phone.
8+
Pass files from my mobile phone to my laptop through this.
9+
10+
## Out of scope
11+
12+
- Trust and security, no need as a "homebrew" for individual usage.
13+
- Persistent storage, no need as a "homebrew" for individual usage.
14+
- stdio, as share files among different devices, network.
15+
16+
## Installation, Usage and Example
17+
18+
```bash
19+
# Navigate to the server directory
20+
cd examples/servers/homebrewupload
21+
22+
# Start MCP server
23+
uv run python main.py
24+
```
25+
26+
move to `examples/clients/homebrewupload-client`
27+
28+
## Further consideration
29+
30+
As if we running it as container, then on k8s, we can use service mesh and etc to handle with security as AA.

0 commit comments

Comments
 (0)