最近手搓的代码,弄一次之后,果然记忆和理解就不一样了。
这次除了本地的MCP SERVER,还弄了HTTP SSE的远程MCP SERVER。
方案就是用ollama + qwen2.5。但网上的教程,有时涉及消息的格式不一样,所以实践时,还要是多探索。有的人写的代码水平高,有的就重复性高了,要会鉴别。
一,本地MCPstdio通信代码
.env的环境变量文件内容
BASE_URL=http://localhost:11434/v1/
MODEL=qwen2.5:1.5b
OPENAI_API_KEY=ollama
本地SERVER,简单请求。
import json
import httpx
from typing import Any
from mcp.server.fastmcp import FastMCP
# 初始化MCP服务器
mcp = FastMCP('WeatherServer')
# OpenWeather API配置
OPENWEATHER_API_BASE = 'http://api.openweathermap.org/data/2.5/weather'
API_KEY = '581703c33f8a098307b8c7f6f219cbe5'
USER_AGENT = 'weather-app/1.0'
async def fetch_weather(city):
params = {
'q': city,
'appid': API_KEY,
'units': 'metric',
'lang': 'zh_cn'
}
async with httpx.AsyncClient() as client:
try:
response = await client.get(OPENWEATHER_API_BASE, params=params, headers=headers, timeout=30.0)
response.raise_for_status()
return response.json()
except httpx.HTTPStatusError as e:
return {'error': f'HTTP错误: {e.response.status_code}'}
except Exception as e:
return {'error': f'请求失败: {str(e)}'}
def format_weather(data):
if isinstance(data, str):
try:
data = json.loads(data)
except Exception as e:
return {'error': f'无法解析天气数据: {str(e)}'}
if 'error' in data:
return {'error': '返回值包括error关键字'}
city = data.get('name', '未知')
country = data.get('sys', {}).get('country', '未知')
temp = data.get('main', {}).get('temp', 'N/A')
humidity = data.get('main', {}).get('humidity', 'N/A')
wind_speed = data.get('wind', {}).get('speed', 'N/A')
weather_list = data.get('weather', [{}])
description = weather_list[0].get('description', '未知')
return (
f'{city}, {country}\n'
f'温度:{temp}.C\n'
f'湿度:{humidity}%\n'
f'网速:{wind_speed}m/s\n'
f'天气:{description}\n'
)
@mcp.tool()
def write_to_text(file_name, content):
try:
with open(file_name, 'w', encoding='utf-8') as f:
f.write(content)
return f'成功写入文件{file_name}'
except Exception as e:
return f'写入文件失败:{str(e)}'
@mcp.tool()
async def query_weather(city):
# data = await fetch_weather(city)
# return format_weather(data)
return (
f'{city}, "北京"\n'
'温度:25.5°C\n'
'湿度:30%\n'
'网速:3.0m/s\n'
'天气:适合出行,秋高气爽\n'
)
if __name__ == '__main__':
mcp.run(transport='stdio')
本地请求client连接,这是难的。因为是通过client带起server的。
import asyncio
import os
import json
import sys
from pickle import FALSE
from typing import Optional
from zoneinfo import available_timezones
from openai import OpenAI
from dotenv import load_dotenv
from mcp import ClientSession, StdioServerParameters
from mcp.client.stdio import stdio_client
from contextlib import AsyncExitStack
load_dotenv()
class MCPClient:
def __init__(self):
"""初始化MCP客户端"""
self.stdio = None
self.write = None
self.openai_api_key = os.getenv('OPENAI_API_KEY')
self.base_url = os.getenv('BASE_URL')
self.model = os.getenv('MODEL')
if not self.openai_api_key:
raise ValueError('没找到API KEY,请在.env文件中设置')
self.client = OpenAI(api_key=self.openai_api_key, base_url=self.base_url)
self.session: Optional[ClientSession] = None
self.exit_stack = AsyncExitStack()
def get_response(self, messages, tools):
response = self.client.chat.completions.create(
model=self.model,
max_tokens=1000,
messages=messages,
tools=tools,
)
return response
async def get_tools(self):
response = await self.session.list_tools()
available_tools = [{
'type': 'function',
'function': {
'name': tool.name,
'description': tool.description,
'input_schema': tool.inputSchema
}
} for tool in response.tools]
return available_tools
async def process_query(self, query):
"""使用大模型处理查询并调用可用的MCP工具(Function Calling)"""
messages = [
{
'role': 'user',
'content': query
}
]
available_tools = await self.get_tools()
response = self.get_response(messages, available_tools)
tool_results = []
final_text = []
for choice in response.choices:
message = choice.message
is_function_call = message.tool_calls
if not is_function_call:
final_text.append(message.content)
else:
tool_name = message.tool_calls[0].function.name
tool_args = json.loads(message.tool_calls[0].function.arguments)
print(f'\n\n[Calling tool {tool_name} with args {tool_args}]\n\n')
try:
result = await self.session.call_tool(tool_name, tool_args)
print(f'\n工具调用返回结果:{result.content}')
content = None
if hasattr(result, 'content'):
if isinstance(result.content, list):
content = '\n'.join(str(item) for item in result.content)
print(f'将列表转换为字符串:{content}')
else:
content = str(result.content)
else:
content = str(result)
tool_results.append({'call': tool_name, 'result': content})
final_text.append(f'[调用工具{tool_name} 参数:{json.dumps(tool_args, ensure_ascii=FALSE)}]')
if message.content and hasattr(message.content, 'text'):
messages.append({
'role': 'assistant',
'content': message.content
})
messages.append({
'role': 'user',
'content': content
})
print('获取下一个LLM响应...')
response = self.get_response(messages, available_tools)
content = response.choices[0].message.content or ''
final_text.append(content)
except Exception as e:
print(f'工具调用异常: {str(e)}')
return '\n'.join(final_text)
async def connect_to_server(self, server_script_path):
"""连接到MCP服务并列出可用工具"""
is_python = server_script_path.endswith('.py')
is_js = server_script_path.endswith('js')
if not (is_js or is_python):
raise ValueError('服务器脚本必须是.py或.js文件')
command = 'python' if is_python else 'node'
server_params = StdioServerParameters(
command=command,
args=[server_script_path],
env=None
)
self.stdio, self.write = await self.exit_stack.enter_async_context(stdio_client(server_params))
self.session = await self.exit_stack.enter_async_context(ClientSession(self.stdio, self.write))
await self.session.initialize()
response = await self.session.list_tools()
tools = response.tools
print('\n已连接到服务器,工具列表如下:', [tool.name for tool in tools])
async def chat_loop(self):
"""运行交互式聊天循环"""
print('\nMCP客户端已启动,输入quit退出')
while True:
try:
query = input('\nQuery: ').strip()
if query.lower() == 'quit':
break
print('\n处理查询中...')
response = await self.process_query(query)
print(f'\n[qwen2:2b] {response}')
except Exception as e:
print(f'\n发生错误:{str(e)}')
async def cleanup(self):
await self.exit_stack.aclose()
async def main():
if len(sys.argv) < 2:
print('Usage: python client.py <path_to_server_script>')
sys.exit(1)
client = MCPClient()
try:
await client.connect_to_server(sys.argv[1])
await client.chat_loop()
finally:
await client.cleanup()
if __name__ == '__main__':
import sys
asyncio.run(main())
二,SSE通信代码
这次有两个server,18080端口的应用,以及18081端口的应用。
1,
import argparse
import mcp
from mcp.server.fastmcp import FastMCP
from starlette.applications import Starlette
from mcp.server.sse import SseServerTransport
from starlette.requests import Request
from starlette.routing import Mount, Route
from mcp.server import Server
import logging
import uvicorn
MCP_SERVER_NAME = 'Cg-Math-Mcp-Sse'
logging.basicConfig(
level=logging.INFO,
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s'
)
logger = logging.getLogger(MCP_SERVER_NAME)
mcp = FastMCP(MCP_SERVER_NAME)
@mcp.tool()
def add(a, b):
return a + b
@mcp.tool()
def subtract(a, b):
return a - b
@mcp.tool()
def multiply(a, b):
return a * b
@mcp.tool()
def devide(a, b):
if b == 0:
raise ValueError('Division by zero is not allowed.')
return a / b
def create_starlette_app(mcp_server: Server, *, debug: bool=False) -> Starlette:
sse = SseServerTransport('/messages/')
async def handle_sse(request: Request) -> None:
async with sse.connect_sse(
request.scope,
request.receive,
request._send,
) as (read_stream, write_stream):
await mcp_server.run(
read_stream,
write_stream,
mcp_server.create_initialization_options(),
)
return Starlette(
debug=debug,
routes=[
Route('/sse', endpoint=handle_sse),
Mount('/messages/', app=sse.handle_post_message),
],
)
if __name__ == '__main__':
mcp_server = mcp._mcp_server
parser = argparse.ArgumentParser(description='Run MCP SSE-based server')
parser.add_argument('--host', default='0.0.0.0', help='host to bind to')
parser.add_argument('--port', type=int, default=18080, help='port to listen to')
args = parser.parse_args()
starlette_app = create_starlette_app(mcp_server, debug=True)
uvicorn.run(starlette_app, host=args.host, port=args.port)
2,
import argparse
import mcp
from mcp.server.fastmcp import FastMCP
from starlette.applications import Starlette
from mcp.server.sse import SseServerTransport
from starlette.requests import Request
from starlette.routing import Mount, Route
from mcp.server import Server
import logging
import uvicorn
MCP_SERVER_NAME = 'Cg-Modulo-Mcp-Sse'
logging.basicConfig(
level=logging.INFO,
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s'
)
logger = logging.getLogger(MCP_SERVER_NAME)
mcp = FastMCP(MCP_SERVER_NAME)
@mcp.tool()
def modulo(a, b):
if b == 0:
raise ValueError('division is not zero.')
return a % b
def create_starlette_app(mcp_server: Server, *, debug: bool=False) -> Starlette:
sse = SseServerTransport('/messages/')
async def handle_sse(request: Request) -> None:
async with sse.connect_sse(
request.scope,
request.receive,
request._send,
) as (read_stream, write_stream):
await mcp_server.run(
read_stream,
write_stream,
mcp_server.create_initialization_options(),
)
return Starlette(
debug=debug,
routes=[
Route('/sse', endpoint=handle_sse),
Mount('/messages/', app=sse.handle_post_message),
],
)
if __name__ == '__main__':
mcp_server = mcp._mcp_server
parser = argparse.ArgumentParser(description='Run MCP SSE-based server')
parser.add_argument('--host', default='0.0.0.0', help='host to bind to')
parser.add_argument('--port', type=int, default=18081, help='port to listen to')
args = parser.parse_args()
starlette_app = create_starlette_app(mcp_server, debug=True)
uvicorn.run(starlette_app, host=args.host, port=args.port)
客户端
import asyncio
import json
import os, sys
from typing import List
from httpx import stream
from mcp import ClientSession
from mcp.client.sse import sse_client
from openai import AsyncOpenAI
class MCPClient:
def __init__(self, model_name, base_url, api_key, server_urls):
self.model_name = model_name
self.server_urls = server_urls
self.sessions = {}
self.tool_mapping = {}
self.client = AsyncOpenAI(base_url=base_url, api_key=api_key)
async def initialize_sessions(self):
for i, server_url in enumerate(self.server_urls):
server_id = f'server{i}'
stream_context = sse_client(url=server_url)
streams = await stream_context.__aenter__()
session_context = ClientSession(*streams)
session = await session_context.__aenter__()
await session.initialize()
self.sessions[server_id] = (session, session_context, stream_context)
response = await session.list_tools()
for tool in response.tools:
prefixed_name = f'{server_id}_{tool.name}'
self.tool_mapping[prefixed_name] = (session, tool.name)
print(f'已连接到{server_url},工具列表: {[tool.name for tool in response.tools]}')
async def cleanup(self):
for server_id, (session, session_context, streams_context) in self.sessions.items():
await session_context.__aexit__(None, None, None)
await streams_context.__aexit__(None, None, None)
print('所有会话已清理.')
async def process_query(self, query):
messages = [{'role': 'user', 'content': query}]
available_tools = []
for server_id, (session, _, _) in self.sessions.items():
response = await session.list_tools()
for tool in response.tools:
prefixed_name = f'{server_id}_{tool.name}'
available_tools.append({
'type': 'function',
'function': {
'name': prefixed_name,
'description': tool.description,
'parameters': tool.inputSchema
},
})
response = await self.client.chat.completions.create(
model=self.model_name,
messages=messages,
tools=available_tools,
)
final_text = []
message = response.choices[0].message
final_text.append(message.content or '')
while message.tool_calls:
for tool_call in message.tool_calls:
prefixed_name = tool_call.function.name
if prefixed_name in self.tool_mapping:
session, original_tool_name = self.tool_mapping[prefixed_name]
tool_args = json.loads(tool_call.function.arguments)
try:
result = await session.call_tool(original_tool_name, tool_args)
except Exception as e:
result = {'content': f'调用工具{original_tool_name}出错:{str(e)}'}
print(result['content'])
final_text.append(f'[调用工具{prefixed_name}参数:{tool_args}]')
final_text.append(f'工具结果:{result.content}')
messages.extend([
{
'role': 'assistant',
'tool_calls': [{
'id': tool_call.id,
'type': 'function',
'function': {
'name': prefixed_name,
'arguments': json.dump(tool_args)
}
}],
},
{
'role': 'tool',
'tool_call_id': tool_call.id,
'content': str(result.content)
},
])
else:
print(f'工具{prefixed_name}未找到')
final_text.append(f'工具{prefixed_name}未找到')
response = await self.client.chat.completions.create(
model=self.model_name,
messages=messages,
tools=available_tools,
)
message = response.choices[0].message
if message.content:
final_text.append(message.content)
return '\n'.join(final_text)
async def chat_loop(self):
print('\nMCP客户端已启动,输入你的问题,输入quit退出。')
while True:
try:
query = input('\n问题: ').strip()
if query.lower() == 'quit':
break
response = await self.process_query(query)
print('\n' + response)
except Exception as e:
print(f'\n发生错误:{str(e)}')
async def main():
model_name = os.getenv('MODEL', 'qwen2.5:1.5b')
base_url = os.getenv('BASE_URL', 'http://localhost:11434/v1/')
api_key = os.getenv('OPENAI_API_KEY', 'ollama')
server_urls = [
'http://localhost:18080/sse',
'http://localhost:18081/sse',
]
client = MCPClient(
model_name=model_name,
base_url=base_url,
api_key=api_key,
server_urls=server_urls
)
try:
await client.initialize_sessions()
await client.chat_loop()
finally:
await client.cleanup()
if __name__ == '__main__':
asyncio.run(main())
三,截图
有时还是感觉不太好用,有不稳定的情况。
image.png
image.png