MCP Server代码积累

最近手搓的代码,弄一次之后,果然记忆和理解就不一样了。
这次除了本地的MCP SERVER,还弄了HTTP SSE的远程MCP SERVER。

方案就是用ollama + qwen2.5。但网上的教程,有时涉及消息的格式不一样,所以实践时,还要是多探索。有的人写的代码水平高,有的就重复性高了,要会鉴别。

一,本地MCPstdio通信代码

.env的环境变量文件内容

BASE_URL=http://localhost:11434/v1/
MODEL=qwen2.5:1.5b
OPENAI_API_KEY=ollama

本地SERVER,简单请求。

import json
import httpx
from typing import Any
from mcp.server.fastmcp import FastMCP

# 初始化MCP服务器
mcp = FastMCP('WeatherServer')

# OpenWeather API配置
OPENWEATHER_API_BASE = 'http://api.openweathermap.org/data/2.5/weather'
API_KEY = '581703c33f8a098307b8c7f6f219cbe5'
USER_AGENT = 'weather-app/1.0'

async def fetch_weather(city):
    params = {
        'q': city,
        'appid': API_KEY,
        'units': 'metric',
        'lang': 'zh_cn'
    }

    async with httpx.AsyncClient() as client:
        try:
            response = await client.get(OPENWEATHER_API_BASE, params=params, headers=headers, timeout=30.0)
            response.raise_for_status()
            return response.json()
        except httpx.HTTPStatusError as e:
            return {'error': f'HTTP错误: {e.response.status_code}'}
        except Exception as e:
            return {'error': f'请求失败: {str(e)}'}

def format_weather(data):
    if isinstance(data, str):
        try:
            data = json.loads(data)
        except Exception as e:
            return {'error': f'无法解析天气数据: {str(e)}'}
    if 'error' in data:
        return {'error': '返回值包括error关键字'}

    city = data.get('name', '未知')
    country = data.get('sys', {}).get('country', '未知')
    temp = data.get('main', {}).get('temp', 'N/A')
    humidity = data.get('main', {}).get('humidity', 'N/A')
    wind_speed = data.get('wind', {}).get('speed', 'N/A')

    weather_list = data.get('weather', [{}])
    description = weather_list[0].get('description', '未知')

    return (
        f'{city}, {country}\n'
        f'温度:{temp}.C\n'
        f'湿度:{humidity}%\n'
        f'网速:{wind_speed}m/s\n'
        f'天气:{description}\n'
    )

@mcp.tool()
def write_to_text(file_name, content):
    try:
        with open(file_name, 'w', encoding='utf-8') as f:
            f.write(content)
        return f'成功写入文件{file_name}'
    except Exception as e:
        return f'写入文件失败:{str(e)}'

@mcp.tool()
async def query_weather(city):
    # data = await fetch_weather(city)
    # return format_weather(data)
    return (
        f'{city}, "北京"\n'
        '温度:25.5°C\n'
        '湿度:30%\n'
        '网速:3.0m/s\n'
        '天气:适合出行,秋高气爽\n'
    )

if __name__ == '__main__':
    mcp.run(transport='stdio')

本地请求client连接,这是难的。因为是通过client带起server的。

import asyncio
import os
import json
import sys
from pickle import FALSE
from typing import Optional
from zoneinfo import available_timezones

from openai import OpenAI
from dotenv import load_dotenv

from mcp import ClientSession, StdioServerParameters
from mcp.client.stdio import stdio_client
from contextlib import AsyncExitStack

load_dotenv()

class MCPClient:
    def __init__(self):
        """初始化MCP客户端"""
        self.stdio = None
        self.write = None
        self.openai_api_key = os.getenv('OPENAI_API_KEY')
        self.base_url = os.getenv('BASE_URL')
        self.model = os.getenv('MODEL')

        if not self.openai_api_key:
            raise ValueError('没找到API KEY,请在.env文件中设置')

        self.client = OpenAI(api_key=self.openai_api_key, base_url=self.base_url)
        self.session: Optional[ClientSession] = None
        self.exit_stack = AsyncExitStack()

    def get_response(self, messages, tools):
        response = self.client.chat.completions.create(
            model=self.model,
            max_tokens=1000,
            messages=messages,
            tools=tools,
        )
        return response

    async def get_tools(self):
        response = await self.session.list_tools()
        available_tools = [{
            'type': 'function',
            'function': {
                'name': tool.name,
                'description': tool.description,
                'input_schema': tool.inputSchema
            }
        } for tool in response.tools]
        return available_tools

    async  def process_query(self, query):
        """使用大模型处理查询并调用可用的MCP工具(Function Calling)"""
        messages = [
            {
                'role': 'user',
                'content': query
            }
        ]
        available_tools = await self.get_tools()
        response = self.get_response(messages, available_tools)
        tool_results = []
        final_text = []
        for choice in response.choices:
            message = choice.message
            is_function_call = message.tool_calls

            if not is_function_call:
                final_text.append(message.content)
            else:
                tool_name = message.tool_calls[0].function.name
                tool_args = json.loads(message.tool_calls[0].function.arguments)
                print(f'\n\n[Calling tool {tool_name} with args {tool_args}]\n\n')
                try:
                    result = await self.session.call_tool(tool_name, tool_args)
                    print(f'\n工具调用返回结果:{result.content}')
                    content = None
                    if hasattr(result, 'content'):
                        if isinstance(result.content, list):
                            content = '\n'.join(str(item) for item in result.content)
                            print(f'将列表转换为字符串:{content}')
                        else:
                            content = str(result.content)
                    else:
                        content = str(result)
                    tool_results.append({'call': tool_name, 'result': content})
                    final_text.append(f'[调用工具{tool_name} 参数:{json.dumps(tool_args, ensure_ascii=FALSE)}]')

                    if message.content and hasattr(message.content, 'text'):
                        messages.append({
                            'role': 'assistant',
                            'content': message.content
                        })
                    messages.append({
                        'role': 'user',
                        'content': content
                    })
                    print('获取下一个LLM响应...')
                    response = self.get_response(messages, available_tools)
                    content = response.choices[0].message.content or ''
                    final_text.append(content)
                except Exception as e:
                    print(f'工具调用异常: {str(e)}')
        return '\n'.join(final_text)

    async def connect_to_server(self, server_script_path):
        """连接到MCP服务并列出可用工具"""
        is_python = server_script_path.endswith('.py')
        is_js = server_script_path.endswith('js')
        if not  (is_js or is_python):
            raise ValueError('服务器脚本必须是.py或.js文件')

        command = 'python' if is_python else 'node'
        server_params = StdioServerParameters(
            command=command,
            args=[server_script_path],
            env=None
        )

        self.stdio, self.write = await self.exit_stack.enter_async_context(stdio_client(server_params))
        self.session = await self.exit_stack.enter_async_context(ClientSession(self.stdio, self.write))
        await self.session.initialize()

        response = await self.session.list_tools()
        tools = response.tools
        print('\n已连接到服务器,工具列表如下:', [tool.name for tool in tools])

    async def chat_loop(self):
        """运行交互式聊天循环"""
        print('\nMCP客户端已启动,输入quit退出')

        while True:
            try:
                query = input('\nQuery: ').strip()
                if query.lower() == 'quit':
                    break
                print('\n处理查询中...')
                response = await self.process_query(query)
                print(f'\n[qwen2:2b] {response}')
            except Exception as e:
                print(f'\n发生错误:{str(e)}')
    async  def cleanup(self):
        await self.exit_stack.aclose()

async def main():
    if len(sys.argv) < 2:
        print('Usage: python client.py <path_to_server_script>')
        sys.exit(1)

    client = MCPClient()
    try:
        await client.connect_to_server(sys.argv[1])
        await client.chat_loop()
    finally:
        await client.cleanup()

if __name__ == '__main__':
    import sys
    asyncio.run(main())

二,SSE通信代码

这次有两个server,18080端口的应用,以及18081端口的应用。
1,

import argparse

import mcp
from mcp.server.fastmcp import FastMCP
from starlette.applications import Starlette
from mcp.server.sse import SseServerTransport
from starlette.requests import Request
from starlette.routing import Mount, Route
from mcp.server import Server
import logging
import uvicorn

MCP_SERVER_NAME = 'Cg-Math-Mcp-Sse'

logging.basicConfig(
    level=logging.INFO,
    format='%(asctime)s - %(name)s - %(levelname)s - %(message)s'
)

logger = logging.getLogger(MCP_SERVER_NAME)

mcp = FastMCP(MCP_SERVER_NAME)

@mcp.tool()
def add(a, b):
    return a + b

@mcp.tool()
def subtract(a, b):
    return a - b

@mcp.tool()
def multiply(a, b):
    return a * b

@mcp.tool()
def devide(a, b):
    if b == 0:
        raise ValueError('Division by zero is not allowed.')
    return a / b

def create_starlette_app(mcp_server: Server, *, debug: bool=False) -> Starlette:
    sse = SseServerTransport('/messages/')

    async def handle_sse(request: Request) -> None:
        async with sse.connect_sse(
            request.scope,
            request.receive,
            request._send,
        ) as (read_stream, write_stream):
            await mcp_server.run(
                read_stream,
                write_stream,
                mcp_server.create_initialization_options(),
            )
    return Starlette(
        debug=debug,
        routes=[
            Route('/sse', endpoint=handle_sse),
            Mount('/messages/', app=sse.handle_post_message),
        ],
    )

if __name__ == '__main__':
    mcp_server = mcp._mcp_server
    parser = argparse.ArgumentParser(description='Run MCP SSE-based server')
    parser.add_argument('--host', default='0.0.0.0', help='host to bind to')
    parser.add_argument('--port', type=int, default=18080, help='port to listen to')
    args = parser.parse_args()

    starlette_app = create_starlette_app(mcp_server, debug=True)
    uvicorn.run(starlette_app, host=args.host, port=args.port)

2,

import argparse

import mcp
from mcp.server.fastmcp import FastMCP
from starlette.applications import Starlette
from mcp.server.sse import SseServerTransport
from starlette.requests import Request
from starlette.routing import Mount, Route
from mcp.server import Server
import logging
import uvicorn

MCP_SERVER_NAME = 'Cg-Modulo-Mcp-Sse'

logging.basicConfig(
    level=logging.INFO,
    format='%(asctime)s - %(name)s - %(levelname)s - %(message)s'
)

logger = logging.getLogger(MCP_SERVER_NAME)

mcp = FastMCP(MCP_SERVER_NAME)

@mcp.tool()
def modulo(a, b):
    if b == 0:
        raise ValueError('division is not zero.')
    return a % b


def create_starlette_app(mcp_server: Server, *, debug: bool=False) -> Starlette:
    sse = SseServerTransport('/messages/')

    async def handle_sse(request: Request) -> None:
        async with sse.connect_sse(
            request.scope,
            request.receive,
            request._send,
        ) as (read_stream, write_stream):
            await mcp_server.run(
                read_stream,
                write_stream,
                mcp_server.create_initialization_options(),
            )
    return Starlette(
        debug=debug,
        routes=[
            Route('/sse', endpoint=handle_sse),
            Mount('/messages/', app=sse.handle_post_message),
        ],
    )

if __name__ == '__main__':
    mcp_server = mcp._mcp_server
    parser = argparse.ArgumentParser(description='Run MCP SSE-based server')
    parser.add_argument('--host', default='0.0.0.0', help='host to bind to')
    parser.add_argument('--port', type=int, default=18081, help='port to listen to')
    args = parser.parse_args()

    starlette_app = create_starlette_app(mcp_server, debug=True)
    uvicorn.run(starlette_app, host=args.host, port=args.port)

客户端

import asyncio
import json
import os, sys
from typing import List

from httpx import stream
from mcp import ClientSession
from mcp.client.sse import sse_client
from openai import AsyncOpenAI

class MCPClient:
    def __init__(self, model_name, base_url, api_key, server_urls):
        self.model_name = model_name
        self.server_urls = server_urls
        self.sessions = {}
        self.tool_mapping = {}
        self.client = AsyncOpenAI(base_url=base_url, api_key=api_key)

    async def initialize_sessions(self):
        for i, server_url in enumerate(self.server_urls):
            server_id = f'server{i}'
            stream_context = sse_client(url=server_url)
            streams = await stream_context.__aenter__()
            session_context = ClientSession(*streams)
            session = await session_context.__aenter__()
            await session.initialize()

            self.sessions[server_id] = (session, session_context, stream_context)

            response = await session.list_tools()
            for tool in response.tools:
                prefixed_name = f'{server_id}_{tool.name}'
                self.tool_mapping[prefixed_name] = (session, tool.name)
            print(f'已连接到{server_url},工具列表: {[tool.name for tool in response.tools]}')

    async def cleanup(self):
        for server_id, (session, session_context, streams_context) in self.sessions.items():
            await session_context.__aexit__(None, None, None)
            await streams_context.__aexit__(None, None, None)
        print('所有会话已清理.')

    async def process_query(self, query):
        messages = [{'role': 'user', 'content': query}]

        available_tools = []
        for server_id, (session, _, _) in self.sessions.items():
            response = await session.list_tools()
            for tool in response.tools:
                prefixed_name = f'{server_id}_{tool.name}'
                available_tools.append({
                    'type': 'function',
                    'function': {
                        'name': prefixed_name,
                        'description': tool.description,
                        'parameters': tool.inputSchema
                    },
                })
        response = await self.client.chat.completions.create(
            model=self.model_name,
            messages=messages,
            tools=available_tools,
        )
        final_text = []
        message = response.choices[0].message
        final_text.append(message.content or '')

        while message.tool_calls:
            for tool_call in message.tool_calls:
                prefixed_name = tool_call.function.name
                if prefixed_name in self.tool_mapping:
                    session, original_tool_name = self.tool_mapping[prefixed_name]
                    tool_args = json.loads(tool_call.function.arguments)
                    try:
                        result = await session.call_tool(original_tool_name, tool_args)
                    except Exception as e:
                        result = {'content': f'调用工具{original_tool_name}出错:{str(e)}'}
                        print(result['content'])
                    final_text.append(f'[调用工具{prefixed_name}参数:{tool_args}]')
                    final_text.append(f'工具结果:{result.content}')
                    messages.extend([
                        {
                            'role': 'assistant',
                            'tool_calls': [{
                                'id': tool_call.id,
                                'type': 'function',
                                'function': {
                                    'name': prefixed_name,
                                    'arguments': json.dump(tool_args)
                                }
                            }],
                        },
                        {
                            'role': 'tool',
                            'tool_call_id': tool_call.id,
                            'content': str(result.content)
                        },
                    ])
                else:
                    print(f'工具{prefixed_name}未找到')
                    final_text.append(f'工具{prefixed_name}未找到')
            response = await self.client.chat.completions.create(
                model=self.model_name,
                messages=messages,
                tools=available_tools,
            )
            message = response.choices[0].message
            if message.content:
                final_text.append(message.content)
        return '\n'.join(final_text)

    async def chat_loop(self):
        print('\nMCP客户端已启动,输入你的问题,输入quit退出。')
        while True:
            try:
                query = input('\n问题: ').strip()
                if query.lower() == 'quit':
                    break
                response = await self.process_query(query)
                print('\n' + response)
            except Exception as e:
                print(f'\n发生错误:{str(e)}')

async def main():
    model_name = os.getenv('MODEL', 'qwen2.5:1.5b')
    base_url = os.getenv('BASE_URL', 'http://localhost:11434/v1/')
    api_key = os.getenv('OPENAI_API_KEY', 'ollama')

    server_urls = [
        'http://localhost:18080/sse',
        'http://localhost:18081/sse',
    ]

    client = MCPClient(
        model_name=model_name,
        base_url=base_url,
        api_key=api_key,
        server_urls=server_urls
    )

    try:
        await client.initialize_sessions()
        await client.chat_loop()
    finally:
        await client.cleanup()
if __name__ == '__main__':
    asyncio.run(main())

三,截图

有时还是感觉不太好用,有不稳定的情况。


image.png
image.png
©著作权归作者所有,转载或内容合作请联系作者
平台声明:文章内容(如有图片或视频亦包括在内)由作者上传并发布,文章内容仅代表作者本人观点,简书系信息发布平台,仅提供信息存储服务。
  • 序言:七十年代末,一起剥皮案震惊了整个滨河市,随后出现的几起案子,更是在滨河造成了极大的恐慌,老刑警刘岩,带你破解...
    沈念sama阅读 228,505评论 6 533
  • 序言:滨河连续发生了三起死亡事件,死亡现场离奇诡异,居然都是意外死亡,警方通过查阅死者的电脑和手机,发现死者居然都...
    沈念sama阅读 98,556评论 3 418
  • 文/潘晓璐 我一进店门,熙熙楼的掌柜王于贵愁眉苦脸地迎上来,“玉大人,你说我怎么就摊上这事。” “怎么了?”我有些...
    开封第一讲书人阅读 176,463评论 0 376
  • 文/不坏的土叔 我叫张陵,是天一观的道长。 经常有香客问我,道长,这世上最难降的妖魔是什么? 我笑而不...
    开封第一讲书人阅读 63,009评论 1 312
  • 正文 为了忘掉前任,我火速办了婚礼,结果婚礼上,老公的妹妹穿的比我还像新娘。我一直安慰自己,他们只是感情好,可当我...
    茶点故事阅读 71,778评论 6 410
  • 文/花漫 我一把揭开白布。 她就那样静静地躺着,像睡着了一般。 火红的嫁衣衬着肌肤如雪。 梳的纹丝不乱的头发上,一...
    开封第一讲书人阅读 55,218评论 1 324
  • 那天,我揣着相机与录音,去河边找鬼。 笑死,一个胖子当着我的面吹牛,可吹牛的内容都是我干的。 我是一名探鬼主播,决...
    沈念sama阅读 43,281评论 3 441
  • 文/苍兰香墨 我猛地睁开眼,长吁一口气:“原来是场噩梦啊……” “哼!你这毒妇竟也来了?” 一声冷哼从身侧响起,我...
    开封第一讲书人阅读 42,436评论 0 288
  • 序言:老挝万荣一对情侣失踪,失踪者是张志新(化名)和其女友刘颖,没想到半个月后,有当地人在树林里发现了一具尸体,经...
    沈念sama阅读 48,969评论 1 335
  • 正文 独居荒郊野岭守林人离奇死亡,尸身上长有42处带血的脓包…… 初始之章·张勋 以下内容为张勋视角 年9月15日...
    茶点故事阅读 40,795评论 3 354
  • 正文 我和宋清朗相恋三年,在试婚纱的时候发现自己被绿了。 大学时的朋友给我发了我未婚夫和他白月光在一起吃饭的照片。...
    茶点故事阅读 42,993评论 1 369
  • 序言:一个原本活蹦乱跳的男人离奇死亡,死状恐怖,灵堂内的尸体忽然破棺而出,到底是诈尸还是另有隐情,我是刑警宁泽,带...
    沈念sama阅读 38,537评论 5 359
  • 正文 年R本政府宣布,位于F岛的核电站,受9级特大地震影响,放射性物质发生泄漏。R本人自食恶果不足惜,却给世界环境...
    茶点故事阅读 44,229评论 3 347
  • 文/蒙蒙 一、第九天 我趴在偏房一处隐蔽的房顶上张望。 院中可真热闹,春花似锦、人声如沸。这庄子的主人今日做“春日...
    开封第一讲书人阅读 34,659评论 0 26
  • 文/苍兰香墨 我抬头看了看天上的太阳。三九已至,却和暖如春,着一层夹袄步出监牢的瞬间,已是汗流浃背。 一阵脚步声响...
    开封第一讲书人阅读 35,917评论 1 286
  • 我被黑心中介骗来泰国打工, 没想到刚下飞机就差点儿被人妖公主榨干…… 1. 我叫王不留,地道东北人。 一个月前我还...
    沈念sama阅读 51,687评论 3 392
  • 正文 我出身青楼,却偏偏与公主长得像,于是被迫代替她去往敌国和亲。 传闻我的和亲对象是个残疾皇子,可洞房花烛夜当晚...
    茶点故事阅读 47,990评论 2 374

推荐阅读更多精彩内容