MCP 是一个由 Anthropic 发起并开源的协议,旨在标准化 AI 模型(尤其是 LLM)与外部数据源和工具交互的方式。可以将其理解为 AI 应用的 "USB-C 接口",让不同的 LLM 能够以统一的方式连接和使用各种外部资源。高德地图已经提出了MCP服务器的解决方案,这意味着DeepSeek等LLM能够实现对地图的操作,包括路径规划、地点搜索等,这将进一步的扩展LLM的能力,不久将会出现更多的DeepSeek导航应用等。未来也会出现更多MCP服务,让AI操作更多的软件,实现更多的功能。
@mcp.tool()defget_time()->str:"""获取当前系统时间"""returnstr(datetime.datetime.now())@mcp.tool()defcalculate_bmi(weight_kg:float,height_m:float)->float:"""根据体重(kg)和身高(m)计算BMI"""returnweight_kg/(height_m**2)
服务器中可用的工具: ['get_time', 'calculate_bmi']
MCP 客户端启动
输入 /bye 退出
>>> 今天天气怎么样
我无法获取天气信息,因为我没有相关的工具。如果您需要了解天气情况,可以查看天气预报应用或网站。
>>> 现在几点了
```json
{
"tool": "get_time", "arguments": {}
}
```
[提示]:正在执行函数
[执行结果]: meta=None content=[TextContent(type='text', text='2025-04-06 22:59:58.898244', annotations=None)] isError=False
现在的时间是2025年4月6日晚上10点59分。
>>> 我想知道我的身高和体重是否符合标准请告诉我您的体重(kg)和身高(m),我可以帮您计算BMI指数来判断是否符合标准。例如:"我身高1.75米,体重70公斤"。
>>> 身高180,体重80
```json
{
"tool": "calculate_bmi",
"arguments": {
"weight_kg": 80,
"height_m": 1.80
}
}
```
[提示]:正在执行函数
[执行结果]: meta=None content=[TextContent(type='text', text='24.691358024691358', annotations=None)] isError=False
您的BMI指数是24.69,属于正常范围(18.5-24.9为正常)。您的身高体重比例是健康的。
1
环境配置
pipinstalluv
uvadd<依赖名称>
uvpipinstall<依赖名称>
uvrun文件名.py
uvinitmcp-server-democdmcp-server-demo
uvadd"mcp[cli]"
pipinstall"mcp[cli]"
2
MCP服务端
frommcp.server.fastmcpimportFastMCPimportdatetimemcp=FastMCP()@mcp.tool()defget_time()->str:"""获取当前系统时间"""returnstr(datetime.datetime.now())@mcp.tool()defcalculate_bmi(weight_kg:float,height_m:float)->float:"""根据体重(kg)和身高(m)计算BMI"""returnweight_kg/(height_m**2)if__name__=="__main__":mcp.run(transport='stdio')
3
MCP客户端
asyncdefconnect_to_server(self,server_script_path:str):"""连接MCP服务器"""server_params=StdioServerParameters(command="python",args=[server_script_path],env=None)self.stdio,self.write=awaitself.exit_stack.enter_async_context(stdio_client(server_params))self.session=awaitself.exit_stack.enter_async_context(ClientSession(self.stdio,self.write))awaitself.session.initialize()#列出可用工具response=awaitself.session.list_tools()tools=response.toolsprint("\n服务器中可用的工具:",[tool.namefortoolintools])tools_description="\n".join([format_tools_for_llm(tool)fortoolintools])#修改系统提示system_prompt=("Youareahelpfulassistantwithaccesstothesetools:\n\n"f"{tools_description}\n""Choosetheappropriatetoolbasedontheuser'squestion.""Ifnotoolisneeded,replydirectly.\n\n""IMPORTANT:Whenyouneedtouseatool,youmustONLYrespondwith""theexactJSONobjectformatbelow,nothingelse:\n""{\n"'"tool":"tool-name",\n''"arguments":{\n''"argument-name":"value"\n'"}\n""}\n\n""Afterreceivingatool'sresponse:\n""1.Transformtherawdataintoanatural,conversationalresponse\n""2.Keepresponsesconcisebutinformative\n""3.Focusonthemostrelevantinformation\n""4.Useappropriatecontextfromtheuser'squestion\n""5.Avoidsimplyrepeatingtherawdata\n\n""
leaseuseonlythetoolsthatareexplicitlydefinedabove.")self.messages.append({"role":"system","content":system_prompt})asyncdefchat_loop(self):"""运行交互式聊天循环"""print("MCP客户端启动")print("输入/bye退出")whileTrue:prompt=input(">>>").strip()ifprompt.lower()=='/bye':breakllm_response=awaitself.chat(prompt)print(llm_response)result=awaitself.execute_tool(llm_response)ifresult!=llm_response:self.messages.append({"role":"assistant","content":llm_response})final_response=awaitself.chat(result,"system")print(final_response)self.messages.append({"role":"assistant","content":final_response})else:self.messages.append({"role":"assistant","content":llm_response})完整代码
frommcp.server.fastmcpimportFastMCPimportdatetimemcp=FastMCP()@mcp.tool()defget_time()->str:"""获取当前系统时间"""returnstr(datetime.datetime.now())@mcp.tool()defcalculate_bmi(weight_kg:float,height_m:float)->float:"""根据体重(kg)和身高(m)计算BMI"""returnweight_kg/(height_m**2)if__name__=="__main__":mcp.run(transport='stdio')
importasyncioimportsysfromtypingimportOptionalfromcontextlibimportAsyncExitStackfrommcpimportClientSession,StdioServerParametersfrommcp.client.stdioimportstdio_clientfromdotenvimportload_dotenvfromopenaiimportAsyncOpenAI,OpenAIimportjsonload_dotenv()#loadenvironmentvariablesfrom.envdefformat_tools_for_llm(tool)->str:"""对tool进行格式化Returns:格式化之后的tool描述"""args_desc=[]if"properties"intool.inputSchema:forparam_name,param_infointool.inputSchema["properties"].items():arg_desc=(f"-{param_name}:{param_info.get('description','Nodescription')}")ifparam_nameintool.inputSchema.get("required",[]):arg_desc+="(required)"args_desc.append(arg_desc)returnf"Tool:{tool.name}\nDescription:{tool.description}\nArguments:\n{chr(10).join(args_desc)}"classMCPClient:def__init__(self):self.session:Optional[ClientSession]=Noneself.exit_stack=AsyncExitStack()self.client=AsyncOpenAI(base_url="https://api.deepseek.com",api_key="<你的apikey>",)self.model="deepseek-chat"self.messages=[]asyncdefconnect_to_server(self,server_script_path:str):"""连接MCP服务器"""server_params=StdioServerParameters(command="python",args=[server_script_path],env=None)self.stdio,self.write=awaitself.exit_stack.enter_async_context(stdio_client(server_params))self.session=awaitself.exit_stack.enter_async_context(ClientSession(self.stdio,self.write))awaitself.session.initialize()#列出可用工具response=awaitself.session.list_tools()tools=response.toolsprint("\n服务器中可用的工具:",[tool.namefortoolintools])tools_description="\n".join([format_tools_for_llm(tool)fortoolintools])#修改系统提示system_prompt=("Youareahelpfulassistantwithaccesstothesetools:\n\n"f"{tools_description}\n""Choosetheappropriatetoolbasedontheuser'squestion.""Ifnotoolisneeded,replydirectly.\n\n""IMPORTANT:Whenyouneedtouseatool,youmustONLYrespondwith""theexactJSONobjectformatbelow,nothingelse:\n""{\n"'"tool":"tool-name",\n''"arguments":{\n''"argument-name":"value"\n'"}\n""}\n\n"'"```json"isnotallowed'"Afterreceivingatool'sresponse:\n""1.Transformtherawdataintoanatural,conversationalresponse\n""2.Keepresponsesconcisebutinformative\n""3.Focusonthemostrelevantinformation\n""4.Useappropriatecontextfromtheuser'squestion\n""5.Avoidsimplyrepeatingtherawdata\n\n""
leaseuseonlythetoolsthatareexplicitlydefinedabove.")self.messages.append({"role":"system","content":system_prompt})asyncdefchat(self,prompt,role="user"):"""与LLM进行交互"""self.messages.append({"role":role,"content":prompt})#初始化LLMAPI调用response=awaitself.client.chat.completions.create(model=self.model,messages=self.messages,)llm_response=response.choices[0].message.contentreturnllm_responseasyncdefexecute_tool(self,llm_response:str):"""
rocesstheLLMresponseandexecutetoolsifneeded.Args:llm_response:TheresponsefromtheLLM.Returns:Theresultoftoolexecutionortheoriginalresponse."""importjsontry:tool_call=json.loads(llm_response.replace("```json\n","").replace("```",""))if"tool"intool_calland"arguments"intool_call:#result=awaitself.session.call_tool(tool_name,tool_args)response=awaitself.session.list_tools()tools=response.toolsifany(tool.name==tool_call["tool"]fortoolintools):try:print("[提示]:正在执行函数")result=awaitself.session.call_tool(tool_call["tool"],tool_call["arguments"])ifisinstance(result,dict)and"progress"inresult:progress=result["progress"]total=result["total"]percentage=(progress/total)*100print(f"
rogress:{progress}/{total}({percentage:.1f}%)")print(f"[执行结果]:{result}")returnf"Toolexecutionresult:{result}"exceptExceptionase:error_msg=f"Errorexecutingtool:{str(e)}"print(error_msg)returnerror_msgreturnf"Noserverfoundwithtool:{tool_call['tool']}"returnllm_responseexceptjson.JSONDecodeError:returnllm_responseasyncdefchat_loop(self):"""运行交互式聊天循环"""print("MCP客户端启动")print("输入/bye退出")whileTrue:prompt=input(">>>").strip()ifprompt.lower()=='/bye':breakllm_response=awaitself.chat(prompt)print(llm_response)result=awaitself.execute_tool(llm_response)ifresult!=llm_response:self.messages.append({"role":"assistant","content":llm_response})final_response=awaitself.chat(result,"system")print(final_response)self.messages.append({"role":"assistant","content":final_response})else:self.messages.append({"role":"assistant","content":llm_response})asyncdefmain():iflen(sys.argv)<2:print("Usage:uvrunclient.py<path_to_server_script>")sys.exit(1)client=MCPClient()awaitclient.connect_to_server(sys.argv[1])awaitclient.chat_loop()if__name__=="__main__":asyncio.run(main())uvrunclient.py./server.py
| 欢迎光临 链载Ai (https://www.lianzai.com/) | Powered by Discuz! X3.5 |