您现在的位置是:网站首页> C#技术
C# AI编程技术收集
- C#技术
- 2025-10-13
- 403人已阅读
C# AI编程技术收集
三个MCP分别是stdio sse streamable 方式部署,实现大模型可以调用他们
C#使用LangChain调用大模型包括MCP
- 实现MCP Server:可以基于C#的gRPC或HTTP框架(如ASP.NET Core)实现MCP Server,定义工具接口和实现逻辑。
- 实现MCP Client:使用C#的gRPC或HTTP客户端库与MCP Server通信,调用工具。
- 集成LangChain.NET:将MCP Client与LangChain.NET结合,创建自定义工具类,使其能够在LangChain.NET中使用。
- 调用大模型:使用LangChain.NET的模型调用功能,结合MCP工具进行复杂任务处理。
以下是一个简化的示例代码结构:
// MCP Server示例(仅供参考)
using Microsoft.AspNetCore.Builder;
using Microsoft.AspNetCore.Hosting;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Hosting;
public class Startup{
public void ConfigureServices(IServiceCollection services)
{
services.AddGrpc();
}
public void Configure(IApplicationBuilder app, IWebHostEnvironment env)
{
app.UseRouting();
app.UseEndpoints(endpoints =>
{
endpoints.MapGrpcService<MCPService>();
});
}}
public class MCPService : MCP.MCPBase{
public override async Task<ToolResponse> ExecuteTool(ToolRequest request, ServerCallContext context)
{
// 工具执行逻辑
return new ToolResponse { Result = "工具执行结果" };
}}
// MCP Client示例(仅供参考)
using Grpc.Net.Client;
public class MCPClient{
private readonly MCP.MCPClient _client;
public MCPClient(string serverAddress)
{
var channel = GrpcChannel.ForAddress(serverAddress);
_client = new MCP.MCPClient(channel);
}
public async Task<string> ExecuteToolAsync(string toolName, Dictionary<string, object> parameters)
{
var request = new ToolRequest
{
ToolName = toolName,
Parameters = { parameters }
};
var response = await _client.ExecuteToolAsync(request);
return response.Result;
}}
// LangChain.NET集成示例(仅供参考)
using LangChain.NET;
public class Program{
public static async Task Main(string[] args)
{
// 初始化LangChain模型
var llm = new OpenAIModel("your_openai_api_key");
// 初始化MCP Client
var mcpClient = new MCPClient("https://mcp-server-address");
// 创建自定义工具
var tools = new List<ITool>
{
new MCPTool(mcpClient, "math", "执行数学计算"),
new MCPTool(mcpClient, "weather", "获取天气信息")
};
// 创建代理
var agent = new ReactAgent(llm, tools);
// 执行查询
var result = await agent.AskAsync("请计算3+5的值");
Console.WriteLine(result);
}}
public class MCPTool : ITool{
private readonly MCPClient _mcpClient;
private readonly string _toolName;
private readonly string _description;
public MCPTool(MCPClient mcpClient, string toolName, string description)
{
_mcpClient = mcpClient;
_toolName = toolName;
_description = description;
}
public async Task<object> ExecuteAsync(object input)
{
var parameters = new Dictionary<string, object>
{
{ "input", input }
};
return await _mcpClient.ExecuteToolAsync(_toolName, parameters);
}
public string Description => _description;
}
大模型调用MCP的代码
using System;
using System.Collections.Generic;
using System.Net.Http;
using System.Text;
using System.Text.Json;
using System.Threading.Tasks;
using System.IO;
namespace EnhancedOllamaMcpExample
{
// Ollama API请求模型
public class OllamaRequest
{
public string Model { get; set; }
public string Prompt { get; set; }
public bool Stream { get; set; } = false;
public Dictionary<string, object> Options { get; set; } = new Dictionary<string, object>();
}
// Ollama API响应模型
public class OllamaResponse
{
public string Model { get; set; }
public string Created_At { get; set; }
public string Response { get; set; }
public bool Done { get; set; }
}
// MCP指令模型
public class McpCommand
{
public string Action { get; set; }
public Dictionary<string, string> Parameters { get; set; } = new Dictionary<string, string>();
}
class EnhancedOllamaMcpClient
{
private readonly HttpClient _httpClient;
private readonly string _ollamaBaseUrl;
private readonly string _modelName;
private readonly string _mcpInstructions; // MCP使用说明
public EnhancedOllamaMcpClient(string ollamaBaseUrl = "http://localhost:11434", string modelName = "qwen")
{
_ollamaBaseUrl = ollamaBaseUrl;
_modelName = modelName;
_httpClient = new HttpClient();
_httpClient.BaseAddress = new Uri(_ollamaBaseUrl);
// 详细的MCP使用说明,告诉模型如何调用工具
_mcpInstructions = @"
当你需要完成以下任务时,请使用MCP指令:
1. 读取本地文件内容
2. 写入内容到本地文件
3. 获取系统信息
4. 执行其他需要外部资源的操作
MCP指令格式:
<mcp>
[
{
""action"": ""操作名称"",
""parameters"": {
""参数1"": ""值1"",
""参数2"": ""值2""
}
}
]
</mcp>
支持的操作及参数:
- readfile: {""path"": ""文件路径""}
- writefile: {""path"": ""文件路径"", ""content"": ""文件内容""}
- getsysteminfo: 无参数
";
}
// 发送提示到Ollama并获取响应
public async Task<string> SendPromptToModelAsync(string prompt)
{
try
{
var requestBody = new OllamaRequest
{
Model = _modelName,
Prompt = prompt,
Stream = false,
Options = new Dictionary<string, object>
{
{ "temperature", 0.7 }
}
};
var jsonRequest = JsonSerializer.Serialize(requestBody);
var content = new StringContent(jsonRequest, Encoding.UTF8, "application/json");
var response = await _httpClient.PostAsync("/api/generate", content);
response.EnsureSuccessStatusCode();
var jsonResponse = await response.Content.ReadAsStringAsync();
var ollamaResponse = JsonSerializer.Deserialize<OllamaResponse>(jsonResponse);
return ollamaResponse?.Response ?? string.Empty;
}
catch (Exception ex)
{
return $"与模型通信出错: {ex.Message}";
}
}
// 解析模型响应中的MCP指令
public bool TryExtractMcpCommands(string modelResponse, out List<McpCommand> commands)
{
commands = new List<McpCommand>();
const string mcpStartTag = "<mcp>";
const string mcpEndTag = "</mcp>";
int startIndex = modelResponse.IndexOf(mcpStartTag, StringComparison.OrdinalIgnoreCase);
if (startIndex == -1)
return false;
int endIndex = modelResponse.IndexOf(mcpEndTag, startIndex + mcpStartTag.Length, StringComparison.OrdinalIgnoreCase);
if (endIndex == -1)
return false;
// 提取并清理MCP内容
string mcpContent = modelResponse.Substring(
startIndex + mcpStartTag.Length,
endIndex - (startIndex + mcpStartTag.Length)
).Trim();
try
{
// 尝试解析MCP指令
commands = JsonSerializer.Deserialize<List<McpCommand>>(mcpContent);
return commands != null && commands.Count > 0;
}
catch (Exception ex)
{
Console.WriteLine($"解析MCP指令失败: {ex.Message}");
return false;
}
}
// 执行MCP指令并返回结果
public async Task<string> ExecuteMcpCommands(List<McpCommand> commands)
{
var executionResults = new List<string>();
foreach (var command in commands)
{
Console.WriteLine($"\n[MCP执行器] 收到模型的MCP指令: {command.Action}");
string result = command.Action.ToLower() switch
{
"readfile" => await ExecuteFileRead(command.Parameters),
"writefile" => await ExecuteFileWrite(command.Parameters),
"getsysteminfo" => await ExecuteSystemInfoRetrieval(),
_ => $"不支持的MCP指令: {command.Action}"
};
executionResults.Add($"指令 '{command.Action}' 执行结果: {result}");
}
return string.Join("\n", executionResults);
}
// 执行文件读取操作
private async Task<string> ExecuteFileRead(Dictionary<string, string> parameters)
{
if (!parameters.TryGetValue("path", out string filePath))
return "错误: 缺少文件路径参数";
try
{
if (!File.Exists(filePath))
return $"错误: 文件不存在 - {filePath}";
string content = await File.ReadAllTextAsync(filePath);
return content.Length > 500 ? $"{content.Substring(0, 500)}..." : content;
}
catch (Exception ex)
{
return $"读取失败: {ex.Message}";
}
}
// 执行文件写入操作
private async Task<string> ExecuteFileWrite(Dictionary<string, string> parameters)
{
if (!parameters.TryGetValue("path", out string filePath))
return "错误: 缺少文件路径参数";
if (!parameters.TryGetValue("content", out string content))
return "错误: 缺少文件内容参数";
try
{
// 确保目录存在
string directory = Path.GetDirectoryName(filePath);
if (!string.IsNullOrEmpty(directory) && !Directory.Exists(directory))
{
Directory.CreateDirectory(directory);
}
await File.WriteAllTextAsync(filePath, content);
return $"成功写入文件: {filePath}";
}
catch (Exception ex)
{
return $"写入失败: {ex.Message}";
}
}
// 执行系统信息获取操作
private Task<string> ExecuteSystemInfoRetrieval()
{
return Task.FromResult($"操作系统: {Environment.OSVersion}\n" +
$"处理器核心数: {Environment.ProcessorCount}\n" +
$"当前工作目录: {Environment.CurrentDirectory}\n" +
$"系统版本: {Environment.OSVersion.VersionString}");
}
// 处理带MCP调用的完整对话流程
public async Task<string> ProcessConversationTurn(string userQuery)
{
// 1. 构建完整提示,包含MCP使用说明
string prompt = $"用户问: {userQuery}\n" +
"请分析是否需要调用工具来回答这个问题。\n" +
"如果需要,请使用MCP指令格式。如果不需要,可以直接回答。\n" +
_mcpInstructions;
// 2. 发送提示给模型,让模型决定是否调用MCP
Console.WriteLine("\n[系统] 正在向模型发送查询,等待模型响应...");
string modelInitialResponse = await SendPromptToModelAsync(prompt);
Console.WriteLine($"\n[模型原始响应]\n{modelInitialResponse}\n");
// 3. 检查模型是否决定使用MCP
if (TryExtractMcpCommands(modelInitialResponse, out var mcpCommands))
{
Console.WriteLine($"[系统] 检测到模型发起了 {mcpCommands.Count} 个MCP调用");
// 4. 执行MCP指令
string executionResults = await ExecuteMcpCommands(mcpCommands);
Console.WriteLine($"[MCP执行结果]\n{executionResults}\n");
// 5. 将执行结果返回给模型,获取最终回答
string finalPrompt = $"你之前发起的MCP指令已经执行,结果如下:\n{executionResults}\n" +
"请根据这些结果,用自然语言回答用户的原始问题。";
return await SendPromptToModelAsync(finalPrompt);
}
// 如果模型没有使用MCP,直接返回其响应
return modelInitialResponse;
}
// 启动交互式对话
public async Task StartInteractiveConversation()
{
Console.WriteLine($"=== Ollama MCP交互对话 (模型: {_modelName}) ===");
Console.WriteLine("输入你的问题,或输入 'exit' 退出对话");
Console.WriteLine("示例问题:");
Console.WriteLine("- 请读取当前目录下的test.txt文件");
Console.WriteLine("- 帮我创建一个名为note.txt的文件,内容是'Hello MCP'");
Console.WriteLine("- 我的系统信息是什么?");
while (true)
{
Console.Write("\n你: ");
string userInput = Console.ReadLine();
if (string.Equals(userInput, "exit", StringComparison.OrdinalIgnoreCase))
{
Console.WriteLine("对话结束,再见!");
break;
}
string response = await ProcessConversationTurn(userInput);
Console.WriteLine($"\n模型: {response}");
}
}
}
class Program
{
static async Task Main(string[] args)
{
// 初始化客户端,连接到本地Ollama服务
var mcpClient = new EnhancedOllamaMcpClient(
ollamaBaseUrl: "http://localhost:11434",
modelName: "qwen" // 可以替换为其他支持的模型如"llama3"、"gemma"等
);
// 启动交互式对话
await mcpClient.StartInteractiveConversation();
}
}
}
三个MCP分别是stdio sse streamable 方式部署,实现大模型可以调用他们
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Net.Http;
using System.Net.Http.Headers;
using System.Text;
using System.Text.Json;
using System.Threading.Tasks;
using System.IO;
using System.Threading;
namespace MultiModeMcpExample
{
// MCP部署模式枚举
public enum McpMode
{
Stdio, // 标准输入输出模式
Sse, // Server-Sent Events模式
Streamable // 流式传输模式
}
// 扩展的MCP指令模型,包含部署模式信息
public class AdvancedMcpCommand
{
public string Action { get; set; }
public McpMode Mode { get; set; } = McpMode.Stdio; // 默认使用stdio模式
public Dictionary<string, string> Parameters { get; set; } = new Dictionary<string, string>();
public bool Stream { get; set; } = false; // 是否启用流式响应
}
// Ollama API模型定义
public class OllamaRequest
{
public string Model { get; set; }
public string Prompt { get; set; }
public bool Stream { get; set; } = false;
public Dictionary<string, object> Options { get; set; } = new Dictionary<string, object>();
}
public class OllamaResponse
{
public string Model { get; set; }
public string Created_At { get; set; }
public string Response { get; set; }
public bool Done { get; set; }
}
class MultiModeMcpClient
{
private readonly HttpClient _httpClient;
private readonly string _ollamaBaseUrl;
private readonly string _modelName;
private readonly string _mcpInstructions;
public MultiModeMcpClient(string ollamaBaseUrl = "http://localhost:11434", string modelName = "qwen")
{
_ollamaBaseUrl = ollamaBaseUrl;
_modelName = modelName;
_httpClient = new HttpClient();
_httpClient.BaseAddress = new Uri(_ollamaBaseUrl);
_httpClient.Timeout = TimeSpan.FromMinutes(5);
// 详细的MCP多模式使用说明
_mcpInstructions = @"
当需要调用外部工具时,请使用MCP指令。支持三种部署方式:
1. stdio: 通过标准输入输出与本地程序通信
2. sse: 通过Server-Sent Events接收实时推送
3. streamable: 通过流式传输进行持续数据交换
MCP指令格式:
<mcp>
[
{
""action"": ""操作名称"",
""mode"": ""stdio|sse|streamable"",
""stream"": true|false, // 是否需要流式响应
""parameters"": {
""参数1"": ""值1"",
""参数2"": ""值2""
}
}
]
</mcp>
支持的操作示例:
- 运行本地程序: {""action"": ""runprogram"", ""mode"": ""stdio"", ""parameters"": {""path"": ""程序路径"", ""args"": ""参数""}}
- 订阅实时数据: {""action"": ""subscribedata"", ""mode"": ""sse"", ""parameters"": {""url"": ""SSE服务地址""}}
- 传输大文件: {""action"": ""transferfile"", ""mode"": ""streamable"", ""stream"": true, ""parameters"": {""url"": ""流服务地址"", ""path"": ""文件路径""}}
";
}
// 发送提示到Ollama模型
public async Task<string> SendPromptToModelAsync(string prompt)
{
try
{
var requestBody = new OllamaRequest
{
Model = _modelName,
Prompt = prompt,
Stream = false,
Options = new Dictionary<string, object>
{
{ "temperature", 0.7 },
{ "num_ctx", 8192 }
}
};
var jsonRequest = JsonSerializer.Serialize(requestBody);
var content = new StringContent(jsonRequest, Encoding.UTF8, "application/json");
var response = await _httpClient.PostAsync("/api/generate", content);
response.EnsureSuccessStatusCode();
var jsonResponse = await response.Content.ReadAsStringAsync();
var ollamaResponse = JsonSerializer.Deserialize<OllamaResponse>(jsonResponse);
return ollamaResponse?.Response ?? string.Empty;
}
catch (Exception ex)
{
return $"与模型通信出错: {ex.Message}";
}
}
// 解析模型响应中的MCP指令
public bool TryExtractMcpCommands(string modelResponse, out List<AdvancedMcpCommand> commands)
{
commands = new List<AdvancedMcpCommand>();
const string mcpStartTag = "<mcp>";
const string mcpEndTag = "</mcp>";
int startIndex = modelResponse.IndexOf(mcpStartTag, StringComparison.OrdinalIgnoreCase);
if (startIndex == -1)
return false;
int endIndex = modelResponse.IndexOf(mcpEndTag, startIndex + mcpStartTag.Length, StringComparison.OrdinalIgnoreCase);
if (endIndex == -1)
return false;
string mcpContent = modelResponse.Substring(
startIndex + mcpStartTag.Length,
endIndex - (startIndex + mcpStartTag.Length)
).Trim();
try
{
var options = new JsonSerializerOptions
{
PropertyNameCaseInsensitive = true
};
commands = JsonSerializer.Deserialize<List<AdvancedMcpCommand>>(mcpContent, options);
return commands != null && commands.Count > 0;
}
catch (Exception ex)
{
Console.WriteLine($"解析MCP指令失败: {ex.Message}");
return false;
}
}
// 执行MCP指令(根据不同模式路由)
public async Task<string> ExecuteMcpCommands(List<AdvancedMcpCommand> commands)
{
var results = new List<string>();
foreach (var command in commands)
{
Console.WriteLine($"\n[MCP执行器] 处理指令: {command.Action} (模式: {command.Mode})");
string result = command.Mode switch
{
McpMode.Stdio => await ExecuteStdioCommand(command),
McpMode.Sse => await ExecuteSseCommand(command),
McpMode.Streamable => await ExecuteStreamableCommand(command),
_ => $"不支持的MCP模式: {command.Mode}"
};
results.Add($"[{command.Mode}:{command.Action}] 结果: {result}");
}
return string.Join("\n\n", results);
}
#region STDIO模式处理(标准输入输出)
private async Task<string> ExecuteStdioCommand(AdvancedMcpCommand command)
{
if (command.Action.ToLower() != "runprogram")
return $"stdio模式仅支持runprogram操作,收到: {command.Action}";
if (!command.Parameters.TryGetValue("path", out string programPath))
return "缺少程序路径参数 (path)";
if (!File.Exists(programPath))
return $"程序不存在: {programPath}";
try
{
var processStartInfo = new ProcessStartInfo
{
FileName = programPath,
Arguments = command.Parameters.TryGetValue("args", out string args) ? args : "",
RedirectStandardInput = true,
RedirectStandardOutput = true,
RedirectStandardError = true,
UseShellExecute = false,
CreateNoWindow = true,
StandardOutputEncoding = Encoding.UTF8,
StandardErrorEncoding = Encoding.UTF8
};
using (var process = new Process { StartInfo = processStartInfo })
{
process.Start();
// 如果有输入参数,发送到标准输入
if (command.Parameters.TryGetValue("input", out string input) && !string.IsNullOrEmpty(input))
{
await process.StandardInput.WriteLineAsync(input);
await process.StandardInput.FlushAsync();
process.StandardInput.Close();
}
// 读取输出(异步)
var outputTask = process.StandardOutput.ReadToEndAsync();
var errorTask = process.StandardError.ReadToEndAsync();
// 等待程序执行完成
if (!await Task.Run(() => process.WaitForExit(5000))) // 5秒超时
{
process.Kill();
return "程序执行超时";
}
string output = await outputTask;
string error = await errorTask;
if (process.ExitCode != 0)
return $"程序执行错误 (代码: {process.ExitCode}): {error}";
// 限制输出长度
return output.Length > 1000 ? output.Substring(0, 1000) + "...[输出被截断]" : output;
}
}
catch (Exception ex)
{
return $"stdio命令执行失败: {ex.Message}";
}
}
#endregion
#region SSE模式处理(Server-Sent Events)
private async Task<string> ExecuteSseCommand(AdvancedMcpCommand command)
{
if (command.Action.ToLower() != "subscribedata")
return $"sse模式仅支持subscribedata操作,收到: {command.Action}";
if (!command.Parameters.TryGetValue("url", out string sseUrl))
return "缺少SSE服务地址参数 (url)";
try
{
using (var request = new HttpRequestMessage(HttpMethod.Get, sseUrl))
{
request.Headers.Accept.Add(new MediaTypeWithQualityHeaderValue("text/event-stream"));
request.Headers.CacheControl = new CacheControlHeaderValue { NoCache = true };
using (var response = await _httpClient.SendAsync(
request,
HttpCompletionOption.ResponseHeadersRead,
CancellationToken.None))
{
response.EnsureSuccessStatusCode();
using (var stream = await response.Content.ReadAsStreamAsync())
using (var reader = new StreamReader(stream))
{
var eventData = new List<string>();
var cancellationTokenSource = new CancellationTokenSource();
// 设置超时(10秒)
cancellationTokenSource.CancelAfter(10000);
// 如果不是流式响应,最多接收3条消息
int maxEvents = command.Stream ? int.MaxValue : 3;
int eventCount = 0;
try
{
while (!reader.EndOfStream && !cancellationTokenSource.Token.IsCancellationRequested)
{
string line = await reader.ReadLineAsync(cancellationTokenSource.Token);
if (string.IsNullOrWhiteSpace(line))
continue;
// 解析SSE格式: data: {内容}
if (line.StartsWith("data:", StringComparison.OrdinalIgnoreCase))
{
string data = line.Substring(5).Trim();
eventData.Add(data);
Console.WriteLine($"[SSE接收] {data}");
eventCount++;
if (eventCount >= maxEvents)
break;
}
}
}
catch (OperationCanceledException)
{
eventData.Add("[SSE接收超时]");
}
return eventData.Count > 0
? $"共接收 {eventData.Count} 条SSE事件:\n{string.Join("\n", eventData)}"
: "未接收到SSE事件数据";
}
}
}
}
catch (Exception ex)
{
return $"SSE命令执行失败: {ex.Message}";
}
}
#endregion
#region Streamable模式处理(流式传输)
private async Task<string> ExecuteStreamableCommand(AdvancedMcpCommand command)
{
if (command.Action.ToLower() != "transferfile")
return $"streamable模式仅支持transferfile操作,收到: {command.Action}";
if (!command.Parameters.TryGetValue("url", out string streamUrl))
return "缺少流服务地址参数 (url)";
if (!command.Parameters.TryGetValue("path", out string filePath))
return "缺少文件路径参数 (path)";
try
{
// 判断是上传还是下载(根据参数或URL推断)
bool isUpload = command.Parameters.TryGetValue("direction", out string dir) &&
dir.Equals("upload", StringComparison.OrdinalIgnoreCase);
if (isUpload)
{
// 上传文件(流式)
if (!File.Exists(filePath))
return $"上传文件不存在: {filePath}";
using (var fileStream = File.OpenRead(filePath))
using (var content = new StreamContent(fileStream))
{
content.Headers.ContentType = new MediaTypeHeaderValue("application/octet-stream");
content.Headers.ContentLength = fileStream.Length;
var response = await _httpClient.PostAsync(streamUrl, content);
response.EnsureSuccessStatusCode();
return $"文件上传成功: {filePath} (大小: {FormatFileSize(fileStream.Length)})";
}
}
else
{
// 下载文件(流式)
using (var response = await _httpClient.GetAsync(streamUrl, HttpCompletionOption.ResponseHeadersRead))
{
response.EnsureSuccessStatusCode();
// 创建目录
string directory = Path.GetDirectoryName(filePath);
if (!string.IsNullOrEmpty(directory) && !Directory.Exists(directory))
Directory.CreateDirectory(directory);
using (var stream = await response.Content.ReadAsStreamAsync())
using (var fileStream = File.Create(filePath))
{
var buffer = new byte[8192];
int bytesRead;
long totalBytes = 0;
// 显示下载进度
while ((bytesRead = await stream.ReadAsync(buffer, 0, buffer.Length)) > 0)
{
await fileStream.WriteAsync(buffer, 0, bytesRead);
totalBytes += bytesRead;
if (command.Stream) // 如果启用流式显示
Console.WriteLine($"[文件下载] 已接收: {FormatFileSize(totalBytes)}");
}
return $"文件下载成功: {filePath} (大小: {FormatFileSize(totalBytes)})";
}
}
}
}
catch (Exception ex)
{
return $"流式传输命令执行失败: {ex.Message}";
}
}
// 格式化文件大小显示
private string FormatFileSize(long bytes)
{
if (bytes < 1024) return $"{bytes} B";
if (bytes < 1048576) return $"{bytes / 1024.0:F1} KB";
if (bytes < 1073741824) return $"{bytes / 1048576.0:F1} MB";
return $"{bytes / 1073741824.0:F1} GB";
}
#endregion
// 处理带MCP调用的对话轮次
public async Task<string> ProcessConversationTurn(string userQuery)
{
// 1. 构建包含多模式MCP说明的提示
string prompt = $"用户问: {userQuery}\n" +
"请分析是否需要调用外部工具。如果需要,请根据任务类型选择合适的MCP模式:\n" +
"- 本地程序交互: 使用stdio模式\n" +
"- 实时数据订阅: 使用sse模式\n" +
"- 大文件传输: 使用streamable模式\n" +
_mcpInstructions;
// 2. 发送提示给模型
Console.WriteLine("\n[系统] 等待模型响应...");
string modelInitialResponse = await SendPromptToModelAsync(prompt);
Console.WriteLine($"\n[模型原始响应]\n{modelInitialResponse}\n");
// 3. 检查并处理MCP指令
if (TryExtractMcpCommands(modelInitialResponse, out var mcpCommands))
{
Console.WriteLine($"[系统] 检测到 {mcpCommands.Count} 个MCP指令,开始执行...");
// 4. 执行MCP指令
string executionResults = await ExecuteMcpCommands(mcpCommands);
Console.WriteLine($"[MCP执行结果]\n{executionResults}\n");
// 5. 获取最终回答
string finalPrompt = $"你发起的MCP指令已执行,结果如下:\n{executionResults}\n" +
"请根据结果用自然语言回答用户问题。";
return await SendPromptToModelAsync(finalPrompt);
}
return modelInitialResponse;
}
// 启动交互式对话
public async Task StartInteractiveConversation()
{
Console.WriteLine($"=== 多模式MCP客户端 (模型: {_modelName}) ===");
Console.WriteLine("支持stdio、sse和streamable三种MCP部署方式");
Console.WriteLine("输入问题或 'exit' 退出");
Console.WriteLine("示例问题:");
Console.WriteLine("- 用stdio模式运行notepad.exe");
Console.WriteLine("- 用sse模式订阅http://localhost:8080/events的实时数据");
Console.WriteLine("- 用streamable模式下载http://example.com/file.zip到本地downloads文件夹");
while (true)
{
Console.Write("\n你: ");
string userInput = Console.ReadLine();
if (string.Equals(userInput, "exit", StringComparison.OrdinalIgnoreCase))
{
Console.WriteLine("对话结束");
break;
}
string response = await ProcessConversationTurn(userInput);
Console.WriteLine($"\n模型: {response}");
}
}
}
class Program
{
static async Task Main(string[] args)
{
var client = new MultiModeMcpClient(
ollamaBaseUrl: "http://localhost:11434",
modelName: "qwen" // 可替换为支持的模型
);
await client.StartInteractiveConversation();
}
}
}
下一篇:C#开发技术收集目录结构