-
Notifications
You must be signed in to change notification settings - Fork 656
Expand file tree
/
Copy pathSampleLlmTool.cs
More file actions
38 lines (34 loc) · 1.48 KB
/
SampleLlmTool.cs
File metadata and controls
38 lines (34 loc) · 1.48 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
using ModelContextProtocol.Protocol;
using ModelContextProtocol.Server;
using System.ComponentModel;
namespace EverythingServer.Tools;
[McpServerToolType]
public class SampleLlmTool
{
[McpServerTool(Name = "sampleLLM"), Description("Samples from an LLM using MCP's sampling feature")]
public static async Task<string> SampleLLM(
McpServer server,
[Description("The prompt to send to the LLM")] string prompt,
[Description("Maximum number of tokens to generate")] int maxTokens,
CancellationToken cancellationToken)
{
var samplingParams = CreateRequestSamplingParams(prompt ?? string.Empty, "sampleLLM", maxTokens);
var sampleResult = await server.SampleAsync(samplingParams, cancellationToken: cancellationToken);
return $"LLM sampling result: {sampleResult.Content.OfType<TextContentBlock>().FirstOrDefault()?.Text}";
}
private static CreateMessageRequestParams CreateRequestSamplingParams(string context, string uri, int maxTokens = 100)
{
return new CreateMessageRequestParams
{
Messages = [new SamplingMessage
{
Role = Role.User,
Content = [new TextContentBlock { Text = $"Resource {uri} context: {context}" }],
}],
SystemPrompt = "You are a helpful test server.",
MaxTokens = maxTokens,
Temperature = 0.7f,
IncludeContext = ContextInclusion.ThisServer
};
}
}