Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion samples/EverythingServer/Program.cs
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,7 @@ await ctx.Server.SampleAsync([
new ChatMessage(ChatRole.System, "You are a helpful test server"),
new ChatMessage(ChatRole.User, $"Resource {uri}, context: A new subscription was started"),
],
options: new ChatOptions
chatOptions: new ChatOptions
{
MaxOutputTokens = 100,
Temperature = 0.7f,
Expand Down
2 changes: 1 addition & 1 deletion samples/EverythingServer/Tools/SampleLlmTool.cs
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ public static async Task<string> SampleLLM(
CancellationToken cancellationToken)
{
var samplingParams = CreateRequestSamplingParams(prompt ?? string.Empty, "sampleLLM", maxTokens);
var sampleResult = await server.SampleAsync(samplingParams, cancellationToken);
var sampleResult = await server.SampleAsync(samplingParams, cancellationToken: cancellationToken);

return $"LLM sampling result: {sampleResult.Content.OfType<TextContentBlock>().FirstOrDefault()?.Text}";
}
Expand Down
2 changes: 1 addition & 1 deletion samples/TestServerWithHosting/Tools/SampleLlmTool.cs
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ public static async Task<string> SampleLLM(
CancellationToken cancellationToken)
{
var samplingParams = CreateRequestSamplingParams(prompt ?? string.Empty, "sampleLLM", maxTokens);
var sampleResult = await thisServer.SampleAsync(samplingParams, cancellationToken);
var sampleResult = await thisServer.SampleAsync(samplingParams, cancellationToken: cancellationToken);

return $"LLM sampling result: {sampleResult.Content.OfType<TextContentBlock>().FirstOrDefault()?.Text}";
}
Expand Down
108 changes: 63 additions & 45 deletions src/ModelContextProtocol.Core/Client/McpClient.Methods.cs

Large diffs are not rendered by default.

2 changes: 1 addition & 1 deletion src/ModelContextProtocol.Core/Client/McpClientPrompt.cs
Original file line number Diff line number Diff line change
Expand Up @@ -96,6 +96,6 @@ public async ValueTask<GetPromptResult> GetAsync(
arguments as IReadOnlyDictionary<string, object?> ??
arguments?.ToDictionary();

return await _client.GetPromptAsync(ProtocolPrompt.Name, argDict, serializerOptions, cancellationToken: cancellationToken).ConfigureAwait(false);
return await _client.GetPromptAsync(ProtocolPrompt.Name, argDict, new RequestOptions() { JsonSerializerOptions = serializerOptions }, cancellationToken).ConfigureAwait(false);
}
}
4 changes: 2 additions & 2 deletions src/ModelContextProtocol.Core/Client/McpClientResource.cs
Original file line number Diff line number Diff line change
Expand Up @@ -78,10 +78,10 @@ public McpClientResource(McpClient client, Resource resource)
/// <returns>A <see cref="ValueTask{ReadResourceResult}"/> containing the resource's result with content and messages.</returns>
/// <remarks>
/// <para>
/// This is a convenience method that internally calls <see cref="McpClient.ReadResourceAsync(string, CancellationToken)"/>.
/// This is a convenience method that internally calls <see cref="McpClient.ReadResourceAsync(string, RequestOptions, CancellationToken)"/>.
/// </para>
/// </remarks>
public ValueTask<ReadResourceResult> ReadAsync(
CancellationToken cancellationToken = default) =>
_client.ReadResourceAsync(Uri, cancellationToken);
_client.ReadResourceAsync(Uri, cancellationToken: cancellationToken);
}
Original file line number Diff line number Diff line change
Expand Up @@ -83,5 +83,5 @@ public McpClientResourceTemplate(McpClient client, ResourceTemplate resourceTemp
public ValueTask<ReadResourceResult> ReadAsync(
IReadOnlyDictionary<string, object?> arguments,
CancellationToken cancellationToken = default) =>
_client.ReadResourceAsync(UriTemplate, arguments, cancellationToken);
_client.ReadResourceAsync(UriTemplate, arguments, cancellationToken: cancellationToken);
}
21 changes: 16 additions & 5 deletions src/ModelContextProtocol.Core/Client/McpClientTool.cs
Original file line number Diff line number Diff line change
Expand Up @@ -128,7 +128,11 @@ internal McpClientTool(
protected async override ValueTask<object?> InvokeCoreAsync(
AIFunctionArguments arguments, CancellationToken cancellationToken)
{
CallToolResult result = await CallAsync(arguments, _progress, JsonSerializerOptions, cancellationToken).ConfigureAwait(false);
var options = JsonSerializerOptions is null ? null : new RequestOptions()
{
JsonSerializerOptions = JsonSerializerOptions,
};
CallToolResult result = await CallAsync(arguments, _progress, options, cancellationToken).ConfigureAwait(false);

// We want to translate the result content into AIContent, using AIContent as the exchange types, so
// that downstream IChatClients can specialize handling based on the content (e.g. sending image content
Expand Down Expand Up @@ -163,8 +167,8 @@ result.StructuredContent is null &&
/// value will result in a progress token being included in the call, and any resulting progress notifications during the operation
/// routed to this instance.
/// </param>
/// <param name="serializerOptions">
/// The JSON serialization options governing argument serialization. If <see langword="null"/>, the default serialization options are used.
/// <param name="options">
/// Optional request options including metadata, serialization settings, and progress tracking.
/// </param>
/// <param name="cancellationToken">The <see cref="CancellationToken"/> to monitor for cancellation requests. The default is <see cref="CancellationToken.None"/>.</param>
/// <returns>
Expand All @@ -191,9 +195,16 @@ result.StructuredContent is null &&
public ValueTask<CallToolResult> CallAsync(
IReadOnlyDictionary<string, object?>? arguments = null,
IProgress<ProgressNotificationValue>? progress = null,
JsonSerializerOptions? serializerOptions = null,
RequestOptions? options = null,
CancellationToken cancellationToken = default) =>
_client.CallToolAsync(ProtocolTool.Name, arguments, progress, serializerOptions, cancellationToken);
_client.CallToolAsync(
ProtocolTool.Name,
arguments,
progress,
options ?? new RequestOptions() {
JsonSerializerOptions = JsonSerializerOptions
},
cancellationToken);

/// <summary>
/// Creates a new instance of the tool but modified to return the specified name from its <see cref="Name"/> property.
Expand Down
1 change: 1 addition & 0 deletions src/ModelContextProtocol.Core/McpJsonUtilities.cs
Original file line number Diff line number Diff line change
Expand Up @@ -132,6 +132,7 @@ internal static bool IsValidMcpToolSchema(JsonElement element)
[JsonSerializable(typeof(ListRootsResult))]
[JsonSerializable(typeof(ListToolsRequestParams))]
[JsonSerializable(typeof(ListToolsResult))]
[JsonSerializable(typeof(PingRequestParams))]
[JsonSerializable(typeof(PingResult))]
[JsonSerializable(typeof(ReadResourceRequestParams))]
[JsonSerializable(typeof(ReadResourceResult))]
Expand Down
3 changes: 3 additions & 0 deletions src/ModelContextProtocol.Core/McpSession.Methods.cs
Original file line number Diff line number Diff line change
Expand Up @@ -150,6 +150,7 @@ internal Task SendNotificationAsync<TParameters>(
/// </summary>
/// <param name="progressToken">The token that identifies the operation for which progress is being reported.</param>
/// <param name="progress">The progress update to send, containing information such as percentage complete or status message.</param>
/// <param name="options">Optional request options including metadata, serialization settings, and progress tracking.</param>
/// <param name="cancellationToken">The <see cref="CancellationToken"/> to monitor for cancellation requests. The default is <see cref="CancellationToken.None"/>.</param>
/// <returns>A task representing the completion of the notification operation (not the operation being tracked).</returns>
/// <exception cref="ArgumentNullException">The current session instance is <see langword="null"/>.</exception>
Expand All @@ -166,6 +167,7 @@ internal Task SendNotificationAsync<TParameters>(
public Task NotifyProgressAsync(
ProgressToken progressToken,
ProgressNotificationValue progress,
RequestOptions? options = null,
CancellationToken cancellationToken = default)
{
return SendNotificationAsync(
Expand All @@ -174,6 +176,7 @@ public Task NotifyProgressAsync(
{
ProgressToken = progressToken,
Progress = progress,
Meta = options?.Meta,
},
McpJsonUtilities.JsonContext.Default.ProgressNotificationParams,
cancellationToken);
Expand Down
11 changes: 11 additions & 0 deletions src/ModelContextProtocol.Core/Protocol/PingRequestParams.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
namespace ModelContextProtocol.Protocol;

/// <summary>
/// Represents the parameters used with a <see cref="RequestMethods.Ping"/> request to verify
/// server connectivity.
/// </summary>
/// <remarks>
/// The server responds with a <see cref="PingResult"/>.
/// See the <see href="https://github.com/modelcontextprotocol/specification/blob/main/schema/">schema</see> for details.
/// </remarks>
public sealed class PingRequestParams : RequestParams;
18 changes: 1 addition & 17 deletions src/ModelContextProtocol.Core/Protocol/RequestParams.cs
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ private protected RequestParams()
public JsonObject? Meta { get; set; }

/// <summary>
/// Gets or sets an opaque token that will be attached to any subsequent progress notifications.
/// Gets the opaque token that will be attached to any subsequent progress notifications.
/// </summary>
[JsonIgnore]
public ProgressToken? ProgressToken
Expand All @@ -48,21 +48,5 @@ public ProgressToken? ProgressToken

return null;
}
set
{
if (value is null)
{
Meta?.Remove("progressToken");
}
else
{
(Meta ??= [])["progressToken"] = value.Value.Token switch
{
string s => JsonValue.Create(s),
long l => JsonValue.Create(l),
_ => throw new InvalidOperationException("ProgressToken must be a string or a long.")
};
}
}
}
}
6 changes: 3 additions & 3 deletions src/ModelContextProtocol.Core/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ foreach (var tool in await client.ListToolsAsync())
var result = await client.CallToolAsync(
"echo",
new Dictionary<string, object?>() { ["message"] = "Hello MCP!" },
cancellationToken:CancellationToken.None);
cancellationToken: CancellationToken.None);

// echo always returns one and only one text content object
Console.WriteLine(result.Content.First(c => c.Type == "text").Text);
Expand Down Expand Up @@ -83,13 +83,13 @@ using System.ComponentModel;
var serverOptions = new McpServerOptions();

// Add tools directly
serverOptions.Capabilities.Tools = new()
serverOptions.Capabilities.Tools = new()
{
ListChanged = true,
ToolCollection = [
McpServerTool.Create((string message) => $"hello {message}", new()
{
Name = "echo",
Name = "echo",
Description = "Echoes the message back to the client."
})
]
Expand Down
96 changes: 96 additions & 0 deletions src/ModelContextProtocol.Core/RequestOptions.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,96 @@
using System.Text.Json;
using System.Text.Json.Nodes;
using ModelContextProtocol.Protocol;

namespace ModelContextProtocol;

/// <summary>
/// Contains optional parameters for MCP requests.
/// </summary>
public sealed class RequestOptions
{
/// <summary>
/// Optional metadata to include in the request.
/// </summary>
private JsonObject? _meta;

/// <summary>
/// Initializes a new instance of the <see cref="RequestOptions"/> class.
/// </summary>
public RequestOptions()
{
}

/// <summary>
/// Optional metadata to include in the request.
/// When getting, automatically includes the progress token if set.
/// </summary>
public JsonObject? Meta
{
get => _meta ??= [];
set
{
// Capture the existing progressToken value if set.
var existingProgressToken = _meta?["progressToken"];

if (value is not null)
{
if (existingProgressToken is not null)
{
value["progressToken"] ??= existingProgressToken;
}

_meta = value;
}
else if (existingProgressToken is not null)
{
_meta = new()
{
["progressToken"] = existingProgressToken,
};
}
else
{
_meta = null;
}
}
}

/// <summary>
/// The serializer options governing tool parameter serialization. If null, the default options are used.
/// </summary>
public JsonSerializerOptions? JsonSerializerOptions { get; set; }

/// <summary>
/// The progress token for tracking long-running operations.
/// </summary>
public ProgressToken? ProgressToken
{
get
{
return _meta?["progressToken"] switch
{
JsonValue v when v.TryGetValue(out string? s) => new(s),
JsonValue v when v.TryGetValue(out long l) => new(l),
_ => null
};
}
set
{
if (value?.Token is { } token)
{
_meta ??= [];
_meta["progressToken"] = token switch
{
string s => s,
long l => l,
_ => throw new InvalidOperationException("ProgressToken must be a string or long"),
};
}
else
{
_meta?.Remove("progressToken");
}
}
}
}
30 changes: 15 additions & 15 deletions src/ModelContextProtocol.Core/Server/McpServer.Methods.cs
Original file line number Diff line number Diff line change
Expand Up @@ -68,19 +68,19 @@ public ValueTask<CreateMessageResult> SampleAsync(
/// Requests to sample an LLM via the client using the provided chat messages and options.
/// </summary>
/// <param name="messages">The messages to send as part of the request.</param>
/// <param name="options">The options to use for the request, including model parameters and constraints.</param>
/// <param name="chatOptions">The options to use for the request, including model parameters and constraints.</param>
/// <param name="cancellationToken">The <see cref="CancellationToken"/> to monitor for cancellation requests. The default is <see cref="CancellationToken.None"/>.</param>
/// <returns>A task containing the chat response from the model.</returns>
/// <exception cref="ArgumentNullException"><paramref name="messages"/> is <see langword="null"/>.</exception>
/// <exception cref="InvalidOperationException">The client does not support sampling.</exception>
public async Task<ChatResponse> SampleAsync(
IEnumerable<ChatMessage> messages, ChatOptions? options = default, CancellationToken cancellationToken = default)
IEnumerable<ChatMessage> messages, ChatOptions? chatOptions = default, CancellationToken cancellationToken = default)
{
Throw.IfNull(messages);

StringBuilder? systemPrompt = null;

if (options?.Instructions is { } instructions)
if (chatOptions?.Instructions is { } instructions)
{
(systemPrompt ??= new()).Append(instructions);
}
Expand Down Expand Up @@ -126,15 +126,15 @@ public async Task<ChatResponse> SampleAsync(
}

ModelPreferences? modelPreferences = null;
if (options?.ModelId is { } modelId)
if (chatOptions?.ModelId is { } modelId)
{
modelPreferences = new() { Hints = [new() { Name = modelId }] };
}

IList<Tool>? tools = null;
if (options?.Tools is { Count: > 0 })
if (chatOptions?.Tools is { Count: > 0 })
{
foreach (var tool in options.Tools)
foreach (var tool in chatOptions.Tools)
{
if (tool is AIFunctionDeclaration af)
{
Expand All @@ -149,7 +149,7 @@ public async Task<ChatResponse> SampleAsync(
}
}

ToolChoice? toolChoice = options?.ToolMode switch
ToolChoice? toolChoice = chatOptions?.ToolMode switch
{
NoneChatToolMode => new() { Mode = ToolChoice.ModeNone },
AutoChatToolMode => new() { Mode = ToolChoice.ModeAuto },
Expand All @@ -159,15 +159,15 @@ public async Task<ChatResponse> SampleAsync(

var result = await SampleAsync(new()
{
MaxTokens = options?.MaxOutputTokens ?? ServerOptions.MaxSamplingOutputTokens,
MaxTokens = chatOptions?.MaxOutputTokens ?? ServerOptions.MaxSamplingOutputTokens,
Messages = samplingMessages,
ModelPreferences = modelPreferences,
StopSequences = options?.StopSequences?.ToArray(),
StopSequences = chatOptions?.StopSequences?.ToArray(),
SystemPrompt = systemPrompt?.ToString(),
Temperature = options?.Temperature,
Temperature = chatOptions?.Temperature,
ToolChoice = toolChoice,
Tools = tools,
Meta = options?.AdditionalProperties?.ToJsonObject(),
Meta = chatOptions?.AdditionalProperties?.ToJsonObject(),
}, cancellationToken).ConfigureAwait(false);

List<AIContent> responseContents = [];
Expand Down Expand Up @@ -472,14 +472,14 @@ private sealed class SamplingChatClient(McpServer server) : IChatClient
private readonly McpServer _server = server;

/// <inheritdoc/>
public Task<ChatResponse> GetResponseAsync(IEnumerable<ChatMessage> messages, ChatOptions? options = null, CancellationToken cancellationToken = default) =>
_server.SampleAsync(messages, options, cancellationToken);
public Task<ChatResponse> GetResponseAsync(IEnumerable<ChatMessage> messages, ChatOptions? chatOptions = null, CancellationToken cancellationToken = default) =>
_server.SampleAsync(messages, chatOptions, cancellationToken);

/// <inheritdoc/>
async IAsyncEnumerable<ChatResponseUpdate> IChatClient.GetStreamingResponseAsync(
IEnumerable<ChatMessage> messages, ChatOptions? options, [EnumeratorCancellation] CancellationToken cancellationToken)
IEnumerable<ChatMessage> messages, ChatOptions? chatOptions, [EnumeratorCancellation] CancellationToken cancellationToken)
{
var response = await GetResponseAsync(messages, options, cancellationToken).ConfigureAwait(false);
var response = await GetResponseAsync(messages, chatOptions, cancellationToken).ConfigureAwait(false);
foreach (var update in response.ToChatResponseUpdates())
{
yield return update;
Expand Down
2 changes: 1 addition & 1 deletion src/ModelContextProtocol.Core/TokenProgress.cs
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,6 @@ internal sealed class TokenProgress(McpSession session, ProgressToken progressTo
/// <inheritdoc />
public void Report(ProgressNotificationValue value)
{
_ = session.NotifyProgressAsync(progressToken, value, CancellationToken.None);
_ = session.NotifyProgressAsync(progressToken, value, cancellationToken: CancellationToken.None);
}
}
Loading
Loading