diff --git a/README.md b/README.md
index 550d47199a..e73ce0b4e2 100644
--- a/README.md
+++ b/README.md
@@ -167,7 +167,7 @@ Official integrations are maintained by companies building production ready MCP
-
**[Comet Opik](https://github.com/comet-ml/opik-mcp)** - Query and analyze your [Opik](https://github.com/comet-ml/opik) logs, traces, prompts and all other telemetry data from your LLMs in natural language.
-
**[Commerce Layer](https://github.com/commercelayer/mcp-server-metrics)** - Interact with Commerce Layer Metrics API.
-
**[Composio](https://docs.composio.dev/docs/mcp-overview#-getting-started)** – Use [Composio](https://composio.dev) to connect 100+ tools. Zero setup. Auth built-in. Made for agents, works for humans.
--
**[Conductor](https://github.com/conductor-oss/conductor-mcp)** - Interact with Conductor (OSS and Orkes) REST APIs.
+-
**[Conductor](https://github.com/conductor-oss/conductor-mcp)** - Interact with Conductor (OSS and Orkes) REST APIs.
-
**[ConfigCat](https://github.com/configcat/mcp-server)** - Enables AI tools to interact with [ConfigCat](https://configcat.com), a feature flag service for teams. Supports managing ConfigCat feature flags, configs, environments, products and organizations. Helps to integrate ConfigCat SDK, implement feature flags and remove zombie (stale) flags.
-
**[Confluent](https://github.com/confluentinc/mcp-confluent)** - Interact with Confluent Kafka and Confluent Cloud REST APIs.
-
**[Construe](https://github.com/mattjoyce/mcp-construe)** - FastMCP server for intelligent Obsidian vault context management with frontmatter filtering, automatic chunking, and secure bidirectional knowledge operations.
@@ -274,7 +274,7 @@ Official integrations are maintained by companies building production ready MCP
-
**[Improve Digital Publisher MCP](https://github.com/azerion/improvedigital-publisher-mcp-server)** - An MCP server that enables publishers to integrate [Improve Digital’s](https://improvedigital.com/) inventory management system with their AI tools or agents.
-
**[Inbox Zero](https://github.com/elie222/inbox-zero/tree/main/apps/mcp-server)** - AI personal assistant for email [Inbox Zero](https://www.getinboxzero.com)
-
**[Inflectra Spira](https://github.com/Inflectra/mcp-server-spira)** - Connect to your instance of the SpiraTest, SpiraTeam or SpiraPlan application lifecycle management platform by [Inflectra](https://www.inflectra.com)
--
**[Infobip](https://github.com/Inflectra/mcp-server-spira)** - MCP server for integrating [Infobip](https://www.infobip.com/) global cloud communication platform. It equips AI agents with communication superpowers, allowing them to send and receive SMS and RCS messages, interact with WhatsApp and Viber, automate communication workflows, and manage customer data, all in a production-ready environment.
+-
**[Infobip](https://github.com/infobip/mcp)** - MCP server for integrating [Infobip](https://www.infobip.com/) global cloud communication platform. It equips AI agents with communication superpowers, allowing them to send and receive SMS and RCS messages, interact with WhatsApp and Viber, automate communication workflows, and manage customer data, all in a production-ready environment.
-
**[Inkeep](https://github.com/inkeep/mcp-server-python)** - RAG Search over your content powered by [Inkeep](https://inkeep.com)
-
**[Integration App](https://github.com/integration-app/mcp-server)** - Interact with any other SaaS applications on behalf of your customers.
-
**[IP2Location.io](https://github.com/ip2location/mcp-ip2location-io)** - Interact with IP2Location.io API to retrieve the geolocation information for an IP address.
@@ -382,7 +382,6 @@ Official integrations are maintained by companies building production ready MCP
- **[OMOP MCP](https://github.com/OHNLP/omop_mcp)** - Map clinical terminology to OMOP concepts using LLMs for healthcare data standardization.
-
**[ONLYOFFICE DocSpace](https://github.com/ONLYOFFICE/docspace-mcp)** - Interact with [ONLYOFFICE DocSpace](https://www.onlyoffice.com/docspace.aspx) API to create rooms, manage files and folders.
-
**[OP.GG](https://github.com/opgginc/opgg-mcp)** - Access real-time gaming data across popular titles like League of Legends, TFT, and Valorant, offering champion analytics, esports schedules, meta compositions, and character statistics.
--
**[Openfort](https://github.com/openfort-xyz/mcp)** - Connect your AI to Openfort's smart wallet, auth, and project infrastructure.
-
**[OpenMetadata](https://open-metadata.org/mcp)** - The first Enterprise-grade MCP server for metadata
-
**[OpenSearch](https://github.com/opensearch-project/opensearch-mcp-server-py)** - MCP server that enables AI agents to perform search and analytics use cases on data stored in [OpenSearch](https://opensearch.org/).
-
**[OpsLevel](https://github.com/opslevel/opslevel-mcp)** - Official MCP Server for [OpsLevel](https://www.opslevel.com).
@@ -425,7 +424,7 @@ Official integrations are maintained by companies building production ready MCP
-
**[Powerdrill](https://github.com/powerdrillai/powerdrill-mcp)** - An MCP server that provides tools to interact with Powerdrill datasets, enabling smart AI data analysis and insights.
-
**[pre.dev Architect](https://docs.pre.dev/mcp-server)** - 10x your coding agent by keeping it on track with pre.dev.
-
**[PrestaShop.com](https://docs.mcp.prestashop.com/)** - Manage your PrestaShop store with AI Assistant by using the official PrestaShop MCP server.
--
**[Prisma](https://www.prisma.io/docs/postgres/mcp-server)** - Create and manage Prisma Postgres databases
+-
**[Prisma](https://www.prisma.io/docs/postgres/integrations/mcp-server)** - Create and manage Prisma Postgres databases
-
**[Probe.dev](https://docs.probe.dev/guides/mcp-integration)** - Comprehensive media analysis and validation powered by [Probe.dev](https://probe.dev). Hosted MCP server with FFprobe, MediaInfo, and Probe Report analysis capabilities.
-
**[ProdE](https://github.com/CuriousBox-AI/ProdE-mcp)** - Your 24/7 production engineer that preserves context across multiple codebases.
-
**[Program Integrity Alliance (PIA)](https://github.com/Program-Integrity-Alliance/pia-mcp-local)** - Local and Hosted MCP servers providing AI-friendly access to U.S. Government Open Datasets. Also available on [Docker MCP Catalog](https://hub.docker.com/mcp/explore?search=PIA). See [our website](https://programintegrity.org) for more details.
@@ -648,7 +647,7 @@ A growing set of community-developed and maintained servers demonstrates various
- **[BigQuery](https://github.com/ergut/mcp-bigquery-server)** (by ergut) - Server implementation for Google BigQuery integration that enables direct BigQuery database access and querying capabilities
- **[Bilibili](https://github.com/wangshunnn/bilibili-mcp-server)** - This MCP server provides tools to fetch Bilibili user profiles, video metadata, search videos, and more.
- **[Binance](https://github.com/ethancod1ng/binance-mcp-server)** - Cryptocurrency trading and market data access through Binance API integration.
-- **[Binance](https://github.com/AnalyticAce/BinanceMCPServer)** (by dosseh shalom) - Unofficial tools and server implementation for Binance's Model Context Protocol (MCP). Designed to support developers building crypto trading AI Agents.
+- **[Binance](https://github.com/AnalyticAce/binance-mcp-server)** (by dosseh shalom) - Unofficial tools and server implementation for Binance's Model Context Protocol (MCP). Designed to support developers building crypto trading AI Agents.
- **[Bing Web Search API](https://github.com/leehanchung/bing-search-mcp)** (by hanchunglee) - Server implementation for Microsoft Bing Web Search API.
- **[BioMCP](https://github.com/genomoncology/biomcp)** (by imaurer) - Biomedical research assistant server providing access to PubMed, ClinicalTrials.gov, and MyVariant.info.
- **[bioRxiv](https://github.com/JackKuo666/bioRxiv-MCP-Server)** - 🔍 Enable AI assistants to search and access bioRxiv papers through a simple MCP interface.
@@ -1654,4 +1653,3 @@ If you find MCP servers useful, please consider starring the repository and cont
---
Managed by Anthropic, but built together with the community. The Model Context Protocol is open source and we encourage everyone to contribute their own servers and improvements!
-
diff --git a/src/filesystem/__tests__/structured-content.test.ts b/src/filesystem/__tests__/structured-content.test.ts
new file mode 100644
index 0000000000..4b8f92b0a3
--- /dev/null
+++ b/src/filesystem/__tests__/structured-content.test.ts
@@ -0,0 +1,158 @@
+import { describe, it, expect, beforeEach, afterEach } from 'vitest';
+import * as fs from 'fs/promises';
+import * as path from 'path';
+import * as os from 'os';
+import { Client } from '@modelcontextprotocol/sdk/client/index.js';
+import { StdioClientTransport } from '@modelcontextprotocol/sdk/client/stdio.js';
+import { spawn } from 'child_process';
+
+/**
+ * Integration tests to verify that tool handlers return structuredContent
+ * that matches the declared outputSchema.
+ *
+ * These tests address issues #3110, #3106, #3093 where tools were returning
+ * structuredContent: { content: [contentBlock] } (array) instead of
+ * structuredContent: { content: string } as declared in outputSchema.
+ */
+describe('structuredContent schema compliance', () => {
+ let client: Client;
+ let transport: StdioClientTransport;
+ let testDir: string;
+
+ beforeEach(async () => {
+ // Create a temp directory for testing
+ testDir = await fs.mkdtemp(path.join(os.tmpdir(), 'mcp-fs-test-'));
+
+ // Create test files
+ await fs.writeFile(path.join(testDir, 'test.txt'), 'test content');
+ await fs.mkdir(path.join(testDir, 'subdir'));
+ await fs.writeFile(path.join(testDir, 'subdir', 'nested.txt'), 'nested content');
+
+ // Start the MCP server
+ const serverPath = path.resolve(__dirname, '../dist/index.js');
+ transport = new StdioClientTransport({
+ command: 'node',
+ args: [serverPath, testDir],
+ });
+
+ client = new Client({
+ name: 'test-client',
+ version: '1.0.0',
+ }, {
+ capabilities: {}
+ });
+
+ await client.connect(transport);
+ });
+
+ afterEach(async () => {
+ await client?.close();
+ await fs.rm(testDir, { recursive: true, force: true });
+ });
+
+ describe('directory_tree', () => {
+ it('should return structuredContent.content as a string, not an array', async () => {
+ const result = await client.callTool({
+ name: 'directory_tree',
+ arguments: { path: testDir }
+ });
+
+ // The result should have structuredContent
+ expect(result.structuredContent).toBeDefined();
+
+ // structuredContent.content should be a string (matching outputSchema: { content: z.string() })
+ const structuredContent = result.structuredContent as { content: unknown };
+ expect(typeof structuredContent.content).toBe('string');
+
+ // It should NOT be an array
+ expect(Array.isArray(structuredContent.content)).toBe(false);
+
+ // The content should be valid JSON representing the tree
+ const treeData = JSON.parse(structuredContent.content as string);
+ expect(Array.isArray(treeData)).toBe(true);
+ });
+ });
+
+ describe('list_directory_with_sizes', () => {
+ it('should return structuredContent.content as a string, not an array', async () => {
+ const result = await client.callTool({
+ name: 'list_directory_with_sizes',
+ arguments: { path: testDir }
+ });
+
+ // The result should have structuredContent
+ expect(result.structuredContent).toBeDefined();
+
+ // structuredContent.content should be a string (matching outputSchema: { content: z.string() })
+ const structuredContent = result.structuredContent as { content: unknown };
+ expect(typeof structuredContent.content).toBe('string');
+
+ // It should NOT be an array
+ expect(Array.isArray(structuredContent.content)).toBe(false);
+
+ // The content should contain directory listing info
+ expect(structuredContent.content).toContain('[FILE]');
+ });
+ });
+
+ describe('move_file', () => {
+ it('should return structuredContent.content as a string, not an array', async () => {
+ const sourcePath = path.join(testDir, 'test.txt');
+ const destPath = path.join(testDir, 'moved.txt');
+
+ const result = await client.callTool({
+ name: 'move_file',
+ arguments: {
+ source: sourcePath,
+ destination: destPath
+ }
+ });
+
+ // The result should have structuredContent
+ expect(result.structuredContent).toBeDefined();
+
+ // structuredContent.content should be a string (matching outputSchema: { content: z.string() })
+ const structuredContent = result.structuredContent as { content: unknown };
+ expect(typeof structuredContent.content).toBe('string');
+
+ // It should NOT be an array
+ expect(Array.isArray(structuredContent.content)).toBe(false);
+
+ // The content should contain success message
+ expect(structuredContent.content).toContain('Successfully moved');
+ });
+ });
+
+ describe('list_directory (control - already working)', () => {
+ it('should return structuredContent.content as a string', async () => {
+ const result = await client.callTool({
+ name: 'list_directory',
+ arguments: { path: testDir }
+ });
+
+ expect(result.structuredContent).toBeDefined();
+
+ const structuredContent = result.structuredContent as { content: unknown };
+ expect(typeof structuredContent.content).toBe('string');
+ expect(Array.isArray(structuredContent.content)).toBe(false);
+ });
+ });
+
+ describe('search_files (control - already working)', () => {
+ it('should return structuredContent.content as a string', async () => {
+ const result = await client.callTool({
+ name: 'search_files',
+ arguments: {
+ path: testDir,
+ pattern: '*.txt'
+ }
+ });
+
+ expect(result.structuredContent).toBeDefined();
+
+ const structuredContent = result.structuredContent as { content: unknown };
+ expect(typeof structuredContent.content).toBe('string');
+ expect(Array.isArray(structuredContent.content)).toBe(false);
+ });
+ });
+});
diff --git a/src/filesystem/index.ts b/src/filesystem/index.ts
index 79a7b2fe4c..48a599fae1 100644
--- a/src/filesystem/index.ts
+++ b/src/filesystem/index.ts
@@ -500,7 +500,7 @@ server.registerTool(
const contentBlock = { type: "text" as const, text };
return {
content: [contentBlock],
- structuredContent: { content: [contentBlock] }
+ structuredContent: { content: text }
};
}
);
@@ -570,7 +570,7 @@ server.registerTool(
const contentBlock = { type: "text" as const, text };
return {
content: [contentBlock],
- structuredContent: { content: [contentBlock] }
+ structuredContent: { content: text }
};
}
);
@@ -599,7 +599,7 @@ server.registerTool(
const contentBlock = { type: "text" as const, text };
return {
content: [contentBlock],
- structuredContent: { content: [contentBlock] }
+ structuredContent: { content: text }
};
}
);