From e431850766c4116ab5d42195f397fc9142ccf822 Mon Sep 17 00:00:00 2001 From: anandgupta42 Date: Thu, 26 Mar 2026 11:51:10 -0700 Subject: [PATCH 1/5] feat: add MongoDB driver support Add MongoDB as the 11th supported database driver, enabling document database operations via the existing `Connector` interface. - New driver at `packages/drivers/src/mongodb.ts` supporting: - Full MQL command set: find, aggregate, CRUD, indexes, collection mgmt - Cross-database queries via per-query `database` field - BSON type serialization (ObjectId, Decimal128, Long, UUID, Binary, Date) - Schema introspection via document sampling with field type inference - Connection string URI and host/port/user/password auth - `authorizedDatabases` fallback for restricted-privilege users - Registration in driver index, config normalizer (with `mongo`/`mongodb` aliases), and connection registry - CI: MongoDB 7.0 service in driver-e2e workflow with health checks - 90 E2E tests including: - CRUD operations, aggregation pipelines, schema introspection - Cross-database operations, index management, collection lifecycle - Adversarial tests: deeply nested docs, special characters, heterogeneous collections, concurrent operations, large documents, numeric edge cases Closes #480 Co-Authored-By: Claude Opus 4.6 (1M context) --- .github/workflows/ci.yml | 18 + bun.lock | 25 + packages/drivers/package.json | 3 +- packages/drivers/src/index.ts | 1 + packages/drivers/src/mongodb.ts | 586 +++++ packages/drivers/src/normalize.ts | 13 + .../altimate/native/connections/registry.ts | 22 +- .../test/altimate/drivers-mongodb-e2e.test.ts | 1915 +++++++++++++++++ 8 files changed, 2580 insertions(+), 3 deletions(-) create mode 100644 packages/drivers/src/mongodb.ts create mode 100644 packages/opencode/test/altimate/drivers-mongodb-e2e.test.ts diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index d50ab68092..d6ce2f843c 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -45,6 +45,7 @@ jobs: - 'packages/opencode/src/altimate/native/connections/**' - 'packages/opencode/test/altimate/drivers-e2e.test.ts' - 'packages/opencode/test/altimate/drivers-docker-e2e.test.ts' + - 'packages/opencode/test/altimate/drivers-mongodb-e2e.test.ts' - 'packages/opencode/test/altimate/connections.test.ts' dbt-tools: - 'packages/dbt-tools/**' @@ -155,6 +156,16 @@ jobs: --health-timeout 5s --health-retries 10 + mongodb: + image: mongo:7.0 + ports: + - 27017:27017 + options: >- + --health-cmd "mongosh --eval 'db.runCommand({ping:1})' --quiet" + --health-interval 5s + --health-timeout 5s + --health-retries 10 + steps: - uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4 @@ -195,6 +206,13 @@ jobs: TEST_REDSHIFT_PORT: "15439" TEST_REDSHIFT_PASSWORD: testpass123 + - name: Run MongoDB driver E2E + run: bun test test/altimate/drivers-mongodb-e2e.test.ts + working-directory: packages/opencode + env: + TEST_MONGODB_HOST: 127.0.0.1 + TEST_MONGODB_PORT: "27017" + # Cloud tests NOT included — they require real credentials # Run locally with: # ALTIMATE_CODE_CONN_SNOWFLAKE_TEST='...' bun test test/altimate/drivers-snowflake-e2e.test.ts diff --git a/bun.lock b/bun.lock index 043147fea9..7654176c36 100644 --- a/bun.lock +++ b/bun.lock @@ -43,6 +43,7 @@ "@databricks/sql": "^1.0.0", "@google-cloud/bigquery": "^8.0.0", "duckdb": "^1.0.0", + "mongodb": "^6.0.0", "mssql": "^11.0.0", "mysql2": "^3.0.0", "oracledb": "^6.0.0", @@ -767,6 +768,8 @@ "@modelcontextprotocol/sdk": ["@modelcontextprotocol/sdk@1.26.0", "", { "dependencies": { "@hono/node-server": "^1.19.9", "ajv": "^8.17.1", "ajv-formats": "^3.0.1", "content-type": "^1.0.5", "cors": "^2.8.5", "cross-spawn": "^7.0.5", "eventsource": "^3.0.2", "eventsource-parser": "^3.0.0", "express": "^5.2.1", "express-rate-limit": "^8.2.1", "hono": "^4.11.4", "jose": "^6.1.3", "json-schema-typed": "^8.0.2", "pkce-challenge": "^5.0.0", "raw-body": "^3.0.0", "zod": "^3.25 || ^4.0", "zod-to-json-schema": "^3.25.1" }, "peerDependencies": { "@cfworker/json-schema": "^4.1.1" }, "optionalPeers": ["@cfworker/json-schema"] }, "sha512-Y5RmPncpiDtTXDbLKswIJzTqu2hyBKxTNsgKqKclDbhIgg1wgtf1fRuvxgTnRfcnxtvvgbIEcqUOzZrJ6iSReg=="], + "@mongodb-js/saslprep": ["@mongodb-js/saslprep@1.4.6", "", { "dependencies": { "sparse-bitfield": "^3.0.3" } }, "sha512-y+x3H1xBZd38n10NZF/rEBlvDOOMQ6LKUTHqr8R9VkJ+mmQOYtJFxIlkkK8fZrtOiL6VixbOBWMbZGBdal3Z1g=="], + "@msgpackr-extract/msgpackr-extract-darwin-arm64": ["@msgpackr-extract/msgpackr-extract-darwin-arm64@3.0.3", "", { "os": "darwin", "cpu": "arm64" }, "sha512-QZHtlVgbAdy2zAqNA9Gu1UpIuI8Xvsd1v8ic6B2pZmeFnFcMWiPLfWXh7TVw4eGEZ/C9TH281KwhVoeQUKbyjw=="], "@msgpackr-extract/msgpackr-extract-darwin-x64": ["@msgpackr-extract/msgpackr-extract-darwin-x64@3.0.3", "", { "os": "darwin", "cpu": "x64" }, "sha512-mdzd3AVzYKuUmiWOQ8GNhl64/IoFGol569zNRdkLReh6LRLHOXxU4U8eq0JwaD8iFHdVGqSy4IjFL4reoWCDFw=="], @@ -1083,6 +1086,10 @@ "@types/unist": ["@types/unist@3.0.3", "", {}, "sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q=="], + "@types/webidl-conversions": ["@types/webidl-conversions@7.0.3", "", {}, "sha512-CiJJvcRtIgzadHCYXw7dqEnMNRjhGZlYK05Mj9OyktqV8uVT8fD2BFOB7S1uwBE3Kj2Z+4UyPmFw/Ixgw/LAlA=="], + + "@types/whatwg-url": ["@types/whatwg-url@11.0.5", "", { "dependencies": { "@types/webidl-conversions": "*" } }, "sha512-coYR071JRaHa+xoEvvYqvnIHaVqaYrLPbsufM9BF63HkwI5Lgmy2QR8Q5K/lYDYo5AK82wOvSOS0UsLTpTG7uQ=="], + "@types/which": ["@types/which@3.0.4", "", {}, "sha512-liyfuo/106JdlgSchJzXEQCVArk0CvevqPote8F8HgWgJ3dRCcTHgJIsLDuee0kxk/mhbInzIZk3QWSZJ8R+2w=="], "@types/yargs": ["@types/yargs@17.0.33", "", { "dependencies": { "@types/yargs-parser": "*" } }, "sha512-WpxBCKWPLr4xSsHgz511rFJAM+wS28w2zEO1QDNY5zM/S8ok70NNfztH0xwhqKyaK0OHCbN98LDAZuy1ctxDkA=="], @@ -1239,6 +1246,8 @@ "browserslist": ["browserslist@4.28.1", "", { "dependencies": { "baseline-browser-mapping": "^2.9.0", "caniuse-lite": "^1.0.30001759", "electron-to-chromium": "^1.5.263", "node-releases": "^2.0.27", "update-browserslist-db": "^1.2.0" }, "bin": { "browserslist": "cli.js" } }, "sha512-ZC5Bd0LgJXgwGqUknZY/vkUQ04r8NXnJZ3yYi4vDmSiZmC/pdSN0NbNRPxZpbtO4uAfDUAFffO8IZoM3Gj8IkA=="], + "bson": ["bson@6.10.4", "", {}, "sha512-WIsKqkSC0ABoBJuT1LEX+2HEvNmNKKgnTAyd0fL8qzK4SH2i9NXg+t08YtdZp/V9IZ33cxe3iV4yM0qg8lMQng=="], + "buffer": ["buffer@5.7.1", "", { "dependencies": { "base64-js": "^1.3.1", "ieee754": "^1.1.13" } }, "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ=="], "buffer-equal-constant-time": ["buffer-equal-constant-time@1.0.1", "", {}, "sha512-zRpUiDwd/xk6ADqPMATG8vc9VPrkck7T07OIx0gnjmJAnHnTVXNQG3vfvWNuiZIkwu9KrKdA1iJKfsfTVxE6NA=="], @@ -1819,6 +1828,8 @@ "media-typer": ["media-typer@1.1.0", "", {}, "sha512-aisnrDP4GNe06UcKFnV5bfMNPBUw4jsLGaWwWfnH3v02GnBuXX2MCVn5RbrWo0j3pczUilYblq7fQ7Nw2t5XKw=="], + "memory-pager": ["memory-pager@1.5.0", "", {}, "sha512-ZS4Bp4r/Zoeq6+NLJpP+0Zzm0pR8whtGPf1XExKLJBAczGMnSi3It14OiNCStjQjM6NU1okjQGSxgEZN8eBYKg=="], + "merge-descriptors": ["merge-descriptors@2.0.0", "", {}, "sha512-Snk314V5ayFLhp3fkUREub6WtjBfPdCPY1Ln8/8munuLuiYhsABgBVWsozAG+MWMbVEvcdcpbi9R7ww22l9Q3g=="], "merge-stream": ["merge-stream@2.0.0", "", {}, "sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w=="], @@ -1873,6 +1884,10 @@ "moment-timezone": ["moment-timezone@0.5.48", "", { "dependencies": { "moment": "^2.29.4" } }, "sha512-f22b8LV1gbTO2ms2j2z13MuPogNoh5UzxL3nzNAYKGraILnbGc9NEE6dyiiiLv46DGRb8A4kg8UKWLjPthxBHw=="], + "mongodb": ["mongodb@6.21.0", "", { "dependencies": { "@mongodb-js/saslprep": "^1.3.0", "bson": "^6.10.4", "mongodb-connection-string-url": "^3.0.2" }, "peerDependencies": { "@aws-sdk/credential-providers": "^3.188.0", "@mongodb-js/zstd": "^1.1.0 || ^2.0.0", "gcp-metadata": "^5.2.0", "kerberos": "^2.0.1", "mongodb-client-encryption": ">=6.0.0 <7", "snappy": "^7.3.2", "socks": "^2.7.1" }, "optionalPeers": ["@aws-sdk/credential-providers", "@mongodb-js/zstd", "gcp-metadata", "kerberos", "mongodb-client-encryption", "snappy", "socks"] }, "sha512-URyb/VXMjJ4da46OeSXg+puO39XH9DeQpWCslifrRn9JWugy0D+DvvBvkm2WxmHe61O/H19JM66p1z7RHVkZ6A=="], + + "mongodb-connection-string-url": ["mongodb-connection-string-url@3.0.2", "", { "dependencies": { "@types/whatwg-url": "^11.0.2", "whatwg-url": "^14.1.0 || ^13.0.0" } }, "sha512-rMO7CGo/9BFwyZABcKAWL8UJwH/Kc2x0g72uhDWzG48URRax5TCIcJ7Rc3RZqffZzO/Gwff/jyKwCU9TN8gehA=="], + "ms": ["ms@2.1.3", "", {}, "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA=="], "msgpackr": ["msgpackr@1.11.9", "", { "optionalDependencies": { "msgpackr-extract": "^3.0.2" } }, "sha512-FkoAAyyA6HM8wL882EcEyFZ9s7hVADSwG9xrVx3dxxNQAtgADTrJoEWivID82Iv1zWDsv/OtbrrcZAzGzOMdNw=="], @@ -2099,6 +2114,8 @@ "pump": ["pump@3.0.4", "", { "dependencies": { "end-of-stream": "^1.1.0", "once": "^1.3.1" } }, "sha512-VS7sjc6KR7e1ukRFhQSY5LM2uBWAUPiOPa/A3mkKmiMwSmRFUITt0xuj+/lesgnCv+dPIEYlkzrcyXgquIHMcA=="], + "punycode": ["punycode@2.3.1", "", {}, "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg=="], + "pure-rand": ["pure-rand@8.1.0", "", {}, "sha512-53B3MB8wetRdD6JZ4W/0gDKaOvKwuXrEmV1auQc0hASWge8rieKV4PCCVNVbJ+i24miiubb4c/B+dg8Ho0ikYw=="], "python-bridge": ["python-bridge@1.1.0", "", { "dependencies": { "bluebird": "^3.5.0" } }, "sha512-qjQ0QB8p9cn/XDeILQH0aP307hV58lrmv0Opjyub68Um7FHdF+ZXlTqyxNkKaXOFk2QSkScoPWwn7U9GGnrkeQ=="], @@ -2249,6 +2266,8 @@ "space-separated-tokens": ["space-separated-tokens@2.0.2", "", {}, "sha512-PEGlAwrG8yXGXRjW32fGbg66JAlOAwbObuqVoJpv/mRgoWDQfgH1wDPvtzWyUSNAXBGSk8h755YDbbcEy3SH2Q=="], + "sparse-bitfield": ["sparse-bitfield@3.0.3", "", { "dependencies": { "memory-pager": "^1.0.2" } }, "sha512-kvzhi7vqKTfkh0PZU+2D2PIllw2ymqJKujUcyPMd9Y75Nv4nPbGJZXNhxsgdQab2BmlDct1YnfQCguEvHr7VsQ=="], + "split2": ["split2@4.2.0", "", {}, "sha512-UcjcJOWknrNkF6PLX83qcHM6KHgVKNkV62Y8a5uYDVv9ydGQVwAHMKqHdJje1VTWpljG0WYpCDhrCdAOYH4TWg=="], "sprintf-js": ["sprintf-js@1.1.3", "", {}, "sha512-Oo+0REFV59/rz3gfJNKQiBlwfHaSESl1pcGyABQsnnIfWOFt6JNj5gCog2U6MLZ//IGYD+nA8nI+mTShREReaA=="], @@ -3063,6 +3082,8 @@ "minipass-sized/minipass": ["minipass@3.3.6", "", { "dependencies": { "yallist": "^4.0.0" } }, "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw=="], + "mongodb-connection-string-url/whatwg-url": ["whatwg-url@14.2.0", "", { "dependencies": { "tr46": "^5.1.0", "webidl-conversions": "^7.0.0" } }, "sha512-De72GdQZzNTUBBChsXueQUnPKDkg/5A5zp7pFDuQAj5UFoENpiACU0wlCvzpAGnTkj++ihpKwKyYewn/XNUbKw=="], + "mssql/commander": ["commander@11.1.0", "", {}, "sha512-yPVavfyCcRhmorC7rWlkHn15b4wDVgVmBA7kV4QVBsF7kv/9TKJAbAXVTxvTnwP8HHKjRCJDClKbciiYS7p0DQ=="], "node-gyp/glob": ["glob@7.2.3", "", { "dependencies": { "fs.realpath": "^1.0.0", "inflight": "^1.0.4", "inherits": "2", "minimatch": "^3.1.1", "once": "^1.3.0", "path-is-absolute": "^1.0.0" } }, "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q=="], @@ -3553,6 +3574,10 @@ "minipass-sized/minipass/yallist": ["yallist@4.0.0", "", {}, "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A=="], + "mongodb-connection-string-url/whatwg-url/tr46": ["tr46@5.1.1", "", { "dependencies": { "punycode": "^2.3.1" } }, "sha512-hdF5ZgjTqgAntKkklYw0R03MG2x/bSzTtkxmIRw/sTNV8YXsCJ1tfLAX23lhxhHJlEf3CRCOCGGWw3vI3GaSPw=="], + + "mongodb-connection-string-url/whatwg-url/webidl-conversions": ["webidl-conversions@7.0.0", "", {}, "sha512-VwddBukDzu71offAQR975unBIGqfKZpM+8ZX6ySk8nYhVoo5CYaZyzt3YBvYtRtO+aoGlqxPg/B87NGVZ/fu6g=="], + "node-gyp/glob/minimatch": ["minimatch@3.1.5", "", { "dependencies": { "brace-expansion": "^1.1.7" } }, "sha512-VgjWUsnnT6n+NUk6eZq77zeFdpW2LWDzP6zFGrCbHXiYNul5Dzqk2HHQ5uFH2DNW5Xbp8+jVzaeNt94ssEEl4w=="], "node-gyp/nopt/abbrev": ["abbrev@1.1.1", "", {}, "sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q=="], diff --git a/packages/drivers/package.json b/packages/drivers/package.json index cda007258a..72097c08cd 100644 --- a/packages/drivers/package.json +++ b/packages/drivers/package.json @@ -16,6 +16,7 @@ "mysql2": "^3.0.0", "mssql": "^11.0.0", "oracledb": "^6.0.0", - "duckdb": "^1.0.0" + "duckdb": "^1.0.0", + "mongodb": "^6.0.0" } } diff --git a/packages/drivers/src/index.ts b/packages/drivers/src/index.ts index f56ba02d6f..6533c2da8a 100644 --- a/packages/drivers/src/index.ts +++ b/packages/drivers/src/index.ts @@ -16,3 +16,4 @@ export { connect as connectSqlserver } from "./sqlserver" export { connect as connectOracle } from "./oracle" export { connect as connectDuckdb } from "./duckdb" export { connect as connectSqlite } from "./sqlite" +export { connect as connectMongodb } from "./mongodb" diff --git a/packages/drivers/src/mongodb.ts b/packages/drivers/src/mongodb.ts new file mode 100644 index 0000000000..3b20c5f965 --- /dev/null +++ b/packages/drivers/src/mongodb.ts @@ -0,0 +1,586 @@ +/** + * MongoDB driver using the `mongodb` package. + * + * Maps MongoDB concepts to the Connector interface: + * - listSchemas() → lists databases + * - listTables(schema) → lists collections in a database + * - describeTable(s, t) → samples documents to infer field types + * - execute(query) → parses and executes MQL commands + * + * Query format (JSON string): + * { "database": "mydb", "collection": "users", "command": "find", "filter": { "age": { "$gt": 25 } } } + * { "database": "mydb", "collection": "orders", "command": "aggregate", "pipeline": [...] } + * { "database": "mydb", "collection": "users", "command": "insertMany", "documents": [...] } + * { "database": "mydb", "collection": "users", "command": "countDocuments", "filter": {} } + */ + +import type { ConnectionConfig, Connector, ConnectorResult, SchemaColumn } from "./types" + +/** Supported MQL commands. */ +type MqlCommand = + | "find" + | "aggregate" + | "countDocuments" + | "distinct" + | "insertOne" + | "insertMany" + | "updateOne" + | "updateMany" + | "deleteOne" + | "deleteMany" + | "createCollection" + | "dropCollection" + | "createIndex" + | "listIndexes" + +interface MqlQuery { + database?: string + collection?: string + command: MqlCommand + // find + filter?: Record + projection?: Record + sort?: Record + limit?: number + skip?: number + // aggregate + pipeline?: Record[] + // insert + document?: Record + documents?: Record[] + // update + update?: Record + // distinct + field?: string + // createIndex + keys?: Record + options?: Record + // createCollection + name?: string +} + +/** + * Infer a human-readable type name from a JavaScript value. + */ +function inferType(value: unknown): string { + if (value === null || value === undefined) return "null" + if (Array.isArray(value)) return "array" + if (value instanceof Date) return "date" + // mongodb BSON types + const ctor = (value as any)?._bsontype + if (ctor) { + switch (ctor) { + case "ObjectId": + case "ObjectID": + return "objectId" + case "Decimal128": + return "decimal128" + case "Long": + return "int64" + case "Int32": + return "int32" + case "Double": + return "double" + case "Binary": + return "binary" + case "Timestamp": + return "timestamp" + case "MinKey": + return "minKey" + case "MaxKey": + return "maxKey" + case "BSONRegExp": + return "regex" + case "Code": + return "javascript" + case "BSONSymbol": + return "symbol" + case "UUID": + return "uuid" + default: + return ctor.toLowerCase() + } + } + const t = typeof value + if (t === "number") return Number.isInteger(value as number) ? "int32" : "double" + if (t === "boolean") return "bool" + if (t === "string") return "string" + if (t === "object") return "object" + return "unknown" +} + +/** + * Flatten a document's fields into column entries, handling nested objects + * with dot notation (1 level deep only to keep it manageable). + */ +function extractFields( + docs: Record[], +): Map> { + const fieldTypes = new Map>() + + for (const doc of docs) { + for (const [key, value] of Object.entries(doc)) { + const types = fieldTypes.get(key) ?? new Set() + types.add(inferType(value)) + fieldTypes.set(key, types) + } + } + + return fieldTypes +} + +export async function connect(config: ConnectionConfig): Promise { + let mongoModule: any + try { + mongoModule = await import("mongodb") + mongoModule = mongoModule.default || mongoModule + } catch { + throw new Error( + "MongoDB driver not installed. Run: npm install mongodb", + ) + } + + const MongoClient = mongoModule.MongoClient + + let client: any + const explicitDb = config.database as string | undefined + + /** Resolve which database to use: query-specified, config-specified, or URI default. */ + function resolveDb(queryDb?: string): any { + if (queryDb) return client.db(queryDb) + if (explicitDb) return client.db(explicitDb) + // Fall back to the database embedded in the connection string URI, or MongoDB's default + return client.db() + } + + /** + * Serialize a value for tabular display. + * BSON types are converted to strings; nested objects are JSON-serialized. + */ + function serializeValue(val: unknown): unknown { + if (val === null || val === undefined) return val + if (typeof val !== "object") return val + + // BSON ObjectId + if ((val as any)._bsontype === "ObjectId" || (val as any)._bsontype === "ObjectID") { + return (val as any).toString() + } + // BSON Decimal128, Long, Int32, Double + if ((val as any)._bsontype === "Decimal128" || (val as any)._bsontype === "Long" || + (val as any)._bsontype === "Int32" || (val as any)._bsontype === "Double") { + return (val as any).toString() + } + // BSON UUID + if ((val as any)._bsontype === "UUID") { + return (val as any).toString() + } + // BSON Binary + if ((val as any)._bsontype === "Binary") { + return `Binary(${(val as any).length()})` + } + // BSON Timestamp + if ((val as any)._bsontype === "Timestamp") { + return (val as any).toString() + } + // Date + if (val instanceof Date) { + return val.toISOString() + } + // Arrays and plain objects — JSON-serialize for tabular display + if (Array.isArray(val) || typeof (val as any).toJSON !== "function") { + return JSON.stringify(val) + } + return JSON.stringify(val) + } + + return { + async connect() { + // Support connection_string or individual fields + let uri: string + if (config.connection_string) { + uri = config.connection_string as string + } else { + const host = (config.host as string) ?? "127.0.0.1" + const port = (config.port as number) ?? 27017 + const user = config.user as string | undefined + const password = config.password as string | undefined + + if (user && password) { + uri = `mongodb://${encodeURIComponent(user)}:${encodeURIComponent(password)}@${host}:${port}` + } else { + uri = `mongodb://${host}:${port}` + } + } + + const connectOptions: Record = { + connectTimeoutMS: (config.connect_timeout as number) ?? 10000, + serverSelectionTimeoutMS: (config.server_selection_timeout as number) ?? 10000, + } + + if (config.auth_source) { + connectOptions.authSource = config.auth_source + } + + if (config.replica_set) { + connectOptions.replicaSet = config.replica_set + } + + if (config.tls !== undefined) { + connectOptions.tls = config.tls + } + + if (config.direct_connection !== undefined) { + connectOptions.directConnection = config.direct_connection + } + + client = new MongoClient(uri, connectOptions) + await client.connect() + }, + + async execute( + query: string, + limit?: number, + _binds?: any[], + ): Promise { + let parsed: MqlQuery + try { + parsed = JSON.parse(query) as MqlQuery + } catch (e) { + throw new Error( + `Invalid MQL query — must be valid JSON. Error: ${(e as Error).message}`, + ) + } + + if (!parsed.command) { + throw new Error("MQL query must include a 'command' field") + } + + const db = resolveDb(parsed.database) + const effectiveLimit = limit ?? 1000 + const cmd = parsed.command + + // Commands that don't need a collection + if (cmd === "createCollection") { + const name = parsed.name ?? parsed.collection + if (!name) { + throw new Error("createCollection requires 'name' or 'collection'") + } + await db.createCollection(name, parsed.options ?? {}) + return { columns: ["result"], rows: [["ok"]], row_count: 1, truncated: false } + } + + if (cmd === "dropCollection") { + if (!parsed.collection) { + throw new Error("dropCollection requires 'collection'") + } + const dropped = await db.collection(parsed.collection).drop().catch((e: any) => { + if (e.codeName === "NamespaceNotFound") return false + throw e + }) + return { + columns: ["dropped"], + rows: [[dropped]], + row_count: 1, + truncated: false, + } + } + + if (!parsed.collection) { + throw new Error(`Command '${cmd}' requires a 'collection' field`) + } + + const coll = db.collection(parsed.collection) + + switch (cmd) { + case "find": { + let cursor = coll.find(parsed.filter ?? {}) + if (parsed.projection) cursor = cursor.project(parsed.projection) + if (parsed.sort) cursor = cursor.sort(parsed.sort) + if (parsed.skip) cursor = cursor.skip(parsed.skip) + // Fetch one extra to detect truncation + const queryLimit = parsed.limit ?? effectiveLimit + cursor = cursor.limit(queryLimit + 1) + const docs = await cursor.toArray() + + const truncated = docs.length > queryLimit + const limited = truncated ? docs.slice(0, queryLimit) : docs + + if (limited.length === 0) { + return { columns: [], rows: [], row_count: 0, truncated: false } + } + + // Build column list from all documents (documents may have different fields) + const colSet = new Set() + for (const doc of limited) { + for (const key of Object.keys(doc)) { + colSet.add(key) + } + } + const columns = Array.from(colSet) + + const rows = limited.map((doc: any) => + columns.map((col) => serializeValue(doc[col])), + ) + + return { columns, rows, row_count: limited.length, truncated } + } + + case "aggregate": { + if (!parsed.pipeline || !Array.isArray(parsed.pipeline)) { + throw new Error("aggregate requires a 'pipeline' array") + } + // Append $limit if the pipeline doesn't already end with one + const pipeline = [...parsed.pipeline] + const lastStage = pipeline[pipeline.length - 1] + const hasLimit = lastStage && "$limit" in lastStage + if (!hasLimit) { + pipeline.push({ $limit: effectiveLimit + 1 }) + } + + const docs = await coll.aggregate(pipeline).toArray() + + const truncated = !hasLimit && docs.length > effectiveLimit + const limited = truncated ? docs.slice(0, effectiveLimit) : docs + + if (limited.length === 0) { + return { columns: [], rows: [], row_count: 0, truncated: false } + } + + const colSet = new Set() + for (const doc of limited) { + for (const key of Object.keys(doc)) { + colSet.add(key) + } + } + const columns = Array.from(colSet) + + const rows = limited.map((doc: any) => + columns.map((col) => serializeValue(doc[col])), + ) + + return { columns, rows, row_count: limited.length, truncated } + } + + case "countDocuments": { + const count = await coll.countDocuments(parsed.filter ?? {}) + return { + columns: ["count"], + rows: [[count]], + row_count: 1, + truncated: false, + } + } + + case "distinct": { + if (!parsed.field) { + throw new Error("distinct requires a 'field' string") + } + const values = await coll.distinct(parsed.field, parsed.filter ?? {}) + const truncated = values.length > effectiveLimit + const limited = truncated ? values.slice(0, effectiveLimit) : values + return { + columns: [parsed.field], + rows: limited.map((v: unknown) => [v]), + row_count: limited.length, + truncated, + } + } + + case "insertOne": { + if (!parsed.document) { + throw new Error("insertOne requires a 'document' object") + } + const result = await coll.insertOne(parsed.document) + return { + columns: ["insertedId"], + rows: [[result.insertedId.toString()]], + row_count: 1, + truncated: false, + } + } + + case "insertMany": { + if (!parsed.documents || !Array.isArray(parsed.documents)) { + throw new Error("insertMany requires a 'documents' array") + } + const result = await coll.insertMany(parsed.documents) + return { + columns: ["insertedCount"], + rows: [[result.insertedCount]], + row_count: 1, + truncated: false, + } + } + + case "updateOne": { + if (!parsed.update) { + throw new Error("updateOne requires an 'update' object") + } + const result = await coll.updateOne(parsed.filter ?? {}, parsed.update) + return { + columns: ["matchedCount", "modifiedCount"], + rows: [[result.matchedCount, result.modifiedCount]], + row_count: 1, + truncated: false, + } + } + + case "updateMany": { + if (!parsed.update) { + throw new Error("updateMany requires an 'update' object") + } + const result = await coll.updateMany(parsed.filter ?? {}, parsed.update) + return { + columns: ["matchedCount", "modifiedCount"], + rows: [[result.matchedCount, result.modifiedCount]], + row_count: 1, + truncated: false, + } + } + + case "deleteOne": { + const result = await coll.deleteOne(parsed.filter ?? {}) + return { + columns: ["deletedCount"], + rows: [[result.deletedCount]], + row_count: 1, + truncated: false, + } + } + + case "deleteMany": { + const result = await coll.deleteMany(parsed.filter ?? {}) + return { + columns: ["deletedCount"], + rows: [[result.deletedCount]], + row_count: 1, + truncated: false, + } + } + + case "createIndex": { + if (!parsed.keys) { + throw new Error("createIndex requires a 'keys' object") + } + const indexName = await coll.createIndex(parsed.keys, parsed.options ?? {}) + return { + columns: ["indexName"], + rows: [[indexName]], + row_count: 1, + truncated: false, + } + } + + case "listIndexes": { + const indexes = await coll.listIndexes().toArray() + if (indexes.length === 0) { + return { columns: [], rows: [], row_count: 0, truncated: false } + } + const columns = ["name", "key", "unique"] + const rows = indexes.map((idx: any) => [ + idx.name, + JSON.stringify(idx.key), + idx.unique ?? false, + ]) + return { columns, rows, row_count: rows.length, truncated: false } + } + + default: + throw new Error(`Unsupported MQL command: ${cmd}`) + } + }, + + async listSchemas(): Promise { + try { + const admin = client.db().admin() + const result = await admin.listDatabases({ nameOnly: true, authorizedDatabases: true }) + return result.databases + .map((db: any) => db.name as string) + .filter((name: string) => name !== "local" && name !== "config") + .sort() + } catch { + // Fallback for users without listDatabases privilege: return the configured/default database + const db = resolveDb() + return [db.databaseName] + } + }, + + async listTables( + schema: string, + ): Promise> { + const db = client.db(schema) + const collections = await db.listCollections().toArray() + return collections + .map((c: any) => ({ + name: c.name as string, + type: c.type === "view" ? "view" : "collection", + })) + .sort((a: { name: string }, b: { name: string }) => + a.name.localeCompare(b.name), + ) + }, + + async describeTable( + schema: string, + table: string, + ): Promise { + const db = client.db(schema) + const coll = db.collection(table) + + // Sample up to 100 documents to infer schema + const docs = await coll.find({}).limit(100).toArray() + + if (docs.length === 0) { + return [] + } + + const fieldTypes = extractFields(docs) + // Track which fields are missing from some documents (nullable by absence) + const fieldPresence = new Map() + for (const doc of docs) { + for (const key of Object.keys(doc)) { + fieldPresence.set(key, (fieldPresence.get(key) ?? 0) + 1) + } + } + + const columns: SchemaColumn[] = [] + for (const [name, types] of fieldTypes) { + const typeArr = Array.from(types) + const hasNull = typeArr.includes("null") + const nonNullTypes = typeArr.filter((t) => t !== "null") + const dataType = + nonNullTypes.length === 0 + ? "null" + : nonNullTypes.length === 1 + ? nonNullTypes[0] + : nonNullTypes.join(" | ") + + // Field is nullable if it has null values OR is missing from some documents + const presentIn = fieldPresence.get(name) ?? 0 + const missingFromSome = presentIn < docs.length + + columns.push({ + name, + data_type: dataType, + nullable: hasNull || missingFromSome, + }) + } + + // Sort: _id first, then alphabetical + columns.sort((a, b) => { + if (a.name === "_id") return -1 + if (b.name === "_id") return 1 + return a.name.localeCompare(b.name) + }) + + return columns + }, + + async close() { + if (client) { + await client.close() + client = null + } + }, + } +} diff --git a/packages/drivers/src/normalize.ts b/packages/drivers/src/normalize.ts index edcb85b8c2..e11d04044d 100644 --- a/packages/drivers/src/normalize.ts +++ b/packages/drivers/src/normalize.ts @@ -73,6 +73,17 @@ const ORACLE_ALIASES: AliasMap = { service_name: ["serviceName"], } +const MONGODB_ALIASES: AliasMap = { + user: ["username"], + database: ["dbname", "db"], + connection_string: ["connectionString", "uri", "url"], + auth_source: ["authSource"], + replica_set: ["replicaSet"], + direct_connection: ["directConnection"], + connect_timeout: ["connectTimeoutMS"], + server_selection_timeout: ["serverSelectionTimeoutMS"], +} + /** Map of warehouse type to its alias map. */ const DRIVER_ALIASES: Record = { snowflake: SNOWFLAKE_ALIASES, @@ -86,6 +97,8 @@ const DRIVER_ALIASES: Record = { sqlserver: SQLSERVER_ALIASES, mssql: SQLSERVER_ALIASES, oracle: ORACLE_ALIASES, + mongodb: MONGODB_ALIASES, + mongo: MONGODB_ALIASES, // duckdb and sqlite have simple configs — no aliases needed } diff --git a/packages/opencode/src/altimate/native/connections/registry.ts b/packages/opencode/src/altimate/native/connections/registry.ts index 67fb35f271..eb2d015b03 100644 --- a/packages/opencode/src/altimate/native/connections/registry.ts +++ b/packages/opencode/src/altimate/native/connections/registry.ts @@ -126,6 +126,8 @@ const DRIVER_MAP: Record = { duckdb: "@altimateai/drivers/duckdb", oracle: "@altimateai/drivers/oracle", sqlite: "@altimateai/drivers/sqlite", + mongodb: "@altimateai/drivers/mongodb", + mongo: "@altimateai/drivers/mongodb", } async function createConnector( @@ -193,6 +195,9 @@ async function createConnector( case "@altimateai/drivers/sqlite": mod = await import("@altimateai/drivers/sqlite") break + case "@altimateai/drivers/mongodb": + mod = await import("@altimateai/drivers/mongodb") + break default: throw new Error(`No static import available for driver: ${driverPath}`) } @@ -222,6 +227,7 @@ export function detectAuthMethod(config: ConnectionConfig | null | undefined): s if (config.password) return "password" const t = typeof config.type === "string" ? config.type.toLowerCase() : "" if (t === "duckdb" || t === "sqlite") return "file" + if (t === "mongodb" || t === "mongo") return config.password ? "password" : "connection_string" return "unknown" } @@ -347,13 +353,25 @@ export function list(): { warehouses: WarehouseInfo[] } { return { warehouses } } -/** Test a connection by running SELECT 1. */ +/** Test a connection by running a simple query. */ export async function test( name: string, ): Promise<{ connected: boolean; error?: string }> { try { const connector = await get(name) - await connector.execute("SELECT 1") + const config = configs.get(name) + const dbType = config?.type?.toLowerCase() + if (dbType === "mongodb" || dbType === "mongo") { + // MongoDB doesn't support SQL — use a ping-equivalent MQL command + await connector.execute(JSON.stringify({ + command: "find", + database: (config?.database as string) || "admin", + collection: "system.version", + limit: 1, + })) + } else { + await connector.execute("SELECT 1") + } return { connected: true } } catch (e) { return { connected: false, error: String(e) } diff --git a/packages/opencode/test/altimate/drivers-mongodb-e2e.test.ts b/packages/opencode/test/altimate/drivers-mongodb-e2e.test.ts new file mode 100644 index 0000000000..de00e7f674 --- /dev/null +++ b/packages/opencode/test/altimate/drivers-mongodb-e2e.test.ts @@ -0,0 +1,1915 @@ +import { describe, expect, test, beforeAll, afterAll } from "bun:test" +import { execSync } from "child_process" +import { createConnection } from "net" +import type { Connector, ConnectorResult, SchemaColumn } from "@altimateai/drivers/types" + +// --------------------------------------------------------------------------- +// Infrastructure helpers +// --------------------------------------------------------------------------- + +const HAS_CI_SERVICE = !!process.env.TEST_MONGODB_HOST +const DOCKER_OPT_IN = process.env.DRIVER_E2E_DOCKER === "1" + +function isDockerAvailable(): boolean { + if (HAS_CI_SERVICE) return true + if (!DOCKER_OPT_IN) return false + try { + execSync("docker info", { stdio: "ignore", timeout: 3000 }) + return true + } catch { + return false + } +} + +function waitForPort(port: number, timeout = 30000): Promise { + return new Promise((resolve, reject) => { + const start = Date.now() + const attempt = () => { + const sock = createConnection({ host: "127.0.0.1", port }) + sock.once("connect", () => { + sock.destroy() + resolve() + }) + sock.once("error", () => { + sock.destroy() + if (Date.now() - start > timeout) { + reject(new Error(`Port ${port} not ready after ${timeout}ms`)) + } else { + setTimeout(attempt, 500) + } + }) + } + attempt() + }) +} + +async function waitForMongoReady( + connectFn: () => Promise, + timeout = 60000, +): Promise { + const start = Date.now() + let lastErr: any + while (Date.now() - start < timeout) { + try { + const connector = await connectFn() + await connector.connect() + // Verify the connection works with a simple command + await connector.execute(JSON.stringify({ + command: "find", + database: "admin", + collection: "system.version", + limit: 1, + })) + return connector + } catch (e: any) { + lastErr = e + await new Promise((r) => setTimeout(r, 2000)) + } + } + throw new Error(`MongoDB not ready after ${timeout}ms: ${lastErr?.message}`) +} + +function dockerRm(name: string) { + try { + execSync(`docker rm -f ${name}`, { stdio: "ignore", timeout: 10000 }) + } catch {} +} + +function dockerRun(args: string) { + execSync(`docker run ${args}`, { stdio: "ignore", timeout: 120000 }) +} + +function cmd(query: Record): string { + return JSON.stringify(query) +} + +const DOCKER = isDockerAvailable() +const MONGODB_CONTAINER = "altimate-test-mongodb" +const MONGODB_HOST = process.env.TEST_MONGODB_HOST || "127.0.0.1" +const MONGODB_PORT = Number(process.env.TEST_MONGODB_PORT) || 27017 +const MONGODB_USE_CI = !!process.env.TEST_MONGODB_HOST + +// --------------------------------------------------------------------------- +// MongoDB E2E Tests +// --------------------------------------------------------------------------- + +describe.skipIf(!DOCKER && !MONGODB_USE_CI)("MongoDB Driver E2E", () => { + let connector: Connector + + beforeAll(async () => { + if (!MONGODB_USE_CI) { + dockerRm(MONGODB_CONTAINER) + dockerRun( + `-d --name ${MONGODB_CONTAINER} ` + + `-p ${MONGODB_PORT}:27017 ` + + `mongo:7.0`, + ) + } + await waitForPort(MONGODB_PORT, 30000) + const { connect } = await import("@altimateai/drivers/mongodb") + connector = await waitForMongoReady(async () => { + return await connect({ + type: "mongodb", + host: MONGODB_HOST, + port: MONGODB_PORT, + database: "testdb", + }) + }, 60000) + }, 150000) + + afterAll(async () => { + if (connector) { + // Clean up test databases + try { + await connector.execute(cmd({ + command: "dropCollection", + database: "testdb", + collection: "users", + })) + } catch {} + try { + await connector.execute(cmd({ + command: "dropCollection", + database: "testdb", + collection: "products", + })) + } catch {} + try { + await connector.execute(cmd({ + command: "dropCollection", + database: "testdb", + collection: "orders", + })) + } catch {} + try { + await connector.execute(cmd({ + command: "dropCollection", + database: "adversarial_db", + collection: "weird_names", + })) + } catch {} + try { + await connector.close() + } catch {} + } + dockerRm(MONGODB_CONTAINER) + }) + + // ========================================================================= + // Connection Tests + // ========================================================================= + + describe("Connection", () => { + test("connects with host/port", () => { + expect(connector).toBeDefined() + }) + + test("connects with connection_string", async () => { + const { connect } = await import("@altimateai/drivers/mongodb") + const conn = await connect({ + type: "mongodb", + connection_string: `mongodb://${MONGODB_HOST}:${MONGODB_PORT}`, + database: "testdb", + }) + await conn.connect() + const result = await conn.execute(cmd({ + command: "find", + database: "admin", + collection: "system.version", + limit: 1, + })) + expect(result).toBeDefined() + await conn.close() + }) + + test("close() cleans up and prevents further operations", async () => { + const { connect } = await import("@altimateai/drivers/mongodb") + const conn = await connect({ + type: "mongodb", + host: MONGODB_HOST, + port: MONGODB_PORT, + database: "testdb", + }) + await conn.connect() + await conn.close() + // After close, operations should fail + await expect( + conn.execute(cmd({ command: "find", database: "testdb", collection: "users" })), + ).rejects.toThrow() + }) + }) + + // ========================================================================= + // CRUD Operations + // ========================================================================= + + describe("CRUD Operations", () => { + beforeAll(async () => { + // Ensure clean state + try { + await connector.execute(cmd({ + command: "dropCollection", + database: "testdb", + collection: "users", + })) + } catch {} + }) + + test("insertOne — single document", async () => { + const result = await connector.execute(cmd({ + command: "insertOne", + database: "testdb", + collection: "users", + document: { name: "alice", age: 30, email: "alice@example.com", active: true }, + })) + expect(result.columns).toEqual(["insertedId"]) + expect(result.row_count).toBe(1) + expect(result.rows[0][0]).toBeDefined() // ObjectId string + }) + + test("insertMany — multiple documents", async () => { + const result = await connector.execute(cmd({ + command: "insertMany", + database: "testdb", + collection: "users", + documents: [ + { name: "bob", age: 25, email: "bob@example.com", active: false }, + { name: "charlie", age: 35, email: "charlie@example.com", active: true }, + { name: "diana", age: 28, email: "diana@example.com", active: true }, + { name: "eve", age: 40, email: "eve@example.com", active: false }, + ], + })) + expect(result.columns).toEqual(["insertedCount"]) + expect(result.rows[0][0]).toBe(4) + }) + + test("find — all documents", async () => { + const result = await connector.execute(cmd({ + command: "find", + database: "testdb", + collection: "users", + })) + expect(result.row_count).toBe(5) + expect(result.columns).toContain("name") + expect(result.columns).toContain("age") + expect(result.columns).toContain("email") + expect(result.columns).toContain("_id") + }) + + test("find — with filter", async () => { + const result = await connector.execute(cmd({ + command: "find", + database: "testdb", + collection: "users", + filter: { active: true }, + })) + expect(result.row_count).toBe(3) // alice, charlie, diana + const names = result.rows.map((r) => r[result.columns.indexOf("name")]) + expect(names).toContain("alice") + expect(names).toContain("charlie") + expect(names).toContain("diana") + }) + + test("find — with projection", async () => { + const result = await connector.execute(cmd({ + command: "find", + database: "testdb", + collection: "users", + filter: { name: "alice" }, + projection: { name: 1, age: 1, _id: 0 }, + })) + expect(result.row_count).toBe(1) + expect(result.columns).toEqual(["name", "age"]) + expect(result.rows[0]).toEqual(["alice", 30]) + }) + + test("find — with sort", async () => { + const result = await connector.execute(cmd({ + command: "find", + database: "testdb", + collection: "users", + projection: { name: 1, age: 1, _id: 0 }, + sort: { age: 1 }, + })) + const ages = result.rows.map((r) => r[result.columns.indexOf("age")]) + expect(ages).toEqual([25, 28, 30, 35, 40]) + }) + + test("find — with sort descending", async () => { + const result = await connector.execute(cmd({ + command: "find", + database: "testdb", + collection: "users", + projection: { name: 1, age: 1, _id: 0 }, + sort: { age: -1 }, + })) + const ages = result.rows.map((r) => r[result.columns.indexOf("age")]) + expect(ages).toEqual([40, 35, 30, 28, 25]) + }) + + test("find — with skip and limit", async () => { + const result = await connector.execute(cmd({ + command: "find", + database: "testdb", + collection: "users", + projection: { name: 1, _id: 0 }, + sort: { age: 1 }, + skip: 1, + limit: 2, + })) + expect(result.row_count).toBe(2) + const names = result.rows.map((r) => r[0]) + expect(names).toEqual(["diana", "alice"]) // age 28, 30 + }) + + test("find — comparison operators ($gt, $lte, $ne)", async () => { + const result = await connector.execute(cmd({ + command: "find", + database: "testdb", + collection: "users", + filter: { age: { $gt: 30, $lte: 40 } }, + projection: { name: 1, _id: 0 }, + sort: { age: 1 }, + })) + const names = result.rows.map((r) => r[0]) + expect(names).toEqual(["charlie", "eve"]) // 35, 40 + }) + + test("find — logical operators ($or, $and)", async () => { + const result = await connector.execute(cmd({ + command: "find", + database: "testdb", + collection: "users", + filter: { + $or: [ + { name: "alice" }, + { age: { $gte: 40 } }, + ], + }, + projection: { name: 1, _id: 0 }, + sort: { name: 1 }, + })) + const names = result.rows.map((r) => r[0]) + expect(names).toEqual(["alice", "eve"]) + }) + + test("find — regex filter", async () => { + const result = await connector.execute(cmd({ + command: "find", + database: "testdb", + collection: "users", + filter: { name: { $regex: "^[ab]", $options: "i" } }, + projection: { name: 1, _id: 0 }, + sort: { name: 1 }, + })) + const names = result.rows.map((r) => r[0]) + expect(names).toEqual(["alice", "bob"]) + }) + + test("find — $in operator", async () => { + const result = await connector.execute(cmd({ + command: "find", + database: "testdb", + collection: "users", + filter: { name: { $in: ["alice", "eve"] } }, + projection: { name: 1, _id: 0 }, + sort: { name: 1 }, + })) + expect(result.rows.map((r) => r[0])).toEqual(["alice", "eve"]) + }) + + test("find — $exists operator", async () => { + // All users have "email" field + const result = await connector.execute(cmd({ + command: "find", + database: "testdb", + collection: "users", + filter: { email: { $exists: true } }, + })) + expect(result.row_count).toBe(5) + }) + + test("updateOne — modifies a single document", async () => { + const result = await connector.execute(cmd({ + command: "updateOne", + database: "testdb", + collection: "users", + filter: { name: "alice" }, + update: { $set: { age: 31, role: "admin" } }, + })) + expect(result.columns).toEqual(["matchedCount", "modifiedCount"]) + expect(result.rows[0]).toEqual([1, 1]) + + // Verify the update + const verify = await connector.execute(cmd({ + command: "find", + database: "testdb", + collection: "users", + filter: { name: "alice" }, + projection: { age: 1, role: 1, _id: 0 }, + })) + expect(verify.rows[0]).toEqual([31, "admin"]) + }) + + test("updateMany — modifies multiple documents", async () => { + const result = await connector.execute(cmd({ + command: "updateMany", + database: "testdb", + collection: "users", + filter: { active: false }, + update: { $set: { active: true } }, + })) + expect(result.rows[0][0]).toBe(2) // bob, eve matched + expect(result.rows[0][1]).toBe(2) // both modified + }) + + test("countDocuments — counts with filter", async () => { + const result = await connector.execute(cmd({ + command: "countDocuments", + database: "testdb", + collection: "users", + filter: { active: true }, + })) + expect(result.columns).toEqual(["count"]) + expect(result.rows[0][0]).toBe(5) // all are now active after updateMany + }) + + test("countDocuments — counts all", async () => { + const result = await connector.execute(cmd({ + command: "countDocuments", + database: "testdb", + collection: "users", + })) + expect(result.rows[0][0]).toBe(5) + }) + + test("distinct — returns unique values", async () => { + // Reset some users to inactive for distinct test + await connector.execute(cmd({ + command: "updateMany", + database: "testdb", + collection: "users", + filter: { name: { $in: ["bob", "eve"] } }, + update: { $set: { active: false } }, + })) + const result = await connector.execute(cmd({ + command: "distinct", + database: "testdb", + collection: "users", + field: "active", + })) + expect(result.columns).toEqual(["active"]) + const values = result.rows.map((r) => r[0]).sort() + expect(values).toEqual([false, true]) + }) + + test("deleteOne — removes a single document", async () => { + const result = await connector.execute(cmd({ + command: "deleteOne", + database: "testdb", + collection: "users", + filter: { name: "eve" }, + })) + expect(result.columns).toEqual(["deletedCount"]) + expect(result.rows[0][0]).toBe(1) + + // Verify deletion + const count = await connector.execute(cmd({ + command: "countDocuments", + database: "testdb", + collection: "users", + })) + expect(count.rows[0][0]).toBe(4) + }) + + test("deleteMany — removes multiple documents", async () => { + // Insert some temp docs to delete + await connector.execute(cmd({ + command: "insertMany", + database: "testdb", + collection: "users", + documents: [ + { name: "temp1", age: 99, active: false }, + { name: "temp2", age: 99, active: false }, + ], + })) + const result = await connector.execute(cmd({ + command: "deleteMany", + database: "testdb", + collection: "users", + filter: { age: 99 }, + })) + expect(result.rows[0][0]).toBe(2) + }) + }) + + // ========================================================================= + // Aggregation Pipeline + // ========================================================================= + + describe("Aggregation Pipeline", () => { + beforeAll(async () => { + // Set up products collection for aggregation tests + try { + await connector.execute(cmd({ + command: "dropCollection", + database: "testdb", + collection: "products", + })) + } catch {} + await connector.execute(cmd({ + command: "insertMany", + database: "testdb", + collection: "products", + documents: [ + { name: "Widget A", category: "widgets", price: 10, quantity: 100 }, + { name: "Widget B", category: "widgets", price: 20, quantity: 50 }, + { name: "Gadget A", category: "gadgets", price: 50, quantity: 30 }, + { name: "Gadget B", category: "gadgets", price: 75, quantity: 15 }, + { name: "Gadget C", category: "gadgets", price: 100, quantity: 5 }, + { name: "Doohickey", category: "misc", price: 5, quantity: 200 }, + ], + })) + }) + + test("aggregate — $group with $sum and $avg", async () => { + const result = await connector.execute(cmd({ + command: "aggregate", + database: "testdb", + collection: "products", + pipeline: [ + { + $group: { + _id: "$category", + totalQuantity: { $sum: "$quantity" }, + avgPrice: { $avg: "$price" }, + count: { $sum: 1 }, + }, + }, + { $sort: { _id: 1 } }, + ], + })) + expect(result.row_count).toBe(3) + const categories = result.rows.map((r) => r[result.columns.indexOf("_id")]) + expect(categories).toEqual(["gadgets", "misc", "widgets"]) + + // Check gadgets: 30+15+5=50 quantity, (50+75+100)/3=75 avg price + const gadgetRow = result.rows[categories.indexOf("gadgets")] + expect(gadgetRow[result.columns.indexOf("totalQuantity")]).toBe(50) + expect(gadgetRow[result.columns.indexOf("avgPrice")]).toBe(75) + expect(gadgetRow[result.columns.indexOf("count")]).toBe(3) + }) + + test("aggregate — $match + $project", async () => { + const result = await connector.execute(cmd({ + command: "aggregate", + database: "testdb", + collection: "products", + pipeline: [ + { $match: { price: { $gte: 50 } } }, + { $project: { name: 1, price: 1, _id: 0 } }, + { $sort: { price: 1 } }, + ], + })) + expect(result.row_count).toBe(3) + const names = result.rows.map((r) => r[result.columns.indexOf("name")]) + expect(names).toEqual(["Gadget A", "Gadget B", "Gadget C"]) + }) + + test("aggregate — $addFields with computed values", async () => { + const result = await connector.execute(cmd({ + command: "aggregate", + database: "testdb", + collection: "products", + pipeline: [ + { + $addFields: { + totalValue: { $multiply: ["$price", "$quantity"] }, + }, + }, + { $project: { name: 1, totalValue: 1, _id: 0 } }, + { $sort: { totalValue: -1 } }, + ], + })) + expect(result.row_count).toBe(6) + // Gadget A: 50*30=1500, Widget B: 20*50=1000, Widget A: 10*100=1000, Doohickey: 5*200=1000 + const firstRow = result.rows[0] + expect(firstRow[result.columns.indexOf("name")]).toBe("Gadget A") + expect(firstRow[result.columns.indexOf("totalValue")]).toBe(1500) + }) + + test("aggregate — $unwind", async () => { + // Insert a document with an array field + await connector.execute(cmd({ + command: "insertOne", + database: "testdb", + collection: "products", + document: { name: "Multi-Tag", category: "tagged", price: 10, quantity: 1, tags: ["a", "b", "c"] }, + })) + const result = await connector.execute(cmd({ + command: "aggregate", + database: "testdb", + collection: "products", + pipeline: [ + { $match: { name: "Multi-Tag" } }, + { $unwind: "$tags" }, + { $project: { tags: 1, _id: 0 } }, + ], + })) + expect(result.row_count).toBe(3) + expect(result.rows.map((r) => r[0])).toEqual(["a", "b", "c"]) + + // Cleanup + await connector.execute(cmd({ + command: "deleteOne", + database: "testdb", + collection: "products", + filter: { name: "Multi-Tag" }, + })) + }) + + test("aggregate — $lookup (join between collections)", async () => { + // Set up orders collection + try { + await connector.execute(cmd({ + command: "dropCollection", + database: "testdb", + collection: "orders", + })) + } catch {} + await connector.execute(cmd({ + command: "insertMany", + database: "testdb", + collection: "orders", + documents: [ + { product_name: "Widget A", quantity: 3, customer: "cust1" }, + { product_name: "Gadget B", quantity: 1, customer: "cust2" }, + ], + })) + + const result = await connector.execute(cmd({ + command: "aggregate", + database: "testdb", + collection: "orders", + pipeline: [ + { + $lookup: { + from: "products", + localField: "product_name", + foreignField: "name", + as: "product_info", + }, + }, + { $project: { product_name: 1, customer: 1, product_info: 1, _id: 0 } }, + { $sort: { product_name: 1 } }, + ], + })) + expect(result.row_count).toBe(2) + // product_info will be JSON-serialized arrays + const firstInfo = JSON.parse(result.rows[0][result.columns.indexOf("product_info")]) + expect(firstInfo).toBeInstanceOf(Array) + expect(firstInfo.length).toBe(1) + expect(firstInfo[0].name).toBe("Gadget B") + }) + + test("aggregate — empty pipeline returns all docs", async () => { + const result = await connector.execute(cmd({ + command: "aggregate", + database: "testdb", + collection: "products", + pipeline: [], + })) + expect(result.row_count).toBe(6) + }) + + test("aggregate — $count stage", async () => { + const result = await connector.execute(cmd({ + command: "aggregate", + database: "testdb", + collection: "products", + pipeline: [ + { $match: { category: "gadgets" } }, + { $count: "total" }, + ], + })) + expect(result.row_count).toBe(1) + expect(result.rows[0][result.columns.indexOf("total")]).toBe(3) + }) + + test("aggregate — $bucket", async () => { + const result = await connector.execute(cmd({ + command: "aggregate", + database: "testdb", + collection: "products", + pipeline: [ + { + $bucket: { + groupBy: "$price", + boundaries: [0, 25, 50, 100, 200], + default: "other", + output: { count: { $sum: 1 } }, + }, + }, + ], + })) + expect(result.row_count).toBeGreaterThan(0) + expect(result.columns).toContain("_id") + expect(result.columns).toContain("count") + }) + }) + + // ========================================================================= + // Schema Introspection + // ========================================================================= + + describe("Schema Introspection", () => { + test("listSchemas — returns databases (excludes local/config)", async () => { + const schemas = await connector.listSchemas() + expect(Array.isArray(schemas)).toBe(true) + expect(schemas).toContain("testdb") + // System databases should be filtered + expect(schemas).not.toContain("local") + expect(schemas).not.toContain("config") + }) + + test("listTables — returns collections in a database", async () => { + const tables = await connector.listTables("testdb") + expect(Array.isArray(tables)).toBe(true) + const names = tables.map((t) => t.name) + expect(names).toContain("users") + expect(names).toContain("products") + // All should be collections + for (const t of tables) { + expect(t.type).toBe("collection") + } + }) + + test("listTables — sorted alphabetically", async () => { + const tables = await connector.listTables("testdb") + const names = tables.map((t) => t.name) + const sorted = [...names].sort() + expect(names).toEqual(sorted) + }) + + test("listTables — empty database returns empty array", async () => { + const tables = await connector.listTables("nonexistent_db_xyz") + expect(tables).toEqual([]) + }) + + test("describeTable — infers field types from sample", async () => { + const columns = await connector.describeTable("testdb", "users") + expect(columns.length).toBeGreaterThan(0) + + // _id should be first + expect(columns[0].name).toBe("_id") + expect(columns[0].data_type).toBe("objectId") + + // Find name column + const nameCol = columns.find((c) => c.name === "name") + expect(nameCol).toBeDefined() + expect(nameCol!.data_type).toBe("string") + + // Find age column + const ageCol = columns.find((c) => c.name === "age") + expect(ageCol).toBeDefined() + // age could be int32 or double depending on BSON handling + expect(["int32", "double"]).toContain(ageCol!.data_type) + + // Find active column + const activeCol = columns.find((c) => c.name === "active") + expect(activeCol).toBeDefined() + expect(activeCol!.data_type).toBe("bool") + }) + + test("describeTable — empty collection returns empty array", async () => { + await connector.execute(cmd({ + command: "createCollection", + database: "testdb", + name: "empty_coll", + })) + const columns = await connector.describeTable("testdb", "empty_coll") + expect(columns).toEqual([]) + await connector.execute(cmd({ + command: "dropCollection", + database: "testdb", + collection: "empty_coll", + })) + }) + + test("describeTable — mixed-type fields show union type", async () => { + await connector.execute(cmd({ + command: "createCollection", + database: "testdb", + name: "mixed_types", + })) + await connector.execute(cmd({ + command: "insertMany", + database: "testdb", + collection: "mixed_types", + documents: [ + { value: 42 }, + { value: "hello" }, + { value: true }, + ], + })) + const columns = await connector.describeTable("testdb", "mixed_types") + const valueCol = columns.find((c) => c.name === "value") + expect(valueCol).toBeDefined() + // Should show union type since values are mixed + expect(valueCol!.data_type).toContain("|") + + await connector.execute(cmd({ + command: "dropCollection", + database: "testdb", + collection: "mixed_types", + })) + }) + }) + + // ========================================================================= + // Collection Management + // ========================================================================= + + describe("Collection Management", () => { + test("createCollection — creates a new collection", async () => { + const result = await connector.execute(cmd({ + command: "createCollection", + database: "testdb", + name: "temp_coll", + })) + expect(result.rows[0][0]).toBe("ok") + + // Verify it exists + const tables = await connector.listTables("testdb") + expect(tables.map((t) => t.name)).toContain("temp_coll") + }) + + test("dropCollection — drops an existing collection", async () => { + const result = await connector.execute(cmd({ + command: "dropCollection", + database: "testdb", + collection: "temp_coll", + })) + expect(result.rows[0][0]).toBe(true) + + // Verify it's gone + const tables = await connector.listTables("testdb") + expect(tables.map((t) => t.name)).not.toContain("temp_coll") + }) + + test("dropCollection — non-existent collection does not throw", async () => { + // MongoDB 7.0+ returns true even for non-existent collections (no NamespaceNotFound error) + const result = await connector.execute(cmd({ + command: "dropCollection", + database: "testdb", + collection: "does_not_exist_xyz", + })) + expect(result.columns).toEqual(["dropped"]) + expect(result.row_count).toBe(1) + }) + }) + + // ========================================================================= + // Index Operations + // ========================================================================= + + describe("Index Operations", () => { + test("createIndex — creates an index on a field", async () => { + const result = await connector.execute(cmd({ + command: "createIndex", + database: "testdb", + collection: "users", + keys: { email: 1 }, + options: { unique: true }, + })) + expect(result.columns).toEqual(["indexName"]) + expect(result.rows[0][0]).toBe("email_1") + }) + + test("createIndex — compound index", async () => { + const result = await connector.execute(cmd({ + command: "createIndex", + database: "testdb", + collection: "users", + keys: { name: 1, age: -1 }, + })) + expect(result.rows[0][0]).toBe("name_1_age_-1") + }) + + test("listIndexes — returns all indexes", async () => { + const result = await connector.execute(cmd({ + command: "listIndexes", + database: "testdb", + collection: "users", + })) + expect(result.columns).toEqual(["name", "key", "unique"]) + expect(result.row_count).toBeGreaterThanOrEqual(3) // _id, email_1, name_1_age_-1 + const names = result.rows.map((r) => r[0]) + expect(names).toContain("_id_") + expect(names).toContain("email_1") + expect(names).toContain("name_1_age_-1") + }) + }) + + // ========================================================================= + // Truncation / LIMIT behavior + // ========================================================================= + + describe("LIMIT and Truncation", () => { + beforeAll(async () => { + try { + await connector.execute(cmd({ + command: "dropCollection", + database: "testdb", + collection: "big_coll", + })) + } catch {} + // Insert 50 documents + const docs = Array.from({ length: 50 }, (_, i) => ({ idx: i, data: `row_${i}` })) + await connector.execute(cmd({ + command: "insertMany", + database: "testdb", + collection: "big_coll", + documents: docs, + })) + }) + + afterAll(async () => { + try { + await connector.execute(cmd({ + command: "dropCollection", + database: "testdb", + collection: "big_coll", + })) + } catch {} + }) + + test("find — auto-limits to effectiveLimit (default 1000)", async () => { + const result = await connector.execute(cmd({ + command: "find", + database: "testdb", + collection: "big_coll", + })) + // All 50 docs returned (< 1000 default limit) + expect(result.row_count).toBe(50) + expect(result.truncated).toBe(false) + }) + + test("find — query-level limit takes precedence", async () => { + const result = await connector.execute(cmd({ + command: "find", + database: "testdb", + collection: "big_coll", + sort: { idx: 1 }, + limit: 10, + })) + expect(result.row_count).toBe(10) + expect(result.truncated).toBe(true) + }) + + test("find — driver limit parameter works", async () => { + const result = await connector.execute( + cmd({ + command: "find", + database: "testdb", + collection: "big_coll", + sort: { idx: 1 }, + }), + 5, // driver-level limit + ) + expect(result.row_count).toBe(5) + expect(result.truncated).toBe(true) + }) + + test("aggregate — auto-appends $limit when not present", async () => { + const result = await connector.execute( + cmd({ + command: "aggregate", + database: "testdb", + collection: "big_coll", + pipeline: [{ $sort: { idx: 1 } }], + }), + 10, + ) + expect(result.row_count).toBe(10) + expect(result.truncated).toBe(true) + }) + + test("aggregate — preserves explicit $limit in pipeline", async () => { + const result = await connector.execute(cmd({ + command: "aggregate", + database: "testdb", + collection: "big_coll", + pipeline: [ + { $sort: { idx: 1 } }, + { $limit: 3 }, + ], + })) + expect(result.row_count).toBe(3) + expect(result.truncated).toBe(false) // Pipeline has its own limit + }) + + test("distinct — truncates long value lists", async () => { + const result = await connector.execute( + cmd({ + command: "distinct", + database: "testdb", + collection: "big_coll", + field: "idx", + }), + 10, + ) + expect(result.row_count).toBe(10) + expect(result.truncated).toBe(true) + }) + }) + + // ========================================================================= + // Empty results + // ========================================================================= + + describe("Empty Results", () => { + test("find — no matching documents returns empty", async () => { + const result = await connector.execute(cmd({ + command: "find", + database: "testdb", + collection: "users", + filter: { name: "nonexistent_user_xyz" }, + })) + expect(result.columns).toEqual([]) + expect(result.rows).toEqual([]) + expect(result.row_count).toBe(0) + expect(result.truncated).toBe(false) + }) + + test("aggregate — no results from pipeline", async () => { + const result = await connector.execute(cmd({ + command: "aggregate", + database: "testdb", + collection: "users", + pipeline: [ + { $match: { age: { $gt: 999 } } }, + ], + })) + expect(result.row_count).toBe(0) + }) + + test("deleteMany — filter matches nothing", async () => { + const result = await connector.execute(cmd({ + command: "deleteMany", + database: "testdb", + collection: "users", + filter: { name: "nobody_exists" }, + })) + expect(result.rows[0][0]).toBe(0) + }) + + test("updateOne — filter matches nothing", async () => { + const result = await connector.execute(cmd({ + command: "updateOne", + database: "testdb", + collection: "users", + filter: { name: "nobody_exists" }, + update: { $set: { age: 99 } }, + })) + expect(result.rows[0]).toEqual([0, 0]) + }) + }) + + // ========================================================================= + // Cross-Database Operations + // ========================================================================= + + describe("Cross-Database Operations", () => { + test("query different database than default", async () => { + // Insert into a different database + await connector.execute(cmd({ + command: "createCollection", + database: "otherdb", + name: "items", + })) + await connector.execute(cmd({ + command: "insertOne", + database: "otherdb", + collection: "items", + document: { label: "cross-db-test" }, + })) + + // Query the other database + const result = await connector.execute(cmd({ + command: "find", + database: "otherdb", + collection: "items", + filter: { label: "cross-db-test" }, + projection: { label: 1, _id: 0 }, + })) + expect(result.rows[0][0]).toBe("cross-db-test") + + // Clean up + await connector.execute(cmd({ + command: "dropCollection", + database: "otherdb", + collection: "items", + })) + }) + + test("listSchemas — shows newly created database", async () => { + await connector.execute(cmd({ + command: "createCollection", + database: "brand_new_db", + name: "first_coll", + })) + const schemas = await connector.listSchemas() + expect(schemas).toContain("brand_new_db") + + // Cleanup + await connector.execute(cmd({ + command: "dropCollection", + database: "brand_new_db", + collection: "first_coll", + })) + }) + }) + + // ========================================================================= + // Error Handling + // ========================================================================= + + describe("Error Handling", () => { + test("rejects invalid JSON query", async () => { + await expect( + connector.execute("not valid json {{{"), + ).rejects.toThrow(/Invalid MQL query/) + }) + + test("rejects query without command field", async () => { + await expect( + connector.execute(JSON.stringify({ database: "testdb", collection: "users" })), + ).rejects.toThrow(/must include a 'command' field/) + }) + + test("rejects unsupported command", async () => { + await expect( + connector.execute(cmd({ + command: "fakeCommand" as any, + database: "testdb", + collection: "users", + })), + ).rejects.toThrow(/Unsupported MQL command/) + }) + + test("rejects find without collection", async () => { + await expect( + connector.execute(cmd({ + command: "find", + database: "testdb", + })), + ).rejects.toThrow(/requires a 'collection' field/) + }) + + test("rejects aggregate without pipeline", async () => { + await expect( + connector.execute(cmd({ + command: "aggregate", + database: "testdb", + collection: "users", + })), + ).rejects.toThrow(/requires a 'pipeline' array/) + }) + + test("rejects insertOne without document", async () => { + await expect( + connector.execute(cmd({ + command: "insertOne", + database: "testdb", + collection: "users", + })), + ).rejects.toThrow(/requires a 'document' object/) + }) + + test("rejects insertMany without documents", async () => { + await expect( + connector.execute(cmd({ + command: "insertMany", + database: "testdb", + collection: "users", + })), + ).rejects.toThrow(/requires a 'documents' array/) + }) + + test("rejects updateOne without update", async () => { + await expect( + connector.execute(cmd({ + command: "updateOne", + database: "testdb", + collection: "users", + filter: { name: "alice" }, + })), + ).rejects.toThrow(/requires an 'update' object/) + }) + + test("rejects updateMany without update", async () => { + await expect( + connector.execute(cmd({ + command: "updateMany", + database: "testdb", + collection: "users", + })), + ).rejects.toThrow(/requires an 'update' object/) + }) + + test("rejects distinct without field", async () => { + await expect( + connector.execute(cmd({ + command: "distinct", + database: "testdb", + collection: "users", + })), + ).rejects.toThrow(/requires a 'field' string/) + }) + + test("rejects createIndex without keys", async () => { + await expect( + connector.execute(cmd({ + command: "createIndex", + database: "testdb", + collection: "users", + })), + ).rejects.toThrow(/requires a 'keys' object/) + }) + + test("rejects createCollection without name", async () => { + await expect( + connector.execute(cmd({ + command: "createCollection", + database: "testdb", + })), + ).rejects.toThrow(/requires 'name' or 'collection'/) + }) + + test("rejects dropCollection without collection", async () => { + await expect( + connector.execute(cmd({ + command: "dropCollection", + database: "testdb", + })), + ).rejects.toThrow(/requires 'collection'/) + }) + }) + + // ========================================================================= + // Adversarial Tests + // ========================================================================= + + describe("Adversarial Tests", () => { + test("handles empty document insertion", async () => { + const result = await connector.execute(cmd({ + command: "insertOne", + database: "testdb", + collection: "users", + document: {}, + })) + expect(result.rows[0][0]).toBeDefined() // Still gets an _id + + // Clean up empty doc + await connector.execute(cmd({ + command: "deleteMany", + database: "testdb", + collection: "users", + filter: { name: { $exists: false } }, + })) + }) + + test("handles deeply nested documents (10 levels)", async () => { + let nested: any = { value: "deep" } + for (let i = 0; i < 10; i++) { + nested = { [`level_${i}`]: nested } + } + + await connector.execute(cmd({ + command: "insertOne", + database: "testdb", + collection: "users", + document: { name: "nested_user", deep: nested }, + })) + + const result = await connector.execute(cmd({ + command: "find", + database: "testdb", + collection: "users", + filter: { name: "nested_user" }, + projection: { deep: 1, _id: 0 }, + })) + expect(result.row_count).toBe(1) + // Deep object should be JSON-serialized + const deepVal = result.rows[0][0] + expect(typeof deepVal).toBe("string") // JSON stringified + expect(deepVal).toContain("deep") + + // Clean up + await connector.execute(cmd({ + command: "deleteOne", + database: "testdb", + collection: "users", + filter: { name: "nested_user" }, + })) + }) + + test("handles documents with special characters in field names", async () => { + await connector.execute(cmd({ + command: "insertOne", + database: "testdb", + collection: "users", + document: { + name: "special_fields", + "field with spaces": "ok", + "field.with.dots": "ok", // MongoDB allows this on insert + "field-with-dashes": "ok", + "UPPERCASE_FIELD": "ok", + "unicode_フィールド": "ok", + }, + })) + + const result = await connector.execute(cmd({ + command: "find", + database: "testdb", + collection: "users", + filter: { name: "special_fields" }, + })) + expect(result.row_count).toBe(1) + expect(result.columns).toContain("field with spaces") + expect(result.columns).toContain("field-with-dashes") + expect(result.columns).toContain("UPPERCASE_FIELD") + expect(result.columns).toContain("unicode_フィールド") + + await connector.execute(cmd({ + command: "deleteOne", + database: "testdb", + collection: "users", + filter: { name: "special_fields" }, + })) + }) + + test("handles documents with special characters in values", async () => { + await connector.execute(cmd({ + command: "insertOne", + database: "testdb", + collection: "users", + document: { + name: "special_values", + quotes: 'She said "hello"', + backslashes: "path\\to\\file", + newlines: "line1\nline2\nline3", + tabs: "col1\tcol2", + unicode: "emoji 🚀 and CJK 中文", + null_char: "before\x00after", // null byte + html: "", + sql_injection: "'; DROP TABLE users; --", + }, + })) + + const result = await connector.execute(cmd({ + command: "find", + database: "testdb", + collection: "users", + filter: { name: "special_values" }, + projection: { quotes: 1, unicode: 1, html: 1, sql_injection: 1, _id: 0 }, + })) + expect(result.row_count).toBe(1) + expect(result.rows[0][result.columns.indexOf("quotes")]).toBe('She said "hello"') + expect(result.rows[0][result.columns.indexOf("unicode")]).toBe("emoji 🚀 and CJK 中文") + expect(result.rows[0][result.columns.indexOf("html")]).toBe("") + expect(result.rows[0][result.columns.indexOf("sql_injection")]).toBe("'; DROP TABLE users; --") + + await connector.execute(cmd({ + command: "deleteOne", + database: "testdb", + collection: "users", + filter: { name: "special_values" }, + })) + }) + + test("handles very large document (close to 16MB BSON limit)", async () => { + // Create a ~1MB string (well under 16MB limit but still large) + const largeString = "x".repeat(1_000_000) + await connector.execute(cmd({ + command: "insertOne", + database: "testdb", + collection: "users", + document: { name: "large_doc", payload: largeString }, + })) + + const result = await connector.execute(cmd({ + command: "find", + database: "testdb", + collection: "users", + filter: { name: "large_doc" }, + projection: { name: 1, _id: 0 }, + })) + expect(result.rows[0][0]).toBe("large_doc") + + await connector.execute(cmd({ + command: "deleteOne", + database: "testdb", + collection: "users", + filter: { name: "large_doc" }, + })) + }) + + test("handles insertMany with empty array", async () => { + // MongoDB driver throws on empty insertMany — driver should propagate the error + await expect( + connector.execute(cmd({ + command: "insertMany", + database: "testdb", + collection: "users", + documents: [], + })), + ).rejects.toThrow() + }) + + test("handles duplicate key error (unique index violation)", async () => { + // email_1 index is unique — inserting duplicate email should fail + const existingEmails = await connector.execute(cmd({ + command: "find", + database: "testdb", + collection: "users", + filter: { email: "alice@example.com" }, + projection: { email: 1, _id: 0 }, + })) + if (existingEmails.row_count > 0) { + await expect( + connector.execute(cmd({ + command: "insertOne", + database: "testdb", + collection: "users", + document: { name: "alice_dup", email: "alice@example.com" }, + })), + ).rejects.toThrow() + } + }) + + test("handles collection names with special characters", async () => { + const weirdName = "coll-with-dashes_and_underscores" + await connector.execute(cmd({ + command: "createCollection", + database: "testdb", + name: weirdName, + })) + await connector.execute(cmd({ + command: "insertOne", + database: "testdb", + collection: weirdName, + document: { test: true }, + })) + const result = await connector.execute(cmd({ + command: "find", + database: "testdb", + collection: weirdName, + })) + expect(result.row_count).toBe(1) + await connector.execute(cmd({ + command: "dropCollection", + database: "testdb", + collection: weirdName, + })) + }) + + test("handles heterogeneous documents in same collection", async () => { + await connector.execute(cmd({ + command: "createCollection", + database: "testdb", + name: "hetero", + })) + await connector.execute(cmd({ + command: "insertMany", + database: "testdb", + collection: "hetero", + documents: [ + { type: "person", name: "Alice", age: 30 }, + { type: "company", name: "Acme", employees: 500, founded: 1990 }, + { type: "product", sku: "ABC-123", price: 29.99 }, + ], + })) + + const result = await connector.execute(cmd({ + command: "find", + database: "testdb", + collection: "hetero", + })) + expect(result.row_count).toBe(3) + // Columns should be union of all fields across all documents + expect(result.columns).toContain("type") + expect(result.columns).toContain("name") + expect(result.columns).toContain("age") + expect(result.columns).toContain("employees") + expect(result.columns).toContain("sku") + expect(result.columns).toContain("price") + expect(result.columns).toContain("founded") + + await connector.execute(cmd({ + command: "dropCollection", + database: "testdb", + collection: "hetero", + })) + }) + + test("handles null and undefined values in documents", async () => { + await connector.execute(cmd({ + command: "createCollection", + database: "testdb", + name: "nulls", + })) + await connector.execute(cmd({ + command: "insertMany", + database: "testdb", + collection: "nulls", + documents: [ + { a: 1, b: null, c: "x" }, + { a: null, b: 2, c: null }, + { a: 3, c: "z" }, // b is missing entirely + ], + })) + + const result = await connector.execute(cmd({ + command: "find", + database: "testdb", + collection: "nulls", + sort: { a: 1 }, + })) + expect(result.row_count).toBe(3) + + // describeTable should detect nullable fields + const columns = await connector.describeTable("testdb", "nulls") + const bCol = columns.find((c) => c.name === "b") + expect(bCol).toBeDefined() + expect(bCol!.nullable).toBe(true) + + await connector.execute(cmd({ + command: "dropCollection", + database: "testdb", + collection: "nulls", + })) + }) + + test("handles array values in documents", async () => { + await connector.execute(cmd({ + command: "createCollection", + database: "testdb", + name: "arrays", + })) + await connector.execute(cmd({ + command: "insertMany", + database: "testdb", + collection: "arrays", + documents: [ + { name: "a", tags: ["x", "y"] }, + { name: "b", tags: ["y", "z"] }, + { name: "c", tags: [] }, + ], + })) + + const result = await connector.execute(cmd({ + command: "find", + database: "testdb", + collection: "arrays", + projection: { name: 1, tags: 1, _id: 0 }, + sort: { name: 1 }, + })) + expect(result.row_count).toBe(3) + // tags should be JSON-serialized as arrays + expect(result.rows[0][result.columns.indexOf("tags")]).toBe('["x","y"]') + expect(result.rows[2][result.columns.indexOf("tags")]).toBe("[]") + + // Querying into arrays with $elemMatch/$in + const filtered = await connector.execute(cmd({ + command: "find", + database: "testdb", + collection: "arrays", + filter: { tags: "y" }, + projection: { name: 1, _id: 0 }, + sort: { name: 1 }, + })) + expect(filtered.rows.map((r) => r[0])).toEqual(["a", "b"]) + + await connector.execute(cmd({ + command: "dropCollection", + database: "testdb", + collection: "arrays", + })) + }) + + test("handles concurrent operations", async () => { + await connector.execute(cmd({ + command: "createCollection", + database: "testdb", + name: "concurrent", + })) + + // Run 10 inserts concurrently + const inserts = Array.from({ length: 10 }, (_, i) => + connector.execute(cmd({ + command: "insertOne", + database: "testdb", + collection: "concurrent", + document: { idx: i }, + })), + ) + const results = await Promise.all(inserts) + expect(results).toHaveLength(10) + results.forEach((r) => { + expect(r.rows[0][0]).toBeDefined() + }) + + // Verify all inserted + const count = await connector.execute(cmd({ + command: "countDocuments", + database: "testdb", + collection: "concurrent", + })) + expect(count.rows[0][0]).toBe(10) + + // Run concurrent reads + const reads = Array.from({ length: 5 }, () => + connector.execute(cmd({ + command: "find", + database: "testdb", + collection: "concurrent", + })), + ) + const readResults = await Promise.all(reads) + readResults.forEach((r) => { + expect(r.row_count).toBe(10) + }) + + await connector.execute(cmd({ + command: "dropCollection", + database: "testdb", + collection: "concurrent", + })) + }) + + test("handles very long collection and database names", async () => { + // MongoDB allows collection names up to ~120 bytes when combined with db name + const longName = "a".repeat(60) + await connector.execute(cmd({ + command: "createCollection", + database: "testdb", + name: longName, + })) + await connector.execute(cmd({ + command: "insertOne", + database: "testdb", + collection: longName, + document: { ok: true }, + })) + const result = await connector.execute(cmd({ + command: "find", + database: "testdb", + collection: longName, + })) + expect(result.row_count).toBe(1) + await connector.execute(cmd({ + command: "dropCollection", + database: "testdb", + collection: longName, + })) + }) + + test("handles update with $inc, $unset, $push operators", async () => { + await connector.execute(cmd({ + command: "insertOne", + database: "testdb", + collection: "users", + document: { name: "ops_test", count: 0, tags: ["initial"], temp: "will_remove" }, + })) + + // $inc + await connector.execute(cmd({ + command: "updateOne", + database: "testdb", + collection: "users", + filter: { name: "ops_test" }, + update: { $inc: { count: 5 } }, + })) + + // $push + await connector.execute(cmd({ + command: "updateOne", + database: "testdb", + collection: "users", + filter: { name: "ops_test" }, + update: { $push: { tags: "added" } }, + })) + + // $unset + await connector.execute(cmd({ + command: "updateOne", + database: "testdb", + collection: "users", + filter: { name: "ops_test" }, + update: { $unset: { temp: "" } }, + })) + + const result = await connector.execute(cmd({ + command: "find", + database: "testdb", + collection: "users", + filter: { name: "ops_test" }, + projection: { count: 1, tags: 1, temp: 1, _id: 0 }, + })) + expect(result.rows[0][result.columns.indexOf("count")]).toBe(5) + + // Clean up + await connector.execute(cmd({ + command: "deleteOne", + database: "testdb", + collection: "users", + filter: { name: "ops_test" }, + })) + }) + + test("handles aggregate with invalid pipeline stage", async () => { + await expect( + connector.execute(cmd({ + command: "aggregate", + database: "testdb", + collection: "users", + pipeline: [{ $invalidStage: {} }], + })), + ).rejects.toThrow() + }) + + test("handles queries on non-existent collection (find returns empty)", async () => { + const result = await connector.execute(cmd({ + command: "find", + database: "testdb", + collection: "totally_nonexistent_collection_xyz", + })) + expect(result.row_count).toBe(0) + }) + + test("handles numeric edge cases in documents", async () => { + await connector.execute(cmd({ + command: "createCollection", + database: "testdb", + name: "numbers", + })) + await connector.execute(cmd({ + command: "insertMany", + database: "testdb", + collection: "numbers", + documents: [ + { label: "zero", val: 0 }, + { label: "negative", val: -42 }, + { label: "float", val: 3.14159 }, + { label: "large", val: 9007199254740991 }, // Number.MAX_SAFE_INTEGER + { label: "tiny", val: 0.000001 }, + ], + })) + + const result = await connector.execute(cmd({ + command: "find", + database: "testdb", + collection: "numbers", + sort: { val: 1 }, + projection: { label: 1, val: 1, _id: 0 }, + })) + expect(result.row_count).toBe(5) + const labels = result.rows.map((r) => r[result.columns.indexOf("label")]) + expect(labels[0]).toBe("negative") // -42 + expect(labels[1]).toBe("zero") // 0 + expect(labels[2]).toBe("tiny") // 0.000001 + + await connector.execute(cmd({ + command: "dropCollection", + database: "testdb", + collection: "numbers", + })) + }) + + test("handles boolean edge cases", async () => { + await connector.execute(cmd({ + command: "createCollection", + database: "testdb", + name: "booleans", + })) + await connector.execute(cmd({ + command: "insertMany", + database: "testdb", + collection: "booleans", + documents: [ + { flag: true }, + { flag: false }, + { flag: null }, + { flag: 0 }, // falsy but not boolean + { flag: 1 }, // truthy but not boolean + { flag: "" }, // empty string + ], + })) + + // Filter for exactly boolean true + const result = await connector.execute(cmd({ + command: "find", + database: "testdb", + collection: "booleans", + filter: { flag: true }, + })) + expect(result.row_count).toBe(1) // Only the actual boolean true + + // Filter for exactly boolean false + const falseResult = await connector.execute(cmd({ + command: "find", + database: "testdb", + collection: "booleans", + filter: { flag: false }, + })) + expect(falseResult.row_count).toBe(1) // Only the actual boolean false + + await connector.execute(cmd({ + command: "dropCollection", + database: "testdb", + collection: "booleans", + })) + }) + }) + + // ========================================================================= + // BSON Type Handling + // ========================================================================= + + describe("BSON Type Handling", () => { + test("ObjectId is serialized to string in results", async () => { + const result = await connector.execute(cmd({ + command: "find", + database: "testdb", + collection: "users", + projection: { _id: 1 }, + limit: 1, + })) + expect(result.row_count).toBe(1) + const id = result.rows[0][0] + expect(typeof id).toBe("string") + expect(id).toMatch(/^[0-9a-f]{24}$/) // 24-char hex string + }) + + test("Date values are serialized to ISO strings", async () => { + await connector.execute(cmd({ + command: "insertOne", + database: "testdb", + collection: "users", + document: { name: "date_test", created_at: { $date: "2024-01-15T10:30:00Z" } }, + })) + + const result = await connector.execute(cmd({ + command: "find", + database: "testdb", + collection: "users", + filter: { name: "date_test" }, + projection: { created_at: 1, _id: 0 }, + })) + // Date should be an ISO string + const dateVal = result.rows[0][0] + expect(typeof dateVal).toBe("string") + + await connector.execute(cmd({ + command: "deleteOne", + database: "testdb", + collection: "users", + filter: { name: "date_test" }, + })) + }) + + test("nested objects are JSON-serialized", async () => { + await connector.execute(cmd({ + command: "insertOne", + database: "testdb", + collection: "users", + document: { + name: "nested_test", + address: { street: "123 Main St", city: "Springfield", zip: "12345" }, + }, + })) + + const result = await connector.execute(cmd({ + command: "find", + database: "testdb", + collection: "users", + filter: { name: "nested_test" }, + projection: { address: 1, _id: 0 }, + })) + const addr = result.rows[0][0] + expect(typeof addr).toBe("string") + const parsed = JSON.parse(addr) + expect(parsed.street).toBe("123 Main St") + expect(parsed.city).toBe("Springfield") + + await connector.execute(cmd({ + command: "deleteOne", + database: "testdb", + collection: "users", + filter: { name: "nested_test" }, + })) + }) + }) +}) From b5e060cd2c51d9d0cbc354646af4580910061996 Mon Sep 17 00:00:00 2001 From: anandgupta42 Date: Thu, 26 Mar 2026 11:53:43 -0700 Subject: [PATCH 2/5] fix: address multi-model code review findings for MongoDB driver Fixes from 6-model consensus review (Claude, GPT 5.2 Codex, Gemini 3.1 Pro, Kimi K2.5, MiniMax M2.5, GLM-5): - Cap user-specified `limit` against `effectiveLimit` to prevent OOM from unbounded queries (flagged by Gemini) - Add `ping` command for connection testing instead of querying `system.version` which may not be accessible (flagged by Gemini, GLM-5) - Add `e.code === 26` fallback for `dropCollection` error handling across MongoDB versions (flagged by GLM-5) - Fix misleading docstring on `extractFields` that claimed dot-notation expansion (flagged by Gemini) Co-Authored-By: Claude Opus 4.6 (1M context) --- packages/drivers/src/mongodb.ts | 18 +++++++++++++----- .../altimate/native/connections/registry.ts | 7 ++----- 2 files changed, 15 insertions(+), 10 deletions(-) diff --git a/packages/drivers/src/mongodb.ts b/packages/drivers/src/mongodb.ts index 3b20c5f965..35e6aba33e 100644 --- a/packages/drivers/src/mongodb.ts +++ b/packages/drivers/src/mongodb.ts @@ -32,6 +32,7 @@ type MqlCommand = | "dropCollection" | "createIndex" | "listIndexes" + | "ping" interface MqlQuery { database?: string @@ -110,8 +111,8 @@ function inferType(value: unknown): string { } /** - * Flatten a document's fields into column entries, handling nested objects - * with dot notation (1 level deep only to keep it manageable). + * Extract field names and their observed types from a set of documents. + * Only inspects top-level fields — nested objects are reported as type "object". */ function extractFields( docs: Record[], @@ -260,6 +261,11 @@ export async function connect(config: ConnectionConfig): Promise { const cmd = parsed.command // Commands that don't need a collection + if (cmd === "ping") { + const result = await db.command({ ping: 1 }) + return { columns: ["ok"], rows: [[result.ok]], row_count: 1, truncated: false } + } + if (cmd === "createCollection") { const name = parsed.name ?? parsed.collection if (!name) { @@ -274,7 +280,7 @@ export async function connect(config: ConnectionConfig): Promise { throw new Error("dropCollection requires 'collection'") } const dropped = await db.collection(parsed.collection).drop().catch((e: any) => { - if (e.codeName === "NamespaceNotFound") return false + if (e.codeName === "NamespaceNotFound" || e.code === 26) return false throw e }) return { @@ -297,8 +303,10 @@ export async function connect(config: ConnectionConfig): Promise { if (parsed.projection) cursor = cursor.project(parsed.projection) if (parsed.sort) cursor = cursor.sort(parsed.sort) if (parsed.skip) cursor = cursor.skip(parsed.skip) - // Fetch one extra to detect truncation - const queryLimit = parsed.limit ?? effectiveLimit + // Cap user-specified limit against effectiveLimit to prevent OOM + const queryLimit = parsed.limit + ? Math.min(parsed.limit, effectiveLimit) + : effectiveLimit cursor = cursor.limit(queryLimit + 1) const docs = await cursor.toArray() diff --git a/packages/opencode/src/altimate/native/connections/registry.ts b/packages/opencode/src/altimate/native/connections/registry.ts index eb2d015b03..6efd9410ef 100644 --- a/packages/opencode/src/altimate/native/connections/registry.ts +++ b/packages/opencode/src/altimate/native/connections/registry.ts @@ -362,12 +362,9 @@ export async function test( const config = configs.get(name) const dbType = config?.type?.toLowerCase() if (dbType === "mongodb" || dbType === "mongo") { - // MongoDB doesn't support SQL — use a ping-equivalent MQL command + // MongoDB doesn't support SQL — use the standard ping command await connector.execute(JSON.stringify({ - command: "find", - database: (config?.database as string) || "admin", - collection: "system.version", - limit: 1, + command: "ping", })) } else { await connector.execute("SELECT 1") From 56acab4306b56678000baa9c9ae788385d2ca23e Mon Sep 17 00:00:00 2001 From: anandgupta42 Date: Thu, 26 Mar 2026 12:02:52 -0700 Subject: [PATCH 3/5] fix: address CodeRabbit findings and update docs for MongoDB driver CodeRabbit review fixes: - Use `...COMMON_ALIASES` spread in `MONGODB_ALIASES` for consistency with other drivers (normalize.ts) - Scan entire aggregate pipeline for `$limit` instead of only last stage; also skip auto-limit for `$out`/`$merge` write pipelines - Keep `mongodb` at `^6.0.0` (NOT upgrading to v7): v7 drops Node 16/18 support and has many breaking changes; v6.21.0 is actively maintained and supports MongoDB server 3.6-8.0 Additional review fixes: - Include database name in URI when building from individual config fields for correct auth-source resolution (GLM-5) - Add security comment about credential exposure in URI (Kimi K2.5) Documentation updates: - `docs/docs/drivers.md`: add MongoDB to support matrix (11 databases), install section, auth methods, auto-discovery - `docs/docs/configure/warehouses.md`: add MongoDB configuration section with connection string and field-based examples, server compatibility - `docs/docs/data-engineering/tools/warehouse-tools.md`: add MongoDB to Docker discovery and env var detection Co-Authored-By: Claude Opus 4.6 (1M context) --- docs/docs/configure/warehouses.md | 47 ++++++++++++++++++- .../data-engineering/tools/warehouse-tools.md | 3 +- docs/docs/drivers.md | 16 ++++++- packages/drivers/src/mongodb.ts | 16 ++++--- packages/drivers/src/normalize.ts | 3 +- 5 files changed, 73 insertions(+), 12 deletions(-) diff --git a/docs/docs/configure/warehouses.md b/docs/docs/configure/warehouses.md index 22fdcb85aa..f665314488 100644 --- a/docs/docs/configure/warehouses.md +++ b/docs/docs/configure/warehouses.md @@ -1,6 +1,6 @@ # Warehouses -Altimate Code connects to 8 warehouse types. Configure them in `.altimate-code/connections.json` (project-local) or `~/.altimate-code/connections.json` (global). +Altimate Code connects to 9 warehouse types. Configure them in `.altimate-code/connections.json` (project-local) or `~/.altimate-code/connections.json` (global). ## Configuration @@ -237,6 +237,51 @@ If you're already authenticated via `gcloud`, omit `credentials_path`: | `ssl_cert` | No | Path to client certificate file | | `ssl_key` | No | Path to client key file | +## MongoDB + +```json +{ + "my-mongodb": { + "type": "mongodb", + "host": "localhost", + "port": 27017, + "database": "analytics", + "user": "analyst", + "password": "{env:MONGO_PASSWORD}" + } +} +``` + +| Field | Required | Description | +|-------|----------|-------------| +| `connection_string` | No | Full connection string (alternative to individual fields) | +| `host` | No | Hostname (default: `127.0.0.1`) | +| `port` | No | Port (default: `27017`) | +| `database` | No | Database name | +| `user` | No | Username | +| `password` | No | Password | +| `auth_source` | No | Authentication database (default: `admin`) | +| `replica_set` | No | Replica set name | +| `tls` | No | Enable TLS (default: `false`) | +| `direct_connection` | No | Connect directly to a single host | + +### Using a connection string + +```json +{ + "my-mongodb": { + "type": "mongodb", + "connection_string": "mongodb://analyst:secret@localhost:27017/analytics" + } +} +``` + +!!! note + MongoDB uses MQL (MongoDB Query Language) instead of SQL. Queries are submitted as JSON objects via the `execute` method. Supported commands: `find`, `aggregate`, `countDocuments`, `distinct`, `insertOne`, `insertMany`, `updateOne`, `updateMany`, `deleteOne`, `deleteMany`, `createIndex`, `listIndexes`, `createCollection`, `dropCollection`, `ping`. + +!!! info "Server compatibility" + The MongoDB driver (v6.x) supports MongoDB server versions 3.6 through 8.0, covering all releases from the last 3+ years. + ## SQL Server ```json diff --git a/docs/docs/data-engineering/tools/warehouse-tools.md b/docs/docs/data-engineering/tools/warehouse-tools.md index 2505f0f282..2318cc3e70 100644 --- a/docs/docs/data-engineering/tools/warehouse-tools.md +++ b/docs/docs/data-engineering/tools/warehouse-tools.md @@ -70,6 +70,7 @@ env_bigquery | bigquery | GOOGLE_APPLICATION_CREDENTIALS | Databricks | `DATABRICKS_HOST`, `DATABRICKS_SERVER_HOSTNAME` | | PostgreSQL | `PGHOST`, `PGDATABASE`, `DATABASE_URL` | | MySQL | `MYSQL_HOST`, `MYSQL_DATABASE` | +| MongoDB | `MONGODB_URI`, `MONGO_URL` | | Redshift | `REDSHIFT_HOST` | ### Parameters @@ -164,7 +165,7 @@ Remove an existing warehouse connection. ## warehouse_discover -Discover database containers running in Docker. Detects PostgreSQL, MySQL/MariaDB, and SQL Server containers with their connection details. +Discover database containers running in Docker. Detects PostgreSQL, MySQL/MariaDB, SQL Server, and MongoDB containers with their connection details. ``` > warehouse_discover diff --git a/docs/docs/drivers.md b/docs/docs/drivers.md index 124ee37f70..ae66164c67 100644 --- a/docs/docs/drivers.md +++ b/docs/docs/drivers.md @@ -2,7 +2,7 @@ ## Overview -Altimate Code connects to 10 databases natively via TypeScript drivers. No Python dependency required. Drivers are loaded lazily, so only the driver you need is imported at runtime. +Altimate Code connects to 11 databases natively via TypeScript drivers. No Python dependency required. Drivers are loaded lazily, so only the driver you need is imported at runtime. ## Support Matrix @@ -17,6 +17,7 @@ Altimate Code connects to 10 databases natively via TypeScript drivers. No Pytho | Snowflake | `snowflake-sdk` | Password, Key-Pair (unencrypted + encrypted), OAuth | ✅ Live account | 37 E2E tests, key-pair with passphrase support | | BigQuery | `@google-cloud/bigquery` | Service Account, ADC | ✅ Live account | 25 E2E tests, UNNEST/STRUCT/DATE types | | Databricks | `@databricks/sql` | PAT, OAuth | ✅ Live account | 24 E2E tests, Unity Catalog support | +| MongoDB | `mongodb` | Password, Connection String | ✅ Docker | 90 E2E tests, MQL queries, aggregation pipelines | | Oracle | `oracledb` (thin) | Password | ❌ Needs Oracle 12.1+ | Thin mode only, no Instant Client | ## Installation @@ -33,6 +34,9 @@ bun add pg # PostgreSQL + Redshift bun add mysql2 # MySQL bun add mssql # SQL Server +# Document databases +bun add mongodb # MongoDB + # Cloud warehouses bun add snowflake-sdk # Snowflake bun add @google-cloud/bigquery # BigQuery @@ -129,6 +133,14 @@ altimate-dbt init --project-root /path/to/dbt/project --python-path $(which pyth |--------|--------------| | Password | `host`, `port`, `service_name`, `user`, `password` | +### MongoDB +| Method | Config Fields | +|--------|--------------| +| Password | `host`, `port`, `database`, `user`, `password` | +| Connection String | `connection_string: "mongodb://user:pass@host:port/db"` | + +MongoDB supports server versions 3.6 through 8.0. Queries use MQL (MongoDB Query Language) via JSON, not SQL. The driver supports `find`, `aggregate`, CRUD operations, index management, and schema introspection via document sampling. + ### DuckDB | Method | Config Fields | |--------|--------------| @@ -167,7 +179,7 @@ SSH auth types: `"key"` (default) or `"password"` (set `ssh_password`). The CLI auto-discovers connections from: -1. **Docker containers**: detects running PostgreSQL, MySQL, MariaDB, SQL Server, Oracle containers +1. **Docker containers**: detects running PostgreSQL, MySQL, MariaDB, SQL Server, Oracle, MongoDB containers 2. **dbt profiles**: parses `~/.dbt/profiles.yml` for all supported adapters 3. **Environment variables**: detects `SNOWFLAKE_ACCOUNT`, `PGHOST`, `MYSQL_HOST`, `MSSQL_HOST`, `ORACLE_HOST`, `DUCKDB_PATH`, `SQLITE_PATH`, etc. diff --git a/packages/drivers/src/mongodb.ts b/packages/drivers/src/mongodb.ts index 35e6aba33e..79b4d257c5 100644 --- a/packages/drivers/src/mongodb.ts +++ b/packages/drivers/src/mongodb.ts @@ -197,6 +197,7 @@ export async function connect(config: ConnectionConfig): Promise { return { async connect() { // Support connection_string or individual fields + // SECURITY: URI may contain credentials — never log it let uri: string if (config.connection_string) { uri = config.connection_string as string @@ -205,11 +206,13 @@ export async function connect(config: ConnectionConfig): Promise { const port = (config.port as number) ?? 27017 const user = config.user as string | undefined const password = config.password as string | undefined + // Include database in URI for correct auth-source resolution + const dbPath = explicitDb ? `/${encodeURIComponent(explicitDb)}` : "" if (user && password) { - uri = `mongodb://${encodeURIComponent(user)}:${encodeURIComponent(password)}@${host}:${port}` + uri = `mongodb://${encodeURIComponent(user)}:${encodeURIComponent(password)}@${host}:${port}${dbPath}` } else { - uri = `mongodb://${host}:${port}` + uri = `mongodb://${host}:${port}${dbPath}` } } @@ -337,11 +340,12 @@ export async function connect(config: ConnectionConfig): Promise { if (!parsed.pipeline || !Array.isArray(parsed.pipeline)) { throw new Error("aggregate requires a 'pipeline' array") } - // Append $limit if the pipeline doesn't already end with one + // Append $limit if the pipeline doesn't already contain one anywhere. + // Also skip for $out/$merge pipelines which write results. const pipeline = [...parsed.pipeline] - const lastStage = pipeline[pipeline.length - 1] - const hasLimit = lastStage && "$limit" in lastStage - if (!hasLimit) { + const hasLimit = pipeline.some((stage) => "$limit" in stage) + const hasWrite = pipeline.some((stage) => "$out" in stage || "$merge" in stage) + if (!hasLimit && !hasWrite) { pipeline.push({ $limit: effectiveLimit + 1 }) } diff --git a/packages/drivers/src/normalize.ts b/packages/drivers/src/normalize.ts index e11d04044d..5935f1d5bb 100644 --- a/packages/drivers/src/normalize.ts +++ b/packages/drivers/src/normalize.ts @@ -74,8 +74,7 @@ const ORACLE_ALIASES: AliasMap = { } const MONGODB_ALIASES: AliasMap = { - user: ["username"], - database: ["dbname", "db"], + ...COMMON_ALIASES, connection_string: ["connectionString", "uri", "url"], auth_source: ["authSource"], replica_set: ["replicaSet"], From adaefa4f49165cfd7210bea64fc5f426a9527a57 Mon Sep 17 00:00:00 2001 From: anandgupta42 Date: Thu, 26 Mar 2026 12:06:39 -0700 Subject: [PATCH 4/5] style: format MongoDB driver files with Prettier Co-Authored-By: Claude Opus 4.6 (1M context) --- packages/drivers/src/index.ts | 1 - packages/drivers/src/mongodb.ts | 74 +- .../altimate/native/connections/registry.ts | 48 +- .../test/altimate/drivers-mongodb-e2e.test.ts | 2320 +++++++++-------- 4 files changed, 1343 insertions(+), 1100 deletions(-) diff --git a/packages/drivers/src/index.ts b/packages/drivers/src/index.ts index 6533c2da8a..73a8d7c2c1 100644 --- a/packages/drivers/src/index.ts +++ b/packages/drivers/src/index.ts @@ -1,7 +1,6 @@ // Re-export types export type { Connector, ConnectorResult, SchemaColumn, ConnectionConfig } from "./types" - // Re-export config normalization export { normalizeConfig } from "./normalize" diff --git a/packages/drivers/src/mongodb.ts b/packages/drivers/src/mongodb.ts index 79b4d257c5..4dcdec09cd 100644 --- a/packages/drivers/src/mongodb.ts +++ b/packages/drivers/src/mongodb.ts @@ -114,9 +114,7 @@ function inferType(value: unknown): string { * Extract field names and their observed types from a set of documents. * Only inspects top-level fields — nested objects are reported as type "object". */ -function extractFields( - docs: Record[], -): Map> { +function extractFields(docs: Record[]): Map> { const fieldTypes = new Map>() for (const doc of docs) { @@ -136,9 +134,7 @@ export async function connect(config: ConnectionConfig): Promise { mongoModule = await import("mongodb") mongoModule = mongoModule.default || mongoModule } catch { - throw new Error( - "MongoDB driver not installed. Run: npm install mongodb", - ) + throw new Error("MongoDB driver not installed. Run: npm install mongodb") } const MongoClient = mongoModule.MongoClient @@ -167,8 +163,12 @@ export async function connect(config: ConnectionConfig): Promise { return (val as any).toString() } // BSON Decimal128, Long, Int32, Double - if ((val as any)._bsontype === "Decimal128" || (val as any)._bsontype === "Long" || - (val as any)._bsontype === "Int32" || (val as any)._bsontype === "Double") { + if ( + (val as any)._bsontype === "Decimal128" || + (val as any)._bsontype === "Long" || + (val as any)._bsontype === "Int32" || + (val as any)._bsontype === "Double" + ) { return (val as any).toString() } // BSON UUID @@ -241,18 +241,12 @@ export async function connect(config: ConnectionConfig): Promise { await client.connect() }, - async execute( - query: string, - limit?: number, - _binds?: any[], - ): Promise { + async execute(query: string, limit?: number, _binds?: any[]): Promise { let parsed: MqlQuery try { parsed = JSON.parse(query) as MqlQuery } catch (e) { - throw new Error( - `Invalid MQL query — must be valid JSON. Error: ${(e as Error).message}`, - ) + throw new Error(`Invalid MQL query — must be valid JSON. Error: ${(e as Error).message}`) } if (!parsed.command) { @@ -282,10 +276,13 @@ export async function connect(config: ConnectionConfig): Promise { if (!parsed.collection) { throw new Error("dropCollection requires 'collection'") } - const dropped = await db.collection(parsed.collection).drop().catch((e: any) => { - if (e.codeName === "NamespaceNotFound" || e.code === 26) return false - throw e - }) + const dropped = await db + .collection(parsed.collection) + .drop() + .catch((e: any) => { + if (e.codeName === "NamespaceNotFound" || e.code === 26) return false + throw e + }) return { columns: ["dropped"], rows: [[dropped]], @@ -307,9 +304,7 @@ export async function connect(config: ConnectionConfig): Promise { if (parsed.sort) cursor = cursor.sort(parsed.sort) if (parsed.skip) cursor = cursor.skip(parsed.skip) // Cap user-specified limit against effectiveLimit to prevent OOM - const queryLimit = parsed.limit - ? Math.min(parsed.limit, effectiveLimit) - : effectiveLimit + const queryLimit = parsed.limit ? Math.min(parsed.limit, effectiveLimit) : effectiveLimit cursor = cursor.limit(queryLimit + 1) const docs = await cursor.toArray() @@ -329,9 +324,7 @@ export async function connect(config: ConnectionConfig): Promise { } const columns = Array.from(colSet) - const rows = limited.map((doc: any) => - columns.map((col) => serializeValue(doc[col])), - ) + const rows = limited.map((doc: any) => columns.map((col) => serializeValue(doc[col]))) return { columns, rows, row_count: limited.length, truncated } } @@ -366,9 +359,7 @@ export async function connect(config: ConnectionConfig): Promise { } const columns = Array.from(colSet) - const rows = limited.map((doc: any) => - columns.map((col) => serializeValue(doc[col])), - ) + const rows = limited.map((doc: any) => columns.map((col) => serializeValue(doc[col]))) return { columns, rows, row_count: limited.length, truncated } } @@ -489,11 +480,7 @@ export async function connect(config: ConnectionConfig): Promise { return { columns: [], rows: [], row_count: 0, truncated: false } } const columns = ["name", "key", "unique"] - const rows = indexes.map((idx: any) => [ - idx.name, - JSON.stringify(idx.key), - idx.unique ?? false, - ]) + const rows = indexes.map((idx: any) => [idx.name, JSON.stringify(idx.key), idx.unique ?? false]) return { columns, rows, row_count: rows.length, truncated: false } } @@ -517,9 +504,7 @@ export async function connect(config: ConnectionConfig): Promise { } }, - async listTables( - schema: string, - ): Promise> { + async listTables(schema: string): Promise> { const db = client.db(schema) const collections = await db.listCollections().toArray() return collections @@ -527,15 +512,10 @@ export async function connect(config: ConnectionConfig): Promise { name: c.name as string, type: c.type === "view" ? "view" : "collection", })) - .sort((a: { name: string }, b: { name: string }) => - a.name.localeCompare(b.name), - ) + .sort((a: { name: string }, b: { name: string }) => a.name.localeCompare(b.name)) }, - async describeTable( - schema: string, - table: string, - ): Promise { + async describeTable(schema: string, table: string): Promise { const db = client.db(schema) const coll = db.collection(table) @@ -561,11 +541,7 @@ export async function connect(config: ConnectionConfig): Promise { const hasNull = typeArr.includes("null") const nonNullTypes = typeArr.filter((t) => t !== "null") const dataType = - nonNullTypes.length === 0 - ? "null" - : nonNullTypes.length === 1 - ? nonNullTypes[0] - : nonNullTypes.join(" | ") + nonNullTypes.length === 0 ? "null" : nonNullTypes.length === 1 ? nonNullTypes[0] : nonNullTypes.join(" | ") // Field is nullable if it has null values OR is missing from some documents const presentIn = fieldPresence.get(name) ?? 0 diff --git a/packages/opencode/src/altimate/native/connections/registry.ts b/packages/opencode/src/altimate/native/connections/registry.ts index 6efd9410ef..5aaafdd640 100644 --- a/packages/opencode/src/altimate/native/connections/registry.ts +++ b/packages/opencode/src/altimate/native/connections/registry.ts @@ -130,15 +130,10 @@ const DRIVER_MAP: Record = { mongo: "@altimateai/drivers/mongodb", } -async function createConnector( - name: string, - config: ConnectionConfig, -): Promise { +async function createConnector(name: string, config: ConnectionConfig): Promise { const driverPath = DRIVER_MAP[config.type.toLowerCase()] if (!driverPath) { - throw new Error( - `Unsupported database type: ${config.type}. Supported: ${Object.keys(DRIVER_MAP).join(", ")}`, - ) + throw new Error(`Unsupported database type: ${config.type}. Supported: ${Object.keys(DRIVER_MAP).join(", ")}`) } // Normalize field names first (camelCase → snake_case, dbt → canonical) @@ -221,7 +216,11 @@ export function detectAuthMethod(config: ConnectionConfig | null | undefined): s if (config.connection_string) return "connection_string" if (config.private_key_path || config.privateKeyPath || config.private_key || config.privateKey) return "key_pair" const auth = typeof config.authenticator === "string" ? config.authenticator.toUpperCase() : "" - if (auth === "EXTERNALBROWSER" || (typeof config.authenticator === "string" && /^https?:\/\/.+\.okta\.com/i.test(config.authenticator))) return "sso" + if ( + auth === "EXTERNALBROWSER" || + (typeof config.authenticator === "string" && /^https?:\/\/.+\.okta\.com/i.test(config.authenticator)) + ) + return "sso" if (auth === "OAUTH") return "oauth" if (config.access_token || config.token) return "token" if (config.password) return "password" @@ -234,7 +233,8 @@ export function detectAuthMethod(config: ConnectionConfig | null | undefined): s export function categorizeConnectionError(e: unknown): string { const msg = String(e).toLowerCase() if (msg.includes("not installed") || msg.includes("cannot find module")) return "driver_missing" - if (msg.includes("password") || msg.includes("authentication") || msg.includes("unauthorized") || msg.includes("jwt")) return "auth_failed" + if (msg.includes("password") || msg.includes("authentication") || msg.includes("unauthorized") || msg.includes("jwt")) + return "auth_failed" if (msg.includes("timeout") || msg.includes("timed out")) return "timeout" if (msg.includes("econnrefused") || msg.includes("enotfound") || msg.includes("network")) return "network_error" if (msg.includes("config") || msg.includes("not found") || msg.includes("missing")) return "config_error" @@ -258,9 +258,7 @@ export async function get(name: string): Promise { const config = configs.get(name) if (!config) { - throw new Error( - `Connection "${name}" not found. Available: ${Array.from(configs.keys()).join(", ") || "(none)"}`, - ) + throw new Error(`Connection "${name}" not found. Available: ${Array.from(configs.keys()).join(", ") || "(none)"}`) } const startTime = Date.now() @@ -354,18 +352,18 @@ export function list(): { warehouses: WarehouseInfo[] } { } /** Test a connection by running a simple query. */ -export async function test( - name: string, -): Promise<{ connected: boolean; error?: string }> { +export async function test(name: string): Promise<{ connected: boolean; error?: string }> { try { const connector = await get(name) const config = configs.get(name) const dbType = config?.type?.toLowerCase() if (dbType === "mongodb" || dbType === "mongo") { // MongoDB doesn't support SQL — use the standard ping command - await connector.execute(JSON.stringify({ - command: "ping", - })) + await connector.execute( + JSON.stringify({ + command: "ping", + }), + ) } else { await connector.execute("SELECT 1") } @@ -417,7 +415,11 @@ export async function add( connectors.delete(name) } - const result: { success: boolean; name: string; type: string; warnings?: string[] } = { success: true, name, type: config.type } + const result: { success: boolean; name: string; type: string; warnings?: string[] } = { + success: true, + name, + type: config.type, + } if (warnings.length > 0) { result.warnings = warnings } @@ -428,9 +430,7 @@ export async function add( } /** Remove a connection from global config. */ -export async function remove( - name: string, -): Promise<{ success: boolean; error?: string }> { +export async function remove(name: string): Promise<{ success: boolean; error?: string }> { try { ensureLoaded() @@ -496,9 +496,7 @@ export function reset(): void { /** * Set configs directly (for testing without file system). */ -export function setConfigs( - newConfigs: Record, -): void { +export function setConfigs(newConfigs: Record): void { configs.clear() for (const [name, config] of Object.entries(newConfigs)) { configs.set(name, config) diff --git a/packages/opencode/test/altimate/drivers-mongodb-e2e.test.ts b/packages/opencode/test/altimate/drivers-mongodb-e2e.test.ts index de00e7f674..d10e5966a7 100644 --- a/packages/opencode/test/altimate/drivers-mongodb-e2e.test.ts +++ b/packages/opencode/test/altimate/drivers-mongodb-e2e.test.ts @@ -43,10 +43,7 @@ function waitForPort(port: number, timeout = 30000): Promise { }) } -async function waitForMongoReady( - connectFn: () => Promise, - timeout = 60000, -): Promise { +async function waitForMongoReady(connectFn: () => Promise, timeout = 60000): Promise { const start = Date.now() let lastErr: any while (Date.now() - start < timeout) { @@ -54,12 +51,14 @@ async function waitForMongoReady( const connector = await connectFn() await connector.connect() // Verify the connection works with a simple command - await connector.execute(JSON.stringify({ - command: "find", - database: "admin", - collection: "system.version", - limit: 1, - })) + await connector.execute( + JSON.stringify({ + command: "find", + database: "admin", + collection: "system.version", + limit: 1, + }), + ) return connector } catch (e: any) { lastErr = e @@ -99,11 +98,7 @@ describe.skipIf(!DOCKER && !MONGODB_USE_CI)("MongoDB Driver E2E", () => { beforeAll(async () => { if (!MONGODB_USE_CI) { dockerRm(MONGODB_CONTAINER) - dockerRun( - `-d --name ${MONGODB_CONTAINER} ` + - `-p ${MONGODB_PORT}:27017 ` + - `mongo:7.0`, - ) + dockerRun(`-d --name ${MONGODB_CONTAINER} ` + `-p ${MONGODB_PORT}:27017 ` + `mongo:7.0`) } await waitForPort(MONGODB_PORT, 30000) const { connect } = await import("@altimateai/drivers/mongodb") @@ -121,32 +116,40 @@ describe.skipIf(!DOCKER && !MONGODB_USE_CI)("MongoDB Driver E2E", () => { if (connector) { // Clean up test databases try { - await connector.execute(cmd({ - command: "dropCollection", - database: "testdb", - collection: "users", - })) + await connector.execute( + cmd({ + command: "dropCollection", + database: "testdb", + collection: "users", + }), + ) } catch {} try { - await connector.execute(cmd({ - command: "dropCollection", - database: "testdb", - collection: "products", - })) + await connector.execute( + cmd({ + command: "dropCollection", + database: "testdb", + collection: "products", + }), + ) } catch {} try { - await connector.execute(cmd({ - command: "dropCollection", - database: "testdb", - collection: "orders", - })) + await connector.execute( + cmd({ + command: "dropCollection", + database: "testdb", + collection: "orders", + }), + ) } catch {} try { - await connector.execute(cmd({ - command: "dropCollection", - database: "adversarial_db", - collection: "weird_names", - })) + await connector.execute( + cmd({ + command: "dropCollection", + database: "adversarial_db", + collection: "weird_names", + }), + ) } catch {} try { await connector.close() @@ -172,12 +175,14 @@ describe.skipIf(!DOCKER && !MONGODB_USE_CI)("MongoDB Driver E2E", () => { database: "testdb", }) await conn.connect() - const result = await conn.execute(cmd({ - command: "find", - database: "admin", - collection: "system.version", - limit: 1, - })) + const result = await conn.execute( + cmd({ + command: "find", + database: "admin", + collection: "system.version", + limit: 1, + }), + ) expect(result).toBeDefined() await conn.close() }) @@ -193,9 +198,7 @@ describe.skipIf(!DOCKER && !MONGODB_USE_CI)("MongoDB Driver E2E", () => { await conn.connect() await conn.close() // After close, operations should fail - await expect( - conn.execute(cmd({ command: "find", database: "testdb", collection: "users" })), - ).rejects.toThrow() + await expect(conn.execute(cmd({ command: "find", database: "testdb", collection: "users" }))).rejects.toThrow() }) }) @@ -207,48 +210,56 @@ describe.skipIf(!DOCKER && !MONGODB_USE_CI)("MongoDB Driver E2E", () => { beforeAll(async () => { // Ensure clean state try { - await connector.execute(cmd({ - command: "dropCollection", - database: "testdb", - collection: "users", - })) + await connector.execute( + cmd({ + command: "dropCollection", + database: "testdb", + collection: "users", + }), + ) } catch {} }) test("insertOne — single document", async () => { - const result = await connector.execute(cmd({ - command: "insertOne", - database: "testdb", - collection: "users", - document: { name: "alice", age: 30, email: "alice@example.com", active: true }, - })) + const result = await connector.execute( + cmd({ + command: "insertOne", + database: "testdb", + collection: "users", + document: { name: "alice", age: 30, email: "alice@example.com", active: true }, + }), + ) expect(result.columns).toEqual(["insertedId"]) expect(result.row_count).toBe(1) expect(result.rows[0][0]).toBeDefined() // ObjectId string }) test("insertMany — multiple documents", async () => { - const result = await connector.execute(cmd({ - command: "insertMany", - database: "testdb", - collection: "users", - documents: [ - { name: "bob", age: 25, email: "bob@example.com", active: false }, - { name: "charlie", age: 35, email: "charlie@example.com", active: true }, - { name: "diana", age: 28, email: "diana@example.com", active: true }, - { name: "eve", age: 40, email: "eve@example.com", active: false }, - ], - })) + const result = await connector.execute( + cmd({ + command: "insertMany", + database: "testdb", + collection: "users", + documents: [ + { name: "bob", age: 25, email: "bob@example.com", active: false }, + { name: "charlie", age: 35, email: "charlie@example.com", active: true }, + { name: "diana", age: 28, email: "diana@example.com", active: true }, + { name: "eve", age: 40, email: "eve@example.com", active: false }, + ], + }), + ) expect(result.columns).toEqual(["insertedCount"]) expect(result.rows[0][0]).toBe(4) }) test("find — all documents", async () => { - const result = await connector.execute(cmd({ - command: "find", - database: "testdb", - collection: "users", - })) + const result = await connector.execute( + cmd({ + command: "find", + database: "testdb", + collection: "users", + }), + ) expect(result.row_count).toBe(5) expect(result.columns).toContain("name") expect(result.columns).toContain("age") @@ -257,12 +268,14 @@ describe.skipIf(!DOCKER && !MONGODB_USE_CI)("MongoDB Driver E2E", () => { }) test("find — with filter", async () => { - const result = await connector.execute(cmd({ - command: "find", - database: "testdb", - collection: "users", - filter: { active: true }, - })) + const result = await connector.execute( + cmd({ + command: "find", + database: "testdb", + collection: "users", + filter: { active: true }, + }), + ) expect(result.row_count).toBe(3) // alice, charlie, diana const names = result.rows.map((r) => r[result.columns.indexOf("name")]) expect(names).toContain("alice") @@ -271,234 +284,271 @@ describe.skipIf(!DOCKER && !MONGODB_USE_CI)("MongoDB Driver E2E", () => { }) test("find — with projection", async () => { - const result = await connector.execute(cmd({ - command: "find", - database: "testdb", - collection: "users", - filter: { name: "alice" }, - projection: { name: 1, age: 1, _id: 0 }, - })) + const result = await connector.execute( + cmd({ + command: "find", + database: "testdb", + collection: "users", + filter: { name: "alice" }, + projection: { name: 1, age: 1, _id: 0 }, + }), + ) expect(result.row_count).toBe(1) expect(result.columns).toEqual(["name", "age"]) expect(result.rows[0]).toEqual(["alice", 30]) }) test("find — with sort", async () => { - const result = await connector.execute(cmd({ - command: "find", - database: "testdb", - collection: "users", - projection: { name: 1, age: 1, _id: 0 }, - sort: { age: 1 }, - })) + const result = await connector.execute( + cmd({ + command: "find", + database: "testdb", + collection: "users", + projection: { name: 1, age: 1, _id: 0 }, + sort: { age: 1 }, + }), + ) const ages = result.rows.map((r) => r[result.columns.indexOf("age")]) expect(ages).toEqual([25, 28, 30, 35, 40]) }) test("find — with sort descending", async () => { - const result = await connector.execute(cmd({ - command: "find", - database: "testdb", - collection: "users", - projection: { name: 1, age: 1, _id: 0 }, - sort: { age: -1 }, - })) + const result = await connector.execute( + cmd({ + command: "find", + database: "testdb", + collection: "users", + projection: { name: 1, age: 1, _id: 0 }, + sort: { age: -1 }, + }), + ) const ages = result.rows.map((r) => r[result.columns.indexOf("age")]) expect(ages).toEqual([40, 35, 30, 28, 25]) }) test("find — with skip and limit", async () => { - const result = await connector.execute(cmd({ - command: "find", - database: "testdb", - collection: "users", - projection: { name: 1, _id: 0 }, - sort: { age: 1 }, - skip: 1, - limit: 2, - })) + const result = await connector.execute( + cmd({ + command: "find", + database: "testdb", + collection: "users", + projection: { name: 1, _id: 0 }, + sort: { age: 1 }, + skip: 1, + limit: 2, + }), + ) expect(result.row_count).toBe(2) const names = result.rows.map((r) => r[0]) expect(names).toEqual(["diana", "alice"]) // age 28, 30 }) test("find — comparison operators ($gt, $lte, $ne)", async () => { - const result = await connector.execute(cmd({ - command: "find", - database: "testdb", - collection: "users", - filter: { age: { $gt: 30, $lte: 40 } }, - projection: { name: 1, _id: 0 }, - sort: { age: 1 }, - })) + const result = await connector.execute( + cmd({ + command: "find", + database: "testdb", + collection: "users", + filter: { age: { $gt: 30, $lte: 40 } }, + projection: { name: 1, _id: 0 }, + sort: { age: 1 }, + }), + ) const names = result.rows.map((r) => r[0]) expect(names).toEqual(["charlie", "eve"]) // 35, 40 }) test("find — logical operators ($or, $and)", async () => { - const result = await connector.execute(cmd({ - command: "find", - database: "testdb", - collection: "users", - filter: { - $or: [ - { name: "alice" }, - { age: { $gte: 40 } }, - ], - }, - projection: { name: 1, _id: 0 }, - sort: { name: 1 }, - })) + const result = await connector.execute( + cmd({ + command: "find", + database: "testdb", + collection: "users", + filter: { + $or: [{ name: "alice" }, { age: { $gte: 40 } }], + }, + projection: { name: 1, _id: 0 }, + sort: { name: 1 }, + }), + ) const names = result.rows.map((r) => r[0]) expect(names).toEqual(["alice", "eve"]) }) test("find — regex filter", async () => { - const result = await connector.execute(cmd({ - command: "find", - database: "testdb", - collection: "users", - filter: { name: { $regex: "^[ab]", $options: "i" } }, - projection: { name: 1, _id: 0 }, - sort: { name: 1 }, - })) + const result = await connector.execute( + cmd({ + command: "find", + database: "testdb", + collection: "users", + filter: { name: { $regex: "^[ab]", $options: "i" } }, + projection: { name: 1, _id: 0 }, + sort: { name: 1 }, + }), + ) const names = result.rows.map((r) => r[0]) expect(names).toEqual(["alice", "bob"]) }) test("find — $in operator", async () => { - const result = await connector.execute(cmd({ - command: "find", - database: "testdb", - collection: "users", - filter: { name: { $in: ["alice", "eve"] } }, - projection: { name: 1, _id: 0 }, - sort: { name: 1 }, - })) + const result = await connector.execute( + cmd({ + command: "find", + database: "testdb", + collection: "users", + filter: { name: { $in: ["alice", "eve"] } }, + projection: { name: 1, _id: 0 }, + sort: { name: 1 }, + }), + ) expect(result.rows.map((r) => r[0])).toEqual(["alice", "eve"]) }) test("find — $exists operator", async () => { // All users have "email" field - const result = await connector.execute(cmd({ - command: "find", - database: "testdb", - collection: "users", - filter: { email: { $exists: true } }, - })) + const result = await connector.execute( + cmd({ + command: "find", + database: "testdb", + collection: "users", + filter: { email: { $exists: true } }, + }), + ) expect(result.row_count).toBe(5) }) test("updateOne — modifies a single document", async () => { - const result = await connector.execute(cmd({ - command: "updateOne", - database: "testdb", - collection: "users", - filter: { name: "alice" }, - update: { $set: { age: 31, role: "admin" } }, - })) + const result = await connector.execute( + cmd({ + command: "updateOne", + database: "testdb", + collection: "users", + filter: { name: "alice" }, + update: { $set: { age: 31, role: "admin" } }, + }), + ) expect(result.columns).toEqual(["matchedCount", "modifiedCount"]) expect(result.rows[0]).toEqual([1, 1]) // Verify the update - const verify = await connector.execute(cmd({ - command: "find", - database: "testdb", - collection: "users", - filter: { name: "alice" }, - projection: { age: 1, role: 1, _id: 0 }, - })) + const verify = await connector.execute( + cmd({ + command: "find", + database: "testdb", + collection: "users", + filter: { name: "alice" }, + projection: { age: 1, role: 1, _id: 0 }, + }), + ) expect(verify.rows[0]).toEqual([31, "admin"]) }) test("updateMany — modifies multiple documents", async () => { - const result = await connector.execute(cmd({ - command: "updateMany", - database: "testdb", - collection: "users", - filter: { active: false }, - update: { $set: { active: true } }, - })) + const result = await connector.execute( + cmd({ + command: "updateMany", + database: "testdb", + collection: "users", + filter: { active: false }, + update: { $set: { active: true } }, + }), + ) expect(result.rows[0][0]).toBe(2) // bob, eve matched expect(result.rows[0][1]).toBe(2) // both modified }) test("countDocuments — counts with filter", async () => { - const result = await connector.execute(cmd({ - command: "countDocuments", - database: "testdb", - collection: "users", - filter: { active: true }, - })) + const result = await connector.execute( + cmd({ + command: "countDocuments", + database: "testdb", + collection: "users", + filter: { active: true }, + }), + ) expect(result.columns).toEqual(["count"]) expect(result.rows[0][0]).toBe(5) // all are now active after updateMany }) test("countDocuments — counts all", async () => { - const result = await connector.execute(cmd({ - command: "countDocuments", - database: "testdb", - collection: "users", - })) + const result = await connector.execute( + cmd({ + command: "countDocuments", + database: "testdb", + collection: "users", + }), + ) expect(result.rows[0][0]).toBe(5) }) test("distinct — returns unique values", async () => { // Reset some users to inactive for distinct test - await connector.execute(cmd({ - command: "updateMany", - database: "testdb", - collection: "users", - filter: { name: { $in: ["bob", "eve"] } }, - update: { $set: { active: false } }, - })) - const result = await connector.execute(cmd({ - command: "distinct", - database: "testdb", - collection: "users", - field: "active", - })) + await connector.execute( + cmd({ + command: "updateMany", + database: "testdb", + collection: "users", + filter: { name: { $in: ["bob", "eve"] } }, + update: { $set: { active: false } }, + }), + ) + const result = await connector.execute( + cmd({ + command: "distinct", + database: "testdb", + collection: "users", + field: "active", + }), + ) expect(result.columns).toEqual(["active"]) const values = result.rows.map((r) => r[0]).sort() expect(values).toEqual([false, true]) }) test("deleteOne — removes a single document", async () => { - const result = await connector.execute(cmd({ - command: "deleteOne", - database: "testdb", - collection: "users", - filter: { name: "eve" }, - })) + const result = await connector.execute( + cmd({ + command: "deleteOne", + database: "testdb", + collection: "users", + filter: { name: "eve" }, + }), + ) expect(result.columns).toEqual(["deletedCount"]) expect(result.rows[0][0]).toBe(1) // Verify deletion - const count = await connector.execute(cmd({ - command: "countDocuments", - database: "testdb", - collection: "users", - })) + const count = await connector.execute( + cmd({ + command: "countDocuments", + database: "testdb", + collection: "users", + }), + ) expect(count.rows[0][0]).toBe(4) }) test("deleteMany — removes multiple documents", async () => { // Insert some temp docs to delete - await connector.execute(cmd({ - command: "insertMany", - database: "testdb", - collection: "users", - documents: [ - { name: "temp1", age: 99, active: false }, - { name: "temp2", age: 99, active: false }, - ], - })) - const result = await connector.execute(cmd({ - command: "deleteMany", - database: "testdb", - collection: "users", - filter: { age: 99 }, - })) + await connector.execute( + cmd({ + command: "insertMany", + database: "testdb", + collection: "users", + documents: [ + { name: "temp1", age: 99, active: false }, + { name: "temp2", age: 99, active: false }, + ], + }), + ) + const result = await connector.execute( + cmd({ + command: "deleteMany", + database: "testdb", + collection: "users", + filter: { age: 99 }, + }), + ) expect(result.rows[0][0]).toBe(2) }) }) @@ -511,44 +561,50 @@ describe.skipIf(!DOCKER && !MONGODB_USE_CI)("MongoDB Driver E2E", () => { beforeAll(async () => { // Set up products collection for aggregation tests try { - await connector.execute(cmd({ - command: "dropCollection", + await connector.execute( + cmd({ + command: "dropCollection", + database: "testdb", + collection: "products", + }), + ) + } catch {} + await connector.execute( + cmd({ + command: "insertMany", database: "testdb", collection: "products", - })) - } catch {} - await connector.execute(cmd({ - command: "insertMany", - database: "testdb", - collection: "products", - documents: [ - { name: "Widget A", category: "widgets", price: 10, quantity: 100 }, - { name: "Widget B", category: "widgets", price: 20, quantity: 50 }, - { name: "Gadget A", category: "gadgets", price: 50, quantity: 30 }, - { name: "Gadget B", category: "gadgets", price: 75, quantity: 15 }, - { name: "Gadget C", category: "gadgets", price: 100, quantity: 5 }, - { name: "Doohickey", category: "misc", price: 5, quantity: 200 }, - ], - })) + documents: [ + { name: "Widget A", category: "widgets", price: 10, quantity: 100 }, + { name: "Widget B", category: "widgets", price: 20, quantity: 50 }, + { name: "Gadget A", category: "gadgets", price: 50, quantity: 30 }, + { name: "Gadget B", category: "gadgets", price: 75, quantity: 15 }, + { name: "Gadget C", category: "gadgets", price: 100, quantity: 5 }, + { name: "Doohickey", category: "misc", price: 5, quantity: 200 }, + ], + }), + ) }) test("aggregate — $group with $sum and $avg", async () => { - const result = await connector.execute(cmd({ - command: "aggregate", - database: "testdb", - collection: "products", - pipeline: [ - { - $group: { - _id: "$category", - totalQuantity: { $sum: "$quantity" }, - avgPrice: { $avg: "$price" }, - count: { $sum: 1 }, + const result = await connector.execute( + cmd({ + command: "aggregate", + database: "testdb", + collection: "products", + pipeline: [ + { + $group: { + _id: "$category", + totalQuantity: { $sum: "$quantity" }, + avgPrice: { $avg: "$price" }, + count: { $sum: 1 }, + }, }, - }, - { $sort: { _id: 1 } }, - ], - })) + { $sort: { _id: 1 } }, + ], + }), + ) expect(result.row_count).toBe(3) const categories = result.rows.map((r) => r[result.columns.indexOf("_id")]) expect(categories).toEqual(["gadgets", "misc", "widgets"]) @@ -561,37 +617,41 @@ describe.skipIf(!DOCKER && !MONGODB_USE_CI)("MongoDB Driver E2E", () => { }) test("aggregate — $match + $project", async () => { - const result = await connector.execute(cmd({ - command: "aggregate", - database: "testdb", - collection: "products", - pipeline: [ - { $match: { price: { $gte: 50 } } }, - { $project: { name: 1, price: 1, _id: 0 } }, - { $sort: { price: 1 } }, - ], - })) + const result = await connector.execute( + cmd({ + command: "aggregate", + database: "testdb", + collection: "products", + pipeline: [ + { $match: { price: { $gte: 50 } } }, + { $project: { name: 1, price: 1, _id: 0 } }, + { $sort: { price: 1 } }, + ], + }), + ) expect(result.row_count).toBe(3) const names = result.rows.map((r) => r[result.columns.indexOf("name")]) expect(names).toEqual(["Gadget A", "Gadget B", "Gadget C"]) }) test("aggregate — $addFields with computed values", async () => { - const result = await connector.execute(cmd({ - command: "aggregate", - database: "testdb", - collection: "products", - pipeline: [ - { - $addFields: { - totalValue: { $multiply: ["$price", "$quantity"] }, + const result = await connector.execute( + cmd({ + command: "aggregate", + database: "testdb", + collection: "products", + pipeline: [ + { + $addFields: { + totalValue: { $multiply: ["$price", "$quantity"] }, + }, }, - }, - { $project: { name: 1, totalValue: 1, _id: 0 } }, - { $sort: { totalValue: -1 } }, - ], - })) - expect(result.row_count).toBe(6) + { $project: { name: 1, totalValue: 1, _id: 0 } }, + { $sort: { totalValue: -1 } }, + ], + }), + ) + expect(result.row_count).toBe(6) // Gadget A: 50*30=1500, Widget B: 20*50=1000, Widget A: 10*100=1000, Doohickey: 5*200=1000 const firstRow = result.rows[0] expect(firstRow[result.columns.indexOf("name")]).toBe("Gadget A") @@ -600,70 +660,78 @@ describe.skipIf(!DOCKER && !MONGODB_USE_CI)("MongoDB Driver E2E", () => { test("aggregate — $unwind", async () => { // Insert a document with an array field - await connector.execute(cmd({ - command: "insertOne", - database: "testdb", - collection: "products", - document: { name: "Multi-Tag", category: "tagged", price: 10, quantity: 1, tags: ["a", "b", "c"] }, - })) - const result = await connector.execute(cmd({ - command: "aggregate", - database: "testdb", - collection: "products", - pipeline: [ - { $match: { name: "Multi-Tag" } }, - { $unwind: "$tags" }, - { $project: { tags: 1, _id: 0 } }, - ], - })) + await connector.execute( + cmd({ + command: "insertOne", + database: "testdb", + collection: "products", + document: { name: "Multi-Tag", category: "tagged", price: 10, quantity: 1, tags: ["a", "b", "c"] }, + }), + ) + const result = await connector.execute( + cmd({ + command: "aggregate", + database: "testdb", + collection: "products", + pipeline: [{ $match: { name: "Multi-Tag" } }, { $unwind: "$tags" }, { $project: { tags: 1, _id: 0 } }], + }), + ) expect(result.row_count).toBe(3) expect(result.rows.map((r) => r[0])).toEqual(["a", "b", "c"]) // Cleanup - await connector.execute(cmd({ - command: "deleteOne", - database: "testdb", - collection: "products", - filter: { name: "Multi-Tag" }, - })) + await connector.execute( + cmd({ + command: "deleteOne", + database: "testdb", + collection: "products", + filter: { name: "Multi-Tag" }, + }), + ) }) test("aggregate — $lookup (join between collections)", async () => { // Set up orders collection try { - await connector.execute(cmd({ - command: "dropCollection", + await connector.execute( + cmd({ + command: "dropCollection", + database: "testdb", + collection: "orders", + }), + ) + } catch {} + await connector.execute( + cmd({ + command: "insertMany", database: "testdb", collection: "orders", - })) - } catch {} - await connector.execute(cmd({ - command: "insertMany", - database: "testdb", - collection: "orders", - documents: [ - { product_name: "Widget A", quantity: 3, customer: "cust1" }, - { product_name: "Gadget B", quantity: 1, customer: "cust2" }, - ], - })) - - const result = await connector.execute(cmd({ - command: "aggregate", - database: "testdb", - collection: "orders", - pipeline: [ - { - $lookup: { - from: "products", - localField: "product_name", - foreignField: "name", - as: "product_info", + documents: [ + { product_name: "Widget A", quantity: 3, customer: "cust1" }, + { product_name: "Gadget B", quantity: 1, customer: "cust2" }, + ], + }), + ) + + const result = await connector.execute( + cmd({ + command: "aggregate", + database: "testdb", + collection: "orders", + pipeline: [ + { + $lookup: { + from: "products", + localField: "product_name", + foreignField: "name", + as: "product_info", + }, }, - }, - { $project: { product_name: 1, customer: 1, product_info: 1, _id: 0 } }, - { $sort: { product_name: 1 } }, - ], - })) + { $project: { product_name: 1, customer: 1, product_info: 1, _id: 0 } }, + { $sort: { product_name: 1 } }, + ], + }), + ) expect(result.row_count).toBe(2) // product_info will be JSON-serialized arrays const firstInfo = JSON.parse(result.rows[0][result.columns.indexOf("product_info")]) @@ -673,45 +741,48 @@ describe.skipIf(!DOCKER && !MONGODB_USE_CI)("MongoDB Driver E2E", () => { }) test("aggregate — empty pipeline returns all docs", async () => { - const result = await connector.execute(cmd({ - command: "aggregate", - database: "testdb", - collection: "products", - pipeline: [], - })) + const result = await connector.execute( + cmd({ + command: "aggregate", + database: "testdb", + collection: "products", + pipeline: [], + }), + ) expect(result.row_count).toBe(6) }) test("aggregate — $count stage", async () => { - const result = await connector.execute(cmd({ - command: "aggregate", - database: "testdb", - collection: "products", - pipeline: [ - { $match: { category: "gadgets" } }, - { $count: "total" }, - ], - })) + const result = await connector.execute( + cmd({ + command: "aggregate", + database: "testdb", + collection: "products", + pipeline: [{ $match: { category: "gadgets" } }, { $count: "total" }], + }), + ) expect(result.row_count).toBe(1) expect(result.rows[0][result.columns.indexOf("total")]).toBe(3) }) test("aggregate — $bucket", async () => { - const result = await connector.execute(cmd({ - command: "aggregate", - database: "testdb", - collection: "products", - pipeline: [ - { - $bucket: { - groupBy: "$price", - boundaries: [0, 25, 50, 100, 200], - default: "other", - output: { count: { $sum: 1 } }, + const result = await connector.execute( + cmd({ + command: "aggregate", + database: "testdb", + collection: "products", + pipeline: [ + { + $bucket: { + groupBy: "$price", + boundaries: [0, 25, 50, 100, 200], + default: "other", + output: { count: { $sum: 1 } }, + }, }, - }, - ], - })) + ], + }), + ) expect(result.row_count).toBeGreaterThan(0) expect(result.columns).toContain("_id") expect(result.columns).toContain("count") @@ -782,47 +853,53 @@ describe.skipIf(!DOCKER && !MONGODB_USE_CI)("MongoDB Driver E2E", () => { }) test("describeTable — empty collection returns empty array", async () => { - await connector.execute(cmd({ - command: "createCollection", - database: "testdb", - name: "empty_coll", - })) + await connector.execute( + cmd({ + command: "createCollection", + database: "testdb", + name: "empty_coll", + }), + ) const columns = await connector.describeTable("testdb", "empty_coll") expect(columns).toEqual([]) - await connector.execute(cmd({ - command: "dropCollection", - database: "testdb", - collection: "empty_coll", - })) + await connector.execute( + cmd({ + command: "dropCollection", + database: "testdb", + collection: "empty_coll", + }), + ) }) test("describeTable — mixed-type fields show union type", async () => { - await connector.execute(cmd({ - command: "createCollection", - database: "testdb", - name: "mixed_types", - })) - await connector.execute(cmd({ - command: "insertMany", - database: "testdb", - collection: "mixed_types", - documents: [ - { value: 42 }, - { value: "hello" }, - { value: true }, - ], - })) + await connector.execute( + cmd({ + command: "createCollection", + database: "testdb", + name: "mixed_types", + }), + ) + await connector.execute( + cmd({ + command: "insertMany", + database: "testdb", + collection: "mixed_types", + documents: [{ value: 42 }, { value: "hello" }, { value: true }], + }), + ) const columns = await connector.describeTable("testdb", "mixed_types") const valueCol = columns.find((c) => c.name === "value") expect(valueCol).toBeDefined() // Should show union type since values are mixed expect(valueCol!.data_type).toContain("|") - await connector.execute(cmd({ - command: "dropCollection", - database: "testdb", - collection: "mixed_types", - })) + await connector.execute( + cmd({ + command: "dropCollection", + database: "testdb", + collection: "mixed_types", + }), + ) }) }) @@ -832,11 +909,13 @@ describe.skipIf(!DOCKER && !MONGODB_USE_CI)("MongoDB Driver E2E", () => { describe("Collection Management", () => { test("createCollection — creates a new collection", async () => { - const result = await connector.execute(cmd({ - command: "createCollection", - database: "testdb", - name: "temp_coll", - })) + const result = await connector.execute( + cmd({ + command: "createCollection", + database: "testdb", + name: "temp_coll", + }), + ) expect(result.rows[0][0]).toBe("ok") // Verify it exists @@ -845,11 +924,13 @@ describe.skipIf(!DOCKER && !MONGODB_USE_CI)("MongoDB Driver E2E", () => { }) test("dropCollection — drops an existing collection", async () => { - const result = await connector.execute(cmd({ - command: "dropCollection", - database: "testdb", - collection: "temp_coll", - })) + const result = await connector.execute( + cmd({ + command: "dropCollection", + database: "testdb", + collection: "temp_coll", + }), + ) expect(result.rows[0][0]).toBe(true) // Verify it's gone @@ -859,11 +940,13 @@ describe.skipIf(!DOCKER && !MONGODB_USE_CI)("MongoDB Driver E2E", () => { test("dropCollection — non-existent collection does not throw", async () => { // MongoDB 7.0+ returns true even for non-existent collections (no NamespaceNotFound error) - const result = await connector.execute(cmd({ - command: "dropCollection", - database: "testdb", - collection: "does_not_exist_xyz", - })) + const result = await connector.execute( + cmd({ + command: "dropCollection", + database: "testdb", + collection: "does_not_exist_xyz", + }), + ) expect(result.columns).toEqual(["dropped"]) expect(result.row_count).toBe(1) }) @@ -875,33 +958,39 @@ describe.skipIf(!DOCKER && !MONGODB_USE_CI)("MongoDB Driver E2E", () => { describe("Index Operations", () => { test("createIndex — creates an index on a field", async () => { - const result = await connector.execute(cmd({ - command: "createIndex", - database: "testdb", - collection: "users", - keys: { email: 1 }, - options: { unique: true }, - })) + const result = await connector.execute( + cmd({ + command: "createIndex", + database: "testdb", + collection: "users", + keys: { email: 1 }, + options: { unique: true }, + }), + ) expect(result.columns).toEqual(["indexName"]) expect(result.rows[0][0]).toBe("email_1") }) test("createIndex — compound index", async () => { - const result = await connector.execute(cmd({ - command: "createIndex", - database: "testdb", - collection: "users", - keys: { name: 1, age: -1 }, - })) + const result = await connector.execute( + cmd({ + command: "createIndex", + database: "testdb", + collection: "users", + keys: { name: 1, age: -1 }, + }), + ) expect(result.rows[0][0]).toBe("name_1_age_-1") }) test("listIndexes — returns all indexes", async () => { - const result = await connector.execute(cmd({ - command: "listIndexes", - database: "testdb", - collection: "users", - })) + const result = await connector.execute( + cmd({ + command: "listIndexes", + database: "testdb", + collection: "users", + }), + ) expect(result.columns).toEqual(["name", "key", "unique"]) expect(result.row_count).toBeGreaterThanOrEqual(3) // _id, email_1, name_1_age_-1 const names = result.rows.map((r) => r[0]) @@ -918,51 +1007,61 @@ describe.skipIf(!DOCKER && !MONGODB_USE_CI)("MongoDB Driver E2E", () => { describe("LIMIT and Truncation", () => { beforeAll(async () => { try { - await connector.execute(cmd({ - command: "dropCollection", - database: "testdb", - collection: "big_coll", - })) + await connector.execute( + cmd({ + command: "dropCollection", + database: "testdb", + collection: "big_coll", + }), + ) } catch {} // Insert 50 documents const docs = Array.from({ length: 50 }, (_, i) => ({ idx: i, data: `row_${i}` })) - await connector.execute(cmd({ - command: "insertMany", - database: "testdb", - collection: "big_coll", - documents: docs, - })) + await connector.execute( + cmd({ + command: "insertMany", + database: "testdb", + collection: "big_coll", + documents: docs, + }), + ) }) afterAll(async () => { try { - await connector.execute(cmd({ - command: "dropCollection", - database: "testdb", - collection: "big_coll", - })) + await connector.execute( + cmd({ + command: "dropCollection", + database: "testdb", + collection: "big_coll", + }), + ) } catch {} }) test("find — auto-limits to effectiveLimit (default 1000)", async () => { - const result = await connector.execute(cmd({ - command: "find", - database: "testdb", - collection: "big_coll", - })) + const result = await connector.execute( + cmd({ + command: "find", + database: "testdb", + collection: "big_coll", + }), + ) // All 50 docs returned (< 1000 default limit) expect(result.row_count).toBe(50) expect(result.truncated).toBe(false) }) test("find — query-level limit takes precedence", async () => { - const result = await connector.execute(cmd({ - command: "find", - database: "testdb", - collection: "big_coll", - sort: { idx: 1 }, - limit: 10, - })) + const result = await connector.execute( + cmd({ + command: "find", + database: "testdb", + collection: "big_coll", + sort: { idx: 1 }, + limit: 10, + }), + ) expect(result.row_count).toBe(10) expect(result.truncated).toBe(true) }) @@ -996,15 +1095,14 @@ describe.skipIf(!DOCKER && !MONGODB_USE_CI)("MongoDB Driver E2E", () => { }) test("aggregate — preserves explicit $limit in pipeline", async () => { - const result = await connector.execute(cmd({ - command: "aggregate", - database: "testdb", - collection: "big_coll", - pipeline: [ - { $sort: { idx: 1 } }, - { $limit: 3 }, - ], - })) + const result = await connector.execute( + cmd({ + command: "aggregate", + database: "testdb", + collection: "big_coll", + pipeline: [{ $sort: { idx: 1 } }, { $limit: 3 }], + }), + ) expect(result.row_count).toBe(3) expect(result.truncated).toBe(false) // Pipeline has its own limit }) @@ -1030,12 +1128,14 @@ describe.skipIf(!DOCKER && !MONGODB_USE_CI)("MongoDB Driver E2E", () => { describe("Empty Results", () => { test("find — no matching documents returns empty", async () => { - const result = await connector.execute(cmd({ - command: "find", - database: "testdb", - collection: "users", - filter: { name: "nonexistent_user_xyz" }, - })) + const result = await connector.execute( + cmd({ + command: "find", + database: "testdb", + collection: "users", + filter: { name: "nonexistent_user_xyz" }, + }), + ) expect(result.columns).toEqual([]) expect(result.rows).toEqual([]) expect(result.row_count).toBe(0) @@ -1043,35 +1143,39 @@ describe.skipIf(!DOCKER && !MONGODB_USE_CI)("MongoDB Driver E2E", () => { }) test("aggregate — no results from pipeline", async () => { - const result = await connector.execute(cmd({ - command: "aggregate", - database: "testdb", - collection: "users", - pipeline: [ - { $match: { age: { $gt: 999 } } }, - ], - })) + const result = await connector.execute( + cmd({ + command: "aggregate", + database: "testdb", + collection: "users", + pipeline: [{ $match: { age: { $gt: 999 } } }], + }), + ) expect(result.row_count).toBe(0) }) test("deleteMany — filter matches nothing", async () => { - const result = await connector.execute(cmd({ - command: "deleteMany", - database: "testdb", - collection: "users", - filter: { name: "nobody_exists" }, - })) + const result = await connector.execute( + cmd({ + command: "deleteMany", + database: "testdb", + collection: "users", + filter: { name: "nobody_exists" }, + }), + ) expect(result.rows[0][0]).toBe(0) }) test("updateOne — filter matches nothing", async () => { - const result = await connector.execute(cmd({ - command: "updateOne", - database: "testdb", - collection: "users", - filter: { name: "nobody_exists" }, - update: { $set: { age: 99 } }, - })) + const result = await connector.execute( + cmd({ + command: "updateOne", + database: "testdb", + collection: "users", + filter: { name: "nobody_exists" }, + update: { $set: { age: 99 } }, + }), + ) expect(result.rows[0]).toEqual([0, 0]) }) }) @@ -1083,51 +1187,63 @@ describe.skipIf(!DOCKER && !MONGODB_USE_CI)("MongoDB Driver E2E", () => { describe("Cross-Database Operations", () => { test("query different database than default", async () => { // Insert into a different database - await connector.execute(cmd({ - command: "createCollection", - database: "otherdb", - name: "items", - })) - await connector.execute(cmd({ - command: "insertOne", - database: "otherdb", - collection: "items", - document: { label: "cross-db-test" }, - })) + await connector.execute( + cmd({ + command: "createCollection", + database: "otherdb", + name: "items", + }), + ) + await connector.execute( + cmd({ + command: "insertOne", + database: "otherdb", + collection: "items", + document: { label: "cross-db-test" }, + }), + ) // Query the other database - const result = await connector.execute(cmd({ - command: "find", - database: "otherdb", - collection: "items", - filter: { label: "cross-db-test" }, - projection: { label: 1, _id: 0 }, - })) + const result = await connector.execute( + cmd({ + command: "find", + database: "otherdb", + collection: "items", + filter: { label: "cross-db-test" }, + projection: { label: 1, _id: 0 }, + }), + ) expect(result.rows[0][0]).toBe("cross-db-test") // Clean up - await connector.execute(cmd({ - command: "dropCollection", - database: "otherdb", - collection: "items", - })) + await connector.execute( + cmd({ + command: "dropCollection", + database: "otherdb", + collection: "items", + }), + ) }) test("listSchemas — shows newly created database", async () => { - await connector.execute(cmd({ - command: "createCollection", - database: "brand_new_db", - name: "first_coll", - })) + await connector.execute( + cmd({ + command: "createCollection", + database: "brand_new_db", + name: "first_coll", + }), + ) const schemas = await connector.listSchemas() expect(schemas).toContain("brand_new_db") // Cleanup - await connector.execute(cmd({ - command: "dropCollection", - database: "brand_new_db", - collection: "first_coll", - })) + await connector.execute( + cmd({ + command: "dropCollection", + database: "brand_new_db", + collection: "first_coll", + }), + ) }) }) @@ -1137,122 +1253,142 @@ describe.skipIf(!DOCKER && !MONGODB_USE_CI)("MongoDB Driver E2E", () => { describe("Error Handling", () => { test("rejects invalid JSON query", async () => { - await expect( - connector.execute("not valid json {{{"), - ).rejects.toThrow(/Invalid MQL query/) + await expect(connector.execute("not valid json {{{")).rejects.toThrow(/Invalid MQL query/) }) test("rejects query without command field", async () => { - await expect( - connector.execute(JSON.stringify({ database: "testdb", collection: "users" })), - ).rejects.toThrow(/must include a 'command' field/) + await expect(connector.execute(JSON.stringify({ database: "testdb", collection: "users" }))).rejects.toThrow( + /must include a 'command' field/, + ) }) test("rejects unsupported command", async () => { await expect( - connector.execute(cmd({ - command: "fakeCommand" as any, - database: "testdb", - collection: "users", - })), + connector.execute( + cmd({ + command: "fakeCommand" as any, + database: "testdb", + collection: "users", + }), + ), ).rejects.toThrow(/Unsupported MQL command/) }) test("rejects find without collection", async () => { await expect( - connector.execute(cmd({ - command: "find", - database: "testdb", - })), + connector.execute( + cmd({ + command: "find", + database: "testdb", + }), + ), ).rejects.toThrow(/requires a 'collection' field/) }) test("rejects aggregate without pipeline", async () => { await expect( - connector.execute(cmd({ - command: "aggregate", - database: "testdb", - collection: "users", - })), - ).rejects.toThrow(/requires a 'pipeline' array/) + connector.execute( + cmd({ + command: "aggregate", + database: "testdb", + collection: "users", + }), + ), + ).rejects.toThrow(/requires a 'pipeline' array/) }) test("rejects insertOne without document", async () => { await expect( - connector.execute(cmd({ - command: "insertOne", - database: "testdb", - collection: "users", - })), + connector.execute( + cmd({ + command: "insertOne", + database: "testdb", + collection: "users", + }), + ), ).rejects.toThrow(/requires a 'document' object/) }) test("rejects insertMany without documents", async () => { await expect( - connector.execute(cmd({ - command: "insertMany", - database: "testdb", - collection: "users", - })), + connector.execute( + cmd({ + command: "insertMany", + database: "testdb", + collection: "users", + }), + ), ).rejects.toThrow(/requires a 'documents' array/) }) test("rejects updateOne without update", async () => { await expect( - connector.execute(cmd({ - command: "updateOne", - database: "testdb", - collection: "users", - filter: { name: "alice" }, - })), + connector.execute( + cmd({ + command: "updateOne", + database: "testdb", + collection: "users", + filter: { name: "alice" }, + }), + ), ).rejects.toThrow(/requires an 'update' object/) }) test("rejects updateMany without update", async () => { await expect( - connector.execute(cmd({ - command: "updateMany", - database: "testdb", - collection: "users", - })), + connector.execute( + cmd({ + command: "updateMany", + database: "testdb", + collection: "users", + }), + ), ).rejects.toThrow(/requires an 'update' object/) }) test("rejects distinct without field", async () => { await expect( - connector.execute(cmd({ - command: "distinct", - database: "testdb", - collection: "users", - })), + connector.execute( + cmd({ + command: "distinct", + database: "testdb", + collection: "users", + }), + ), ).rejects.toThrow(/requires a 'field' string/) }) test("rejects createIndex without keys", async () => { await expect( - connector.execute(cmd({ - command: "createIndex", - database: "testdb", - collection: "users", - })), + connector.execute( + cmd({ + command: "createIndex", + database: "testdb", + collection: "users", + }), + ), ).rejects.toThrow(/requires a 'keys' object/) }) test("rejects createCollection without name", async () => { await expect( - connector.execute(cmd({ - command: "createCollection", - database: "testdb", - })), + connector.execute( + cmd({ + command: "createCollection", + database: "testdb", + }), + ), ).rejects.toThrow(/requires 'name' or 'collection'/) }) test("rejects dropCollection without collection", async () => { await expect( - connector.execute(cmd({ - command: "dropCollection", - database: "testdb", - })), + connector.execute( + cmd({ + command: "dropCollection", + database: "testdb", + }), + ), ).rejects.toThrow(/requires 'collection'/) }) }) @@ -1263,21 +1399,25 @@ describe.skipIf(!DOCKER && !MONGODB_USE_CI)("MongoDB Driver E2E", () => { describe("Adversarial Tests", () => { test("handles empty document insertion", async () => { - const result = await connector.execute(cmd({ - command: "insertOne", - database: "testdb", - collection: "users", - document: {}, - })) + const result = await connector.execute( + cmd({ + command: "insertOne", + database: "testdb", + collection: "users", + document: {}, + }), + ) expect(result.rows[0][0]).toBeDefined() // Still gets an _id // Clean up empty doc - await connector.execute(cmd({ - command: "deleteMany", - database: "testdb", - collection: "users", - filter: { name: { $exists: false } }, - })) + await connector.execute( + cmd({ + command: "deleteMany", + database: "testdb", + collection: "users", + filter: { name: { $exists: false } }, + }), + ) }) test("handles deeply nested documents (10 levels)", async () => { @@ -1286,20 +1426,24 @@ describe.skipIf(!DOCKER && !MONGODB_USE_CI)("MongoDB Driver E2E", () => { nested = { [`level_${i}`]: nested } } - await connector.execute(cmd({ - command: "insertOne", - database: "testdb", - collection: "users", - document: { name: "nested_user", deep: nested }, - })) + await connector.execute( + cmd({ + command: "insertOne", + database: "testdb", + collection: "users", + document: { name: "nested_user", deep: nested }, + }), + ) - const result = await connector.execute(cmd({ - command: "find", - database: "testdb", - collection: "users", - filter: { name: "nested_user" }, - projection: { deep: 1, _id: 0 }, - })) + const result = await connector.execute( + cmd({ + command: "find", + database: "testdb", + collection: "users", + filter: { name: "nested_user" }, + projection: { deep: 1, _id: 0 }, + }), + ) expect(result.row_count).toBe(1) // Deep object should be JSON-serialized const deepVal = result.rows[0][0] @@ -1307,196 +1451,236 @@ describe.skipIf(!DOCKER && !MONGODB_USE_CI)("MongoDB Driver E2E", () => { expect(deepVal).toContain("deep") // Clean up - await connector.execute(cmd({ - command: "deleteOne", - database: "testdb", - collection: "users", - filter: { name: "nested_user" }, - })) + await connector.execute( + cmd({ + command: "deleteOne", + database: "testdb", + collection: "users", + filter: { name: "nested_user" }, + }), + ) }) test("handles documents with special characters in field names", async () => { - await connector.execute(cmd({ - command: "insertOne", - database: "testdb", - collection: "users", - document: { - name: "special_fields", - "field with spaces": "ok", - "field.with.dots": "ok", // MongoDB allows this on insert - "field-with-dashes": "ok", - "UPPERCASE_FIELD": "ok", - "unicode_フィールド": "ok", - }, - })) - - const result = await connector.execute(cmd({ - command: "find", - database: "testdb", - collection: "users", - filter: { name: "special_fields" }, - })) + await connector.execute( + cmd({ + command: "insertOne", + database: "testdb", + collection: "users", + document: { + name: "special_fields", + "field with spaces": "ok", + "field.with.dots": "ok", // MongoDB allows this on insert + "field-with-dashes": "ok", + UPPERCASE_FIELD: "ok", + unicode_フィールド: "ok", + }, + }), + ) + + const result = await connector.execute( + cmd({ + command: "find", + database: "testdb", + collection: "users", + filter: { name: "special_fields" }, + }), + ) expect(result.row_count).toBe(1) expect(result.columns).toContain("field with spaces") expect(result.columns).toContain("field-with-dashes") expect(result.columns).toContain("UPPERCASE_FIELD") expect(result.columns).toContain("unicode_フィールド") - await connector.execute(cmd({ - command: "deleteOne", - database: "testdb", - collection: "users", - filter: { name: "special_fields" }, - })) + await connector.execute( + cmd({ + command: "deleteOne", + database: "testdb", + collection: "users", + filter: { name: "special_fields" }, + }), + ) }) test("handles documents with special characters in values", async () => { - await connector.execute(cmd({ - command: "insertOne", - database: "testdb", - collection: "users", - document: { - name: "special_values", - quotes: 'She said "hello"', - backslashes: "path\\to\\file", - newlines: "line1\nline2\nline3", - tabs: "col1\tcol2", - unicode: "emoji 🚀 and CJK 中文", - null_char: "before\x00after", // null byte - html: "", - sql_injection: "'; DROP TABLE users; --", - }, - })) - - const result = await connector.execute(cmd({ - command: "find", - database: "testdb", - collection: "users", - filter: { name: "special_values" }, - projection: { quotes: 1, unicode: 1, html: 1, sql_injection: 1, _id: 0 }, - })) + await connector.execute( + cmd({ + command: "insertOne", + database: "testdb", + collection: "users", + document: { + name: "special_values", + quotes: 'She said "hello"', + backslashes: "path\\to\\file", + newlines: "line1\nline2\nline3", + tabs: "col1\tcol2", + unicode: "emoji 🚀 and CJK 中文", + null_char: "before\x00after", // null byte + html: "", + sql_injection: "'; DROP TABLE users; --", + }, + }), + ) + + const result = await connector.execute( + cmd({ + command: "find", + database: "testdb", + collection: "users", + filter: { name: "special_values" }, + projection: { quotes: 1, unicode: 1, html: 1, sql_injection: 1, _id: 0 }, + }), + ) expect(result.row_count).toBe(1) expect(result.rows[0][result.columns.indexOf("quotes")]).toBe('She said "hello"') expect(result.rows[0][result.columns.indexOf("unicode")]).toBe("emoji 🚀 and CJK 中文") expect(result.rows[0][result.columns.indexOf("html")]).toBe("") expect(result.rows[0][result.columns.indexOf("sql_injection")]).toBe("'; DROP TABLE users; --") - await connector.execute(cmd({ - command: "deleteOne", - database: "testdb", - collection: "users", - filter: { name: "special_values" }, - })) + await connector.execute( + cmd({ + command: "deleteOne", + database: "testdb", + collection: "users", + filter: { name: "special_values" }, + }), + ) }) test("handles very large document (close to 16MB BSON limit)", async () => { // Create a ~1MB string (well under 16MB limit but still large) const largeString = "x".repeat(1_000_000) - await connector.execute(cmd({ - command: "insertOne", - database: "testdb", - collection: "users", - document: { name: "large_doc", payload: largeString }, - })) + await connector.execute( + cmd({ + command: "insertOne", + database: "testdb", + collection: "users", + document: { name: "large_doc", payload: largeString }, + }), + ) - const result = await connector.execute(cmd({ - command: "find", - database: "testdb", - collection: "users", - filter: { name: "large_doc" }, - projection: { name: 1, _id: 0 }, - })) + const result = await connector.execute( + cmd({ + command: "find", + database: "testdb", + collection: "users", + filter: { name: "large_doc" }, + projection: { name: 1, _id: 0 }, + }), + ) expect(result.rows[0][0]).toBe("large_doc") - await connector.execute(cmd({ - command: "deleteOne", - database: "testdb", - collection: "users", - filter: { name: "large_doc" }, - })) + await connector.execute( + cmd({ + command: "deleteOne", + database: "testdb", + collection: "users", + filter: { name: "large_doc" }, + }), + ) }) test("handles insertMany with empty array", async () => { // MongoDB driver throws on empty insertMany — driver should propagate the error await expect( - connector.execute(cmd({ - command: "insertMany", - database: "testdb", - collection: "users", - documents: [], - })), + connector.execute( + cmd({ + command: "insertMany", + database: "testdb", + collection: "users", + documents: [], + }), + ), ).rejects.toThrow() }) test("handles duplicate key error (unique index violation)", async () => { // email_1 index is unique — inserting duplicate email should fail - const existingEmails = await connector.execute(cmd({ - command: "find", - database: "testdb", - collection: "users", - filter: { email: "alice@example.com" }, - projection: { email: 1, _id: 0 }, - })) + const existingEmails = await connector.execute( + cmd({ + command: "find", + database: "testdb", + collection: "users", + filter: { email: "alice@example.com" }, + projection: { email: 1, _id: 0 }, + }), + ) if (existingEmails.row_count > 0) { await expect( - connector.execute(cmd({ - command: "insertOne", - database: "testdb", - collection: "users", - document: { name: "alice_dup", email: "alice@example.com" }, - })), + connector.execute( + cmd({ + command: "insertOne", + database: "testdb", + collection: "users", + document: { name: "alice_dup", email: "alice@example.com" }, + }), + ), ).rejects.toThrow() } }) test("handles collection names with special characters", async () => { const weirdName = "coll-with-dashes_and_underscores" - await connector.execute(cmd({ - command: "createCollection", - database: "testdb", - name: weirdName, - })) - await connector.execute(cmd({ - command: "insertOne", - database: "testdb", - collection: weirdName, - document: { test: true }, - })) - const result = await connector.execute(cmd({ - command: "find", - database: "testdb", - collection: weirdName, - })) + await connector.execute( + cmd({ + command: "createCollection", + database: "testdb", + name: weirdName, + }), + ) + await connector.execute( + cmd({ + command: "insertOne", + database: "testdb", + collection: weirdName, + document: { test: true }, + }), + ) + const result = await connector.execute( + cmd({ + command: "find", + database: "testdb", + collection: weirdName, + }), + ) expect(result.row_count).toBe(1) - await connector.execute(cmd({ - command: "dropCollection", - database: "testdb", - collection: weirdName, - })) + await connector.execute( + cmd({ + command: "dropCollection", + database: "testdb", + collection: weirdName, + }), + ) }) test("handles heterogeneous documents in same collection", async () => { - await connector.execute(cmd({ - command: "createCollection", - database: "testdb", - name: "hetero", - })) - await connector.execute(cmd({ - command: "insertMany", - database: "testdb", - collection: "hetero", - documents: [ - { type: "person", name: "Alice", age: 30 }, - { type: "company", name: "Acme", employees: 500, founded: 1990 }, - { type: "product", sku: "ABC-123", price: 29.99 }, - ], - })) - - const result = await connector.execute(cmd({ - command: "find", - database: "testdb", - collection: "hetero", - })) + await connector.execute( + cmd({ + command: "createCollection", + database: "testdb", + name: "hetero", + }), + ) + await connector.execute( + cmd({ + command: "insertMany", + database: "testdb", + collection: "hetero", + documents: [ + { type: "person", name: "Alice", age: 30 }, + { type: "company", name: "Acme", employees: 500, founded: 1990 }, + { type: "product", sku: "ABC-123", price: 29.99 }, + ], + }), + ) + + const result = await connector.execute( + cmd({ + command: "find", + database: "testdb", + collection: "hetero", + }), + ) expect(result.row_count).toBe(3) // Columns should be union of all fields across all documents expect(result.columns).toContain("type") @@ -1507,36 +1691,44 @@ describe.skipIf(!DOCKER && !MONGODB_USE_CI)("MongoDB Driver E2E", () => { expect(result.columns).toContain("price") expect(result.columns).toContain("founded") - await connector.execute(cmd({ - command: "dropCollection", - database: "testdb", - collection: "hetero", - })) + await connector.execute( + cmd({ + command: "dropCollection", + database: "testdb", + collection: "hetero", + }), + ) }) test("handles null and undefined values in documents", async () => { - await connector.execute(cmd({ - command: "createCollection", - database: "testdb", - name: "nulls", - })) - await connector.execute(cmd({ - command: "insertMany", - database: "testdb", - collection: "nulls", - documents: [ - { a: 1, b: null, c: "x" }, - { a: null, b: 2, c: null }, - { a: 3, c: "z" }, // b is missing entirely - ], - })) - - const result = await connector.execute(cmd({ - command: "find", - database: "testdb", - collection: "nulls", - sort: { a: 1 }, - })) + await connector.execute( + cmd({ + command: "createCollection", + database: "testdb", + name: "nulls", + }), + ) + await connector.execute( + cmd({ + command: "insertMany", + database: "testdb", + collection: "nulls", + documents: [ + { a: 1, b: null, c: "x" }, + { a: null, b: 2, c: null }, + { a: 3, c: "z" }, // b is missing entirely + ], + }), + ) + + const result = await connector.execute( + cmd({ + command: "find", + database: "testdb", + collection: "nulls", + sort: { a: 1 }, + }), + ) expect(result.row_count).toBe(3) // describeTable should detect nullable fields @@ -1545,75 +1737,91 @@ describe.skipIf(!DOCKER && !MONGODB_USE_CI)("MongoDB Driver E2E", () => { expect(bCol).toBeDefined() expect(bCol!.nullable).toBe(true) - await connector.execute(cmd({ - command: "dropCollection", - database: "testdb", - collection: "nulls", - })) + await connector.execute( + cmd({ + command: "dropCollection", + database: "testdb", + collection: "nulls", + }), + ) }) test("handles array values in documents", async () => { - await connector.execute(cmd({ - command: "createCollection", - database: "testdb", - name: "arrays", - })) - await connector.execute(cmd({ - command: "insertMany", - database: "testdb", - collection: "arrays", - documents: [ - { name: "a", tags: ["x", "y"] }, - { name: "b", tags: ["y", "z"] }, - { name: "c", tags: [] }, - ], - })) - - const result = await connector.execute(cmd({ - command: "find", - database: "testdb", - collection: "arrays", - projection: { name: 1, tags: 1, _id: 0 }, - sort: { name: 1 }, - })) + await connector.execute( + cmd({ + command: "createCollection", + database: "testdb", + name: "arrays", + }), + ) + await connector.execute( + cmd({ + command: "insertMany", + database: "testdb", + collection: "arrays", + documents: [ + { name: "a", tags: ["x", "y"] }, + { name: "b", tags: ["y", "z"] }, + { name: "c", tags: [] }, + ], + }), + ) + + const result = await connector.execute( + cmd({ + command: "find", + database: "testdb", + collection: "arrays", + projection: { name: 1, tags: 1, _id: 0 }, + sort: { name: 1 }, + }), + ) expect(result.row_count).toBe(3) // tags should be JSON-serialized as arrays expect(result.rows[0][result.columns.indexOf("tags")]).toBe('["x","y"]') expect(result.rows[2][result.columns.indexOf("tags")]).toBe("[]") // Querying into arrays with $elemMatch/$in - const filtered = await connector.execute(cmd({ - command: "find", - database: "testdb", - collection: "arrays", - filter: { tags: "y" }, - projection: { name: 1, _id: 0 }, - sort: { name: 1 }, - })) + const filtered = await connector.execute( + cmd({ + command: "find", + database: "testdb", + collection: "arrays", + filter: { tags: "y" }, + projection: { name: 1, _id: 0 }, + sort: { name: 1 }, + }), + ) expect(filtered.rows.map((r) => r[0])).toEqual(["a", "b"]) - await connector.execute(cmd({ - command: "dropCollection", - database: "testdb", - collection: "arrays", - })) + await connector.execute( + cmd({ + command: "dropCollection", + database: "testdb", + collection: "arrays", + }), + ) }) test("handles concurrent operations", async () => { - await connector.execute(cmd({ - command: "createCollection", - database: "testdb", - name: "concurrent", - })) + await connector.execute( + cmd({ + command: "createCollection", + database: "testdb", + name: "concurrent", + }), + ) // Run 10 inserts concurrently const inserts = Array.from({ length: 10 }, (_, i) => - connector.execute(cmd({ - command: "insertOne", - database: "testdb", - collection: "concurrent", - document: { idx: i }, - })), + connector.execute( + cmd({ + command: "insertOne", + database: "testdb", + collection: "concurrent", + document: { idx: i }, + }), + ), ) const results = await Promise.all(inserts) expect(results).toHaveLength(10) @@ -1622,215 +1830,263 @@ describe.skipIf(!DOCKER && !MONGODB_USE_CI)("MongoDB Driver E2E", () => { }) // Verify all inserted - const count = await connector.execute(cmd({ - command: "countDocuments", - database: "testdb", - collection: "concurrent", - })) + const count = await connector.execute( + cmd({ + command: "countDocuments", + database: "testdb", + collection: "concurrent", + }), + ) expect(count.rows[0][0]).toBe(10) // Run concurrent reads const reads = Array.from({ length: 5 }, () => - connector.execute(cmd({ - command: "find", - database: "testdb", - collection: "concurrent", - })), + connector.execute( + cmd({ + command: "find", + database: "testdb", + collection: "concurrent", + }), + ), ) const readResults = await Promise.all(reads) readResults.forEach((r) => { expect(r.row_count).toBe(10) }) - await connector.execute(cmd({ - command: "dropCollection", - database: "testdb", - collection: "concurrent", - })) + await connector.execute( + cmd({ + command: "dropCollection", + database: "testdb", + collection: "concurrent", + }), + ) }) test("handles very long collection and database names", async () => { // MongoDB allows collection names up to ~120 bytes when combined with db name const longName = "a".repeat(60) - await connector.execute(cmd({ - command: "createCollection", - database: "testdb", - name: longName, - })) - await connector.execute(cmd({ - command: "insertOne", - database: "testdb", - collection: longName, - document: { ok: true }, - })) - const result = await connector.execute(cmd({ - command: "find", - database: "testdb", - collection: longName, - })) + await connector.execute( + cmd({ + command: "createCollection", + database: "testdb", + name: longName, + }), + ) + await connector.execute( + cmd({ + command: "insertOne", + database: "testdb", + collection: longName, + document: { ok: true }, + }), + ) + const result = await connector.execute( + cmd({ + command: "find", + database: "testdb", + collection: longName, + }), + ) expect(result.row_count).toBe(1) - await connector.execute(cmd({ - command: "dropCollection", - database: "testdb", - collection: longName, - })) + await connector.execute( + cmd({ + command: "dropCollection", + database: "testdb", + collection: longName, + }), + ) }) test("handles update with $inc, $unset, $push operators", async () => { - await connector.execute(cmd({ - command: "insertOne", - database: "testdb", - collection: "users", - document: { name: "ops_test", count: 0, tags: ["initial"], temp: "will_remove" }, - })) + await connector.execute( + cmd({ + command: "insertOne", + database: "testdb", + collection: "users", + document: { name: "ops_test", count: 0, tags: ["initial"], temp: "will_remove" }, + }), + ) // $inc - await connector.execute(cmd({ - command: "updateOne", - database: "testdb", - collection: "users", - filter: { name: "ops_test" }, - update: { $inc: { count: 5 } }, - })) + await connector.execute( + cmd({ + command: "updateOne", + database: "testdb", + collection: "users", + filter: { name: "ops_test" }, + update: { $inc: { count: 5 } }, + }), + ) // $push - await connector.execute(cmd({ - command: "updateOne", - database: "testdb", - collection: "users", - filter: { name: "ops_test" }, - update: { $push: { tags: "added" } }, - })) + await connector.execute( + cmd({ + command: "updateOne", + database: "testdb", + collection: "users", + filter: { name: "ops_test" }, + update: { $push: { tags: "added" } }, + }), + ) // $unset - await connector.execute(cmd({ - command: "updateOne", - database: "testdb", - collection: "users", - filter: { name: "ops_test" }, - update: { $unset: { temp: "" } }, - })) + await connector.execute( + cmd({ + command: "updateOne", + database: "testdb", + collection: "users", + filter: { name: "ops_test" }, + update: { $unset: { temp: "" } }, + }), + ) - const result = await connector.execute(cmd({ - command: "find", - database: "testdb", - collection: "users", - filter: { name: "ops_test" }, - projection: { count: 1, tags: 1, temp: 1, _id: 0 }, - })) + const result = await connector.execute( + cmd({ + command: "find", + database: "testdb", + collection: "users", + filter: { name: "ops_test" }, + projection: { count: 1, tags: 1, temp: 1, _id: 0 }, + }), + ) expect(result.rows[0][result.columns.indexOf("count")]).toBe(5) // Clean up - await connector.execute(cmd({ - command: "deleteOne", - database: "testdb", - collection: "users", - filter: { name: "ops_test" }, - })) + await connector.execute( + cmd({ + command: "deleteOne", + database: "testdb", + collection: "users", + filter: { name: "ops_test" }, + }), + ) }) test("handles aggregate with invalid pipeline stage", async () => { await expect( - connector.execute(cmd({ - command: "aggregate", - database: "testdb", - collection: "users", - pipeline: [{ $invalidStage: {} }], - })), + connector.execute( + cmd({ + command: "aggregate", + database: "testdb", + collection: "users", + pipeline: [{ $invalidStage: {} }], + }), + ), ).rejects.toThrow() }) test("handles queries on non-existent collection (find returns empty)", async () => { - const result = await connector.execute(cmd({ - command: "find", - database: "testdb", - collection: "totally_nonexistent_collection_xyz", - })) + const result = await connector.execute( + cmd({ + command: "find", + database: "testdb", + collection: "totally_nonexistent_collection_xyz", + }), + ) expect(result.row_count).toBe(0) }) test("handles numeric edge cases in documents", async () => { - await connector.execute(cmd({ - command: "createCollection", - database: "testdb", - name: "numbers", - })) - await connector.execute(cmd({ - command: "insertMany", - database: "testdb", - collection: "numbers", - documents: [ - { label: "zero", val: 0 }, - { label: "negative", val: -42 }, - { label: "float", val: 3.14159 }, - { label: "large", val: 9007199254740991 }, // Number.MAX_SAFE_INTEGER - { label: "tiny", val: 0.000001 }, - ], - })) - - const result = await connector.execute(cmd({ - command: "find", - database: "testdb", - collection: "numbers", - sort: { val: 1 }, - projection: { label: 1, val: 1, _id: 0 }, - })) + await connector.execute( + cmd({ + command: "createCollection", + database: "testdb", + name: "numbers", + }), + ) + await connector.execute( + cmd({ + command: "insertMany", + database: "testdb", + collection: "numbers", + documents: [ + { label: "zero", val: 0 }, + { label: "negative", val: -42 }, + { label: "float", val: 3.14159 }, + { label: "large", val: 9007199254740991 }, // Number.MAX_SAFE_INTEGER + { label: "tiny", val: 0.000001 }, + ], + }), + ) + + const result = await connector.execute( + cmd({ + command: "find", + database: "testdb", + collection: "numbers", + sort: { val: 1 }, + projection: { label: 1, val: 1, _id: 0 }, + }), + ) expect(result.row_count).toBe(5) const labels = result.rows.map((r) => r[result.columns.indexOf("label")]) expect(labels[0]).toBe("negative") // -42 expect(labels[1]).toBe("zero") // 0 expect(labels[2]).toBe("tiny") // 0.000001 - await connector.execute(cmd({ - command: "dropCollection", - database: "testdb", - collection: "numbers", - })) + await connector.execute( + cmd({ + command: "dropCollection", + database: "testdb", + collection: "numbers", + }), + ) }) test("handles boolean edge cases", async () => { - await connector.execute(cmd({ - command: "createCollection", - database: "testdb", - name: "booleans", - })) - await connector.execute(cmd({ - command: "insertMany", - database: "testdb", - collection: "booleans", - documents: [ - { flag: true }, - { flag: false }, - { flag: null }, - { flag: 0 }, // falsy but not boolean - { flag: 1 }, // truthy but not boolean - { flag: "" }, // empty string - ], - })) + await connector.execute( + cmd({ + command: "createCollection", + database: "testdb", + name: "booleans", + }), + ) + await connector.execute( + cmd({ + command: "insertMany", + database: "testdb", + collection: "booleans", + documents: [ + { flag: true }, + { flag: false }, + { flag: null }, + { flag: 0 }, // falsy but not boolean + { flag: 1 }, // truthy but not boolean + { flag: "" }, // empty string + ], + }), + ) // Filter for exactly boolean true - const result = await connector.execute(cmd({ - command: "find", - database: "testdb", - collection: "booleans", - filter: { flag: true }, - })) + const result = await connector.execute( + cmd({ + command: "find", + database: "testdb", + collection: "booleans", + filter: { flag: true }, + }), + ) expect(result.row_count).toBe(1) // Only the actual boolean true // Filter for exactly boolean false - const falseResult = await connector.execute(cmd({ - command: "find", - database: "testdb", - collection: "booleans", - filter: { flag: false }, - })) + const falseResult = await connector.execute( + cmd({ + command: "find", + database: "testdb", + collection: "booleans", + filter: { flag: false }, + }), + ) expect(falseResult.row_count).toBe(1) // Only the actual boolean false - await connector.execute(cmd({ - command: "dropCollection", - database: "testdb", - collection: "booleans", - })) + await connector.execute( + cmd({ + command: "dropCollection", + database: "testdb", + collection: "booleans", + }), + ) }) }) @@ -1840,13 +2096,15 @@ describe.skipIf(!DOCKER && !MONGODB_USE_CI)("MongoDB Driver E2E", () => { describe("BSON Type Handling", () => { test("ObjectId is serialized to string in results", async () => { - const result = await connector.execute(cmd({ - command: "find", - database: "testdb", - collection: "users", - projection: { _id: 1 }, - limit: 1, - })) + const result = await connector.execute( + cmd({ + command: "find", + database: "testdb", + collection: "users", + projection: { _id: 1 }, + limit: 1, + }), + ) expect(result.row_count).toBe(1) const id = result.rows[0][0] expect(typeof id).toBe("string") @@ -1854,62 +2112,74 @@ describe.skipIf(!DOCKER && !MONGODB_USE_CI)("MongoDB Driver E2E", () => { }) test("Date values are serialized to ISO strings", async () => { - await connector.execute(cmd({ - command: "insertOne", - database: "testdb", - collection: "users", - document: { name: "date_test", created_at: { $date: "2024-01-15T10:30:00Z" } }, - })) + await connector.execute( + cmd({ + command: "insertOne", + database: "testdb", + collection: "users", + document: { name: "date_test", created_at: { $date: "2024-01-15T10:30:00Z" } }, + }), + ) - const result = await connector.execute(cmd({ - command: "find", - database: "testdb", - collection: "users", - filter: { name: "date_test" }, - projection: { created_at: 1, _id: 0 }, - })) + const result = await connector.execute( + cmd({ + command: "find", + database: "testdb", + collection: "users", + filter: { name: "date_test" }, + projection: { created_at: 1, _id: 0 }, + }), + ) // Date should be an ISO string const dateVal = result.rows[0][0] expect(typeof dateVal).toBe("string") - await connector.execute(cmd({ - command: "deleteOne", - database: "testdb", - collection: "users", - filter: { name: "date_test" }, - })) + await connector.execute( + cmd({ + command: "deleteOne", + database: "testdb", + collection: "users", + filter: { name: "date_test" }, + }), + ) }) test("nested objects are JSON-serialized", async () => { - await connector.execute(cmd({ - command: "insertOne", - database: "testdb", - collection: "users", - document: { - name: "nested_test", - address: { street: "123 Main St", city: "Springfield", zip: "12345" }, - }, - })) - - const result = await connector.execute(cmd({ - command: "find", - database: "testdb", - collection: "users", - filter: { name: "nested_test" }, - projection: { address: 1, _id: 0 }, - })) + await connector.execute( + cmd({ + command: "insertOne", + database: "testdb", + collection: "users", + document: { + name: "nested_test", + address: { street: "123 Main St", city: "Springfield", zip: "12345" }, + }, + }), + ) + + const result = await connector.execute( + cmd({ + command: "find", + database: "testdb", + collection: "users", + filter: { name: "nested_test" }, + projection: { address: 1, _id: 0 }, + }), + ) const addr = result.rows[0][0] expect(typeof addr).toBe("string") const parsed = JSON.parse(addr) expect(parsed.street).toBe("123 Main St") expect(parsed.city).toBe("Springfield") - await connector.execute(cmd({ - command: "deleteOne", - database: "testdb", - collection: "users", - filter: { name: "nested_test" }, - })) + await connector.execute( + cmd({ + command: "deleteOne", + database: "testdb", + collection: "users", + filter: { name: "nested_test" }, + }), + ) }) }) }) From 57ed5760a54877858f367fa90c3014828dba9d0b Mon Sep 17 00:00:00 2001 From: anandgupta42 Date: Thu, 26 Mar 2026 12:36:19 -0700 Subject: [PATCH 5/5] fix: cap aggregate `$limit` and serialize `distinct` values (CodeRabbit) - Cap user-specified `$limit` in aggregate pipelines against `effectiveLimit` to prevent OOM, matching the `find` command behavior - Serialize `distinct` return values through `serializeValue()` for consistent BSON type handling across find/aggregate/distinct Co-Authored-By: Claude Opus 4.6 (1M context) --- packages/drivers/src/mongodb.ts | 21 ++++++++++++++------- 1 file changed, 14 insertions(+), 7 deletions(-) diff --git a/packages/drivers/src/mongodb.ts b/packages/drivers/src/mongodb.ts index 4dcdec09cd..5757cf89f8 100644 --- a/packages/drivers/src/mongodb.ts +++ b/packages/drivers/src/mongodb.ts @@ -333,18 +333,25 @@ export async function connect(config: ConnectionConfig): Promise { if (!parsed.pipeline || !Array.isArray(parsed.pipeline)) { throw new Error("aggregate requires a 'pipeline' array") } - // Append $limit if the pipeline doesn't already contain one anywhere. - // Also skip for $out/$merge pipelines which write results. + // Cap or append $limit to prevent OOM. Skip for $out/$merge write pipelines. const pipeline = [...parsed.pipeline] - const hasLimit = pipeline.some((stage) => "$limit" in stage) const hasWrite = pipeline.some((stage) => "$out" in stage || "$merge" in stage) - if (!hasLimit && !hasWrite) { - pipeline.push({ $limit: effectiveLimit + 1 }) + if (!hasWrite) { + const limitIdx = pipeline.findIndex((stage) => "$limit" in stage) + if (limitIdx >= 0) { + // Cap user-specified $limit against effectiveLimit + const userLimit = (pipeline[limitIdx] as any).$limit + if (typeof userLimit === "number" && userLimit > effectiveLimit) { + pipeline[limitIdx] = { $limit: effectiveLimit + 1 } + } + } else { + pipeline.push({ $limit: effectiveLimit + 1 }) + } } const docs = await coll.aggregate(pipeline).toArray() - const truncated = !hasLimit && docs.length > effectiveLimit + const truncated = docs.length > effectiveLimit const limited = truncated ? docs.slice(0, effectiveLimit) : docs if (limited.length === 0) { @@ -383,7 +390,7 @@ export async function connect(config: ConnectionConfig): Promise { const limited = truncated ? values.slice(0, effectiveLimit) : values return { columns: [parsed.field], - rows: limited.map((v: unknown) => [v]), + rows: limited.map((v: unknown) => [serializeValue(v)]), row_count: limited.length, truncated, }