diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index bbdaa840..8aca113c 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -8,6 +8,7 @@ on: permissions: contents: write packages: write + id-token: write jobs: goreleaser: @@ -52,3 +53,31 @@ jobs: dist/ !dist/*.txt retention-days: 30 + + npm-publish: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v6 + + - name: Setup Node.js + uses: actions/setup-node@v4 + with: + node-version: 24 + registry-url: https://registry.npmjs.org + + - name: Install dependencies + working-directory: openapi/linter/customrules/types + run: npm ci + + - name: Set version from tag + working-directory: openapi/linter/customrules/types + run: npm version "${GITHUB_REF_NAME#v}" --no-git-tag-version + + - name: Build + working-directory: openapi/linter/customrules/types + run: npm run build + + - name: Publish + working-directory: openapi/linter/customrules/types + run: npm publish --provenance --access public diff --git a/.github/workflows/update-cmd-dependency.yaml b/.github/workflows/update-cmd-dependency.yaml deleted file mode 100644 index dd3b06f7..00000000 --- a/.github/workflows/update-cmd-dependency.yaml +++ /dev/null @@ -1,89 +0,0 @@ -name: Update CMD OpenAPI Dependency - -on: - push: - branches: [main] - # Only run if changes affect the root module (not cmd/openapi itself) - paths-ignore: - - "cmd/openapi/**" - - ".github/workflows/update-cmd-dependency.yaml" - -permissions: - contents: write - pull-requests: write - -jobs: - update-dependency: - name: Update cmd/openapi dependency - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v6 - - - name: Setup Go - uses: actions/setup-go@v6 - with: - go-version-file: "go.mod" - cache: false # Disable caching to ensure fresh dependency resolution - - - name: Update cmd/openapi go.mod - run: | - cd cmd/openapi - - # Update to latest main commit - go get github.com/speakeasy-api/openapi@main - go mod tidy - - - name: Check for changes - id: changes - run: | - if git diff --quiet cmd/openapi/go.mod cmd/openapi/go.sum; then - echo "changed=false" >> $GITHUB_OUTPUT - echo "No changes detected in cmd/openapi/go.mod or go.sum" - else - echo "changed=true" >> $GITHUB_OUTPUT - echo "Changes detected in cmd/openapi/go.mod or go.sum" - - # Get the new version for the PR description - NEW_VERSION=$(grep 'github.com/speakeasy-api/openapi v' cmd/openapi/go.mod | head -1 | awk '{print $2}') - echo "version=${NEW_VERSION}" >> $GITHUB_OUTPUT - echo "Updated to version: ${NEW_VERSION}" - fi - - - name: Create Pull Request - if: steps.changes.outputs.changed == 'true' - uses: peter-evans/create-pull-request@v8 - with: - token: ${{ secrets.GITHUB_TOKEN }} - commit-message: | - chore(cmd): update openapi dependency to latest main - - Updates cmd/openapi/go.mod to use the latest commit from main. - Version: ${{ steps.changes.outputs.version }} - branch: bot/update-cmd-openapi-dependency - delete-branch: true - title: "chore(cmd): update openapi dependency to latest main" - body: | - ## Updates cmd/openapi dependency - - This PR updates the `cmd/openapi/go.mod` file to reference the latest commit from main. - - **Updated to:** `${{ steps.changes.outputs.version }}` - - **Changes:** - - Updated `github.com/speakeasy-api/openapi` dependency in `cmd/openapi/go.mod` - - Ran `go mod tidy` to update dependencies - - --- - *This PR was automatically created by the [update-cmd-dependency workflow](.github/workflows/update-cmd-dependency.yaml)* - labels: | - dependencies - automated - - - name: Summary - run: | - if [ "${{ steps.changes.outputs.changed }}" == "true" ]; then - echo "✅ Pull request created to update cmd/openapi dependency" - echo "Version: ${{ steps.changes.outputs.version }}" - else - echo "ℹ️ No changes needed - cmd/openapi dependency already up to date" - fi diff --git a/.github/workflows/update-submodule-dependencies.yaml b/.github/workflows/update-submodule-dependencies.yaml new file mode 100644 index 00000000..775bfbd1 --- /dev/null +++ b/.github/workflows/update-submodule-dependencies.yaml @@ -0,0 +1,115 @@ +name: Update Submodule Dependencies + +on: + push: + branches: [main] + # Only run if changes affect the root module (not submodules themselves) + paths-ignore: + - "cmd/openapi/**" + - "openapi/linter/customrules/**" + - ".github/workflows/update-submodule-dependencies.yaml" + +permissions: + contents: write + pull-requests: write + +jobs: + update-dependencies: + name: Update submodule dependencies + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v6 + + - name: Setup Go + uses: actions/setup-go@v6 + with: + go-version-file: "go.mod" + cache: false # Disable caching to ensure fresh dependency resolution + + - name: Update openapi/linter/customrules go.mod + run: | + cd openapi/linter/customrules + + # Update to latest main commit + go get github.com/speakeasy-api/openapi@main + go mod tidy + + - name: Update cmd/openapi go.mod + run: | + cd cmd/openapi + + # Update to latest main commit (both main module and customrules) + go get github.com/speakeasy-api/openapi@main + go get github.com/speakeasy-api/openapi/openapi/linter/customrules@main + go mod tidy + + - name: Check for changes + id: changes + run: | + CHANGED_FILES="" + + # Check customrules module + if ! git diff --quiet openapi/linter/customrules/go.mod openapi/linter/customrules/go.sum 2>/dev/null; then + CHANGED_FILES="${CHANGED_FILES}customrules " + fi + + # Check cmd/openapi module + if ! git diff --quiet cmd/openapi/go.mod cmd/openapi/go.sum 2>/dev/null; then + CHANGED_FILES="${CHANGED_FILES}cmd " + fi + + if [ -z "$CHANGED_FILES" ]; then + echo "changed=false" >> $GITHUB_OUTPUT + echo "No changes detected" + else + echo "changed=true" >> $GITHUB_OUTPUT + echo "modules=${CHANGED_FILES}" >> $GITHUB_OUTPUT + echo "Changes detected in: ${CHANGED_FILES}" + + # Get the new version for the PR description + NEW_VERSION=$(grep 'github.com/speakeasy-api/openapi v' cmd/openapi/go.mod | head -1 | awk '{print $2}') + echo "version=${NEW_VERSION}" >> $GITHUB_OUTPUT + echo "Updated to version: ${NEW_VERSION}" + fi + + - name: Create Pull Request + if: steps.changes.outputs.changed == 'true' + uses: peter-evans/create-pull-request@v8 + with: + token: ${{ secrets.GITHUB_TOKEN }} + commit-message: | + chore: update submodule dependencies to latest main + + Updates go.mod files in submodules to use the latest commit from main. + Version: ${{ steps.changes.outputs.version }} + Updated modules: ${{ steps.changes.outputs.modules }} + branch: bot/update-submodule-dependencies + delete-branch: true + title: "chore: update submodule dependencies to latest main" + body: | + ## Updates submodule dependencies + + This PR updates the `go.mod` files in submodules to reference the latest commit from main. + + **Updated to:** `${{ steps.changes.outputs.version }}` + **Updated modules:** ${{ steps.changes.outputs.modules }} + + **Changes:** + - Updated `github.com/speakeasy-api/openapi` dependency in submodule go.mod files + - Ran `go mod tidy` to update dependencies + + --- + *This PR was automatically created by the [update-submodule-dependencies workflow](.github/workflows/update-submodule-dependencies.yaml)* + labels: | + dependencies + automated + + - name: Summary + run: | + if [ "${{ steps.changes.outputs.changed }}" == "true" ]; then + echo "✅ Pull request created to update submodule dependencies" + echo "Version: ${{ steps.changes.outputs.version }}" + echo "Modules: ${{ steps.changes.outputs.modules }}" + else + echo "ℹ️ No changes needed - submodule dependencies already up to date" + fi diff --git a/AGENTS.md b/AGENTS.md index 3e961abc..8a40e09c 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -106,6 +106,66 @@ git commit -m "feat: implement prefixEncoding and itemEncoding for OpenAPI 3.2 3. **Searchability**: Easier to search and filter commits 4. **Tool Compatibility**: Works better with automated tools and scripts +## Linter Rules + +This project uses `golangci-lint` with strict rules. Run `mise lint` to check. The most common violations are listed below. **When you encounter a new common lint pattern not documented here, add it to this section so future sessions avoid the same mistakes.** + +### perfsprint — Avoid `fmt.Sprintf` for Simple String Operations + +The `perfsprint` linter flags unnecessary `fmt.Sprintf` calls. Use string concatenation or `strconv` instead. + +#### ❌ Bad + +```go +// Single %s — just use concatenation +msg := fmt.Sprintf("prefix: %s", value) + +// Single %d — use strconv +msg := fmt.Sprintf("%d", count) + +// Writing formatted string to a writer +b.WriteString(fmt.Sprintf("hello %s world %d", name, n)) +``` + +#### ✅ Good + +```go +// String concatenation +msg := "prefix: " + value + +// strconv for numbers +msg := strconv.Itoa(count) + +// fmt.Fprintf writes directly to the writer +fmt.Fprintf(b, "hello %s world %d", name, n) + +// For string-only format with multiple args, concatenation is fine +b.WriteString(indent + "const x = " + varName + ";\n") +``` + +**Rule of thumb:** If `fmt.Sprintf` has a single `%s` or `%d` verb and nothing else complex, replace it with concatenation or `strconv`. If writing to an `io.Writer`/`strings.Builder`, use `fmt.Fprintf` directly instead of `WriteString(fmt.Sprintf(...))`. + +### staticcheck — Common Issues + +- **QF1012**: Use `fmt.Fprintf(w, ...)` instead of `w.WriteString(fmt.Sprintf(...))` — writes directly to the writer without an intermediate string allocation. +- **QF1003**: Use tagged `switch` instead of `if-else` chains on the same variable. +- **S1016**: Use type conversion `TargetType(value)` instead of struct literal when types have identical fields. + +### predeclared — Don't Shadow Built-in Identifiers + +Avoid using `min`, `max`, `new`, `len`, `cap`, `copy`, `delete`, `error`, `any` as variable names. Use descriptive alternatives like `minVal`, `maxVal`. + +### testifylint — Test Assertion Best Practices + +- Use `assert.Empty(t, val)` instead of `assert.Equal(t, "", val)` +- Use `assert.True(t, val)` / `assert.False(t, val)` instead of `assert.Equal(t, true/false, val)` +- Use `require.Error(t, err)` instead of `assert.Error(t, err)` for error checks +- Use `assert.Len(t, slice, n)` instead of `assert.Equal(t, n, len(slice))` + +### gocritic — Code Style + +- Convert `if-else if` chains to `switch` statements when comparing the same variable. + ## Testing Follow these testing conventions when writing Go tests in this project. Run newly added or modified test immediately after changes to make sure they work as expected before continuing with more work. diff --git a/README.md b/README.md index b9014161..7ab05223 100644 --- a/README.md +++ b/README.md @@ -72,7 +72,9 @@ The `arazzo` package provides an API for working with Arazzo documents including ### [openapi](./openapi) -The `openapi` package provides an API for working with OpenAPI documents including reading, creating, mutating, walking, validating and upgrading them. Supports OpenAPI 3.0.x, 3.1.x, and 3.2.x specifications. +The `openapi` package provides an API for working with OpenAPI documents including reading, creating, mutating, walking, validating, upgrading, and linting them. Supports OpenAPI 3.0.x, 3.1.x, and 3.2.x specifications. + +The [`openapi/linter`](./openapi/linter) subpackage provides a configurable linter with 60+ built-in rules covering style, security (OWASP), and semantic validation. Custom rules can be written in TypeScript/JavaScript using the [`@speakeasy-api/openapi-linter-types`](https://www.npmjs.com/package/@speakeasy-api/openapi-linter-types) package. ### [swagger](./swagger) @@ -125,6 +127,7 @@ The CLI provides four main command groups: - `explore` - Interactively explore an OpenAPI specification in the terminal - `inline` - Inline all references in an OpenAPI specification - `join` - Join multiple OpenAPI documents into a single document + - `lint` - Lint an OpenAPI specification for style, security, and best practices - `localize` - Localize an OpenAPI specification by copying external references to a target directory - `optimize` - Optimize an OpenAPI specification by deduplicating inline schemas - `sanitize` - Remove unwanted elements from an OpenAPI specification @@ -150,6 +153,12 @@ The CLI provides four main command groups: # Validate an OpenAPI specification openapi spec validate ./spec.yaml +# Lint for style, security, and best practices +openapi spec lint ./spec.yaml + +# Lint with custom configuration +openapi spec lint --config lint.yaml ./spec.yaml + # Bundle external references into components section openapi spec bundle ./spec.yaml ./bundled-spec.yaml diff --git a/arazzo/arazzo.go b/arazzo/arazzo.go index 4e6d9ec4..907f25f0 100644 --- a/arazzo/arazzo.go +++ b/arazzo/arazzo.go @@ -109,11 +109,11 @@ func (a *Arazzo) Validate(ctx context.Context, opts ...validation.Option) []erro arazzoVersion, err := version.Parse(a.Arazzo) if err != nil { - errs = append(errs, validation.NewValueError(validation.NewValueValidationError("arazzo.version is invalid %s: %s", a.Arazzo, err.Error()), core, core.Arazzo)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidFormat, fmt.Errorf("arazzo.version is invalid `%s`: %w", a.Arazzo, err), core, core.Arazzo)) } if arazzoVersion != nil { if arazzoVersion.GreaterThan(*MaximumSupportedVersion) { - errs = append(errs, validation.NewValueError(validation.NewValueValidationError("arazzo.version only Arazzo versions between %s and %s are supported", MinimumSupportedVersion, MaximumSupportedVersion), core, core.Arazzo)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationSupportedVersion, fmt.Errorf("arazzo.version only Arazzo versions between `%s` and `%s` are supported", MinimumSupportedVersion, MaximumSupportedVersion), core, core.Arazzo)) } } @@ -125,7 +125,7 @@ func (a *Arazzo) Validate(ctx context.Context, opts ...validation.Option) []erro errs = append(errs, sourceDescription.Validate(ctx, opts...)...) if _, ok := sourceDescriptionNames[sourceDescription.Name]; ok { - errs = append(errs, validation.NewSliceError(validation.NewValueValidationError("sourceDescription.name %s is not unique", sourceDescription.Name), core, core.SourceDescriptions, i)) + errs = append(errs, validation.NewSliceError(validation.SeverityError, validation.RuleValidationDuplicateKey, fmt.Errorf("sourceDescription.name `%s` is not unique", sourceDescription.Name), core, core.SourceDescriptions, i)) } sourceDescriptionNames[sourceDescription.Name] = true @@ -137,7 +137,7 @@ func (a *Arazzo) Validate(ctx context.Context, opts ...validation.Option) []erro errs = append(errs, workflow.Validate(ctx, opts...)...) if _, ok := workflowIds[workflow.WorkflowID]; ok { - errs = append(errs, validation.NewSliceError(validation.NewValueValidationError("workflow.workflowId %s is not unique", workflow.WorkflowID), core, core.Workflows, i)) + errs = append(errs, validation.NewSliceError(validation.SeverityError, validation.RuleValidationDuplicateKey, fmt.Errorf("workflow.workflowId `%s` is not unique", workflow.WorkflowID), core, core.Workflows, i)) } workflowIds[workflow.WorkflowID] = true diff --git a/arazzo/arazzo_examples_test.go b/arazzo/arazzo_examples_test.go index e2f54e38..254a7c47 100644 --- a/arazzo/arazzo_examples_test.go +++ b/arazzo/arazzo_examples_test.go @@ -190,6 +190,6 @@ func Example_validating() { fmt.Printf("%s\n", err.Error()) } // Output: - // [3:3] info.version is missing - // [13:9] step at least one of operationId, operationPath or workflowId fields must be set + // [3:3] error validation-required-field `info.version` is required + // [13:9] error validation-required-field step at least one of operationId, operationPath or workflowId fields must be set } diff --git a/arazzo/arazzo_test.go b/arazzo/arazzo_test.go index 8c567aaf..506c27fe 100644 --- a/arazzo/arazzo_test.go +++ b/arazzo/arazzo_test.go @@ -300,11 +300,11 @@ sourceDescriptions: column int underlyingError error }{ - {line: 1, column: 1, underlyingError: validation.NewMissingFieldError("arazzo.workflows is missing")}, - {line: 1, column: 9, underlyingError: validation.NewValueValidationError("arazzo.version only Arazzo versions between 1.0.0 and 1.0.1 are supported")}, - {line: 4, column: 3, underlyingError: validation.NewMissingFieldError("info.version is missing")}, - {line: 6, column: 5, underlyingError: validation.NewMissingFieldError("sourceDescription.url is missing")}, - {line: 7, column: 11, underlyingError: validation.NewValueValidationError("sourceDescription.type must be one of [openapi, arazzo]")}, + {line: 1, column: 1, underlyingError: errors.New("`arazzo.workflows` is required")}, + {line: 1, column: 9, underlyingError: errors.New("arazzo.version only Arazzo versions between `1.0.0` and `1.0.1` are supported")}, + {line: 4, column: 3, underlyingError: errors.New("`info.version` is required")}, + {line: 6, column: 5, underlyingError: errors.New("`sourceDescription.url` is required")}, + {line: 7, column: 11, underlyingError: errors.New("sourceDescription.type must be one of [`openapi, arazzo`]")}, } require.Len(t, validationErrs, len(expectedErrors), "number of validation errors should match") @@ -546,8 +546,8 @@ var stressTests = []struct { args: args{ location: "https://raw.githubusercontent.com/Redocly/museum-openapi-example/2770b2b2e59832d245c7b0eb0badf6568d7efb53/arazzo/museum-api.arazzo.yaml", validationIgnores: []string{ - "[71:24] invalid jsonpath expression: Error at line 1, column 7: unexpected token when parsing segment", // legit invalid RFC 9535 syntax - "[107:24] invalid jsonpath expression: Error at line 1, column 7: unexpected token when parsing segment", // legit invalid RFC 9535 syntax + "[71:24] error validation-invalid-syntax invalid jsonpath expression: Error at line 1, column 7: unexpected token when parsing segment", // legit invalid RFC 9535 syntax + "[107:24] error validation-invalid-syntax invalid jsonpath expression: Error at line 1, column 7: unexpected token when parsing segment", // legit invalid RFC 9535 syntax }, }, wantTitle: "Redocly Museum API Test Workflow", @@ -564,7 +564,7 @@ var stressTests = []struct { args: args{ location: "https://raw.githubusercontent.com/Redocly/warp-single-sidebar/b78fc09da52d7755e92e1bc8f990edd37421cbde/apis/arazzo.yaml", validationIgnores: []string{ - "[63:24] invalid jsonpath expression: Error at line 1, column 12: unexpected token when parsing segment", // legit invalid RFC 9535 syntax + "[63:24] error validation-invalid-syntax invalid jsonpath expression: Error at line 1, column 12: unexpected token when parsing segment", // legit invalid RFC 9535 syntax }, }, wantTitle: "Warp API", @@ -605,10 +605,10 @@ var stressTests = []struct { args: args{ location: "https://raw.githubusercontent.com/OAI/Arazzo-Specification/23852b8b0d13ab1e3288a57a990611ffed45ab5d/examples/1.0.0/oauth.arazzo.yaml", validationIgnores: []string{ - "[65:24] invalid jsonpath expression: Error at line 1, column 15: unexpected token when parsing segment", // legit invalid RFC 9535 syntax - "[105:24] invalid jsonpath expression: Error at line 1, column 15: unexpected token when parsing segment", // legit invalid RFC 9535 syntax - "[155:24] invalid jsonpath expression: Error at line 1, column 15: unexpected token when parsing segment", // legit invalid RFC 9535 syntax - "[175:24] invalid jsonpath expression: Error at line 1, column 15: unexpected token when parsing segment", // legit invalid RFC 9535 syntax + "[65:24] error validation-invalid-syntax invalid jsonpath expression: Error at line 1, column 15: unexpected token when parsing segment", // legit invalid RFC 9535 syntax + "[105:24] error validation-invalid-syntax invalid jsonpath expression: Error at line 1, column 15: unexpected token when parsing segment", // legit invalid RFC 9535 syntax + "[155:24] error validation-invalid-syntax invalid jsonpath expression: Error at line 1, column 15: unexpected token when parsing segment", // legit invalid RFC 9535 syntax + "[175:24] error validation-invalid-syntax invalid jsonpath expression: Error at line 1, column 15: unexpected token when parsing segment", // legit invalid RFC 9535 syntax }, }, wantTitle: "Example OAuth service", @@ -632,7 +632,7 @@ var stressTests = []struct { args: args{ location: "https://raw.githubusercontent.com/frankkilcommins/simple-spectral-arazzo-GA/4ec8856f1cf21c0f77597c715c150ef3e2772a89/apis/OnlineStore.arazzo.yaml", validationIgnores: []string{ - "info.title is missing", // legit issue + "`info.title` is required", // legit issue "operationId must be a valid expression if there are multiple OpenAPI source descriptions", // legit issue "$responses.body.menuItems[0].subcategories[0].id", // legit issue }, @@ -645,9 +645,9 @@ var stressTests = []struct { args: args{ location: "https://raw.githubusercontent.com/leidenheit/itarazzo-library/3b335e1c4293444add52b5f2476420e2d871b1a5/src/test/resources/test.arazzo.yaml", validationIgnores: []string{ - "expression is not valid, must begin with $: 4711Chocolate", // legit issue - "[32:24] invalid jsonpath expression: Error at line 1, column 0: unexpected token", // unsupported version: draft-goessner-dispatch-jsonpath-00 - "[36:24] invalid jsonpath expression: Error at line 1, column 5: unexpected token when parsing segment", // unsupported version: draft-goessner-dispatch-jsonpath-00 + "expression is not valid, must begin with $: 4711Chocolate", // legit issue + "[32:24] error validation-invalid-syntax invalid jsonpath expression: Error at line 1, column 0: unexpected token", // unsupported version: draft-goessner-dispatch-jsonpath-00 + "[36:24] error validation-invalid-syntax invalid jsonpath expression: Error at line 1, column 5: unexpected token when parsing segment", // unsupported version: draft-goessner-dispatch-jsonpath-00 }, }, wantTitle: "A cookie eating workflow", @@ -659,9 +659,9 @@ var stressTests = []struct { validationIgnores: []string{ "jsonpointer must start with /: $.status", // legit issues TODO: improve the error returned as it is wrong "jsonpointer must start with /: $.id", // legit issues TODO: improve the error returned as it is wrong - "[81:24] invalid jsonpath expression: Error at line 1, column 7: unexpected token when parsing segment", // unsupported version: draft-goessner-dispatch-jsonpath-00 - "[110:24] invalid jsonpath expression: Error at line 1, column 5: unexpected token when parsing segment", // unsupported version: draft-goessner-dispatch-jsonpath-00 - "[114:24] invalid jsonpath expression: Error at line 1, column 9: unexpected token when parsing segment", // unsupported version: draft-goessner-dispatch-jsonpath-00 + "[81:24] error validation-invalid-syntax invalid jsonpath expression: Error at line 1, column 7: unexpected token when parsing segment", // unsupported version: draft-goessner-dispatch-jsonpath-00 + "[110:24] error validation-invalid-syntax invalid jsonpath expression: Error at line 1, column 5: unexpected token when parsing segment", // unsupported version: draft-goessner-dispatch-jsonpath-00 + "[114:24] error validation-invalid-syntax invalid jsonpath expression: Error at line 1, column 9: unexpected token when parsing segment", // unsupported version: draft-goessner-dispatch-jsonpath-00 }, }, wantTitle: "PetStore - Example of Workflows", @@ -671,7 +671,7 @@ var stressTests = []struct { args: args{ location: "https://raw.githubusercontent.com/ritza-co/e2e-testing-arazzo/c0615c3708a1e4c0fcaeb79edae78ddc4eb5ba82/arazzo.yaml", validationIgnores: []string{ - "[42:24] invalid jsonpath expression: Error at line 1, column 8: unexpected token", // legit invalid RFC 9535 syntax + "[42:24] error validation-invalid-syntax invalid jsonpath expression: Error at line 1, column 8: unexpected token", // legit invalid RFC 9535 syntax }, }, wantTitle: "Build-a-Bot Workflow", @@ -681,7 +681,7 @@ var stressTests = []struct { args: args{ location: "https://raw.githubusercontent.com/API-Flows/openapi-workflow-registry/75c237ce1b155ba9f8dc7f065759df7ae1cbbbe5/root/adyen/adyen-giving.yaml", validationIgnores: []string{ - "in must be one of [path, query, header, cookie] but was body", + "in must be one of [`path, query, header, cookie`] but was `body`", }, }, wantTitle: "Adyen Giving", diff --git a/arazzo/components.go b/arazzo/components.go index 515a75ef..76a07d90 100644 --- a/arazzo/components.go +++ b/arazzo/components.go @@ -2,6 +2,7 @@ package arazzo import ( "context" + "fmt" "regexp" "github.com/speakeasy-api/openapi/arazzo/core" @@ -44,7 +45,7 @@ func (c *Components) Validate(ctx context.Context, opts ...validation.Option) [] for key, input := range c.Inputs.All() { if !componentNameRegex.MatchString(key) { - errs = append(errs, validation.NewMapKeyError(validation.NewValueValidationError("components.inputs key must be a valid key [%s]: %s", componentNameRegex.String(), key), core, core.Inputs, key)) + errs = append(errs, validation.NewMapKeyError(validation.SeverityError, validation.RuleValidationInvalidFormat, fmt.Errorf("components.inputs key must be a valid key [`%s`]: `%s`", componentNameRegex.String(), key), core, core.Inputs, key)) } errs = append(errs, input.Validate(ctx, opts...)...) @@ -52,7 +53,7 @@ func (c *Components) Validate(ctx context.Context, opts ...validation.Option) [] for key, parameter := range c.Parameters.All() { if !componentNameRegex.MatchString(key) { - errs = append(errs, validation.NewMapKeyError(validation.NewValueValidationError("components.parameters key must be a valid key [%s]: %s", componentNameRegex.String(), key), core, core.Parameters, key)) + errs = append(errs, validation.NewMapKeyError(validation.SeverityError, validation.RuleValidationInvalidFormat, fmt.Errorf("components.parameters key must be a valid key [`%s`]: `%s`", componentNameRegex.String(), key), core, core.Parameters, key)) } paramOps := opts @@ -63,7 +64,7 @@ func (c *Components) Validate(ctx context.Context, opts ...validation.Option) [] for key, successAction := range c.SuccessActions.All() { if !componentNameRegex.MatchString(key) { - errs = append(errs, validation.NewMapKeyError(validation.NewValueValidationError("components.successActions key must be a valid key [%s]: %s", componentNameRegex.String(), key), core, core.SuccessActions, key)) + errs = append(errs, validation.NewMapKeyError(validation.SeverityError, validation.RuleValidationInvalidFormat, fmt.Errorf("components.successActions key must be a valid key [`%s`]: `%s`", componentNameRegex.String(), key), core, core.SuccessActions, key)) } successActionOps := opts @@ -74,7 +75,7 @@ func (c *Components) Validate(ctx context.Context, opts ...validation.Option) [] for key, failureAction := range c.FailureActions.All() { if !componentNameRegex.MatchString(key) { - errs = append(errs, validation.NewMapKeyError(validation.NewValueValidationError("components.failureActions key must be a valid key [%s]: %s", componentNameRegex.String(), key), core, core.FailureActions, key)) + errs = append(errs, validation.NewMapKeyError(validation.SeverityError, validation.RuleValidationInvalidFormat, fmt.Errorf("components.failureActions key must be a valid key [`%s`]: `%s`", componentNameRegex.String(), key), core, core.FailureActions, key)) } failureActionOps := opts diff --git a/arazzo/core/criterion.go b/arazzo/core/criterion.go index d5849e08..50b3c9ef 100644 --- a/arazzo/core/criterion.go +++ b/arazzo/core/criterion.go @@ -63,7 +63,7 @@ func (c *CriterionTypeUnion) Unmarshal(ctx context.Context, parentName string, n c.DetermineValidity(validationErrs) default: return []error{ - validation.NewValidationError(validation.NewTypeMismatchError(parentName, "criterionTypeUnion expected string or object, got %s", yml.NodeKindToString(resolvedNode.Kind)), resolvedNode), + validation.NewValidationError(validation.SeverityError, validation.RuleValidationTypeMismatch, validation.NewTypeMismatchError(parentName, "criterionTypeUnion expected string or object, got %s", yml.NodeKindToString(resolvedNode.Kind)), resolvedNode), }, nil } @@ -78,7 +78,7 @@ func (c *CriterionTypeUnion) SyncChanges(ctx context.Context, model any, valueNo } if mv.Kind() != reflect.Struct { - return nil, fmt.Errorf("CriterionTypeUnion.SyncChanges expected a struct, got %s", mv.Type()) + return nil, fmt.Errorf("CriterionTypeUnion.SyncChanges expected a struct, got `%s`", mv.Type()) } tf := mv.FieldByName("Type") diff --git a/arazzo/core/criterion_syncchanges_test.go b/arazzo/core/criterion_syncchanges_test.go index 7a78888c..7c02ef96 100644 --- a/arazzo/core/criterion_syncchanges_test.go +++ b/arazzo/core/criterion_syncchanges_test.go @@ -41,5 +41,5 @@ func TestCriterionTypeUnion_SyncChanges_NonStruct_Error(t *testing.T) { ctu := CriterionTypeUnion{} _, err = ctu.SyncChanges(t.Context(), "not a struct", node.Content[0]) require.Error(t, err, "SyncChanges should fail") - assert.Contains(t, err.Error(), "CriterionTypeUnion.SyncChanges expected a struct, got string", "error message should match") + assert.Contains(t, err.Error(), "CriterionTypeUnion.SyncChanges expected a struct, got `string`", "error message should match") } diff --git a/arazzo/core/criterion_test.go b/arazzo/core/criterion_test.go index 30db14c2..fb35a1d5 100644 --- a/arazzo/core/criterion_test.go +++ b/arazzo/core/criterion_test.go @@ -250,5 +250,5 @@ func TestCriterionTypeUnion_SyncChanges_Int_Error(t *testing.T) { _, err := union.SyncChanges(t.Context(), 42, nil) require.Error(t, err, "should return error for int model") - require.Contains(t, err.Error(), "expected a struct", "error should mention struct expectation") + require.Contains(t, err.Error(), "expected a struct, got `int`", "error should mention struct expectation") } diff --git a/arazzo/core/reusable.go b/arazzo/core/reusable.go index 5d32fafa..c9022a16 100644 --- a/arazzo/core/reusable.go +++ b/arazzo/core/reusable.go @@ -34,10 +34,8 @@ func (r *Reusable[T]) Unmarshal(ctx context.Context, parentName string, node *ya if resolvedNode.Kind != yaml.MappingNode { r.SetValid(false, false) - r.SetValid(false, false) - return []error{ - validation.NewValidationError(validation.NewTypeMismatchError(parentName, "reusable expected object, got %s", yml.NodeKindToString(resolvedNode.Kind)), resolvedNode), + validation.NewValidationError(validation.SeverityError, validation.RuleValidationTypeMismatch, validation.NewTypeMismatchError(parentName, "reusable expected `object`, got %s", yml.NodeKindToString(resolvedNode.Kind)), resolvedNode), }, nil } @@ -65,7 +63,7 @@ func (r *Reusable[T]) SyncChanges(ctx context.Context, model any, valueNode *yam } if mv.Kind() != reflect.Struct { - return nil, fmt.Errorf("Reusable.SyncChanges expected a struct, got %s", mv.Kind()) + return nil, fmt.Errorf("Reusable.SyncChanges expected a struct, got `%s`", mv.Kind()) } of := mv.FieldByName("Object") diff --git a/arazzo/core/reusable_test.go b/arazzo/core/reusable_test.go index dc5c6d6f..96aeea43 100644 --- a/arazzo/core/reusable_test.go +++ b/arazzo/core/reusable_test.go @@ -39,7 +39,7 @@ func TestReusable_Unmarshal_NonMappingNode_Error(t *testing.T) { validationErrs, err := reusable.Unmarshal(t.Context(), "test", node.Content[0]) require.NoError(t, err, "unmarshal error should be nil") require.NotEmpty(t, validationErrs, "validation errors should not be empty") - assert.Contains(t, validationErrs[0].Error(), "reusable expected object", "error message should match") + assert.Contains(t, validationErrs[0].Error(), "reusable expected `object`", "error message should match") assert.False(t, reusable.GetValid(), "reusable should not be valid") } @@ -53,7 +53,7 @@ func TestReusable_SyncChanges_NonStruct_Error(t *testing.T) { reusable := Reusable[*Parameter]{} _, err = reusable.SyncChanges(t.Context(), "not a struct", node.Content[0]) require.Error(t, err, "SyncChanges should fail") - assert.Contains(t, err.Error(), "Reusable.SyncChanges expected a struct, got string", "error message should match") + assert.Contains(t, err.Error(), "Reusable.SyncChanges expected a struct, got `string`", "error message should match") } func TestReusable_Unmarshal_NilNode_Error(t *testing.T) { diff --git a/arazzo/criterion/condition.go b/arazzo/criterion/condition.go index 34e10c10..1289a91c 100644 --- a/arazzo/criterion/condition.go +++ b/arazzo/criterion/condition.go @@ -2,6 +2,7 @@ package criterion import ( "errors" + "fmt" "strings" "github.com/speakeasy-api/openapi/expression" @@ -81,21 +82,21 @@ func (s *Condition) Validate(valueNode *yaml.Node, opts ...validation.Option) [] errs := []error{} if s.Expression == "" { - errs = append(errs, validation.NewValidationError(validation.NewMissingValueError("expression is required"), valueNode)) + errs = append(errs, validation.NewValidationError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("expression is required"), valueNode)) } if err := s.Expression.Validate(); err != nil { - errs = append(errs, validation.NewValidationError(validation.NewValueValidationError(err.Error()), valueNode)) + errs = append(errs, validation.NewValidationError(validation.SeverityError, validation.RuleValidationInvalidSyntax, fmt.Errorf("%s", err.Error()), valueNode)) } switch s.Operator { case OperatorLT, OperatorLTE, OperatorGT, OperatorGTE, OperatorEQ, OperatorNE, OperatorNot, OperatorAnd, OperatorOr: default: - errs = append(errs, validation.NewValidationError(validation.NewValueValidationError("operator must be one of [%s]", strings.Join([]string{string(OperatorLT), string(OperatorLTE), string(OperatorGT), string(OperatorGTE), string(OperatorEQ), string(OperatorNE), string(OperatorNot), string(OperatorAnd), string(OperatorOr)}, ", ")), valueNode)) + errs = append(errs, validation.NewValidationError(validation.SeverityError, validation.RuleValidationAllowedValues, fmt.Errorf("operator must be one of [`%s`]", strings.Join([]string{string(OperatorLT), string(OperatorLTE), string(OperatorGT), string(OperatorGTE), string(OperatorEQ), string(OperatorNE), string(OperatorNot), string(OperatorAnd), string(OperatorOr)}, ", ")), valueNode)) } if s.Value == "" { - errs = append(errs, validation.NewValidationError(validation.NewMissingValueError("value is required"), valueNode)) + errs = append(errs, validation.NewValidationError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("value is required"), valueNode)) } return errs diff --git a/arazzo/criterion/criterion.go b/arazzo/criterion/criterion.go index 14bdbffa..dcaf3cd8 100644 --- a/arazzo/criterion/criterion.go +++ b/arazzo/criterion/criterion.go @@ -2,6 +2,7 @@ package criterion import ( "context" + "errors" "fmt" "regexp" "strings" @@ -59,7 +60,7 @@ func (c *CriterionExpressionType) Validate(opts ...validation.Option) []error { switch c.Version { case CriterionTypeVersionDraftGoessnerDispatchJsonPath00: default: - errs = append(errs, validation.NewValueError(validation.NewValueValidationError("version must be one of [%s]", strings.Join([]string{string(CriterionTypeVersionDraftGoessnerDispatchJsonPath00)}, ", ")), core, core.Version)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationAllowedValues, fmt.Errorf("version must be one of [`%s`]", strings.Join([]string{string(CriterionTypeVersionDraftGoessnerDispatchJsonPath00)}, ", ")), core, core.Version)) } case CriterionTypeXPath: switch c.Version { @@ -67,10 +68,10 @@ func (c *CriterionExpressionType) Validate(opts ...validation.Option) []error { case CriterionTypeVersionXPath20: case CriterionTypeVersionXPath10: default: - errs = append(errs, validation.NewValueError(validation.NewValueValidationError("version must be one of [%s]", strings.Join([]string{string(CriterionTypeVersionXPath30), string(CriterionTypeVersionXPath20), string(CriterionTypeVersionXPath10)}, ", ")), core, core.Version)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationAllowedValues, fmt.Errorf("version must be one of [`%s`]", strings.Join([]string{string(CriterionTypeVersionXPath30), string(CriterionTypeVersionXPath20), string(CriterionTypeVersionXPath10)}, ", ")), core, core.Version)) } default: - errs = append(errs, validation.NewValueError(validation.NewValueValidationError("type must be one of [%s]", strings.Join([]string{string(CriterionTypeJsonPath), string(CriterionTypeXPath)}, ", ")), core, core.Type)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationAllowedValues, fmt.Errorf("type must be one of [`%s`]", strings.Join([]string{string(CriterionTypeJsonPath), string(CriterionTypeXPath)}, ", ")), core, core.Type)) } if len(errs) == 0 { @@ -190,7 +191,7 @@ func (c *Criterion) Validate(opts ...validation.Option) []error { errs := []error{} if c.Condition == "" { - errs = append(errs, validation.NewValueError(validation.NewMissingValueError("condition is required"), core, core.Condition)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("condition is required"), core, core.Condition)) } if c.Type.Type != nil { @@ -200,19 +201,19 @@ func (c *Criterion) Validate(opts ...validation.Option) []error { case CriterionTypeJsonPath: case CriterionTypeXPath: default: - errs = append(errs, validation.NewValueError(validation.NewValueValidationError("type must be one of [%s]", strings.Join([]string{string(CriterionTypeSimple), string(CriterionTypeRegex), string(CriterionTypeJsonPath), string(CriterionTypeXPath)}, ", ")), core, core.Type)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationAllowedValues, fmt.Errorf("type must be one of [`%s`]", strings.Join([]string{string(CriterionTypeSimple), string(CriterionTypeRegex), string(CriterionTypeJsonPath), string(CriterionTypeXPath)}, ", ")), core, core.Type)) } } else if c.Type.ExpressionType != nil { errs = append(errs, c.Type.ExpressionType.Validate(opts...)...) } if c.Type.IsTypeProvided() && c.Context == nil { - errs = append(errs, validation.NewValueError(validation.NewMissingValueError("context is required, if type is set"), core, core.Context)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("context is required, if type is set"), core, core.Context)) } if c.Context != nil { if err := c.Context.Validate(); err != nil { - errs = append(errs, validation.NewValueError(validation.NewValueValidationError(err.Error()), core, core.Context)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidSyntax, fmt.Errorf("%s", err.Error()), core, core.Context)) } } @@ -235,18 +236,18 @@ func (c *Criterion) validateCondition(opts ...validation.Option) []error { case CriterionTypeSimple: cond, err := newCondition(c.Condition) if err != nil && c.Context == nil { - errs = append(errs, validation.NewValueError(validation.NewValueValidationError(err.Error()), core, core.Condition)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidSyntax, fmt.Errorf("%s", err.Error()), core, core.Condition)) } else if cond != nil { errs = append(errs, cond.Validate(valueNode, opts...)...) } case CriterionTypeRegex: _, err := regexp.Compile(c.Condition) if err != nil { - errs = append(errs, validation.NewValueError(validation.NewValueValidationError("invalid regex expression: %s", err.Error()), core, core.Condition)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidSyntax, fmt.Errorf("invalid regex expression: %w", err), core, core.Condition)) } case CriterionTypeJsonPath: if _, err := jsonpath.NewPath(c.Condition); err != nil { - errs = append(errs, validation.NewValueError(validation.NewValueValidationError("invalid jsonpath expression: %s", err), core, core.Condition)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidSyntax, fmt.Errorf("invalid jsonpath expression: %w", err), core, core.Condition)) } case CriterionTypeXPath: // TODO validate xpath diff --git a/arazzo/criterion/criterion_validate_test.go b/arazzo/criterion/criterion_validate_test.go index a34e28f3..0b328871 100644 --- a/arazzo/criterion/criterion_validate_test.go +++ b/arazzo/criterion/criterion_validate_test.go @@ -72,7 +72,7 @@ func TestCriterionExpressionType_Validate_Error(t *testing.T) { Type: criterion.CriterionTypeJsonPath, Version: "invalid-version", }, - expectedError: "version must be one of [draft-goessner-dispatch-jsonpath-00]", + expectedError: "version must be one of [`draft-goessner-dispatch-jsonpath-00`]", }, { name: "invalid xpath version", @@ -80,7 +80,7 @@ func TestCriterionExpressionType_Validate_Error(t *testing.T) { Type: criterion.CriterionTypeXPath, Version: "invalid-version", }, - expectedError: "version must be one of [xpath-30, xpath-20, xpath-10]", + expectedError: "version must be one of [`xpath-30, xpath-20, xpath-10`]", }, { name: "invalid type", @@ -88,7 +88,7 @@ func TestCriterionExpressionType_Validate_Error(t *testing.T) { Type: "invalid-type", Version: criterion.CriterionTypeVersionNone, }, - expectedError: "type must be one of [jsonpath, xpath]", + expectedError: "type must be one of [`jsonpath, xpath`]", }, } diff --git a/arazzo/failureaction.go b/arazzo/failureaction.go index fb36fd8a..4a019efd 100644 --- a/arazzo/failureaction.go +++ b/arazzo/failureaction.go @@ -3,6 +3,7 @@ package arazzo import ( "context" "errors" + "fmt" "strings" "github.com/speakeasy-api/openapi/arazzo/core" @@ -69,22 +70,22 @@ func (f *FailureAction) Validate(ctx context.Context, opts ...validation.Option) errs := []error{} if core.Name.Present && f.Name == "" { - errs = append(errs, validation.NewValueError(validation.NewMissingValueError("failureAction.name is required"), core, core.Name)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("failureAction.name is required"), core, core.Name)) } switch f.Type { case FailureActionTypeEnd: if f.WorkflowID != nil { - errs = append(errs, validation.NewValueError(validation.NewValueValidationError("failureAction.workflowId is not allowed when type: end is specified"), core, core.WorkflowID)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationMutuallyExclusiveFields, errors.New("failureAction.workflowId is not allowed when type: end is specified"), core, core.WorkflowID)) } if f.StepID != nil { - errs = append(errs, validation.NewValueError(validation.NewValueValidationError("failureAction.stepId is not allowed when type: end is specified"), core, core.StepID)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationMutuallyExclusiveFields, errors.New("failureAction.stepId is not allowed when type: end is specified"), core, core.StepID)) } if f.RetryAfter != nil { - errs = append(errs, validation.NewValueError(validation.NewValueValidationError("failureAction.retryAfter is not allowed when type: end is specified"), core, core.RetryAfter)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationMutuallyExclusiveFields, errors.New("failureAction.retryAfter is not allowed when type: end is specified"), core, core.RetryAfter)) } if f.RetryLimit != nil { - errs = append(errs, validation.NewValueError(validation.NewValueValidationError("failureAction.retryLimit is not allowed when type: end is specified"), core, core.RetryLimit)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationMutuallyExclusiveFields, errors.New("failureAction.retryLimit is not allowed when type: end is specified"), core, core.RetryLimit)) } case FailureActionTypeGoto: workflowIDNode := core.WorkflowID.GetKeyNodeOrRoot(core.RootNode) @@ -100,10 +101,10 @@ func (f *FailureAction) Validate(ctx context.Context, opts ...validation.Option) required: true, }, opts...)...) if f.RetryAfter != nil { - errs = append(errs, validation.NewValueError(validation.NewValueValidationError("failureAction.retryAfter is not allowed when type: goto is specified"), core, core.RetryAfter)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationMutuallyExclusiveFields, errors.New("failureAction.retryAfter is not allowed when type: goto is specified"), core, core.RetryAfter)) } if f.RetryLimit != nil { - errs = append(errs, validation.NewValueError(validation.NewValueValidationError("failureAction.retryLimit is not allowed when type: goto is specified"), core, core.RetryLimit)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationMutuallyExclusiveFields, errors.New("failureAction.retryLimit is not allowed when type: goto is specified"), core, core.RetryLimit)) } case FailureActionTypeRetry: workflowIDNode := core.WorkflowID.GetKeyNodeOrRoot(core.RootNode) @@ -120,16 +121,16 @@ func (f *FailureAction) Validate(ctx context.Context, opts ...validation.Option) }, opts...)...) if f.RetryAfter != nil { if *f.RetryAfter < 0 { - errs = append(errs, validation.NewValueError(validation.NewValueValidationError("failureAction.retryAfter must be greater than or equal to 0"), core, core.RetryAfter)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationAllowedValues, errors.New("failureAction.retryAfter must be greater than or equal to 0"), core, core.RetryAfter)) } } if f.RetryLimit != nil { if *f.RetryLimit < 0 { - errs = append(errs, validation.NewValueError(validation.NewValueValidationError("failureAction.retryLimit must be greater than or equal to 0"), core, core.RetryLimit)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationAllowedValues, errors.New("failureAction.retryLimit must be greater than or equal to 0"), core, core.RetryLimit)) } } default: - errs = append(errs, validation.NewValueError(validation.NewValueValidationError("failureAction.type must be one of [%s]", strings.Join([]string{string(FailureActionTypeEnd), string(FailureActionTypeGoto), string(FailureActionTypeRetry)}, ", ")), core, core.Type)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationAllowedValues, fmt.Errorf("failureAction.type must be one of [`%s`]", strings.Join([]string{string(FailureActionTypeEnd), string(FailureActionTypeGoto), string(FailureActionTypeRetry)}, ", ")), core, core.Type)) } for i := range f.Criteria { diff --git a/arazzo/info.go b/arazzo/info.go index 48cb5b4b..3136658c 100644 --- a/arazzo/info.go +++ b/arazzo/info.go @@ -2,6 +2,7 @@ package arazzo import ( "context" + "errors" "github.com/speakeasy-api/openapi/arazzo/core" "github.com/speakeasy-api/openapi/extensions" @@ -34,11 +35,11 @@ func (i *Info) Validate(ctx context.Context, opts ...validation.Option) []error errs := []error{} if core.Title.Present && i.Title == "" { - errs = append(errs, validation.NewValueError(validation.NewMissingValueError("info.title is required"), core, core.Title)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("`info.title` is required"), core, core.Title)) } if core.Version.Present && i.Version == "" { - errs = append(errs, validation.NewValueError(validation.NewMissingValueError("info.version is required"), core, core.Version)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("`info.version` is required"), core, core.Version)) } i.Valid = len(errs) == 0 && core.GetValid() diff --git a/arazzo/parameter.go b/arazzo/parameter.go index e97e5c9f..f56d3966 100644 --- a/arazzo/parameter.go +++ b/arazzo/parameter.go @@ -2,6 +2,8 @@ package arazzo import ( "context" + "errors" + "fmt" "strings" "github.com/speakeasy-api/openapi/arazzo/core" @@ -55,7 +57,7 @@ func (p *Parameter) Validate(ctx context.Context, opts ...validation.Option) []e s := validation.GetContextObject[Step](o) if core.Name.Present && p.Name == "" { - errs = append(errs, validation.NewValueError(validation.NewMissingValueError("parameter fieldname is required"), core, core.Name)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("parameter fieldname is required"), core, core.Name)) } in := In("") @@ -71,25 +73,25 @@ func (p *Parameter) Validate(ctx context.Context, opts ...validation.Option) []e default: if p.In == nil || in == "" { if w == nil && s != nil && s.WorkflowID == nil { - errs = append(errs, validation.NewValueError(validation.NewMissingValueError("parameter.in is required within a step when workflowId is not set"), core, core.In)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("parameter.in is required within a step when workflowId is not set"), core, core.In)) } } if in != "" { - errs = append(errs, validation.NewValueError(validation.NewValueValidationError("parameter.in must be one of [%s] but was %s", strings.Join([]string{string(InPath), string(InQuery), string(InHeader), string(InCookie)}, ", "), in), core, core.In)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationAllowedValues, fmt.Errorf("parameter.in must be one of [`%s`] but was `%s`", strings.Join([]string{string(InPath), string(InQuery), string(InHeader), string(InCookie)}, ", "), in), core, core.In)) } } if core.Value.Present && p.Value == nil { - errs = append(errs, validation.NewValueError(validation.NewMissingValueError("parameter.value is required"), core, core.Value)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("`parameter.value` is required"), core, core.Value)) } else if p.Value != nil { _, expression, err := expression.GetValueOrExpressionValue(p.Value) if err != nil { - errs = append(errs, validation.NewValueError(validation.NewValueValidationError(err.Error()), core, core.Value)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidSyntax, fmt.Errorf("%s", err.Error()), core, core.Value)) } if expression != nil { if err := expression.Validate(); err != nil { - errs = append(errs, validation.NewValueError(validation.NewValueValidationError(err.Error()), core, core.Value)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidSyntax, fmt.Errorf("%s", err.Error()), core, core.Value)) } } } diff --git a/arazzo/payloadreplacement.go b/arazzo/payloadreplacement.go index a8793963..ad0e850b 100644 --- a/arazzo/payloadreplacement.go +++ b/arazzo/payloadreplacement.go @@ -2,6 +2,8 @@ package arazzo import ( "context" + "errors" + "fmt" "github.com/speakeasy-api/openapi/arazzo/core" "github.com/speakeasy-api/openapi/expression" @@ -32,23 +34,23 @@ func (p *PayloadReplacement) Validate(ctx context.Context, opts ...validation.Op errs := []error{} if core.Target.Present && p.Target == "" { - errs = append(errs, validation.NewValueError(validation.NewMissingValueError("payloadReplacement.target is required"), core, core.Target)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("payloadReplacement.target is required"), core, core.Target)) } if err := p.Target.Validate(); err != nil { - errs = append(errs, validation.NewValueError(validation.NewValueValidationError("payloadReplacement.target is invalid: "+err.Error()), core, core.Target)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidSyntax, fmt.Errorf("payloadReplacement.target is invalid: %w", err), core, core.Target)) } if core.Value.Present && p.Value == nil { - errs = append(errs, validation.NewValueError(validation.NewMissingValueError("payloadReplacement.value is required"), core, core.Value)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("payloadReplacement.value is required"), core, core.Value)) } else if p.Value != nil { _, expression, err := expression.GetValueOrExpressionValue(p.Value) if err != nil { - errs = append(errs, validation.NewValueError(validation.NewValueValidationError("payloadReplacement.value is invalid: "+err.Error()), core, core.Value)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidSyntax, fmt.Errorf("payloadReplacement.value is invalid: %w", err), core, core.Value)) } if expression != nil { if err := expression.Validate(); err != nil { - errs = append(errs, validation.NewValueError(validation.NewValueValidationError("payloadReplacement.value is invalid: "+err.Error()), core, core.Value)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidSyntax, fmt.Errorf("payloadReplacement.value is invalid: %w", err), core, core.Value)) } } } diff --git a/arazzo/requestbody.go b/arazzo/requestbody.go index c7f2ac64..89c17533 100644 --- a/arazzo/requestbody.go +++ b/arazzo/requestbody.go @@ -2,6 +2,7 @@ package arazzo import ( "context" + "fmt" "mime" "github.com/speakeasy-api/openapi/arazzo/core" @@ -36,7 +37,7 @@ func (r *RequestBody) Validate(ctx context.Context, opts ...validation.Option) [ if r.ContentType != nil { _, _, err := mime.ParseMediaType(*r.ContentType) if err != nil { - errs = append(errs, validation.NewValueError(validation.NewValueValidationError("requestBody.contentType is not valid: %s", err.Error()), core, core.ContentType)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidFormat, fmt.Errorf("requestBody.contentType is not valid: %w", err), core, core.ContentType)) } } @@ -47,7 +48,7 @@ func (r *RequestBody) Validate(ctx context.Context, opts ...validation.Option) [ if err == nil && exp != nil { // Only validate if the entire payload IS an expression (not just contains expressions) if err := exp.Validate(); err != nil { - errs = append(errs, validation.NewValueError(validation.NewValueValidationError("requestBody.payload expression is not valid: %s", err.Error()), core, core.Payload)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidSyntax, fmt.Errorf("requestBody.payload expression is not valid: %w", err), core, core.Payload)) } } // If exp is nil, the payload is a value (not an expression) - no validation needed diff --git a/arazzo/reusable.go b/arazzo/reusable.go index 686f5bd9..d7a2c0a0 100644 --- a/arazzo/reusable.go +++ b/arazzo/reusable.go @@ -3,6 +3,7 @@ package arazzo import ( "context" "errors" + "fmt" "reflect" "unicode" "unicode/utf8" @@ -117,7 +118,7 @@ func (r *Reusable[T, V, C]) Validate(ctx context.Context, opts ...validation.Opt case "parameters": default: if r.Value != nil { - errs = append(errs, validation.NewValueError(validation.NewValueValidationError("reusableParameter.value is not allowed when object is not a parameter"), core, core.Value)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationMutuallyExclusiveFields, errors.New("reusableParameter.value is not allowed when object is not a parameter"), core, core.Value)) } } @@ -136,7 +137,7 @@ func (r *Reusable[T, V, C]) validateReference(ctx context.Context, a *Arazzo, ob core := r.GetCore() if err := r.Reference.Validate(); err != nil { return []error{ - validation.NewValueError(validation.NewValueValidationError("%s.reference is invalid: %s", componentTypeToReusableType(objComponentType), err.Error()), core, core.Reference), + validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidSyntax, fmt.Errorf("`%s`.reference is invalid: %w", componentTypeToReusableType(objComponentType), err), core, core.Reference), } } @@ -144,13 +145,13 @@ func (r *Reusable[T, V, C]) validateReference(ctx context.Context, a *Arazzo, ob if typ != expression.ExpressionTypeComponents { return []error{ - validation.NewValueError(validation.NewValueValidationError("%s.reference must be a components expression, got %s", componentTypeToReusableType(objComponentType), r.Reference.GetType()), core, core.Reference), + validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidSyntax, fmt.Errorf("`%s`.reference must be a components expression, got `%s`", componentTypeToReusableType(objComponentType), r.Reference.GetType()), core, core.Reference), } } if componentType == "" || len(references) != 1 { return []error{ - validation.NewValueError(validation.NewValueValidationError("%s.reference must be a components expression with 3 parts, got %s", componentTypeToReusableType(objComponentType), *r.Reference), core, core.Reference), + validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidSyntax, fmt.Errorf("`%s`.reference must be a components expression with 3 parts, got `%s`", componentTypeToReusableType(objComponentType), *r.Reference), core, core.Reference), } } @@ -186,7 +187,7 @@ func (r *Reusable[T, V, C]) validateReference(ctx context.Context, a *Arazzo, ob }, opts...) default: return []error{ - validation.NewValueError(validation.NewValueValidationError("reference to %s is not valid, valid components are [parameters, successActions, failureActions]", componentType), core, core.Reference), + validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidReference, fmt.Errorf("reference to `%s` is not valid, valid components are [parameters, successActions, failureActions]", componentType), core, core.Reference), } } } @@ -203,20 +204,20 @@ type validateComponentReferenceArgs[T any] struct { func validateComponentReference[T any, V interfaces.Validator[T]](ctx context.Context, args validateComponentReferenceArgs[V], opts ...validation.Option) []error { if args.componentType != args.objComponentType { return []error{ - validation.NewValidationError(validation.NewValueValidationError("%s.reference expected a %s reference got %s", componentTypeToReusableType(args.objComponentType), args.objComponentType, args.componentType), args.referenceValueNode), + validation.NewValidationError(validation.SeverityError, validation.RuleValidationTypeMismatch, fmt.Errorf("`%s`.reference expected a `%s` reference got `%s`", componentTypeToReusableType(args.objComponentType), args.objComponentType, args.componentType), args.referenceValueNode), } } if args.components == nil { return []error{ - validation.NewValidationError(validation.NewValueValidationError("%s.reference to missing component %s, components.%s not present", componentTypeToReusableType(args.objComponentType), *args.reference, args.componentType), args.referenceValueNode), + validation.NewValidationError(validation.SeverityError, validation.RuleValidationInvalidReference, fmt.Errorf("`%s`.reference to missing component `%s`, components.`%s` not present", componentTypeToReusableType(args.objComponentType), *args.reference, args.componentType), args.referenceValueNode), } } component, ok := args.components.Get(args.componentName) if !ok { return []error{ - validation.NewValidationError(validation.NewValueValidationError("%s.reference to missing component %s, components.%s.%s not present", componentTypeToReusableType(args.objComponentType), *args.reference, args.componentType, args.componentName), args.referenceValueNode), + validation.NewValidationError(validation.SeverityError, validation.RuleValidationInvalidReference, fmt.Errorf("`%s`.reference to missing component `%s`, components.`%s`.`%s` not present", componentTypeToReusableType(args.objComponentType), *args.reference, args.componentType, args.componentName), args.referenceValueNode), } } diff --git a/arazzo/sourcedescription.go b/arazzo/sourcedescription.go index a9a6b61a..d4f94ab9 100644 --- a/arazzo/sourcedescription.go +++ b/arazzo/sourcedescription.go @@ -2,6 +2,8 @@ package arazzo import ( "context" + "errors" + "fmt" "net/url" "strings" @@ -57,14 +59,14 @@ func (s *SourceDescription) Validate(ctx context.Context, opts ...validation.Opt errs := []error{} if core.Name.Present && s.Name == "" { - errs = append(errs, validation.NewValueError(validation.NewMissingValueError("sourceDescription.name is required"), core, core.Name)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("sourceDescription.name is required"), core, core.Name)) } if core.URL.Present && s.URL == "" { - errs = append(errs, validation.NewValueError(validation.NewMissingValueError("sourceDescription.url is required"), core, core.URL)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("sourceDescription.url is required"), core, core.URL)) } else if core.URL.Present { if _, err := url.Parse(s.URL); err != nil { - errs = append(errs, validation.NewValueError(validation.NewValueValidationError("sourceDescription.url is not a valid url/uri according to RFC 3986: %s", err), core, core.URL)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidFormat, fmt.Errorf("sourceDescription.url is not a valid url/uri according to RFC 3986: %w", err), core, core.URL)) } } @@ -72,7 +74,7 @@ func (s *SourceDescription) Validate(ctx context.Context, opts ...validation.Opt case SourceDescriptionTypeOpenAPI: case SourceDescriptionTypeArazzo: default: - errs = append(errs, validation.NewValueError(validation.NewValueValidationError("sourceDescription.type must be one of [%s]", strings.Join([]string{SourceDescriptionTypeOpenAPI, SourceDescriptionTypeArazzo}, ", ")), core, core.Type)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationAllowedValues, fmt.Errorf("sourceDescription.type must be one of [`%s`]", strings.Join([]string{SourceDescriptionTypeOpenAPI, SourceDescriptionTypeArazzo}, ", ")), core, core.Type)) } s.Valid = len(errs) == 0 && core.GetValid() diff --git a/arazzo/step.go b/arazzo/step.go index 40605f41..b0fe508f 100644 --- a/arazzo/step.go +++ b/arazzo/step.go @@ -90,10 +90,10 @@ func (s *Step) Validate(ctx context.Context, opts ...validation.Option) []error errs := []error{} if core.StepID.Present && s.StepID == "" { - errs = append(errs, validation.NewValueError(validation.NewMissingValueError("step.stepId is required"), core, core.StepID)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("step.stepId is required"), core, core.StepID)) } else if s.StepID != "" { if !stepIDRegex.MatchString(s.StepID) { - errs = append(errs, validation.NewValueError(validation.NewValueValidationError("step.stepId must be a valid name [%s]: %s", stepIDRegex.String(), s.StepID), core, core.StepID)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidFormat, fmt.Errorf("step.stepId must be a valid name [`%s`]: `%s`", stepIDRegex.String(), s.StepID), core, core.StepID)) } numStepsWithID := 0 @@ -103,7 +103,7 @@ func (s *Step) Validate(ctx context.Context, opts ...validation.Option) []error } } if numStepsWithID > 1 { - errs = append(errs, validation.NewValueError(validation.NewValueValidationError("step.stepId must be unique within the workflow, found %d steps with the same stepId", numStepsWithID), core, core.StepID)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationDuplicateKey, fmt.Errorf("step.stepId must be unique within the workflow, found `%d` steps with the same stepId", numStepsWithID), core, core.StepID)) } } @@ -121,10 +121,10 @@ func (s *Step) Validate(ctx context.Context, opts ...validation.Option) []error } switch numSet { case 0: - errs = append(errs, validation.NewValidationError(validation.NewMissingValueError("step at least one of operationId, operationPath or workflowId fields must be set"), core.RootNode)) + errs = append(errs, validation.NewValidationError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("step at least one of operationId, operationPath or workflowId fields must be set"), core.RootNode)) case 1: default: - errs = append(errs, validation.NewValidationError(validation.NewValueValidationError("step only one of operationId, operationPath or workflowId.can be set"), core.RootNode)) + errs = append(errs, validation.NewValidationError(validation.SeverityError, validation.RuleValidationMutuallyExclusiveFields, errors.New("step only one of operationId, operationPath or workflowId can be set"), core.RootNode)) } if s.OperationID != nil { @@ -135,65 +135,65 @@ func (s *Step) Validate(ctx context.Context, opts ...validation.Option) []error } } if numOpenAPISourceDescriptions > 1 && !s.OperationID.IsExpression() { - errs = append(errs, validation.NewValueError(validation.NewValueValidationError("step.operationId must be a valid expression if there are multiple OpenAPI source descriptions"), core, core.OperationID)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidSyntax, errors.New("step.operationId must be a valid expression if there are multiple OpenAPI source descriptions"), core, core.OperationID)) } if s.OperationID.IsExpression() { if err := s.OperationID.Validate(); err != nil { - errs = append(errs, validation.NewValueError(validation.NewValueValidationError("step.operationId expression is invalid: %s", err.Error()), core, core.OperationID)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidSyntax, fmt.Errorf("step.operationId expression is invalid: %w", err), core, core.OperationID)) } typ, sourceDescriptionName, _, _ := s.OperationID.GetParts() if typ != expression.ExpressionTypeSourceDescriptions { - errs = append(errs, validation.NewValueError(validation.NewValueValidationError("step.operationId must be a sourceDescriptions expression, got %s", typ), core, core.OperationID)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidSyntax, fmt.Errorf("step.operationId must be a sourceDescriptions expression, got `%s`", typ), core, core.OperationID)) } if a.SourceDescriptions.Find(sourceDescriptionName) == nil { - errs = append(errs, validation.NewValueError(validation.NewValueValidationError("step.operationId referencing sourceDescription %s not found", sourceDescriptionName), core, core.OperationID)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidReference, fmt.Errorf("step.operationId referencing sourceDescription `%s` not found", sourceDescriptionName), core, core.OperationID)) } } } if s.OperationPath != nil { if err := s.OperationPath.Validate(); err != nil { - errs = append(errs, validation.NewValueError(validation.NewValueValidationError("step.operationPath expression is invalid: %s", err.Error()), core, core.OperationPath)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidSyntax, fmt.Errorf("step.operationPath expression is invalid: %w", err), core, core.OperationPath)) } typ, sourceDescriptionName, expressionParts, jp := s.OperationPath.GetParts() if typ != expression.ExpressionTypeSourceDescriptions { - errs = append(errs, validation.NewValueError(validation.NewValueValidationError("step.operationPath must be a sourceDescriptions expression, got %s", typ), core, core.OperationPath)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidSyntax, fmt.Errorf("step.operationPath must be a sourceDescriptions expression, got `%s`", typ), core, core.OperationPath)) } if a.SourceDescriptions.Find(sourceDescriptionName) == nil { - errs = append(errs, validation.NewValueError(validation.NewValueValidationError("step.operationPath referencing sourceDescription %s not found", sourceDescriptionName), core, core.OperationPath)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidReference, fmt.Errorf("step.operationPath referencing sourceDescription `%s` not found", sourceDescriptionName), core, core.OperationPath)) } if len(expressionParts) != 1 || expressionParts[0] != "url" { - errs = append(errs, validation.NewValueError(validation.NewValueValidationError("step.operationPath must reference the url of a sourceDescription"), core, core.OperationPath)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidSyntax, errors.New("step.operationPath must reference the url of a sourceDescription"), core, core.OperationPath)) } if jp == "" { - errs = append(errs, validation.NewValueError(validation.NewValueValidationError("step.operationPath must contain a json pointer to the operation path within the sourceDescription"), core, core.OperationPath)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("step.operationPath must contain a json pointer to the operation path within the sourceDescription"), core, core.OperationPath)) } } if s.WorkflowID != nil { if s.WorkflowID.IsExpression() { if err := s.WorkflowID.Validate(); err != nil { - errs = append(errs, validation.NewValueError(validation.NewValueValidationError("step.workflowId expression is invalid: %s", err.Error()), core, core.WorkflowID)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidSyntax, fmt.Errorf("step.workflowId expression is invalid: %w", err), core, core.WorkflowID)) } typ, sourceDescriptionName, _, _ := s.WorkflowID.GetParts() if typ != expression.ExpressionTypeSourceDescriptions { - errs = append(errs, validation.NewValueError(validation.NewValueValidationError("step.workflowId must be a sourceDescriptions expression, got %s", typ), core, core.WorkflowID)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidSyntax, fmt.Errorf("step.workflowId must be a sourceDescriptions expression, got `%s`", typ), core, core.WorkflowID)) } if a.SourceDescriptions.Find((sourceDescriptionName)) == nil { - errs = append(errs, validation.NewValueError(validation.NewValueValidationError("step.workflowId referencing sourceDescription %s not found", sourceDescriptionName), core, core.WorkflowID)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidReference, fmt.Errorf("step.workflowId referencing sourceDescription `%s` not found", sourceDescriptionName), core, core.WorkflowID)) } } else if a.Workflows.Find(pointer.Value(s.WorkflowID).String()) == nil { - errs = append(errs, validation.NewValueError(validation.NewValueValidationError("step.workflowId referencing workflow %s not found", *s.WorkflowID), core, core.WorkflowID)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidReference, fmt.Errorf("step.workflowId referencing workflow `%s` not found", *s.WorkflowID), core, core.WorkflowID)) } } @@ -206,14 +206,14 @@ func (s *Step) Validate(ctx context.Context, opts ...validation.Option) []error if parameter.Reference != nil { _, ok := parameterRefs[string(*parameter.Reference)] if ok { - errs = append(errs, validation.NewSliceError(validation.NewValueValidationError("step.parameters duplicate parameter found with reference %s", *parameter.Reference), core, core.Parameters, i)) + errs = append(errs, validation.NewSliceError(validation.SeverityError, validation.RuleValidationDuplicateKey, fmt.Errorf("step.parameters duplicate parameter found with reference `%s`", *parameter.Reference), core, core.Parameters, i)) } parameterRefs[string(*parameter.Reference)] = true } else if parameter.Object != nil { id := fmt.Sprintf("%s.%v", parameter.Object.Name, parameter.Object.In) _, ok := parameters[id] if ok { - errs = append(errs, validation.NewSliceError(validation.NewValueValidationError("step.parameters duplicate parameter found with name %s and in %v", parameter.Object.Name, parameter.Object.In), core, core.Parameters, i)) + errs = append(errs, validation.NewSliceError(validation.SeverityError, validation.RuleValidationDuplicateKey, fmt.Errorf("step.parameters duplicate parameter found with name `%s` and in `%v`", parameter.Object.Name, parameter.Object.In), core, core.Parameters, i)) } parameters[id] = true } @@ -221,7 +221,7 @@ func (s *Step) Validate(ctx context.Context, opts ...validation.Option) []error if s.RequestBody != nil { if s.WorkflowID != nil { - errs = append(errs, validation.NewValueError(validation.NewValueValidationError("step.requestBody should not be set when workflowId is set"), core, core.RequestBody)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationMutuallyExclusiveFields, errors.New("step.requestBody should not be set when workflowId is set"), core, core.RequestBody)) } errs = append(errs, s.RequestBody.Validate(ctx, opts...)...) @@ -240,14 +240,14 @@ func (s *Step) Validate(ctx context.Context, opts ...validation.Option) []error if onSuccess.Reference != nil { _, ok := successActionRefs[string(*onSuccess.Reference)] if ok { - errs = append(errs, validation.NewSliceError(validation.NewValueValidationError("step.onSuccess duplicate successAction found with reference %s", *onSuccess.Reference), core, core.OnSuccess, i)) + errs = append(errs, validation.NewSliceError(validation.SeverityError, validation.RuleValidationDuplicateKey, fmt.Errorf("step.onSuccess duplicate successAction found with reference `%s`", *onSuccess.Reference), core, core.OnSuccess, i)) } successActionRefs[string(*onSuccess.Reference)] = true } else if onSuccess.Object != nil { id := fmt.Sprintf("%s.%v", onSuccess.Object.Name, onSuccess.Object.Type) _, ok := successActions[id] if ok { - errs = append(errs, validation.NewSliceError(validation.NewValueValidationError("step.onSuccess duplicate successAction found with name %s and type %v", onSuccess.Object.Name, onSuccess.Object.Type), core, core.OnSuccess, i)) + errs = append(errs, validation.NewSliceError(validation.SeverityError, validation.RuleValidationDuplicateKey, fmt.Errorf("step.onSuccess duplicate successAction found with name `%s` and type `%v`", onSuccess.Object.Name, onSuccess.Object.Type), core, core.OnSuccess, i)) } successActions[id] = true } @@ -262,14 +262,14 @@ func (s *Step) Validate(ctx context.Context, opts ...validation.Option) []error if onFailure.Reference != nil { _, ok := failureActionRefs[string(*onFailure.Reference)] if ok { - errs = append(errs, validation.NewSliceError(validation.NewValueValidationError("step.onFailure duplicate failureAction found with reference %s", *onFailure.Reference), core, core.OnFailure, i)) + errs = append(errs, validation.NewSliceError(validation.SeverityError, validation.RuleValidationDuplicateKey, fmt.Errorf("step.onFailure duplicate failureAction found with reference `%s`", *onFailure.Reference), core, core.OnFailure, i)) } failureActionRefs[string(*onFailure.Reference)] = true } else if onFailure.Object != nil { id := fmt.Sprintf("%s.%v", onFailure.Object.Name, onFailure.Object.Type) _, ok := failureActions[id] if ok { - errs = append(errs, validation.NewSliceError(validation.NewValueValidationError("step.onFailure duplicate failureAction found with name %s and type %v", onFailure.Object.Name, onFailure.Object.Type), core, core.OnFailure, i)) + errs = append(errs, validation.NewSliceError(validation.SeverityError, validation.RuleValidationDuplicateKey, fmt.Errorf("step.onFailure duplicate failureAction found with name `%s` and type `%v`", onFailure.Object.Name, onFailure.Object.Type), core, core.OnFailure, i)) } failureActions[id] = true } @@ -277,11 +277,11 @@ func (s *Step) Validate(ctx context.Context, opts ...validation.Option) []error for name, output := range s.Outputs.All() { if !outputNameRegex.MatchString(name) { - errs = append(errs, validation.NewMapKeyError(validation.NewValueValidationError("step.outputs name must be a valid name [%s]: %s", outputNameRegex.String(), name), core, core.Outputs, name)) + errs = append(errs, validation.NewMapKeyError(validation.SeverityError, validation.RuleValidationInvalidFormat, fmt.Errorf("step.outputs name must be a valid name [`%s`]: `%s`", outputNameRegex.String(), name), core, core.Outputs, name)) } if err := output.Validate(); err != nil { - errs = append(errs, validation.NewMapValueError(validation.NewValueValidationError("step.outputs expression is invalid: %s", err.Error()), core, core.Outputs, name)) + errs = append(errs, validation.NewMapValueError(validation.SeverityError, validation.RuleValidationInvalidSyntax, fmt.Errorf("step.outputs expression is invalid: %w", err), core, core.Outputs, name)) } } diff --git a/arazzo/successaction.go b/arazzo/successaction.go index e73d4feb..8fc3e5e0 100644 --- a/arazzo/successaction.go +++ b/arazzo/successaction.go @@ -3,6 +3,7 @@ package arazzo import ( "context" "errors" + "fmt" "strings" "github.com/speakeasy-api/openapi/arazzo/core" @@ -64,16 +65,16 @@ func (s *SuccessAction) Validate(ctx context.Context, opts ...validation.Option) errs := []error{} if core.Name.Present && s.Name == "" { - errs = append(errs, validation.NewValueError(validation.NewMissingValueError("successAction.name is required"), core, core.Name)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("successAction.name is required"), core, core.Name)) } switch s.Type { case SuccessActionTypeEnd: if s.WorkflowID != nil { - errs = append(errs, validation.NewValueError(validation.NewValueValidationError("successAction.workflowId is not allowed when type: end is specified"), core, core.WorkflowID)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationMutuallyExclusiveFields, errors.New("successAction.workflowId is not allowed when type: end is specified"), core, core.WorkflowID)) } if s.StepID != nil { - errs = append(errs, validation.NewValueError(validation.NewValueValidationError("successAction.stepId is not allowed when type: end is specified"), core, core.StepID)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationMutuallyExclusiveFields, errors.New("successAction.stepId is not allowed when type: end is specified"), core, core.StepID)) } case SuccessActionTypeGoto: workflowIDNode := core.WorkflowID.GetKeyNodeOrRoot(core.RootNode) @@ -90,7 +91,7 @@ func (s *SuccessAction) Validate(ctx context.Context, opts ...validation.Option) required: true, }, opts...)...) default: - errs = append(errs, validation.NewValueError(validation.NewValueValidationError("successAction.type must be one of [%s]", strings.Join([]string{string(SuccessActionTypeEnd), string(SuccessActionTypeGoto)}, ", ")), core, core.Type)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationAllowedValues, fmt.Errorf("successAction.type must be one of [`%s`]", strings.Join([]string{string(SuccessActionTypeEnd), string(SuccessActionTypeGoto)}, ", ")), core, core.Type)) } for i := range s.Criteria { @@ -120,28 +121,28 @@ func validationActionWorkflowIDAndStepID(ctx context.Context, parentName string, errs := []error{} if params.required && params.workflowID == nil && params.stepID == nil { - errs = append(errs, validation.NewValidationError(validation.NewMissingValueError("%s.workflowId or stepId is required", parentName), params.workflowIDNode)) + errs = append(errs, validation.NewValidationError(validation.SeverityError, validation.RuleValidationRequiredField, fmt.Errorf("`%s`.workflowId or stepId is required", parentName), params.workflowIDNode)) } if params.workflowID != nil && params.stepID != nil { - errs = append(errs, validation.NewValidationError(validation.NewValueValidationError("%s.workflowId and stepId are mutually exclusive, only one can be specified", parentName), params.workflowIDNode)) + errs = append(errs, validation.NewValidationError(validation.SeverityError, validation.RuleValidationMutuallyExclusiveFields, fmt.Errorf("`%s`.workflowId and stepId are mutually exclusive, only one can be specified", parentName), params.workflowIDNode)) } if params.workflowID != nil { if params.workflowID.IsExpression() { if err := params.workflowID.Validate(); err != nil { - errs = append(errs, validation.NewValidationError(validation.NewValueValidationError("%s.workflowId expression is invalid: %s", parentName, err.Error()), params.workflowIDNode)) + errs = append(errs, validation.NewValidationError(validation.SeverityError, validation.RuleValidationInvalidSyntax, fmt.Errorf("`%s`.workflowId expression is invalid: %w", parentName, err), params.workflowIDNode)) } typ, sourceDescriptionName, _, _ := params.workflowID.GetParts() if typ != expression.ExpressionTypeSourceDescriptions { - errs = append(errs, validation.NewValidationError(validation.NewValueValidationError("%s.workflowId must be a sourceDescriptions expression, got %s", parentName, typ), params.workflowIDNode)) + errs = append(errs, validation.NewValidationError(validation.SeverityError, validation.RuleValidationInvalidSyntax, fmt.Errorf("`%s`.workflowId must be a sourceDescriptions expression, got `%s`", parentName, typ), params.workflowIDNode)) } if params.arazzo.SourceDescriptions.Find(sourceDescriptionName) == nil { - errs = append(errs, validation.NewValidationError(validation.NewValueValidationError("%s.sourceDescription value %s not found", parentName, sourceDescriptionName), params.workflowIDNode)) + errs = append(errs, validation.NewValidationError(validation.SeverityError, validation.RuleValidationInvalidReference, fmt.Errorf("`%s`.sourceDescription value `%s` not found", parentName, sourceDescriptionName), params.workflowIDNode)) } } else if params.arazzo.Workflows.Find(pointer.Value(params.workflowID).String()) == nil { - errs = append(errs, validation.NewValidationError(validation.NewValueValidationError("%s.workflowId value %s does not exist", parentName, *params.workflowID), params.workflowIDNode)) + errs = append(errs, validation.NewValidationError(validation.SeverityError, validation.RuleValidationInvalidReference, fmt.Errorf("`%s`.workflowId value `%s` does not exist", parentName, *params.workflowID), params.workflowIDNode)) } } if params.stepID != nil { @@ -206,11 +207,11 @@ func validationActionWorkflowIDAndStepID(ctx context.Context, parentName string, } if !foundStepId { - errs = append(errs, validation.NewValidationError(validation.NewValueValidationError("%s.stepId value %s does not exist in any parent workflows", parentName, pointer.Value(params.stepID)), params.workflowIDNode)) + errs = append(errs, validation.NewValidationError(validation.SeverityError, validation.RuleValidationInvalidReference, fmt.Errorf("`%s`.stepId value `%s` does not exist in any parent workflows", parentName, pointer.Value(params.stepID)), params.workflowIDNode)) } } } else if w.Steps.Find(pointer.Value(params.stepID)) == nil { - errs = append(errs, validation.NewValidationError(validation.NewValueValidationError("%s.stepId value %s does not exist in workflow %s", parentName, pointer.Value(params.stepID), w.WorkflowID), params.workflowIDNode)) + errs = append(errs, validation.NewValidationError(validation.SeverityError, validation.RuleValidationInvalidReference, fmt.Errorf("`%s`.stepId value `%s` does not exist in workflow `%s`", parentName, pointer.Value(params.stepID), w.WorkflowID), params.workflowIDNode)) } } diff --git a/arazzo/workflow.go b/arazzo/workflow.go index 411685c7..1e2ddbd6 100644 --- a/arazzo/workflow.go +++ b/arazzo/workflow.go @@ -3,6 +3,7 @@ package arazzo import ( "context" "errors" + "fmt" "regexp" "github.com/speakeasy-api/openapi/arazzo/core" @@ -78,7 +79,7 @@ func (w *Workflow) Validate(ctx context.Context, opts ...validation.Option) []er errs := []error{} if core.WorkflowID.Present && w.WorkflowID == "" { - errs = append(errs, validation.NewValueError(validation.NewMissingValueError("workflow.workflowId is required"), core, core.WorkflowID)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("workflow.workflowId is required"), core, core.WorkflowID)) } if w.Inputs != nil { @@ -88,20 +89,20 @@ func (w *Workflow) Validate(ctx context.Context, opts ...validation.Option) []er for i, dependsOn := range w.DependsOn { if dependsOn.IsExpression() { if err := dependsOn.Validate(); err != nil { - errs = append(errs, validation.NewSliceError(validation.NewValueValidationError("workflow.dependsOn expression is invalid: %s", err.Error()), core, core.DependsOn, i)) + errs = append(errs, validation.NewSliceError(validation.SeverityError, validation.RuleValidationInvalidSyntax, fmt.Errorf("workflow.dependsOn expression is invalid: %w", err), core, core.DependsOn, i)) } typ, sourceDescriptionName, _, _ := dependsOn.GetParts() if typ != expression.ExpressionTypeSourceDescriptions { - errs = append(errs, validation.NewSliceError(validation.NewValueValidationError("workflow.dependsOn must be a sourceDescriptions expression if not a workflowId, got %s", typ), core, core.DependsOn, i)) + errs = append(errs, validation.NewSliceError(validation.SeverityError, validation.RuleValidationInvalidSyntax, fmt.Errorf("workflow.dependsOn must be a sourceDescriptions expression if not a workflowId, got `%s`", typ), core, core.DependsOn, i)) } if a.SourceDescriptions.Find(sourceDescriptionName) == nil { - errs = append(errs, validation.NewSliceError(validation.NewValueValidationError("workflow.dependsOn sourceDescription %s not found", sourceDescriptionName), core, core.DependsOn, i)) + errs = append(errs, validation.NewSliceError(validation.SeverityError, validation.RuleValidationInvalidReference, fmt.Errorf("workflow.dependsOn sourceDescription `%s` not found", sourceDescriptionName), core, core.DependsOn, i)) } } else if a.Workflows.Find(string(dependsOn)) == nil { - errs = append(errs, validation.NewSliceError(validation.NewValueValidationError("workflow.dependsOn workflowId %s not found", dependsOn), core, core.DependsOn, i)) + errs = append(errs, validation.NewSliceError(validation.SeverityError, validation.RuleValidationInvalidReference, fmt.Errorf("workflow.dependsOn workflowId `%s` not found", dependsOn), core, core.DependsOn, i)) } } @@ -119,11 +120,11 @@ func (w *Workflow) Validate(ctx context.Context, opts ...validation.Option) []er for name, output := range w.Outputs.All() { if !outputNameRegex.MatchString(name) { - errs = append(errs, validation.NewMapKeyError(validation.NewValueValidationError("workflow.outputs name must be a valid name [%s]: %s", outputNameRegex.String(), name), core, core.Outputs, name)) + errs = append(errs, validation.NewMapKeyError(validation.SeverityError, validation.RuleValidationInvalidFormat, fmt.Errorf("workflow.outputs name must be a valid name [`%s`]: `%s`", outputNameRegex.String(), name), core, core.Outputs, name)) } if err := output.Validate(); err != nil { - errs = append(errs, validation.NewMapValueError(validation.NewValueValidationError("workflow.outputs expression is invalid: %s", err.Error()), core, core.Outputs, name)) + errs = append(errs, validation.NewMapValueError(validation.SeverityError, validation.RuleValidationInvalidSyntax, fmt.Errorf("workflow.outputs expression is invalid: %w", err), core, core.Outputs, name)) } } diff --git a/cmd/openapi/commands/openapi/README.md b/cmd/openapi/commands/openapi/README.md index ad443f93..41853780 100644 --- a/cmd/openapi/commands/openapi/README.md +++ b/cmd/openapi/commands/openapi/README.md @@ -9,6 +9,9 @@ OpenAPI specifications define REST APIs in a standard format. These commands hel - [Table of Contents](#table-of-contents) - [Available Commands](#available-commands) - [`validate`](#validate) + - [`lint`](#lint) + - [Configuration File](#configuration-file) + - [Custom Rules](#custom-rules) - [`upgrade`](#upgrade) - [`inline`](#inline) - [`clean`](#clean) @@ -48,6 +51,282 @@ This command checks for: - Reference resolution and validity - Best practice recommendations +### `lint` + +Lint an OpenAPI specification document for style, consistency, and best practices. + +```bash +# Lint a specification file +openapi spec lint ./spec.yaml + +# Lint with JSON output +openapi spec lint -f json ./spec.yaml + +# Lint with a custom configuration file +openapi spec lint -c ./lint.yaml ./spec.yaml + +# Lint with specific rules disabled +openapi spec lint -d rule-id-1 -d rule-id-2 ./spec.yaml +``` + +**Flags:** + +| Flag | Short | Description | +|------|-------|-------------| +| `--format` | `-f` | Output format: `text` (default) or `json` | +| `--config` | `-c` | Path to lint configuration file | +| `--ruleset` | `-r` | Ruleset to use (default: `all`) | +| `--disable` | `-d` | Rules to disable (can be specified multiple times) | + +**What lint checks:** + +- All validation errors (structural validity, schema compliance, references) +- Path parameter validation +- Operation ID requirements +- Consistent naming conventions +- Security best practices +- Additional style and consistency rules + +**Default Configuration Path:** + +If no `--config` flag is provided, the linter looks for a configuration file at `~/.openapi/lint.yaml`. + +#### Configuration File + +Create a YAML configuration file to customize linting behavior: + +```yaml +# lint.yaml + +# Extend from a base ruleset (optional) +extends: + - recommended + +# Configure individual rules +rules: + # Disable a rule entirely + - id: operation-operationId + disabled: true + + # Change the severity of a rule + - id: path-params + severity: error # error, warning, or hint + + # Use match patterns for bulk configuration + - match: "^oas3-.*" + severity: warning + + # Disable rules matching a pattern + - match: "^oas2-.*" + disabled: true + +# Configure rules by category +categories: + validation: + severity: error + style: + severity: warning + disabled: false + +# Custom rules configuration (requires TypeScript/JavaScript rules) +custom_rules: + paths: + - ./rules/*.ts + - ./rules/**/*.ts + +# Output format (text or json) +output_format: text +``` + +**Configuration Options:** + +| Option | Type | Description | +|--------|------|-------------| +| `extends` | `string[]` | Rulesets to extend from (`all`, `recommended`, `security`) | +| `rules` | `RuleEntry[]` | Individual rule configurations | +| `categories` | `map[string]CategoryConfig` | Category-level configurations | +| `custom_rules` | `CustomRulesConfig` | Custom TypeScript/JavaScript rules | +| `output_format` | `string` | Output format (`text` or `json`) | + +**Available Rulesets:** + +| Ruleset | Description | +|---------|-------------| +| `all` | All available rules (default) | +| `recommended` | Balanced ruleset - semantic rules, essential style, basic security | +| `security` | Comprehensive OWASP security rules | + +**Rule Entry Options:** + +| Option | Type | Description | +|--------|------|-------------| +| `id` | `string` | Exact rule ID to configure | +| `match` | `string` | Regex pattern to match rule IDs | +| `severity` | `string` | `error`, `warning`, or `hint` | +| `disabled` | `bool` | Set to `true` to disable the rule | + +#### Custom Rules + +Custom rules allow you to write linting rules in TypeScript or JavaScript. Rules are loaded when you specify paths in the configuration file. + +**Setup:** + +**Step 1:** Install the types package in your rules directory: + +```bash +cd ./rules +npm init -y +npm install @anthropic/openapi-linter-types +``` + +**Step 2:** Create a TypeScript rule file: + +```typescript +// rules/require-operation-summary.ts +import { + Rule, + createError, + type Context, + type DocumentInfo, + type RuleConfig, + type Severity, + type ValidationError, +} from '@anthropic/openapi-linter-types'; + +class RequireOperationSummary extends Rule { + id(): string { + return 'custom-require-operation-summary'; + } + + category(): string { + return 'style'; + } + + description(): string { + return 'All operations must have a summary for documentation.'; + } + + summary(): string { + return 'Operations must have summary'; + } + + defaultSeverity(): Severity { + return 'warning'; + } + + run(ctx: Context, docInfo: DocumentInfo, config: RuleConfig): ValidationError[] { + const errors: ValidationError[] = []; + + // Access all operations via the index + for (const opNode of docInfo.getIndex().getOperations()) { + const op = opNode.getNode(); + if (!op.getSummary()) { + errors.push( + createError( + config.getSeverity(this.defaultSeverity()), + this.id(), + `Operation "${op.getOperationID() || 'unnamed'}" is missing a summary`, + op.getRootNode() + ) + ); + } + } + + return errors; + } +} + +// Register the rule with the linter +registerRule(new RequireOperationSummary()); +``` + +**Step 3:** Configure the linter to load your rules: + +```yaml +# lint.yaml +custom_rules: + paths: + - ./rules/*.ts + +rules: + # Optionally configure your custom rules + - id: custom-require-operation-summary + severity: error +``` + +**Step 4:** Run the linter: + +```bash +openapi spec lint -c ./lint.yaml ./spec.yaml +``` + +**Custom Rule API:** + +Your rule class must implement the `RuleRunner` interface: + +| Method | Required | Description | +|--------|----------|-------------| +| `id()` | Yes | Unique rule identifier | +| `category()` | Yes | Rule category (e.g., `style`, `security`) | +| `description()` | Yes | Full description of the rule | +| `summary()` | Yes | Short summary for output | +| `link()` | No | URL to documentation | +| `defaultSeverity()` | No | Default severity (`error`, `warning`, `hint`) | +| `versions()` | No | OpenAPI versions this rule applies to | +| `run()` | Yes | Execute the rule and return validation errors | + +**Accessing Document Data:** + +The `DocumentInfo` object provides access to the parsed OpenAPI document: + +```typescript +// Get the OpenAPI document +const doc = docInfo.getDocument(); + +// Get the pre-built index for efficient traversal +const index = docInfo.getIndex(); + +// Access indexed collections +index.getOperations(); // All operations +index.getComponentSchemas(); // Component schemas +index.getInlineRequestBodies(); // Inline request bodies +index.getInlineResponses(); // Inline responses +index.getInlineParameters(); // Inline parameters +index.getInlineHeaders(); // Inline headers +index.getInlineSchemas(); // Inline schemas +index.getPathItems(); // Path items +index.getSecurityRequirements(); // Security requirements +index.getCallbacks(); // Callbacks +``` + +**Creating Validation Errors:** + +Use the `createError` helper to create validation errors with proper source location: + +```typescript +import { createError, Severity } from '@anthropic/openapi-linter-types'; + +// Create an error at a specific node location +const error = createError( + 'warning', // severity + 'my-rule-id', // rule ID + 'Description of the issue', // message + node.getRootNode() // YAML node for location +); +``` + +**Console Logging:** + +Use `console.log`, `console.warn`, and `console.error` for debugging: + +```typescript +run(ctx: Context, docInfo: DocumentInfo, config: RuleConfig): ValidationError[] { + console.log('Running custom rule...'); + console.log('Operations count:', docInfo.getIndex().getOperations().length); + // ... +} +``` + ### `upgrade` Upgrade an OpenAPI specification to the latest supported version (3.2.0). diff --git a/cmd/openapi/commands/openapi/convert_rules.go b/cmd/openapi/commands/openapi/convert_rules.go new file mode 100644 index 00000000..261f05d2 --- /dev/null +++ b/cmd/openapi/commands/openapi/convert_rules.go @@ -0,0 +1,166 @@ +package openapi + +import ( + "fmt" + "os" + "path/filepath" + "sort" + + "github.com/speakeasy-api/openapi/openapi/linter/converter" + "github.com/spf13/cobra" +) + +var convertRulesCmd = &cobra.Command{ + Use: "convert-rules ", + Short: "Convert Spectral/Vacuum/legacy configs to native linter format", + Long: `Convert a Spectral, Vacuum, or legacy Speakeasy lint config into the native +linter format. This generates: + + - A lint.yaml config file with mapped rule overrides + - TypeScript rule files for custom rules that don't have native equivalents + +Supported input formats: + - Spectral configs (.spectral.yml / .spectral.yaml) + - Vacuum configs (Spectral-compatible format) + - Legacy Speakeasy lint.yaml (with lintVersion/defaultRuleset/rulesets) + +Examples: + openapi spec lint convert-rules .spectral.yml + openapi spec lint convert-rules .spectral.yml --output ./converted + openapi spec lint convert-rules lint.yaml --dry-run + openapi spec lint convert-rules .spectral.yml --force`, + Args: cobra.ExactArgs(1), + Run: runConvertRules, +} + +var ( + convertOutput string + convertRulesDir string + convertForce bool + convertDryRun bool +) + +func init() { + convertRulesCmd.Flags().StringVarP(&convertOutput, "output", "o", ".", "Output directory for generated files") + convertRulesCmd.Flags().StringVar(&convertRulesDir, "rules-dir", "./rules", "Subdirectory for generated .ts rule files") + convertRulesCmd.Flags().BoolVarP(&convertForce, "force", "f", false, "Overwrite existing files") + convertRulesCmd.Flags().BoolVar(&convertDryRun, "dry-run", false, "Print summary without writing files") + + lintCmd.AddCommand(convertRulesCmd) +} + +func runConvertRules(cmd *cobra.Command, args []string) { + configFile := args[0] + + // Parse the input config + ir, err := converter.ParseFile(configFile) + if err != nil { + fmt.Fprintf(os.Stderr, "Error parsing config: %v\n", err) + os.Exit(1) + } + + // Generate native output + result, err := converter.Generate(ir, + converter.WithRulesDir(convertRulesDir), + ) + if err != nil { + fmt.Fprintf(os.Stderr, "Error generating output: %v\n", err) + os.Exit(1) + } + + // Print summary + printConvertSummary(result, configFile) + + // Print warnings + if len(result.Warnings) > 0 { + fmt.Println("\nWarnings:") + for _, w := range result.Warnings { + prefix := "" + if w.RuleID != "" { + prefix = fmt.Sprintf("[%s] ", w.RuleID) + } + fmt.Printf(" %s(%s) %s\n", prefix, w.Phase, w.Message) + } + } + + if convertDryRun { + fmt.Println("\n--dry-run: no files written") + return + } + + // Check for existing files unless --force + if !convertForce { + configPath := filepath.Join(convertOutput, "lint.yaml") + if _, err := os.Stat(configPath); err == nil { + fmt.Fprintf(os.Stderr, "Error: %s already exists (use --force to overwrite)\n", configPath) + os.Exit(1) + } + rulesPath := filepath.Join(convertOutput, convertRulesDir) + if _, err := os.Stat(rulesPath); err == nil { + fmt.Fprintf(os.Stderr, "Error: %s already exists (use --force to overwrite)\n", rulesPath) + os.Exit(1) + } + } + + // Ensure output directory exists + if err := os.MkdirAll(convertOutput, 0o755); err != nil { //nolint:gosec + fmt.Fprintf(os.Stderr, "Error creating output directory: %v\n", err) + os.Exit(1) + } + + // Write files + if err := result.WriteFiles(convertOutput); err != nil { + fmt.Fprintf(os.Stderr, "Error writing files: %v\n", err) + os.Exit(1) + } + + fmt.Printf("\nFiles written to %s\n", convertOutput) +} + +func printConvertSummary(result *converter.GenerateResult, inputFile string) { + fmt.Printf("Converting: %s\n\n", inputFile) + + // Extends + if len(result.Config.Extends) > 0 { + fmt.Printf("Extends: %v\n", result.Config.Extends) + } + + // Rule overrides + overrideCount := 0 + for _, entry := range result.Config.Rules { + if entry.Disabled != nil || entry.Severity != nil { + overrideCount++ + } + } + if overrideCount > 0 { + fmt.Printf("Rule overrides: %d\n", overrideCount) + } + + // Generated rules + if len(result.GeneratedRules) > 0 { + ruleIDs := sortedKeys(result.GeneratedRules) + fmt.Printf("Generated rules: %d\n", len(result.GeneratedRules)) + for _, ruleID := range ruleIDs { + fmt.Printf(" - %s.ts\n", ruleID) + } + + // Files to be written + fmt.Println("\nFiles:") + fmt.Println(" - lint.yaml") + for _, ruleID := range ruleIDs { + fmt.Printf(" - %s/%s.ts\n", convertRulesDir, ruleID) + } + } else { + fmt.Println("\nFiles:") + fmt.Println(" - lint.yaml") + } +} + +func sortedKeys(m map[string]string) []string { + keys := make([]string, 0, len(m)) + for k := range m { + keys = append(keys, k) + } + sort.Strings(keys) + return keys +} diff --git a/cmd/openapi/commands/openapi/explore.go b/cmd/openapi/commands/openapi/explore.go index 9694135e..152f2023 100644 --- a/cmd/openapi/commands/openapi/explore.go +++ b/cmd/openapi/commands/openapi/explore.go @@ -2,6 +2,7 @@ package openapi import ( "context" + "errors" "fmt" "os" "path/filepath" @@ -56,7 +57,7 @@ func runExplore(cmd *cobra.Command, args []string) error { } if len(operations) == 0 { - return fmt.Errorf("no operations found in the OpenAPI document") + return errors.New("no operations found in the OpenAPI document") } // Get document info for display @@ -95,7 +96,7 @@ func loadOpenAPIDocument(ctx context.Context, file string) (*openapi.OpenAPI, er return nil, fmt.Errorf("failed to unmarshal OpenAPI document: %w", err) } if doc == nil { - return nil, fmt.Errorf("failed to parse OpenAPI document: document is nil") + return nil, errors.New("failed to parse OpenAPI document: document is nil") } // Report validation errors as warnings but continue diff --git a/cmd/openapi/commands/openapi/lint.go b/cmd/openapi/commands/openapi/lint.go new file mode 100644 index 00000000..d3011696 --- /dev/null +++ b/cmd/openapi/commands/openapi/lint.go @@ -0,0 +1,192 @@ +package openapi + +import ( + "context" + "fmt" + "os" + "path/filepath" + + "github.com/speakeasy-api/openapi/linter" + "github.com/speakeasy-api/openapi/openapi" + openapiLinter "github.com/speakeasy-api/openapi/openapi/linter" + "github.com/spf13/cobra" + + // Enable custom rules support + _ "github.com/speakeasy-api/openapi/openapi/linter/customrules" +) + +var lintCmd = &cobra.Command{ + Use: "lint ", + Short: "Lint an OpenAPI specification document", + Long: `Lint an OpenAPI specification document for style, consistency, and best practices. + +This command runs both spec validation and additional lint rules including: +- Path parameter validation +- Operation ID requirements +- Consistent naming conventions +- Security best practices (OWASP) + +CONFIGURATION: + +By default, the linter looks for a configuration file at ~/.openapi/lint.yaml. +Use --config to specify a custom configuration file. + +Available rulesets: all (default), recommended, security + +Example configuration (lint.yaml): + + extends: recommended + + rules: + - id: operation-operationId + severity: error + - id: some-rule + disabled: true + + custom_rules: + paths: + - ./rules/*.ts + +CUSTOM RULES: + +Write custom linting rules in TypeScript or JavaScript. Install the types package +in your rules directory: + + npm install @speakeasy-api/openapi-linter-types + +Then configure the paths in your lint.yaml under custom_rules.paths. + +See the full documentation at: +https://github.com/speakeasy-api/openapi/blob/main/cmd/openapi/commands/openapi/README.md#lint`, + Args: cobra.ExactArgs(1), + Run: runLint, +} + +var ( + lintOutputFormat string + lintRuleset string + lintConfigFile string + lintDisableRules []string +) + +func init() { + lintCmd.Flags().StringVarP(&lintOutputFormat, "format", "f", "text", "Output format: text or json") + lintCmd.Flags().StringVarP(&lintRuleset, "ruleset", "r", "all", "Ruleset to use (default loads from config)") + lintCmd.Flags().StringVarP(&lintConfigFile, "config", "c", "", "Path to lint config file (default: ~/.openapi/lint.yaml)") + lintCmd.Flags().StringSliceVarP(&lintDisableRules, "disable", "d", nil, "Rule IDs to disable (can be repeated)") +} + +func runLint(cmd *cobra.Command, args []string) { + ctx := cmd.Context() + file := args[0] + + if err := lintOpenAPI(ctx, file); err != nil { + fmt.Fprintf(os.Stderr, "Error: %v\n", err) + os.Exit(1) + } +} + +func lintOpenAPI(ctx context.Context, file string) error { + cleanFile := filepath.Clean(file) + + // Get absolute path for document location + absPath, err := filepath.Abs(cleanFile) + if err != nil { + return fmt.Errorf("failed to get absolute path: %w", err) + } + + // Load the OpenAPI document + f, err := os.Open(cleanFile) + if err != nil { + return fmt.Errorf("failed to open file: %w", err) + } + defer f.Close() + + // Unmarshal with validation to get validation errors + doc, validationErrors, err := openapi.Unmarshal(ctx, f) + if err != nil { + return fmt.Errorf("failed to unmarshal file: %w", err) + } + + // Build linter configuration + config := buildLintConfig() + + // Create the OpenAPI linter with default rules + lint, err := openapiLinter.NewLinter(config) + if err != nil { + return fmt.Errorf("failed to create linter: %w", err) + } + + // Create document info with location + docInfo := linter.NewDocumentInfo(doc, absPath) + + // Run linting with validation errors passed in + output, err := lint.Lint(ctx, docInfo, validationErrors, nil) + if err != nil { + return fmt.Errorf("linting failed: %w", err) + } + + // Format and print output + switch lintOutputFormat { + case "json": + fmt.Println(output.FormatJSON()) + default: + fmt.Printf("%s\n", cleanFile) + fmt.Println(output.FormatText()) + } + + // Exit with error code if there are errors + if output.HasErrors() { + return fmt.Errorf("linting found %d errors", output.ErrorCount()) + } + + return nil +} + +func buildLintConfig() *linter.Config { + config := linter.NewConfig() + + // Load from config file if specified + if lintConfigFile != "" { + loaded, err := linter.LoadConfigFromFile(lintConfigFile) + if err != nil { + fmt.Fprintf(os.Stderr, "Error: %v\n", err) + os.Exit(1) + } + config = loaded + } else { + homeDir, err := os.UserHomeDir() + if err != nil { + fmt.Fprintf(os.Stderr, "Error: %v\n", err) + os.Exit(1) + } + defaultPath := filepath.Join(homeDir, ".openapi", "lint.yaml") + loaded, err := linter.LoadConfigFromFile(defaultPath) + if err == nil { + config = loaded + } + } + + // Disable specified rules + for _, rule := range lintDisableRules { + disabled := true + config.Rules = append(config.Rules, linter.RuleEntry{ + ID: rule, + Disabled: &disabled, + }) + } + + // Set output format + switch lintOutputFormat { + case "json": + config.OutputFormat = linter.OutputFormatJSON + default: + config.OutputFormat = linter.OutputFormatText + } + + return config +} + +func ptr[T any](v T) *T { + return &v +} diff --git a/cmd/openapi/commands/openapi/root.go b/cmd/openapi/commands/openapi/root.go index e492e60f..f2619205 100644 --- a/cmd/openapi/commands/openapi/root.go +++ b/cmd/openapi/commands/openapi/root.go @@ -5,6 +5,7 @@ import "github.com/spf13/cobra" // Apply adds OpenAPI commands to the provided root command func Apply(rootCmd *cobra.Command) { rootCmd.AddCommand(validateCmd) + rootCmd.AddCommand(lintCmd) rootCmd.AddCommand(upgradeCmd) rootCmd.AddCommand(inlineCmd) rootCmd.AddCommand(cleanCmd) diff --git a/cmd/openapi/commands/openapi/snip.go b/cmd/openapi/commands/openapi/snip.go index 8ccf1a53..5bc265b2 100644 --- a/cmd/openapi/commands/openapi/snip.go +++ b/cmd/openapi/commands/openapi/snip.go @@ -2,6 +2,7 @@ package openapi import ( "context" + "errors" "fmt" "strings" @@ -95,7 +96,7 @@ func runSnip(cmd *cobra.Command, args []string) error { // If -w is specified without any operation selection flags, error if snipWriteInPlace && !(hasRemoveFlags || hasKeepFlags) { - return fmt.Errorf("--write flag requires specifying operations via --operationId/--operation or --keepOperationId/--keepOperation") + return errors.New("--write flag requires specifying operations via --operationId/--operation or --keepOperationId/--keepOperation") } // Interactive mode when no flags provided @@ -105,7 +106,7 @@ func runSnip(cmd *cobra.Command, args []string) error { // Disallow mixing keep + remove flags; ambiguous intent if hasRemoveFlags && hasKeepFlags { - return fmt.Errorf("cannot combine keep and remove flags; use either --operationId/--operation or --keepOperationId/--keepOperation") + return errors.New("cannot combine keep and remove flags; use either --operationId/--operation or --keepOperationId/--keepOperation") } // CLI mode @@ -138,7 +139,7 @@ func runSnipCLI(ctx context.Context, inputFile, outputFile string) error { } if len(operationsToRemove) == 0 { - return fmt.Errorf("no operations specified for removal") + return errors.New("no operations specified for removal") } // Perform the snip @@ -175,7 +176,7 @@ func runSnipCLIKeep(ctx context.Context, inputFile, outputFile string) error { return err } if len(keepOps) == 0 { - return fmt.Errorf("no operations specified to keep") + return errors.New("no operations specified to keep") } // Collect all operations from the document @@ -184,7 +185,7 @@ func runSnipCLIKeep(ctx context.Context, inputFile, outputFile string) error { return fmt.Errorf("failed to collect operations: %w", err) } if len(allOps) == 0 { - return fmt.Errorf("no operations found in the OpenAPI document") + return errors.New("no operations found in the OpenAPI document") } // Build lookup sets for keep filters @@ -248,7 +249,7 @@ func runSnipInteractive(ctx context.Context, inputFile, outputFile string) error } if len(operations) == 0 { - return fmt.Errorf("no operations found in the OpenAPI document") + return errors.New("no operations found in the OpenAPI document") } // Get document info @@ -296,7 +297,7 @@ func runSnipInteractive(ctx context.Context, inputFile, outputFile string) error // Get the final model state tuiModel, ok := finalModel.(tui.Model) if !ok { - return fmt.Errorf("unexpected model type") + return errors.New("unexpected model type") } // Check if user performed an action or just quit diff --git a/cmd/openapi/commands/overlay/README.md b/cmd/openapi/commands/overlay/README.md index ded98676..19adc340 100644 --- a/cmd/openapi/commands/overlay/README.md +++ b/cmd/openapi/commands/overlay/README.md @@ -11,7 +11,6 @@ OpenAPI Overlays provide a way to modify OpenAPI and Arazzo specifications witho - [`apply`](#apply) - [`validate`](#validate) - [`compare`](#compare) - - [`upgrade`](#upgrade) - [What are OpenAPI Overlays?](#what-are-openapi-overlays) - [Example Overlay](#example-overlay) - [Common Use Cases](#common-use-cases) @@ -100,40 +99,6 @@ Features: - Creates overlay files that can recreate the transformation - Supports both positional arguments and explicit flags -### `upgrade` - -Upgrade an Overlay document to the latest supported version (1.1.0). - -```bash -# Preview upgrade (output to stdout) -openapi overlay upgrade my-overlay.yaml - -# Upgrade and save to new file -openapi overlay upgrade my-overlay.yaml upgraded-overlay.yaml - -# Upgrade in-place -openapi overlay upgrade -w my-overlay.yaml -``` - -Features: - -- Updates the Overlay version field from 1.0.0 to 1.1.0 -- Enables RFC 9535 JSONPath as the default implementation -- Clears redundant `x-speakeasy-jsonpath: rfc9535` (now default in 1.1.0) -- All existing actions remain valid and functional -- Validates overlay before and after upgrade - -Version Differences: - -| Version | Default JSONPath | Setting | -| ------- | ---------------- | -------------------------------------------- | -| 1.0.0 | Legacy yamlpath | `x-speakeasy-jsonpath: rfc9535` for RFC 9535 | -| 1.1.0+ | RFC 9535 | `x-speakeasy-jsonpath: legacy` for legacy | - -Options: - -- `-w, --write`: Write result in-place to input file - ## What are OpenAPI Overlays? OpenAPI Overlays are documents that describe modifications to be applied to OpenAPI specifications. They allow you to: diff --git a/cmd/openapi/commands/overlay/root.go b/cmd/openapi/commands/overlay/root.go index b949db06..1aecb92c 100644 --- a/cmd/openapi/commands/overlay/root.go +++ b/cmd/openapi/commands/overlay/root.go @@ -5,6 +5,5 @@ import "github.com/spf13/cobra" func Apply(rootCmd *cobra.Command) { rootCmd.AddCommand(applyCmd) rootCmd.AddCommand(compareCmd) - rootCmd.AddCommand(upgradeCmd) rootCmd.AddCommand(validateCmd) } diff --git a/cmd/openapi/commands/overlay/upgrade.go b/cmd/openapi/commands/overlay/upgrade.go deleted file mode 100644 index b04f8358..00000000 --- a/cmd/openapi/commands/overlay/upgrade.go +++ /dev/null @@ -1,122 +0,0 @@ -package overlay - -import ( - "fmt" - "os" - - "github.com/speakeasy-api/openapi/overlay" - "github.com/speakeasy-api/openapi/overlay/loader" - "github.com/spf13/cobra" - "gopkg.in/yaml.v3" -) - -var upgradeCmd = &cobra.Command{ - Use: "upgrade [output-file]", - Short: "Upgrade an Overlay document to the latest supported version (1.1.0)", - Long: `Upgrade an Overlay specification document to the latest supported version (1.1.0). - -The upgrade process includes: -- Updating the Overlay version field from 1.0.0 to 1.1.0 -- Enabling RFC 9535 JSONPath as the default implementation -- Clearing redundant x-speakeasy-jsonpath: rfc9535 (now default in 1.1.0) -- All existing actions remain valid and functional -- Support for new 1.1.0 features like copy actions and info description - -Version Differences: - 1.0.0: Legacy JSONPath by default, RFC 9535 opt-in with x-speakeasy-jsonpath: rfc9535 - 1.1.0: RFC 9535 JSONPath by default, legacy opt-out with x-speakeasy-jsonpath: legacy - -Output options: - - No output file specified: writes to stdout (pipe-friendly) - - Output file specified: writes to the specified file - - --write flag: writes in-place to the input file`, - Example: ` # Preview upgrade (output to stdout) - openapi overlay upgrade my-overlay.yaml - - # Upgrade and save to new file - openapi overlay upgrade my-overlay.yaml upgraded-overlay.yaml - - # Upgrade in-place - openapi overlay upgrade -w my-overlay.yaml`, - Args: cobra.RangeArgs(1, 2), - Run: runOverlayUpgrade, -} - -var overlayWriteInPlace bool - -func init() { - upgradeCmd.Flags().BoolVarP(&overlayWriteInPlace, "write", "w", false, - "write result in-place to input file") -} - -func runOverlayUpgrade(cmd *cobra.Command, args []string) { - ctx := cmd.Context() - inputFile := args[0] - - var outputFile string - if len(args) > 1 { - outputFile = args[1] - } - - // Load the overlay - o, err := loader.LoadOverlay(inputFile) - if err != nil { - Dief("Failed to load overlay: %v", err) - } - - // Validate the overlay before upgrade - if err := o.Validate(); err != nil { - Dief("Overlay validation failed: %v", err) - } - - originalVersion := o.Version - - // Perform the upgrade - upgraded, err := overlay.Upgrade(ctx, o) - if err != nil { - Dief("Failed to upgrade overlay: %v", err) - } - - // Print status - if !upgraded { - fmt.Fprintf(os.Stderr, "No upgrade needed - overlay is already at version %s\n", originalVersion) - } else { - fmt.Fprintf(os.Stderr, "Successfully upgraded overlay from %s to %s\n", originalVersion, o.Version) - } - - // Validate the upgraded overlay - if err := o.Validate(); err != nil { - Dief("Upgraded overlay failed validation: %v", err) - } - - // Serialize output - output, err := o.ToString() - if err != nil { - Dief("Failed to serialize overlay: %v", err) - } - - // Determine output destination - switch { - case overlayWriteInPlace: - if err := os.WriteFile(inputFile, []byte(output), 0644); err != nil { - Dief("Failed to write to input file: %v", err) - } - fmt.Fprintf(os.Stderr, "Wrote upgraded overlay to %s\n", inputFile) - case outputFile != "": - if err := os.WriteFile(outputFile, []byte(output), 0644); err != nil { - Dief("Failed to write to output file: %v", err) - } - fmt.Fprintf(os.Stderr, "Wrote upgraded overlay to %s\n", outputFile) - default: - // Write to stdout - var node yaml.Node - if err := yaml.Unmarshal([]byte(output), &node); err != nil { - Dief("Failed to parse output: %v", err) - } - encoder := yaml.NewEncoder(os.Stdout) - encoder.SetIndent(2) - if err := encoder.Encode(&node); err != nil { - Dief("Failed to write to stdout: %v", err) - } - } -} diff --git a/cmd/openapi/go.mod b/cmd/openapi/go.mod index d0e1e36e..a5b36ff9 100644 --- a/cmd/openapi/go.mod +++ b/cmd/openapi/go.mod @@ -6,7 +6,8 @@ require ( github.com/charmbracelet/bubbles v0.21.0 github.com/charmbracelet/bubbletea v1.3.10 github.com/charmbracelet/lipgloss v1.1.0 - github.com/speakeasy-api/openapi v1.15.1-0.20260123232020-443f8a84b64c + github.com/speakeasy-api/openapi v1.15.2-0.20260205050808-54a315b347f2 + github.com/speakeasy-api/openapi/openapi/linter/customrules v0.0.0-20260205050808-54a315b347f2 github.com/spf13/cobra v1.10.1 github.com/stretchr/testify v1.11.1 gopkg.in/yaml.v3 v3.0.1 @@ -20,10 +21,14 @@ require ( github.com/charmbracelet/x/cellbuf v0.0.13-0.20250311204145-2c3ea96c31dd // indirect github.com/charmbracelet/x/term v0.2.1 // indirect github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect + github.com/dlclark/regexp2 v1.11.4 // indirect + github.com/dop251/goja v0.0.0-20260106131823-651366fbe6e3 // indirect github.com/dprotaso/go-yit v0.0.0-20191028211022-135eb7262960 // indirect github.com/erikgeiser/coninput v0.0.0-20211004153227-1c3628e74d0f // indirect + github.com/evanw/esbuild v0.27.2 // indirect + github.com/go-sourcemap/sourcemap v2.1.4+incompatible // indirect + github.com/google/pprof v0.0.0-20230207041349-798e818bf904 // indirect github.com/inconshreveable/mousetrap v1.1.0 // indirect - github.com/kr/text v0.2.0 // indirect github.com/lucasb-eyer/go-colorful v1.2.0 // indirect github.com/mattn/go-isatty v0.0.20 // indirect github.com/mattn/go-localereader v0.0.1 // indirect diff --git a/cmd/openapi/go.sum b/cmd/openapi/go.sum index 9bde1a69..6f06a7d2 100644 --- a/cmd/openapi/go.sum +++ b/cmd/openapi/go.sum @@ -1,3 +1,5 @@ +github.com/Masterminds/semver/v3 v3.2.1 h1:RN9w6+7QoMeJVGyfmbcgs28Br8cvmnucEXnY0rYXWg0= +github.com/Masterminds/semver/v3 v3.2.1/go.mod h1:qvl/7zhW3nngYb5+80sSMF+FG2BjYrf8m9wsX0PNOMQ= github.com/atotto/clipboard v0.1.4 h1:EH0zSVneZPSuFR11BlR9YppQTVDbh5+16AmcJi4g1z4= github.com/atotto/clipboard v0.1.4/go.mod h1:ZY9tmq7sm5xIbd9bOK4onWV4S6X0u6GY7Vn0Yu86PYI= github.com/aymanbagabas/go-osc52/v2 v2.0.1 h1:HwpRHbFMcZLEVr42D4p7XBqjyuxQH5SMiErDT4WkJ2k= @@ -17,19 +19,26 @@ github.com/charmbracelet/x/cellbuf v0.0.13-0.20250311204145-2c3ea96c31dd/go.mod github.com/charmbracelet/x/term v0.2.1 h1:AQeHeLZ1OqSXhrAWpYUtZyX1T3zVxfpZuEQMIQaGIAQ= github.com/charmbracelet/x/term v0.2.1/go.mod h1:oQ4enTYFV7QN4m0i9mzHrViD7TQKvNEEkHUMCmsxdUg= github.com/cpuguy83/go-md2man/v2 v2.0.6/go.mod h1:oOW0eioCTA6cOiMLiUPZOpcVxMig6NIQQ7OS05n1F4g= -github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1VwoXQT9A3Wy9MM3WgvqSxFWenqJduM= github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/dlclark/regexp2 v1.11.0 h1:G/nrcoOa7ZXlpoa/91N3X7mM3r8eIlMBBJZvsz/mxKI= -github.com/dlclark/regexp2 v1.11.0/go.mod h1:DHkYz0B9wPfa6wondMfaivmHpzrQ3v9q8cnmRbL6yW8= +github.com/dlclark/regexp2 v1.11.4 h1:rPYF9/LECdNymJufQKmri9gV604RvvABwgOA8un7yAo= +github.com/dlclark/regexp2 v1.11.4/go.mod h1:DHkYz0B9wPfa6wondMfaivmHpzrQ3v9q8cnmRbL6yW8= +github.com/dop251/goja v0.0.0-20260106131823-651366fbe6e3 h1:bVp3yUzvSAJzu9GqID+Z96P+eu5TKnIMJSV4QaZMauM= +github.com/dop251/goja v0.0.0-20260106131823-651366fbe6e3/go.mod h1:MxLav0peU43GgvwVgNbLAj1s/bSGboKkhuULvq/7hx4= github.com/dprotaso/go-yit v0.0.0-20191028211022-135eb7262960 h1:aRd8M7HJVZOqn/vhOzrGcQH0lNAMkqMn+pXUYkatmcA= github.com/dprotaso/go-yit v0.0.0-20191028211022-135eb7262960/go.mod h1:9HQzr9D/0PGwMEbC3d5AB7oi67+h4TsQqItC1GVYG58= github.com/erikgeiser/coninput v0.0.0-20211004153227-1c3628e74d0f h1:Y/CXytFA4m6baUTXGLOoWe4PQhGxaX0KpnayAqC48p4= github.com/erikgeiser/coninput v0.0.0-20211004153227-1c3628e74d0f/go.mod h1:vw97MGsxSvLiUE2X8qFplwetxpGLQrlU1Q9AUEIzCaM= +github.com/evanw/esbuild v0.27.2 h1:3xBEws9y/JosfewXMM2qIyHAi+xRo8hVx475hVkJfNg= +github.com/evanw/esbuild v0.27.2/go.mod h1:D2vIQZqV/vIf/VRHtViaUtViZmG7o+kKmlBfVQuRi48= github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo= +github.com/go-sourcemap/sourcemap v2.1.4+incompatible h1:a+iTbH5auLKxaNwQFg0B+TCYl6lbukKPc7b5x0n1s6Q= +github.com/go-sourcemap/sourcemap v2.1.4+incompatible/go.mod h1:F8jJfvm2KbVjc5NqelyYJmf/v5J0dwNLS2mL4sNA1Jg= github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/google/pprof v0.0.0-20230207041349-798e818bf904 h1:4/hN5RUoecvl+RmJRE2YxKWtnnQls6rQjjW5oV7qg2U= +github.com/google/pprof v0.0.0-20230207041349-798e818bf904/go.mod h1:uglQLonpP8qtYCYyzA+8c/9qtqgA3qsXGYqCPKARAFg= github.com/hpcloud/tail v1.0.0 h1:nfCOvKYfkgYP8hkirhJocXT2+zOD8yUNjXaWfTlyFKI= github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU= github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8= @@ -75,8 +84,10 @@ github.com/sergi/go-diff v1.1.0 h1:we8PVUC3FE2uYfodKH/nBHMSetSfHDR6scGdBi+erh0= github.com/sergi/go-diff v1.1.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM= github.com/speakeasy-api/jsonpath v0.6.2 h1:Mys71yd6u8kuowNCR0gCVPlVAHCmKtoGXYoAtcEbqXQ= github.com/speakeasy-api/jsonpath v0.6.2/go.mod h1:ymb2iSkyOycmzKwbEAYPJV/yi2rSmvBCLZJcyD+VVWw= -github.com/speakeasy-api/openapi v1.15.1-0.20260123232020-443f8a84b64c h1:nmJ3K4QQO1fNkIKM0GKfEGs/Cav7udtn9LijHF8ZaFw= -github.com/speakeasy-api/openapi v1.15.1-0.20260123232020-443f8a84b64c/go.mod h1:aiVj+JnirrwZDtKegt0hQrj/ixl3v17EkN2YGnTuSro= +github.com/speakeasy-api/openapi v1.15.2-0.20260205050808-54a315b347f2 h1:HAVe+/IBKXdUv/Qq1UzXIWV4RDHA8JQA0OGpgd/a0Zs= +github.com/speakeasy-api/openapi v1.15.2-0.20260205050808-54a315b347f2/go.mod h1:aiVj+JnirrwZDtKegt0hQrj/ixl3v17EkN2YGnTuSro= +github.com/speakeasy-api/openapi/openapi/linter/customrules v0.0.0-20260205050808-54a315b347f2 h1:pQsTBWKRf27uGkT1vbosf/GFiUfNr0qSxkd+FfdZSwE= +github.com/speakeasy-api/openapi/openapi/linter/customrules v0.0.0-20260205050808-54a315b347f2/go.mod h1:Z2pg+iCf6izq0dVz3Ow/jHrHSlT220y899gwNHz1ZzE= github.com/spf13/cobra v1.10.1 h1:lJeBwCfmrnXthfAupyUTzJ/J4Nc1RsHC/mSRU2dll/s= github.com/spf13/cobra v1.10.1/go.mod h1:7SmJGaTHFVBY0jW4NXGluQoLvhqFQM+6XSKD+P4XaB0= github.com/spf13/pflag v1.0.9 h1:9exaQaMOCwffKiiiYk6/BndUBv+iRViNW+4lEMi0PvY= @@ -100,6 +111,7 @@ golang.org/x/sync v0.19.0 h1:vV+1eWNmZ5geRlYjzm2adRgW2/mcpevXNg50YZtPCE4= golang.org/x/sync v0.19.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI= golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20210809222454-d867a43fc93e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.36.0 h1:KVRy2GtZBrk1cBYA7MKu5bEZFxQk4NIDV6RLVcC8o0k= golang.org/x/sys v0.36.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks= @@ -116,8 +128,9 @@ gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 h1:uRGJdciOHaEIrze2W8Q3AKkep gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw= gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.2.4 h1:/eiJrUcujPVeJ3xlSWaiNi3uSVmDGBK1pDHUHAnao1I= gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= +gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= gopkg.in/yaml.v3 v3.0.0-20191026110619-0b21df46bc1d/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= diff --git a/cmd/openapi/internal/explore/tui/input.go b/cmd/openapi/internal/explore/tui/input.go index 990af4de..a8a07a7b 100644 --- a/cmd/openapi/internal/explore/tui/input.go +++ b/cmd/openapi/internal/explore/tui/input.go @@ -1,6 +1,7 @@ package tui import ( + "errors" "fmt" "strings" @@ -111,7 +112,7 @@ func PromptForFilePath(prompt, defaultValue string) (string, error) { inputModel, ok := finalModel.(InputModel) if !ok { - return "", fmt.Errorf("unexpected model type") + return "", errors.New("unexpected model type") } if inputModel.IsCancelled() { diff --git a/cmd/update-lint-docs/main.go b/cmd/update-lint-docs/main.go new file mode 100644 index 00000000..b8dfdda8 --- /dev/null +++ b/cmd/update-lint-docs/main.go @@ -0,0 +1,183 @@ +package main + +import ( + "fmt" + "os" + "path/filepath" + "regexp" + "sort" + "strings" + + "github.com/speakeasy-api/openapi/linter" + "github.com/speakeasy-api/openapi/openapi" + openapiLinter "github.com/speakeasy-api/openapi/openapi/linter" +) + +func main() { + if err := updateLintDocs(); err != nil { + fmt.Fprintf(os.Stderr, "Error: %v\n", err) + os.Exit(1) + } +} + +func updateLintDocs() error { + fmt.Println("🔄 Updating lint rules in README files...") + + if err := updateOpenAPILintDocs(); err != nil { + return fmt.Errorf("failed to update OpenAPI lint docs: %w", err) + } + + if err := updateRuleLinks(); err != nil { + return fmt.Errorf("failed to update rule links: %w", err) + } + + fmt.Println("🎉 Lint docs updated successfully!") + return nil +} + +func updateOpenAPILintDocs() error { + readmeFile := "openapi/linter/README.md" + + // Check if README exists + if _, err := os.Stat(readmeFile); os.IsNotExist(err) { + fmt.Printf("⚠️ No README file found: %s\n", readmeFile) + return nil + } + + // Create linter to get the registry + config := linter.NewConfig() + lint, err := openapiLinter.NewLinter(config) + if err != nil { + return fmt.Errorf("failed to create linter: %w", err) + } + docGen := linter.NewDocGenerator(lint.Registry()) + + // Generate rules table + content := generateRulesTable(docGen) + + // Update README file + if err := updateReadmeFile(readmeFile, content); err != nil { + return fmt.Errorf("failed to update README: %w", err) + } + + fmt.Printf("✅ Updated %s\n", readmeFile) + return nil +} + +func generateRulesTable(docGen *linter.DocGenerator[*openapi.OpenAPI]) string { + docs := docGen.GenerateAllRuleDocs() + + // Sort rules alphabetically by ID + sort.Slice(docs, func(i, j int) bool { + return docs[i].ID < docs[j].ID + }) + + var content strings.Builder + content.WriteString("| Rule | Severity | Description |\n") + content.WriteString("|------|----------|-------------|\n") + + for _, doc := range docs { + // Escape pipe characters in description + desc := strings.ReplaceAll(doc.Description, "|", "\\|") + // Replace newlines with spaces + desc = strings.ReplaceAll(desc, "\n", " ") + content.WriteString(fmt.Sprintf("| `%s` | %s | %s |\n", doc.ID, doc.ID, doc.DefaultSeverity, desc)) + } + + return content.String() +} + +func updateReadmeFile(filename, newContent string) error { + // Read the current README + data, err := os.ReadFile(filename) //nolint:gosec + if err != nil { + return err + } + + content := string(data) + + // Find the start and end markers + startMarker := "" + endMarker := "" + + startIdx := strings.Index(content, startMarker) + endIdx := strings.Index(content, endMarker) + + if startIdx == -1 || endIdx == -1 { + return fmt.Errorf("could not find lint rules markers in %s", filename) + } + + // Replace the content between markers + before := content[:startIdx+len(startMarker)] + after := content[endIdx:] + + newFileContent := before + "\n\n" + newContent + "\n" + after + + // Write the updated content + return os.WriteFile(filename, []byte(newFileContent), 0600) +} + +func updateRuleLinks() error { + const baseURL = "https://github.com/speakeasy-api/openapi/blob/main/openapi/linter/README.md" + rulesDir := "openapi/linter/rules" + + // Get all rule files + entries, err := os.ReadDir(rulesDir) + if err != nil { + return fmt.Errorf("failed to read rules directory: %w", err) + } + + // Pattern to match Link() method - captures receiver and return value + linkPattern := regexp.MustCompile(`func (\([^)]+\)) Link\(\) string \{\s*return "[^"]*"\s*\}`) + + updatedCount := 0 + for _, entry := range entries { + if entry.IsDir() || !strings.HasSuffix(entry.Name(), ".go") || strings.HasSuffix(entry.Name(), "_test.go") { + continue + } + + filePath := filepath.Join(rulesDir, entry.Name()) + + // Read the file + data, err := os.ReadFile(filePath) //nolint:gosec + if err != nil { + return fmt.Errorf("failed to read %s: %w", filePath, err) + } + + content := string(data) + + // Find the rule ID constant + ruleIDPattern := regexp.MustCompile(`const (Rule\w+) = "([^"]+)"`) + matches := ruleIDPattern.FindStringSubmatch(content) + if len(matches) < 3 { + continue // Skip if no rule ID found + } + ruleID := matches[2] + + // Create the new link + newLink := fmt.Sprintf("%s#%s", baseURL, ruleID) + + // Replace the Link() method, preserving the receiver + newContent := linkPattern.ReplaceAllStringFunc(content, func(match string) string { + receiverMatch := regexp.MustCompile(`func (\([^)]+\))`).FindStringSubmatch(match) + if len(receiverMatch) > 1 { + return fmt.Sprintf(`func %s Link() string { + return "%s" +}`, receiverMatch[1], newLink) + } + return match + }) + + // Only write if content changed + if newContent != content { + if err := os.WriteFile(filePath, []byte(newContent), 0600); err != nil { + return fmt.Errorf("failed to write %s: %w", filePath, err) + } + updatedCount++ + fmt.Printf("✅ Updated link in %s\n", filePath) + } + } + + fmt.Printf("✅ Updated links in %d rule files\n", updatedCount) + return nil +} diff --git a/go.work b/go.work index b115ffd4..1e38c8f4 100644 --- a/go.work +++ b/go.work @@ -4,4 +4,6 @@ use ( . ./cmd/openapi ./jsonschema/oas3/tests + ./openapi/linter/converter/tests + ./openapi/linter/customrules ) diff --git a/jsonpointer/jsonpointer.go b/jsonpointer/jsonpointer.go index 6d66e904..042ee0bb 100644 --- a/jsonpointer/jsonpointer.go +++ b/jsonpointer/jsonpointer.go @@ -221,6 +221,10 @@ type NavigableNoder interface { } func getStructTarget(sourceVal reflect.Value, currentPart navigationPart, stack []navigationPart, currentPath string, o *options) (any, []navigationPart, error) { + if sourceVal.Kind() == reflect.Ptr && sourceVal.IsNil() { + return nil, nil, ErrNotFound.Wrap(fmt.Errorf("struct is nil at %s", currentPath)) + } + if interfaces.ImplementsInterface[NavigableNoder](sourceVal.Type()) { val, stack, err := getNavigableNoderTarget(sourceVal, currentPart, stack, currentPath, o) if err != nil { diff --git a/jsonpointer/models_test.go b/jsonpointer/models_test.go index 272725a5..4d2ef7bd 100644 --- a/jsonpointer/models_test.go +++ b/jsonpointer/models_test.go @@ -240,6 +240,37 @@ func TestNavigateModel_EmbeddedMap(t *testing.T) { }) } +func TestNavigateModel_NilModelPointer(t *testing.T) { + t.Parallel() + + t.Run("nil model pointer returns error instead of panic", func(t *testing.T) { + t.Parallel() + + // A nil pointer to a model type should return an error, not panic. + // This reproduces a crash when resolving a broken $ref like + // "#/components/schemas/DoesNotExist" where the Components pointer is nil. + var model *tests.TestPrimitiveHighModel + _, err := GetTarget(model, "/stringField") + require.Error(t, err) + assert.Contains(t, err.Error(), "not found") + assert.Contains(t, err.Error(), "nil") + }) + + t.Run("nil nested model pointer returns error instead of panic", func(t *testing.T) { + t.Parallel() + + // A model with a nil nested model pointer should return an error + // when trying to navigate through the nil pointer. + model := &tests.TestComplexHighModel{ + NestedModel: nil, + } + _, err := GetTarget(model, "/nestedModel/stringField") + require.Error(t, err) + assert.Contains(t, err.Error(), "not found") + assert.Contains(t, err.Error(), "nil") + }) +} + func TestNavigateModel_EmbeddedMapEscapedKeys(t *testing.T) { t.Parallel() diff --git a/jsonschema/oas3/core/discriminator_test.go b/jsonschema/oas3/core/discriminator_test.go new file mode 100644 index 00000000..322032f6 --- /dev/null +++ b/jsonschema/oas3/core/discriminator_test.go @@ -0,0 +1,325 @@ +package core + +import ( + "testing" + + "github.com/speakeasy-api/openapi/marshaller" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestDiscriminator_Unmarshal_AllFields_Success(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + yaml string + }{ + { + name: "all fields populated", + yaml: ` +propertyName: petType +mapping: + dog: "#/components/schemas/Dog" + cat: "#/components/schemas/Cat" +defaultMapping: "#/components/schemas/Pet" +x-custom: value +`, + }, + { + name: "only required propertyName field", + yaml: ` +propertyName: type +`, + }, + { + name: "propertyName with mapping", + yaml: ` +propertyName: objectType +mapping: + typeA: "#/components/schemas/TypeA" + typeB: "#/components/schemas/TypeB" +`, + }, + { + name: "propertyName with defaultMapping", + yaml: ` +propertyName: kind +defaultMapping: "#/components/schemas/DefaultType" +`, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + var target Discriminator + validationErrs, err := marshaller.UnmarshalCore(ctx, "", parseYAML(t, tt.yaml), &target) + + require.NoError(t, err, "unmarshal should succeed") + require.Empty(t, validationErrs, "should not have validation errors") + assert.NotNil(t, target, "Discriminator should not be nil") + }) + } +} + +func TestDiscriminator_Unmarshal_PropertyNameField_Success(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + yaml string + expectedPropertyName string + }{ + { + name: "simple property name", + yaml: `propertyName: type`, + expectedPropertyName: "type", + }, + { + name: "camelCase property name", + yaml: `propertyName: petType`, + expectedPropertyName: "petType", + }, + { + name: "snake_case property name", + yaml: `propertyName: pet_type`, + expectedPropertyName: "pet_type", + }, + { + name: "kebab-case property name", + yaml: `propertyName: pet-type`, + expectedPropertyName: "pet-type", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + var target Discriminator + validationErrs, err := marshaller.UnmarshalCore(ctx, "", parseYAML(t, tt.yaml), &target) + + require.NoError(t, err, "unmarshal should succeed") + require.Empty(t, validationErrs, "should not have validation errors") + assert.Equal(t, tt.expectedPropertyName, target.PropertyName.Value, "should parse propertyName correctly") + }) + } +} + +func TestDiscriminator_Unmarshal_MappingField_Success(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + yaml string + key string + expectedRef string + expectedSize int + }{ + { + name: "single mapping entry", + yaml: ` +propertyName: type +mapping: + dog: "#/components/schemas/Dog" +`, + key: "dog", + expectedRef: "#/components/schemas/Dog", + expectedSize: 1, + }, + { + name: "multiple mapping entries", + yaml: ` +propertyName: type +mapping: + dog: "#/components/schemas/Dog" + cat: "#/components/schemas/Cat" + bird: "#/components/schemas/Bird" +`, + key: "cat", + expectedRef: "#/components/schemas/Cat", + expectedSize: 3, + }, + { + name: "mapping with external refs", + yaml: ` +propertyName: type +mapping: + local: "#/components/schemas/Local" + external: "https://example.com/schemas/External" +`, + key: "external", + expectedRef: "https://example.com/schemas/External", + expectedSize: 2, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + var target Discriminator + validationErrs, err := marshaller.UnmarshalCore(ctx, "", parseYAML(t, tt.yaml), &target) + + require.NoError(t, err, "unmarshal should succeed") + require.Empty(t, validationErrs, "should not have validation errors") + require.NotNil(t, target.Mapping.Value, "mapping should be set") + assert.Equal(t, tt.expectedSize, target.Mapping.Value.Len(), "should have correct number of mappings") + + value, found := target.Mapping.Value.Get(tt.key) + require.True(t, found, "should find mapping key") + assert.Equal(t, tt.expectedRef, value.Value, "should parse mapping value correctly") + }) + } +} + +func TestDiscriminator_Unmarshal_DefaultMappingField_Success(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + yaml string + expectedDefaultMapping string + }{ + { + name: "defaultMapping with component ref", + yaml: ` +propertyName: type +defaultMapping: "#/components/schemas/Default" +`, + expectedDefaultMapping: "#/components/schemas/Default", + }, + { + name: "defaultMapping with external ref", + yaml: ` +propertyName: type +defaultMapping: "https://example.com/schemas/Default" +`, + expectedDefaultMapping: "https://example.com/schemas/Default", + }, + { + name: "defaultMapping with path ref", + yaml: ` +propertyName: type +defaultMapping: "#/definitions/Default" +`, + expectedDefaultMapping: "#/definitions/Default", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + var target Discriminator + validationErrs, err := marshaller.UnmarshalCore(ctx, "", parseYAML(t, tt.yaml), &target) + + require.NoError(t, err, "unmarshal should succeed") + require.Empty(t, validationErrs, "should not have validation errors") + require.NotNil(t, target.DefaultMapping.Value, "defaultMapping should be set") + assert.Equal(t, tt.expectedDefaultMapping, *target.DefaultMapping.Value, "should parse defaultMapping correctly") + }) + } +} + +func TestDiscriminator_Unmarshal_Extensions_Success(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + yaml string + extensionKey string + expectedValue string + }{ + { + name: "single extension", + yaml: ` +propertyName: type +x-custom: value +`, + extensionKey: "x-custom", + expectedValue: "value", + }, + { + name: "multiple extensions", + yaml: ` +propertyName: type +x-first: value1 +x-second: value2 +`, + extensionKey: "x-first", + expectedValue: "value1", + }, + { + name: "extension with all fields", + yaml: ` +propertyName: type +mapping: + dog: "#/components/schemas/Dog" +defaultMapping: "#/components/schemas/Pet" +x-vendor: custom-value +`, + extensionKey: "x-vendor", + expectedValue: "custom-value", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + var target Discriminator + validationErrs, err := marshaller.UnmarshalCore(ctx, "", parseYAML(t, tt.yaml), &target) + + require.NoError(t, err, "unmarshal should succeed") + require.Empty(t, validationErrs, "should not have validation errors") + require.NotNil(t, target.Extensions, "extensions should be set") + + ext, found := target.Extensions.Get(tt.extensionKey) + require.True(t, found, "should find extension") + assert.Equal(t, tt.expectedValue, ext.Value.Value, "should parse extension value correctly") + }) + } +} + +func TestDiscriminator_Unmarshal_MinimalObject_Success(t *testing.T) { + t.Parallel() + + ctx := t.Context() + yaml := `propertyName: type` + + var target Discriminator + validationErrs, err := marshaller.UnmarshalCore(ctx, "", parseYAML(t, yaml), &target) + + require.NoError(t, err, "unmarshal should succeed") + require.Empty(t, validationErrs, "should not have validation errors") + assert.Equal(t, "type", target.PropertyName.Value, "should parse propertyName") + assert.Nil(t, target.Mapping.Value, "mapping should be nil") + assert.Nil(t, target.DefaultMapping.Value, "defaultMapping should be nil") +} + +func TestDiscriminator_Unmarshal_EmptyMapping_Success(t *testing.T) { + t.Parallel() + + ctx := t.Context() + yaml := ` +propertyName: type +mapping: {} +` + + var target Discriminator + validationErrs, err := marshaller.UnmarshalCore(ctx, "", parseYAML(t, yaml), &target) + + require.NoError(t, err, "unmarshal should succeed") + require.Empty(t, validationErrs, "should not have validation errors") + assert.Equal(t, "type", target.PropertyName.Value, "should parse propertyName") + require.NotNil(t, target.Mapping.Value, "mapping should not be nil") + assert.Equal(t, 0, target.Mapping.Value.Len(), "mapping should be empty") +} diff --git a/jsonschema/oas3/core/externaldoc_test.go b/jsonschema/oas3/core/externaldoc_test.go new file mode 100644 index 00000000..ea4afea6 --- /dev/null +++ b/jsonschema/oas3/core/externaldoc_test.go @@ -0,0 +1,228 @@ +package core + +import ( + "testing" + + "github.com/speakeasy-api/openapi/marshaller" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestExternalDocumentation_Unmarshal_AllFields_Success(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + yaml string + }{ + { + name: "all fields populated", + yaml: ` +url: https://example.com/docs +description: Additional documentation +x-custom: value +`, + }, + { + name: "only required url field", + yaml: ` +url: https://example.com +`, + }, + { + name: "url with description", + yaml: ` +url: https://api.example.com/reference +description: API Reference Documentation +`, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + var target ExternalDocumentation + validationErrs, err := marshaller.UnmarshalCore(ctx, "", parseYAML(t, tt.yaml), &target) + + require.NoError(t, err, "unmarshal should succeed") + require.Empty(t, validationErrs, "should not have validation errors") + assert.NotNil(t, target, "ExternalDocumentation should not be nil") + }) + } +} + +func TestExternalDocumentation_Unmarshal_URLField_Success(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + yaml string + expectedURL string + }{ + { + name: "https url", + yaml: `url: https://example.com/docs`, + expectedURL: "https://example.com/docs", + }, + { + name: "http url", + yaml: `url: http://example.com/docs`, + expectedURL: "http://example.com/docs", + }, + { + name: "url with path", + yaml: `url: https://api.example.com/v1/reference`, + expectedURL: "https://api.example.com/v1/reference", + }, + { + name: "url with query params", + yaml: `url: https://example.com/docs?version=2.0`, + expectedURL: "https://example.com/docs?version=2.0", + }, + { + name: "url with fragment", + yaml: `url: https://example.com/docs#section`, + expectedURL: "https://example.com/docs#section", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + var target ExternalDocumentation + validationErrs, err := marshaller.UnmarshalCore(ctx, "", parseYAML(t, tt.yaml), &target) + + require.NoError(t, err, "unmarshal should succeed") + require.Empty(t, validationErrs, "should not have validation errors") + assert.Equal(t, tt.expectedURL, target.URL.Value, "should parse url correctly") + }) + } +} + +func TestExternalDocumentation_Unmarshal_DescriptionField_Success(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + yaml string + expectedDescription string + }{ + { + name: "simple description", + yaml: ` +url: https://example.com +description: Documentation +`, + expectedDescription: "Documentation", + }, + { + name: "multi-word description", + yaml: ` +url: https://example.com +description: Complete API documentation and reference guide +`, + expectedDescription: "Complete API documentation and reference guide", + }, + { + name: "description with special chars", + yaml: ` +url: https://example.com +description: "Documentation: API & SDK Guide" +`, + expectedDescription: "Documentation: API & SDK Guide", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + var target ExternalDocumentation + validationErrs, err := marshaller.UnmarshalCore(ctx, "", parseYAML(t, tt.yaml), &target) + + require.NoError(t, err, "unmarshal should succeed") + require.Empty(t, validationErrs, "should not have validation errors") + require.NotNil(t, target.Description.Value, "description should be set") + assert.Equal(t, tt.expectedDescription, *target.Description.Value, "should parse description correctly") + }) + } +} + +func TestExternalDocumentation_Unmarshal_Extensions_Success(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + yaml string + extensionKey string + expectedValue string + }{ + { + name: "single extension", + yaml: ` +url: https://example.com +x-custom: value +`, + extensionKey: "x-custom", + expectedValue: "value", + }, + { + name: "multiple extensions", + yaml: ` +url: https://example.com +x-first: value1 +x-second: value2 +`, + extensionKey: "x-first", + expectedValue: "value1", + }, + { + name: "extension with url and description", + yaml: ` +url: https://example.com/docs +description: API docs +x-vendor: custom-value +`, + extensionKey: "x-vendor", + expectedValue: "custom-value", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + var target ExternalDocumentation + validationErrs, err := marshaller.UnmarshalCore(ctx, "", parseYAML(t, tt.yaml), &target) + + require.NoError(t, err, "unmarshal should succeed") + require.Empty(t, validationErrs, "should not have validation errors") + require.NotNil(t, target.Extensions, "extensions should be set") + + ext, found := target.Extensions.Get(tt.extensionKey) + require.True(t, found, "should find extension") + assert.Equal(t, tt.expectedValue, ext.Value.Value, "should parse extension value correctly") + }) + } +} + +func TestExternalDocumentation_Unmarshal_MinimalObject_Success(t *testing.T) { + t.Parallel() + + ctx := t.Context() + yaml := `url: https://example.com` + + var target ExternalDocumentation + validationErrs, err := marshaller.UnmarshalCore(ctx, "", parseYAML(t, yaml), &target) + + require.NoError(t, err, "unmarshal should succeed") + require.Empty(t, validationErrs, "should not have validation errors") + assert.Equal(t, "https://example.com", target.URL.Value, "should parse url") + assert.Nil(t, target.Description.Value, "description should be nil") +} diff --git a/jsonschema/oas3/core/factory_registration.go b/jsonschema/oas3/core/factory_registration.go index 6f710f6c..8fcb91ae 100644 --- a/jsonschema/oas3/core/factory_registration.go +++ b/jsonschema/oas3/core/factory_registration.go @@ -20,13 +20,8 @@ func init() { return &core.EitherValue[[]marshaller.Node[string], string]{} }) - // Register Node-wrapped EitherValue for additionalProperties - marshaller.RegisterType(func() *marshaller.Node[*core.EitherValue[Schema, bool]] { - return &marshaller.Node[*core.EitherValue[Schema, bool]]{} - }) - - // Register sequencedmap for additionalProperties (used in properties field) - marshaller.RegisterType(func() *sequencedmap.Map[string, marshaller.Node[*core.EitherValue[Schema, bool]]] { - return &sequencedmap.Map[string, marshaller.Node[*core.EitherValue[Schema, bool]]]{} + // Register sequencedmap for properties and similar fields + marshaller.RegisterType(func() *sequencedmap.Map[string, *core.EitherValue[Schema, bool]] { + return &sequencedmap.Map[string, *core.EitherValue[Schema, bool]]{} }) } diff --git a/jsonschema/oas3/core/jsonschema_test.go b/jsonschema/oas3/core/jsonschema_test.go index d5a944bb..297fe11f 100644 --- a/jsonschema/oas3/core/jsonschema_test.go +++ b/jsonschema/oas3/core/jsonschema_test.go @@ -70,3 +70,222 @@ minLength: 1 assert.True(t, target.Left.Value.Type.Value.IsRight, "Type should be Right type (string)") assert.Equal(t, "string", target.Left.Value.Type.Value.Right.Value, "Type should be 'string'") } + +func TestJSONSchema_Unmarshal_TypeArray_Success(t *testing.T) { + t.Parallel() + + ctx := t.Context() + + // YAML with type as array (tests EitherValue[[]marshaller.Node[string], string]) + testYaml := ` +type: [string, number] +` + var node yaml.Node + err := yaml.Unmarshal([]byte(testYaml), &node) + require.NoError(t, err) + + var target JSONSchema + validationErrs, err := marshaller.UnmarshalCore(ctx, "", node.Content[0], &target) + + require.NoError(t, err, "Should not have syntax errors") + require.Empty(t, validationErrs, "Should not have validation errors") + require.NotNil(t, target, "JSONSchema should not be nil") + assert.True(t, target.IsLeft, "JSONSchema should be Left type (Schema)") + + // Verify type array was unmarshaled + require.NotNil(t, target.Left.Value.Type.Value, "Type should be set") + assert.True(t, target.Left.Value.Type.Value.IsLeft, "Type should be Left type (array)") + assert.Len(t, target.Left.Value.Type.Value.Left.Value, 2, "Should have 2 types") +} + +func TestJSONSchema_Unmarshal_PropertiesWithAdditionalProperties_Success(t *testing.T) { + t.Parallel() + + ctx := t.Context() + + // YAML with properties and additionalProperties (tests sequencedmap and nested schemas) + testYaml := ` +type: object +properties: + name: + type: string + age: + type: integer +additionalProperties: + type: string +` + var node yaml.Node + err := yaml.Unmarshal([]byte(testYaml), &node) + require.NoError(t, err) + + var target JSONSchema + validationErrs, err := marshaller.UnmarshalCore(ctx, "", node.Content[0], &target) + + require.NoError(t, err, "Should not have syntax errors") + require.Empty(t, validationErrs, "Should not have validation errors") + require.NotNil(t, target, "JSONSchema should not be nil") + + // Verify properties map + require.NotNil(t, target.Left.Value.Properties.Value, "Properties should be set") + assert.Equal(t, 2, target.Left.Value.Properties.Value.Len(), "Should have 2 properties") + + // Verify additionalProperties schema + require.NotNil(t, target.Left.Value.AdditionalProperties.Value, "AdditionalProperties should be set") + assert.True(t, target.Left.Value.AdditionalProperties.Value.IsLeft, "AdditionalProperties should be schema") +} + +func TestJSONSchema_Unmarshal_WithDiscriminator_Success(t *testing.T) { + t.Parallel() + + ctx := t.Context() + + // YAML with discriminator (tests Discriminator type registration) + testYaml := ` +type: object +discriminator: + propertyName: petType + mapping: + dog: "#/components/schemas/Dog" + cat: "#/components/schemas/Cat" +` + var node yaml.Node + err := yaml.Unmarshal([]byte(testYaml), &node) + require.NoError(t, err) + + var target JSONSchema + validationErrs, err := marshaller.UnmarshalCore(ctx, "", node.Content[0], &target) + + require.NoError(t, err, "Should not have syntax errors") + require.Empty(t, validationErrs, "Should not have validation errors") + require.NotNil(t, target, "JSONSchema should not be nil") + assert.True(t, target.IsLeft, "JSONSchema should be Left type (Schema)") + + // Verify discriminator was unmarshaled + require.NotNil(t, target.Left.Value.Discriminator.Value, "Discriminator should be set") + assert.Equal(t, "petType", target.Left.Value.Discriminator.Value.PropertyName.Value, "Should parse propertyName") + require.NotNil(t, target.Left.Value.Discriminator.Value.Mapping.Value, "Mapping should be set") + assert.Equal(t, 2, target.Left.Value.Discriminator.Value.Mapping.Value.Len(), "Should have 2 mappings") +} + +func TestJSONSchema_Unmarshal_WithExternalDocs_Success(t *testing.T) { + t.Parallel() + + ctx := t.Context() + + // YAML with externalDocs (tests ExternalDocumentation type registration) + testYaml := ` +type: string +description: A user identifier +externalDocs: + url: https://example.com/docs/user-id + description: User ID documentation +` + var node yaml.Node + err := yaml.Unmarshal([]byte(testYaml), &node) + require.NoError(t, err) + + var target JSONSchema + validationErrs, err := marshaller.UnmarshalCore(ctx, "", node.Content[0], &target) + + require.NoError(t, err, "Should not have syntax errors") + require.Empty(t, validationErrs, "Should not have validation errors") + require.NotNil(t, target, "JSONSchema should not be nil") + assert.True(t, target.IsLeft, "JSONSchema should be Left type (Schema)") + + // Verify externalDocs was unmarshaled + require.NotNil(t, target.Left.Value.ExternalDocs.Value, "ExternalDocs should be set") + assert.Equal(t, "https://example.com/docs/user-id", target.Left.Value.ExternalDocs.Value.URL.Value, "Should parse URL") + require.NotNil(t, target.Left.Value.ExternalDocs.Value.Description.Value, "Description should be set") + assert.Equal(t, "User ID documentation", *target.Left.Value.ExternalDocs.Value.Description.Value, "Should parse description") +} + +func TestJSONSchema_Unmarshal_WithXML_Success(t *testing.T) { + t.Parallel() + + ctx := t.Context() + + // YAML with xml (tests XML type registration) + testYaml := ` +type: object +xml: + name: Person + namespace: http://example.com/schema + prefix: per + wrapped: true +` + var node yaml.Node + err := yaml.Unmarshal([]byte(testYaml), &node) + require.NoError(t, err) + + var target JSONSchema + validationErrs, err := marshaller.UnmarshalCore(ctx, "", node.Content[0], &target) + + require.NoError(t, err, "Should not have syntax errors") + require.Empty(t, validationErrs, "Should not have validation errors") + require.NotNil(t, target, "JSONSchema should not be nil") + assert.True(t, target.IsLeft, "JSONSchema should be Left type (Schema)") + + // Verify xml was unmarshaled + require.NotNil(t, target.Left.Value.XML.Value, "XML should be set") + require.NotNil(t, target.Left.Value.XML.Value.Name.Value, "Name should be set") + assert.Equal(t, "Person", *target.Left.Value.XML.Value.Name.Value, "Should parse name") + require.NotNil(t, target.Left.Value.XML.Value.Namespace.Value, "Namespace should be set") + assert.Equal(t, "http://example.com/schema", *target.Left.Value.XML.Value.Namespace.Value, "Should parse namespace") + require.NotNil(t, target.Left.Value.XML.Value.Prefix.Value, "Prefix should be set") + assert.Equal(t, "per", *target.Left.Value.XML.Value.Prefix.Value, "Should parse prefix") + require.NotNil(t, target.Left.Value.XML.Value.Wrapped.Value, "Wrapped should be set") + assert.True(t, *target.Left.Value.XML.Value.Wrapped.Value, "Should parse wrapped as true") +} + +func TestJSONSchema_Unmarshal_ComplexSchema_Success(t *testing.T) { + t.Parallel() + + ctx := t.Context() + + // YAML with multiple nested features to test all registrations together + testYaml := ` +type: object +properties: + id: + type: string + xml: + attribute: true + name: + type: string +discriminator: + propertyName: type +externalDocs: + url: https://example.com/docs +` + var node yaml.Node + err := yaml.Unmarshal([]byte(testYaml), &node) + require.NoError(t, err) + + var target JSONSchema + validationErrs, err := marshaller.UnmarshalCore(ctx, "", node.Content[0], &target) + + require.NoError(t, err, "Should not have syntax errors") + require.Empty(t, validationErrs, "Should not have validation errors") + require.NotNil(t, target, "JSONSchema should not be nil") + assert.True(t, target.IsLeft, "JSONSchema should be Left type (Schema)") + + // Verify properties + require.NotNil(t, target.Left.Value.Properties.Value, "Properties should be set") + assert.Equal(t, 2, target.Left.Value.Properties.Value.Len(), "Should have 2 properties") + + // Verify id property has xml + idProp, found := target.Left.Value.Properties.Value.Get("id") + require.True(t, found, "Should find id property") + require.NotNil(t, idProp, "id property should not be nil") + require.NotNil(t, idProp.Left.Value.XML.Value, "id should have XML") + require.NotNil(t, idProp.Left.Value.XML.Value.Attribute.Value, "XML attribute should be set") + assert.True(t, *idProp.Left.Value.XML.Value.Attribute.Value, "XML attribute should be true") + + // Verify discriminator + require.NotNil(t, target.Left.Value.Discriminator.Value, "Discriminator should be set") + assert.Equal(t, "type", target.Left.Value.Discriminator.Value.PropertyName.Value, "Should parse discriminator propertyName") + + // Verify externalDocs + require.NotNil(t, target.Left.Value.ExternalDocs.Value, "ExternalDocs should be set") + assert.Equal(t, "https://example.com/docs", target.Left.Value.ExternalDocs.Value.URL.Value, "Should parse externalDocs URL") +} diff --git a/jsonschema/oas3/core/xml_test.go b/jsonschema/oas3/core/xml_test.go new file mode 100644 index 00000000..493a8b86 --- /dev/null +++ b/jsonschema/oas3/core/xml_test.go @@ -0,0 +1,333 @@ +package core + +import ( + "testing" + + "github.com/speakeasy-api/openapi/marshaller" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "gopkg.in/yaml.v3" +) + +func parseYAML(t *testing.T, yml string) *yaml.Node { + t.Helper() + var node yaml.Node + err := yaml.Unmarshal([]byte(yml), &node) + require.NoError(t, err) + return node.Content[0] +} + +func TestXML_Unmarshal_AllFields_Success(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + yaml string + }{ + { + name: "all fields populated", + yaml: ` +name: Person +namespace: http://example.com/schema/Person +prefix: per +attribute: true +wrapped: false +x-custom: value +`, + }, + { + name: "only required fields", + yaml: ` +name: Item +`, + }, + { + name: "namespace and prefix", + yaml: ` +namespace: http://example.com/ns +prefix: ex +`, + }, + { + name: "boolean flags", + yaml: ` +attribute: true +wrapped: true +`, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + var target XML + validationErrs, err := marshaller.UnmarshalCore(ctx, "", parseYAML(t, tt.yaml), &target) + + require.NoError(t, err, "unmarshal should succeed") + require.Empty(t, validationErrs, "should not have validation errors") + assert.NotNil(t, target, "XML should not be nil") + }) + } +} + +func TestXML_Unmarshal_NameField_Success(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + yaml string + expectedName string + }{ + { + name: "simple name", + yaml: `name: Person`, + expectedName: "Person", + }, + { + name: "camelCase name", + yaml: `name: personDetails`, + expectedName: "personDetails", + }, + { + name: "PascalCase name", + yaml: `name: PersonDetails`, + expectedName: "PersonDetails", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + var target XML + validationErrs, err := marshaller.UnmarshalCore(ctx, "", parseYAML(t, tt.yaml), &target) + + require.NoError(t, err, "unmarshal should succeed") + require.Empty(t, validationErrs, "should not have validation errors") + require.NotNil(t, target.Name.Value, "name should be set") + assert.Equal(t, tt.expectedName, *target.Name.Value, "should parse name correctly") + }) + } +} + +func TestXML_Unmarshal_NamespaceField_Success(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + yaml string + expectedNamespace string + }{ + { + name: "http namespace", + yaml: `namespace: http://example.com/schema`, + expectedNamespace: "http://example.com/schema", + }, + { + name: "https namespace", + yaml: `namespace: https://example.com/api/v1`, + expectedNamespace: "https://example.com/api/v1", + }, + { + name: "urn namespace", + yaml: `namespace: urn:example:schema`, + expectedNamespace: "urn:example:schema", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + var target XML + validationErrs, err := marshaller.UnmarshalCore(ctx, "", parseYAML(t, tt.yaml), &target) + + require.NoError(t, err, "unmarshal should succeed") + require.Empty(t, validationErrs, "should not have validation errors") + require.NotNil(t, target.Namespace.Value, "namespace should be set") + assert.Equal(t, tt.expectedNamespace, *target.Namespace.Value, "should parse namespace correctly") + }) + } +} + +func TestXML_Unmarshal_PrefixField_Success(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + yaml string + expectedPrefix string + }{ + { + name: "short prefix", + yaml: `prefix: ex`, + expectedPrefix: "ex", + }, + { + name: "longer prefix", + yaml: `prefix: example`, + expectedPrefix: "example", + }, + { + name: "single char prefix", + yaml: `prefix: x`, + expectedPrefix: "x", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + var target XML + validationErrs, err := marshaller.UnmarshalCore(ctx, "", parseYAML(t, tt.yaml), &target) + + require.NoError(t, err, "unmarshal should succeed") + require.Empty(t, validationErrs, "should not have validation errors") + require.NotNil(t, target.Prefix.Value, "prefix should be set") + assert.Equal(t, tt.expectedPrefix, *target.Prefix.Value, "should parse prefix correctly") + }) + } +} + +func TestXML_Unmarshal_AttributeField_Success(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + yaml string + expectedAttribute bool + }{ + { + name: "attribute true", + yaml: `attribute: true`, + expectedAttribute: true, + }, + { + name: "attribute false", + yaml: `attribute: false`, + expectedAttribute: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + var target XML + validationErrs, err := marshaller.UnmarshalCore(ctx, "", parseYAML(t, tt.yaml), &target) + + require.NoError(t, err, "unmarshal should succeed") + require.Empty(t, validationErrs, "should not have validation errors") + require.NotNil(t, target.Attribute.Value, "attribute should be set") + assert.Equal(t, tt.expectedAttribute, *target.Attribute.Value, "should parse attribute correctly") + }) + } +} + +func TestXML_Unmarshal_WrappedField_Success(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + yaml string + expectedWrapped bool + }{ + { + name: "wrapped true", + yaml: `wrapped: true`, + expectedWrapped: true, + }, + { + name: "wrapped false", + yaml: `wrapped: false`, + expectedWrapped: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + var target XML + validationErrs, err := marshaller.UnmarshalCore(ctx, "", parseYAML(t, tt.yaml), &target) + + require.NoError(t, err, "unmarshal should succeed") + require.Empty(t, validationErrs, "should not have validation errors") + require.NotNil(t, target.Wrapped.Value, "wrapped should be set") + assert.Equal(t, tt.expectedWrapped, *target.Wrapped.Value, "should parse wrapped correctly") + }) + } +} + +func TestXML_Unmarshal_Extensions_Success(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + yaml string + extensionKey string + expectedValue string + }{ + { + name: "single extension", + yaml: ` +x-custom: value +`, + extensionKey: "x-custom", + expectedValue: "value", + }, + { + name: "multiple extensions", + yaml: ` +x-first: value1 +x-second: value2 +`, + extensionKey: "x-first", + expectedValue: "value1", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + var target XML + validationErrs, err := marshaller.UnmarshalCore(ctx, "", parseYAML(t, tt.yaml), &target) + + require.NoError(t, err, "unmarshal should succeed") + require.Empty(t, validationErrs, "should not have validation errors") + require.NotNil(t, target.Extensions, "extensions should be set") + + ext, found := target.Extensions.Get(tt.extensionKey) + require.True(t, found, "should find extension") + assert.Equal(t, tt.expectedValue, ext.Value.Value, "should parse extension value correctly") + }) + } +} + +func TestXML_Unmarshal_EmptyObject_Success(t *testing.T) { + t.Parallel() + + ctx := t.Context() + yaml := `{}` + + var target XML + validationErrs, err := marshaller.UnmarshalCore(ctx, "", parseYAML(t, yaml), &target) + + require.NoError(t, err, "unmarshal should succeed") + require.Empty(t, validationErrs, "should not have validation errors") + assert.Nil(t, target.Name.Value, "name should be nil") + assert.Nil(t, target.Namespace.Value, "namespace should be nil") + assert.Nil(t, target.Prefix.Value, "prefix should be nil") + assert.Nil(t, target.Attribute.Value, "attribute should be nil") + assert.Nil(t, target.Wrapped.Value, "wrapped should be nil") +} diff --git a/jsonschema/oas3/discriminator.go b/jsonschema/oas3/discriminator.go index f10f00b2..4510cd23 100644 --- a/jsonschema/oas3/discriminator.go +++ b/jsonschema/oas3/discriminator.go @@ -2,6 +2,7 @@ package oas3 import ( "context" + "errors" "github.com/speakeasy-api/openapi/extensions" "github.com/speakeasy-api/openapi/internal/interfaces" @@ -72,17 +73,13 @@ func (d *Discriminator) Validate(ctx context.Context, opts ...validation.Option) errs := []error{} // propertyName is REQUIRED in all OpenAPI versions - if core.PropertyName.Present { - if core.PropertyName.Value == "" { - errs = append(errs, validation.NewValueError(validation.NewMissingValueError("discriminator.propertyName is required"), core, core.PropertyName)) - } - } else { - errs = append(errs, validation.NewValueError(validation.NewMissingValueError("discriminator.propertyName is required"), core, core.PropertyName)) + if core.PropertyName.Present && d.PropertyName == "" { + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("`discriminator.propertyName` is required"), core, core.PropertyName)) } // defaultMapping validation - must not be empty if present - if core.DefaultMapping.Present && (core.DefaultMapping.Value == nil || *core.DefaultMapping.Value == "") { - errs = append(errs, validation.NewValueError(validation.NewMissingValueError("discriminator.defaultMapping cannot be empty"), core, core.DefaultMapping)) + if core.DefaultMapping.Present && d.GetDefaultMapping() == "" { + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationEmptyValue, errors.New("discriminator.defaultMapping cannot be empty"), core, core.DefaultMapping)) } d.Valid = len(errs) == 0 && core.GetValid() diff --git a/jsonschema/oas3/discriminator_validate_test.go b/jsonschema/oas3/discriminator_validate_test.go index e8dbf2c8..69e7d3c6 100644 --- a/jsonschema/oas3/discriminator_validate_test.go +++ b/jsonschema/oas3/discriminator_validate_test.go @@ -94,8 +94,7 @@ func TestDiscriminator_Validate_Error(t *testing.T) { dog: "#/components/schemas/Dog" `, wantErrs: []string{ - "[1:1] discriminator.propertyName is missing", - "[1:1] discriminator.propertyName is required", + "[1:1] error validation-required-field `discriminator.propertyName` is required", }, }, { @@ -105,7 +104,7 @@ propertyName: "" mapping: dog: "#/components/schemas/Dog" `, - wantErrs: []string{"[2:15] discriminator.propertyName is required"}, + wantErrs: []string{"[2:15] error validation-required-field `discriminator.propertyName` is required"}, }, } diff --git a/jsonschema/oas3/externaldoc.go b/jsonschema/oas3/externaldoc.go index 9063d746..507efada 100644 --- a/jsonschema/oas3/externaldoc.go +++ b/jsonschema/oas3/externaldoc.go @@ -2,6 +2,8 @@ package oas3 import ( "context" + "errors" + "fmt" "net/url" "reflect" @@ -86,10 +88,10 @@ func (e *ExternalDocumentation) Validate(ctx context.Context, opts ...validation if core.URL.Present { if core.URL.Value == "" { - errs = append(errs, validation.NewValueError(validation.NewMissingValueError("externalDocumentation.url is required"), core, core.URL)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("`externalDocumentation.url` is required"), core, core.URL)) } else { if _, err := url.Parse(core.URL.Value); err != nil { - errs = append(errs, validation.NewValueError(validation.NewValueValidationError("externalDocumentation.url is not a valid uri: %s", err), core, core.URL)) + errs = append(errs, validation.NewValueError(validation.SeverityWarning, validation.RuleValidationInvalidFormat, fmt.Errorf("externalDocumentation.url is not a valid uri: %w", err), core, core.URL)) } } } diff --git a/jsonschema/oas3/externaldoc_validate_test.go b/jsonschema/oas3/externaldoc_validate_test.go index bdecc955..e17ce717 100644 --- a/jsonschema/oas3/externaldoc_validate_test.go +++ b/jsonschema/oas3/externaldoc_validate_test.go @@ -86,7 +86,7 @@ func TestExternalDoc_Validate_Error(t *testing.T) { yml: ` description: Some documentation `, - wantErrs: []string{"[2:1] externalDocumentation.url is missing"}, + wantErrs: []string{"[2:1] error validation-required-field `externalDocumentation.url` is required"}, }, { name: "empty URL", @@ -94,7 +94,7 @@ description: Some documentation description: Some documentation url: "" `, - wantErrs: []string{"[3:6] externalDocumentation.url is required"}, + wantErrs: []string{"[3:6] error validation-required-field `externalDocumentation.url` is required"}, }, { name: "invalid URL format", diff --git a/jsonschema/oas3/jsonschema_validate_test.go b/jsonschema/oas3/jsonschema_validate_test.go index 577ee2a4..29c5f230 100644 --- a/jsonschema/oas3/jsonschema_validate_test.go +++ b/jsonschema/oas3/jsonschema_validate_test.go @@ -227,7 +227,7 @@ func TestJSONSchema_Validate_Error(t *testing.T) { name: "schema fails direct validation", yml: ` "test"`, - wantErrs: []string{"[2:1] failed to validate either Schema [expected object, got `te...`] or bool [line 2: cannot unmarshal !!str `test` into bool]"}, + wantErrs: []string{"[2:1] error validation-type-mismatch failed to validate either Schema [expected `object`, got `te...`] or bool [line 2: cannot unmarshal !!str `test` into bool]"}, }, { name: "child schema fails validation", @@ -243,8 +243,8 @@ description: $ref: "#/components/schemas/stream/properties/profiles/description" `, wantErrs: []string{ - "[2:1] schema.description expected string, got object", - "[10:5] schema.description expected string, got object", + "[2:1] error validation-type-mismatch schema.description expected `string`, got `object`", + "[10:5] error validation-type-mismatch schema.description expected `string`, got `object`", }, }, { @@ -253,8 +253,8 @@ description: type: invalid_type `, wantErrs: []string{ - "[2:7] schema.type expected array, got string", - "[2:7] schema.type value must be one of 'array', 'boolean', 'integer', 'null', 'number', 'object', 'string'", + "[2:7] error validation-invalid-schema schema.type value must be one of 'array', 'boolean', 'integer', 'null', 'number', 'object', 'string'", + "[2:7] error validation-type-mismatch schema.type expected `array`, got `string`", }, }, } diff --git a/jsonschema/oas3/resolution.go b/jsonschema/oas3/resolution.go index 22c615f6..16bff2ab 100644 --- a/jsonschema/oas3/resolution.go +++ b/jsonschema/oas3/resolution.go @@ -64,7 +64,7 @@ func (j *JSONSchema[Referenceable]) GetAbsRef() references.Reference { if j.referenceResolutionCache == nil { return ref } - return references.Reference(j.referenceResolutionCache.AbsoluteReference + "#" + ref.GetJSONPointer().String()) + return references.Reference(j.referenceResolutionCache.AbsoluteDocumentPath + "#" + ref.GetJSONPointer().String()) } // Resolve will fully resolve the reference and return the JSONSchema referenced. This will recursively resolve any intermediate references as well. @@ -180,7 +180,7 @@ func (s *JSONSchema[Referenceable]) resolve(ctx context.Context, opts references // The ResolveResult.ResolvedDocument should be used as the new TargetDocument if s.referenceResolutionCache.ResolvedDocument != nil { opts.TargetDocument = s.referenceResolutionCache.ResolvedDocument - opts.TargetLocation = s.referenceResolutionCache.AbsoluteReference + opts.TargetLocation = s.referenceResolutionCache.AbsoluteDocumentPath } } @@ -195,7 +195,7 @@ func (s *JSONSchema[Referenceable]) resolve(ctx context.Context, opts references if result := s.tryResolveViaRegistry(ctx, ref, opts); result != nil { // Compute absolute reference for circular detection // Use the result's AbsoluteReference combined with any anchor/fragment - absRef := result.AbsoluteReference + absRef := result.AbsoluteDocumentPath if anchor := ExtractAnchor(string(ref)); anchor != "" { absRef = absRef + "#" + anchor } else if jp := ref.GetJSONPointer(); jp != "" { @@ -279,7 +279,7 @@ func (s *JSONSchema[Referenceable]) resolve(ctx context.Context, opts references // Use $id as base URI if present in the resolved schema (JSON Schema spec) // The $id keyword identifies a schema resource with its canonical URI // and serves as the base URI for relative references within that schema - baseURI := result.AbsoluteReference + baseURI := result.AbsoluteDocumentPath if !schema.IsBool() && schema.GetSchema() != nil { if schemaID := schema.GetSchema().GetID(); schemaID != "" { baseURI = schemaID @@ -290,6 +290,9 @@ func (s *JSONSchema[Referenceable]) resolve(ctx context.Context, opts references // This enables $id and $anchor resolution within the fetched document setupRemoteSchemaRegistry(ctx, schema, baseURI) + // Collect nested reference schemas that need parent links set + var nestedRefs []*JSONSchemaReferenceable + for item := range Walk(ctx, schema) { _ = item.Match(SchemaMatcher{ Schema: func(js *JSONSchemaReferenceable) error { @@ -301,16 +304,36 @@ func (s *JSONSchema[Referenceable]) resolve(ctx context.Context, opts references localBaseURI = jsID } } + // Get the ref to build absolute reference with fragment + jsRef := js.GetRef() + absRef := utils.BuildAbsoluteReference(localBaseURI, string(jsRef.GetJSONPointer())) js.referenceResolutionCache = &references.ResolveResult[JSONSchemaReferenceable]{ - AbsoluteReference: localBaseURI, - ResolvedDocument: result.ResolvedDocument, + AbsoluteDocumentPath: localBaseURI, + AbsoluteReference: references.Reference(absRef), + ResolvedDocument: result.ResolvedDocument, } + + // Collect this reference for setting parent links after the walk + nestedRefs = append(nestedRefs, js) } return nil }, }) } + // Set parent links for all nested references found during the walk + // This maintains reference chain tracking when accessing properties of resolved schemas + var topLevel *JSONSchemaReferenceable + if s.topLevelParent != nil { + topLevel = s.topLevelParent + } else { + topLevel = (*JSONSchemaReferenceable)(s) + } + for _, js := range nestedRefs { + js.SetParent((*JSONSchemaReferenceable)(s)) + js.SetTopLevelParent(topLevel) + } + s.referenceResolutionCache = result s.validationErrsCache = validationErrs diff --git a/jsonschema/oas3/resolution_defs.go b/jsonschema/oas3/resolution_defs.go index 9d12dd86..97b09daf 100644 --- a/jsonschema/oas3/resolution_defs.go +++ b/jsonschema/oas3/resolution_defs.go @@ -5,6 +5,7 @@ import ( "fmt" "strings" + "github.com/speakeasy-api/openapi/internal/utils" "github.com/speakeasy-api/openapi/jsonpointer" "github.com/speakeasy-api/openapi/references" "gopkg.in/yaml.v3" @@ -144,9 +145,11 @@ func (s *JSONSchema[Referenceable]) tryResolveLocalDefs(_ context.Context, ref r absRef = schemaID } + absRefWithFragment := utils.BuildAbsoluteReference(absRef, string(ref.GetJSONPointer())) return &references.ResolveResult[JSONSchemaReferenceable]{ - Object: defSchema, - AbsoluteReference: absRef, + Object: defSchema, + AbsoluteDocumentPath: absRef, + AbsoluteReference: references.Reference(absRefWithFragment), } } diff --git a/jsonschema/oas3/resolution_external.go b/jsonschema/oas3/resolution_external.go index a1860902..a97d1896 100644 --- a/jsonschema/oas3/resolution_external.go +++ b/jsonschema/oas3/resolution_external.go @@ -5,6 +5,7 @@ import ( "errors" "fmt" + "github.com/speakeasy-api/openapi/internal/utils" "github.com/speakeasy-api/openapi/jsonpointer" "github.com/speakeasy-api/openapi/marshaller" "github.com/speakeasy-api/openapi/references" @@ -33,7 +34,7 @@ func (s *JSONSchema[Referenceable]) resolveExternalAnchorReference(ctx context.C // Use $id as base URI if present in the resolved schema (JSON Schema spec) // The $id keyword identifies a schema resource with its canonical URI // and serves as the base URI for anchor lookups within that schema - baseURI := docResult.AbsoluteReference + baseURI := docResult.AbsoluteDocumentPath if !externalDoc.IsBool() && externalDoc.GetSchema() != nil { if schemaID := externalDoc.GetSchema().GetID(); schemaID != "" { baseURI = schemaID @@ -60,8 +61,8 @@ func (s *JSONSchema[Referenceable]) resolveExternalAnchorReference(ctx context.C // This handles the case where the reference uses the retrieval URL instead of the canonical $id // Example: fetch https://example.com/a.json, but $id is https://cdn.example.com/canonical.json // A reference to "https://example.com/a.json#foo" should still resolve - if resolved == nil && docResult.AbsoluteReference != "" && docResult.AbsoluteReference != baseURI { - resolved = registry.LookupByAnchor(docResult.AbsoluteReference, anchor) + if resolved == nil && docResult.AbsoluteDocumentPath != "" && docResult.AbsoluteDocumentPath != baseURI { + resolved = registry.LookupByAnchor(docResult.AbsoluteDocumentPath, anchor) } // Fallback: try with empty base URI @@ -73,10 +74,12 @@ func (s *JSONSchema[Referenceable]) resolveExternalAnchorReference(ctx context.C return nil, validationErrs, fmt.Errorf("anchor not found in external document: %s#%s", ref.GetURI(), anchor) } + absRef := utils.BuildAbsoluteReference(baseURI, "#"+anchor) return &references.ResolveResult[JSONSchemaReferenceable]{ - Object: resolved, - AbsoluteReference: baseURI, - ResolvedDocument: docResult.ResolvedDocument, + Object: resolved, + AbsoluteDocumentPath: baseURI, + AbsoluteReference: references.Reference(absRef), + ResolvedDocument: docResult.ResolvedDocument, }, validationErrs, nil } @@ -105,7 +108,7 @@ func (s *JSONSchema[Referenceable]) resolveExternalRefWithFragment(ctx context.C // Use $id as base URI if present in the resolved schema (JSON Schema spec) // The $id keyword identifies a schema resource with its canonical URI // and serves as the base URI for relative references within that schema - baseURI := docResult.AbsoluteReference + baseURI := docResult.AbsoluteDocumentPath if !externalDoc.IsBool() && externalDoc.GetSchema() != nil { if schemaID := externalDoc.GetSchema().GetID(); schemaID != "" { baseURI = schemaID @@ -119,9 +122,10 @@ func (s *JSONSchema[Referenceable]) resolveExternalRefWithFragment(ctx context.C if jp == "" { // No fragment, return the whole document with canonical base URI return &references.ResolveResult[JSONSchemaReferenceable]{ - Object: externalDoc, - AbsoluteReference: baseURI, - ResolvedDocument: docResult.ResolvedDocument, + Object: externalDoc, + AbsoluteDocumentPath: baseURI, + AbsoluteReference: references.Reference(baseURI), + ResolvedDocument: docResult.ResolvedDocument, }, validationErrs, nil } @@ -150,10 +154,12 @@ func (s *JSONSchema[Referenceable]) resolveExternalRefWithFragment(ctx context.C target.GetSchema().SetEffectiveBaseURI(baseURI) } + absRef := utils.BuildAbsoluteReference(baseURI, string(jp)) return &references.ResolveResult[JSONSchemaReferenceable]{ - Object: target, - AbsoluteReference: baseURI, - ResolvedDocument: docResult.ResolvedDocument, + Object: target, + AbsoluteDocumentPath: baseURI, + AbsoluteReference: references.Reference(absRef), + ResolvedDocument: docResult.ResolvedDocument, }, validationErrs, nil } diff --git a/jsonschema/oas3/resolution_registry.go b/jsonschema/oas3/resolution_registry.go index 89b7c14f..2bd0e3eb 100644 --- a/jsonschema/oas3/resolution_registry.go +++ b/jsonschema/oas3/resolution_registry.go @@ -3,6 +3,7 @@ package oas3 import ( "context" + "github.com/speakeasy-api/openapi/internal/utils" "github.com/speakeasy-api/openapi/references" ) @@ -34,10 +35,12 @@ func (s *JSONSchema[Referenceable]) tryResolveViaRegistry(ctx context.Context, r } if resolved := registry.LookupByAnchor(anchorBase, anchor); resolved != nil { + absRef := utils.BuildAbsoluteReference(anchorBase, "#"+anchor) return &references.ResolveResult[JSONSchemaReferenceable]{ - Object: resolved, - AbsoluteReference: anchorBase, - ResolvedDocument: opts.TargetDocument, + Object: resolved, + AbsoluteDocumentPath: anchorBase, + AbsoluteReference: references.Reference(absRef), + ResolvedDocument: opts.TargetDocument, } } @@ -45,10 +48,12 @@ func (s *JSONSchema[Referenceable]) tryResolveViaRegistry(ctx context.Context, r // This handles the case where anchors were registered without a document base URI if ref.GetURI() == "" && anchorBase != "" { if resolved := registry.LookupByAnchor("", anchor); resolved != nil { + absRef := "#" + anchor return &references.ResolveResult[JSONSchemaReferenceable]{ - Object: resolved, - AbsoluteReference: "", - ResolvedDocument: opts.TargetDocument, + Object: resolved, + AbsoluteDocumentPath: "", + AbsoluteReference: references.Reference(absRef), + ResolvedDocument: opts.TargetDocument, } } } @@ -57,10 +62,12 @@ func (s *JSONSchema[Referenceable]) tryResolveViaRegistry(ctx context.Context, r docBase := registry.GetDocumentBaseURI() if docBase != "" && docBase != anchorBase { if resolved := registry.LookupByAnchor(docBase, anchor); resolved != nil { + absRef := utils.BuildAbsoluteReference(docBase, "#"+anchor) return &references.ResolveResult[JSONSchemaReferenceable]{ - Object: resolved, - AbsoluteReference: docBase, - ResolvedDocument: opts.TargetDocument, + Object: resolved, + AbsoluteDocumentPath: docBase, + AbsoluteReference: references.Reference(absRef), + ResolvedDocument: opts.TargetDocument, } } } @@ -108,19 +115,22 @@ func (s *JSONSchema[Referenceable]) tryResolveViaRegistry(ctx context.Context, r // If there's no JSON pointer, return the schema directly if jp == "" { return &references.ResolveResult[JSONSchemaReferenceable]{ - Object: resolvedSchema, - AbsoluteReference: absoluteReference, - ResolvedDocument: opts.TargetDocument, + Object: resolvedSchema, + AbsoluteDocumentPath: absoluteReference, + AbsoluteReference: references.Reference(absoluteReference), + ResolvedDocument: opts.TargetDocument, } } // There's a JSON pointer - navigate within the found schema target, err := navigateJSONPointer(ctx, resolvedSchema, jp) if err == nil && target != nil { + absRef := utils.BuildAbsoluteReference(absoluteReference, string(jp)) return &references.ResolveResult[JSONSchemaReferenceable]{ - Object: target, - AbsoluteReference: absoluteReference, - ResolvedDocument: opts.TargetDocument, + Object: target, + AbsoluteDocumentPath: absoluteReference, + AbsoluteReference: references.Reference(absRef), + ResolvedDocument: opts.TargetDocument, } } // If navigation failed, fall through to external resolution @@ -171,8 +181,8 @@ func (s *JSONSchema[Referenceable]) getEffectiveBaseURI(opts references.ResolveO } // Check if we have a cached absolute reference - if s.referenceResolutionCache != nil && s.referenceResolutionCache.AbsoluteReference != "" { - return s.referenceResolutionCache.AbsoluteReference + if s.referenceResolutionCache != nil && s.referenceResolutionCache.AbsoluteDocumentPath != "" { + return s.referenceResolutionCache.AbsoluteDocumentPath } // Fall back to target location diff --git a/jsonschema/oas3/resolution_test.go b/jsonschema/oas3/resolution_test.go index 2a5ba3e4..5100bb26 100644 --- a/jsonschema/oas3/resolution_test.go +++ b/jsonschema/oas3/resolution_test.go @@ -23,14 +23,16 @@ import ( // MockResolutionTarget implements references.ResolutionTarget for testing type MockResolutionTarget struct { - objCache map[string]any - docCache map[string][]byte + objCache map[string]any + docCache map[string][]byte + extDocCache map[string]any } func NewMockResolutionTarget() *MockResolutionTarget { return &MockResolutionTarget{ - objCache: make(map[string]any), - docCache: make(map[string][]byte), + objCache: make(map[string]any), + docCache: make(map[string][]byte), + extDocCache: make(map[string]any), } } @@ -59,6 +61,18 @@ func (m *MockResolutionTarget) InitCache() { if m.docCache == nil { m.docCache = make(map[string][]byte) } + if m.extDocCache == nil { + m.extDocCache = make(map[string]any) + } +} + +func (m *MockResolutionTarget) GetCachedExternalDocument(key string) (any, bool) { + data, exists := m.extDocCache[key] + return data, exists +} + +func (m *MockResolutionTarget) StoreExternalDocumentInCache(key string, doc any) { + m.extDocCache[key] = doc } // MockVirtualFS implements system.VirtualFS for testing @@ -485,9 +499,9 @@ func TestJSONSchema_Resolve_Caching(t *testing.T) { // Set up cached resolved schema using the actual cache field schema.referenceResolutionCache = &references.ResolveResult[JSONSchema[Referenceable]]{ - Object: resolved, - AbsoluteReference: "testdata/simple_schema.yaml#/components/schemas/User", - ResolvedDocument: resolved, + Object: resolved, + AbsoluteDocumentPath: "testdata/simple_schema.yaml#/components/schemas/User", + ResolvedDocument: resolved, } root, err := LoadTestSchemaFromFile(t.Context(), "testdata/simple_schema.yaml") @@ -1928,7 +1942,7 @@ func TestGetEffectiveBaseURI_Success(t *testing.T) { schema := createSchemaWithRef("#foo") schema.referenceResolutionCache = &references.ResolveResult[JSONSchema[Referenceable]]{ - AbsoluteReference: "https://example.com/cached.json", + AbsoluteDocumentPath: "https://example.com/cached.json", } opts := ResolveOptions{ diff --git a/jsonschema/oas3/schema.go b/jsonschema/oas3/schema.go index 88b14bfe..495f2964 100644 --- a/jsonschema/oas3/schema.go +++ b/jsonschema/oas3/schema.go @@ -494,6 +494,68 @@ func (s *Schema) GetFormat() string { return *s.Format } +// IsReferenceOnly returns true if this schema only contains a $ref and no other properties. +// This is used for the no-ref-siblings linter rule in OAS 3.0.x (in OAS 3.1+, $ref can have siblings). +func (s *Schema) IsReferenceOnly() bool { + if !s.IsReference() { + return false + } + + // Check all schema fields - if any are set, it's not reference-only + return s.Type == nil && + len(s.AllOf) == 0 && + len(s.OneOf) == 0 && + len(s.AnyOf) == 0 && + s.Discriminator == nil && + len(s.Examples) == 0 && + len(s.PrefixItems) == 0 && + s.Contains == nil && + s.MinContains == nil && + s.MaxContains == nil && + s.If == nil && + s.Else == nil && + s.Then == nil && + (s.DependentSchemas == nil || s.DependentSchemas.Len() == 0) && + (s.PatternProperties == nil || s.PatternProperties.Len() == 0) && + s.PropertyNames == nil && + s.UnevaluatedItems == nil && + s.UnevaluatedProperties == nil && + s.Items == nil && + s.Anchor == nil && + s.ID == nil && + s.Not == nil && + (s.Properties == nil || s.Properties.Len() == 0) && + (s.Defs == nil || s.Defs.Len() == 0) && + s.Title == nil && + s.MultipleOf == nil && + s.Maximum == nil && + s.Minimum == nil && + s.MaxLength == nil && + s.MinLength == nil && + s.Pattern == nil && + s.Format == nil && + s.MaxItems == nil && + s.MinItems == nil && + s.UniqueItems == nil && + s.MaxProperties == nil && + s.MinProperties == nil && + len(s.Required) == 0 && + len(s.Enum) == 0 && + s.AdditionalProperties == nil && + s.Description == nil && + s.Default == nil && + s.Const == nil && + s.Nullable == nil && + s.ReadOnly == nil && + s.WriteOnly == nil && + s.ExternalDocs == nil && + s.Example == nil && + s.Deprecated == nil && + s.Schema == nil && + s.XML == nil && + (s.Extensions == nil || s.Extensions.Len() == 0) +} + // GetMaxItems returns the value of the MaxItems field. Returns nil if not set. func (s *Schema) GetMaxItems() *int64 { if s == nil { diff --git a/jsonschema/oas3/schema_exclusive_validation_test.go b/jsonschema/oas3/schema_exclusive_validation_test.go index b2b16c93..dd5a98e6 100644 --- a/jsonschema/oas3/schema_exclusive_validation_test.go +++ b/jsonschema/oas3/schema_exclusive_validation_test.go @@ -227,7 +227,7 @@ exclusiveMinimum: true exclusiveMaximum: false `, openAPIVersion: pointer.From("3.1.0"), - wantErrs: []string{"[5:19] schema.exclusiveMinimum expected number, got boolean", "[6:19] schema.exclusiveMaximum expected number, got boolean"}, + wantErrs: []string{"[5:19] error validation-type-mismatch schema.exclusiveMinimum expected `number`, got `boolean`", "[6:19] error validation-type-mismatch schema.exclusiveMaximum expected `number`, got `boolean`"}, }, { name: "boolean exclusiveMinimum with 3.1 $schema should fail", @@ -239,7 +239,7 @@ maximum: 100 exclusiveMinimum: true exclusiveMaximum: false `, - wantErrs: []string{"[6:19] schema.exclusiveMinimum expected number, got boolean", "[7:19] schema.exclusiveMaximum expected number, got boolean"}, + wantErrs: []string{"[6:19] error validation-type-mismatch schema.exclusiveMinimum expected `number`, got `boolean`", "[7:19] error validation-type-mismatch schema.exclusiveMaximum expected `number`, got `boolean`"}, }, // Invalid types should always fail { @@ -248,7 +248,7 @@ exclusiveMaximum: false type: number exclusiveMinimum: "invalid" `, - wantErrs: []string{"[2:1] schema.exclusiveMinimum expected number, got string", "[3:19] schema.exclusiveMinimum failed to validate either bool [schema.exclusiveMinimum line 3: cannot unmarshal !!str `invalid` into bool] or float64 [schema.exclusiveMinimum line 3: cannot unmarshal !!str `invalid` into float64]"}, + wantErrs: []string{"[2:1] error validation-type-mismatch schema.exclusiveMinimum expected `number`, got `string`", "[3:19] error validation-type-mismatch schema.exclusiveMinimum failed to validate either bool [schema.exclusiveMinimum line 3: cannot unmarshal !!str `invalid` into bool] or float64 [schema.exclusiveMinimum line 3: cannot unmarshal !!str `invalid` into float64]"}, }, { name: "invalid string type for exclusiveMaximum", @@ -256,7 +256,7 @@ exclusiveMinimum: "invalid" type: number exclusiveMaximum: "invalid" `, - wantErrs: []string{"[2:1] schema.exclusiveMaximum expected number, got string", "[3:19] schema.exclusiveMaximum failed to validate either bool [schema.exclusiveMaximum line 3: cannot unmarshal !!str `invalid` into bool] or float64 [schema.exclusiveMaximum line 3: cannot unmarshal !!str `invalid` into float64]"}, + wantErrs: []string{"[2:1] error validation-type-mismatch schema.exclusiveMaximum expected `number`, got `string`", "[3:19] error validation-type-mismatch schema.exclusiveMaximum failed to validate either bool [schema.exclusiveMaximum line 3: cannot unmarshal !!str `invalid` into bool] or float64 [schema.exclusiveMaximum line 3: cannot unmarshal !!str `invalid` into float64]"}, }, { name: "invalid array type for exclusiveMinimum", @@ -264,7 +264,7 @@ exclusiveMaximum: "invalid" type: number exclusiveMinimum: [1, 2, 3] `, - wantErrs: []string{"[2:1] schema.exclusiveMinimum expected number, got array", "[3:19] schema.exclusiveMinimum failed to validate either bool [schema.exclusiveMinimum expected bool, got sequence] or float64 [schema.exclusiveMinimum expected float64, got sequence]"}, + wantErrs: []string{"[2:1] error validation-type-mismatch schema.exclusiveMinimum expected `number`, got `array`", "[3:19] error validation-type-mismatch schema.exclusiveMinimum failed to validate either bool [schema.exclusiveMinimum expected `bool`, got `sequence`] or float64 [schema.exclusiveMinimum expected `float64`, got `sequence`]"}, }, // Mixed boolean and numeric should fail with OpenAPI 3.0 (only supports boolean) { @@ -276,7 +276,7 @@ exclusiveMinimum: true exclusiveMaximum: 50.5 `, openAPIVersion: pointer.From("3.0.3"), - wantErrs: []string{"[5:19] schema.exclusiveMaximum expected boolean, got number"}, + wantErrs: []string{"[5:19] error validation-type-mismatch schema.exclusiveMaximum expected `boolean`, got `number`"}, }, { name: "mixed numeric exclusiveMinimum and boolean exclusiveMaximum with OpenAPI 3.0 should fail", @@ -287,7 +287,7 @@ exclusiveMinimum: 0.5 exclusiveMaximum: true `, openAPIVersion: pointer.From("3.0.3"), - wantErrs: []string{"[4:19] schema.exclusiveMinimum expected boolean, got number"}, + wantErrs: []string{"[4:19] error validation-type-mismatch schema.exclusiveMinimum expected `boolean`, got `number`"}, }, } diff --git a/jsonschema/oas3/schema_validate_test.go b/jsonschema/oas3/schema_validate_test.go index 644c6c63..8dafa271 100644 --- a/jsonschema/oas3/schema_validate_test.go +++ b/jsonschema/oas3/schema_validate_test.go @@ -379,8 +379,8 @@ externalDocs: description: More information `, wantErrs: []string{ - "[2:1] schema.externalDocs missing property 'url'", - "[5:3] externalDocumentation.url is missing", + "[2:1] error validation-required-field `schema.externalDocs` missing property `url`", + "[5:3] error validation-required-field `externalDocumentation.url` is required", }, }, { @@ -390,8 +390,8 @@ type: invalid_type title: Invalid Type `, wantErrs: []string{ - "[2:7] schema.type expected array, got string", - "[2:7] schema.type value must be one of 'array', 'boolean', 'integer', 'null', 'number', 'object', 'string'", + "[2:7] error validation-invalid-schema schema.type value must be one of 'array', 'boolean', 'integer', 'null', 'number', 'object', 'string'", + "[2:7] error validation-type-mismatch schema.type expected `array`, got `string`", }, }, { @@ -400,7 +400,7 @@ title: Invalid Type type: string minLength: -1 `, - wantErrs: []string{"[3:12] schema.minLength minimum: got -1, want 0"}, + wantErrs: []string{"[3:12] error validation-invalid-schema schema.minLength minimum: got -1, want 0"}, }, { name: "negative multipleOf", @@ -408,7 +408,7 @@ minLength: -1 type: number multipleOf: -1 `, - wantErrs: []string{"[3:13] schema.multipleOf exclusiveMinimum: got -1, want 0"}, + wantErrs: []string{"[3:13] error validation-invalid-schema schema.multipleOf exclusiveMinimum: got -1, want 0"}, }, { name: "zero multipleOf", @@ -416,7 +416,7 @@ multipleOf: -1 type: number multipleOf: 0 `, - wantErrs: []string{"[3:13] schema.multipleOf exclusiveMinimum: got 0, want 0"}, + wantErrs: []string{"[3:13] error validation-invalid-schema schema.multipleOf exclusiveMinimum: got 0, want 0"}, }, { name: "invalid additionalProperties type", @@ -425,9 +425,9 @@ type: object additionalProperties: "invalid" `, wantErrs: []string{ - "[2:1] schema.additionalProperties expected one of [boolean, object], got string", - "[2:1] schema.additionalProperties expected one of [boolean, object], got string", - "[3:23] schema.additionalProperties failed to validate either Schema [schema.additionalProperties expected object, got `invalid`] or bool [schema.additionalProperties line 3: cannot unmarshal !!str `invalid` into bool]", + "[2:1] error validation-type-mismatch schema.additionalProperties expected one of [`boolean`, `object`], got `string`", + "[2:1] error validation-type-mismatch schema.additionalProperties expected one of [`boolean`, `object`], got `string`", + "[3:23] error validation-type-mismatch schema.additionalProperties failed to validate either Schema [schema.additionalProperties expected `object`, got `invalid`] or bool [schema.additionalProperties line 3: cannot unmarshal !!str `invalid` into bool]", }, }, { @@ -436,7 +436,7 @@ additionalProperties: "invalid" type: array minItems: -1 `, - wantErrs: []string{"[3:11] schema.minItems minimum: got -1, want 0"}, + wantErrs: []string{"[3:11] error validation-invalid-schema schema.minItems minimum: got -1, want 0"}, }, { name: "negative minProperties", @@ -444,7 +444,7 @@ minItems: -1 type: object minProperties: -1 `, - wantErrs: []string{"[3:16] schema.minProperties minimum: got -1, want 0"}, + wantErrs: []string{"[3:16] error validation-invalid-schema schema.minProperties minimum: got -1, want 0"}, }, { name: "invalid items type", @@ -453,9 +453,9 @@ type: array items: "invalid" `, wantErrs: []string{ - "[2:1] schema.items expected one of [boolean, object], got string", - "[2:1] schema.items expected one of [boolean, object], got string", - "[3:8] schema.items failed to validate either Schema [schema.items expected object, got `invalid`] or bool [schema.items line 3: cannot unmarshal !!str `invalid` into bool]", + "[2:1] error validation-type-mismatch schema.items expected one of [`boolean`, `object`], got `string`", + "[2:1] error validation-type-mismatch schema.items expected one of [`boolean`, `object`], got `string`", + "[3:8] error validation-type-mismatch schema.items failed to validate either Schema [schema.items expected `object`, got `invalid`] or bool [schema.items line 3: cannot unmarshal !!str `invalid` into bool]", }, }, { @@ -465,8 +465,8 @@ type: object required: "invalid" `, wantErrs: []string{ - "[2:1] schema.required expected array, got string", - "[3:11] schema.required expected sequence, got `invalid`", + "[2:1] error validation-type-mismatch schema.required expected `array`, got `string`", + "[3:11] error validation-type-mismatch schema.required expected `sequence`, got `invalid`", }, }, { @@ -475,8 +475,8 @@ required: "invalid" allOf: "invalid" `, wantErrs: []string{ - "[2:1] schema.allOf expected array, got string", - "[2:8] schema.allOf expected sequence, got `invalid`", + "[2:1] error validation-type-mismatch schema.allOf expected `array`, got `string`", + "[2:8] error validation-type-mismatch schema.allOf expected `sequence`, got `invalid`", }, }, { @@ -485,8 +485,8 @@ allOf: "invalid" anyOf: "invalid" `, wantErrs: []string{ - "[2:1] schema.anyOf expected array, got string", - "[2:8] schema.anyOf expected sequence, got `invalid`", + "[2:1] error validation-type-mismatch schema.anyOf expected `array`, got `string`", + "[2:8] error validation-type-mismatch schema.anyOf expected `sequence`, got `invalid`", }, }, { @@ -495,8 +495,8 @@ anyOf: "invalid" oneOf: "invalid" `, wantErrs: []string{ - "[2:1] schema.oneOf expected array, got string", - "[2:8] schema.oneOf expected sequence, got `invalid`", + "[2:1] error validation-type-mismatch schema.oneOf expected `array`, got `string`", + "[2:8] error validation-type-mismatch schema.oneOf expected `sequence`, got `invalid`", }, }, { @@ -506,49 +506,49 @@ $schema: "https://spec.openapis.org/oas/3.0/dialect/2024-10-18" $ref: "#/components/schemas/User" required: ["name", "email"] `, - wantErrs: []string{"[2:1] schema. additional properties '$ref' not allowed"}, + wantErrs: []string{"[2:1] error validation-invalid-schema schema. additional properties '$ref' not allowed"}, }, { name: "empty component name in $ref", yml: ` $ref: "#/components/schemas/" `, - wantErrs: []string{"[2:1] invalid reference: component name cannot be empty"}, + wantErrs: []string{"[2:1] error validation-invalid-reference invalid reference: component name cannot be empty"}, }, { name: "missing component name in $ref", yml: ` $ref: "#/components/schemas" `, - wantErrs: []string{"[2:1] invalid reference: component name cannot be empty"}, + wantErrs: []string{"[2:1] error validation-invalid-reference invalid reference: component name cannot be empty"}, }, { name: "component name with invalid characters in $ref", yml: ` $ref: "#/components/schemas/User@Schema" `, - wantErrs: []string{`[2:1] invalid reference: component name "User@Schema" must match pattern ^[a-zA-Z0-9.\-_]+$`}, + wantErrs: []string{`[2:1] error validation-invalid-reference invalid reference: component name "User@Schema" must match pattern ^[a-zA-Z0-9.\-_]+$`}, }, { name: "component name with space in $ref", yml: ` $ref: "#/components/schemas/User Schema" `, - wantErrs: []string{`[2:1] invalid reference: component name "User Schema" must match pattern ^[a-zA-Z0-9.\-_]+$`}, + wantErrs: []string{`[2:1] error validation-invalid-reference invalid reference: component name "User Schema" must match pattern ^[a-zA-Z0-9.\-_]+$`}, }, { name: "invalid JSON pointer - missing leading slash in $ref", yml: ` $ref: "#components/schemas/User" `, - wantErrs: []string{"[2:1] invalid reference JSON pointer: validation error -- jsonpointer must start with /: components/schemas/User"}, + wantErrs: []string{"[2:1] error validation-invalid-reference invalid reference JSON pointer: validation error -- jsonpointer must start with /: components/schemas/User"}, }, { name: "empty JSON pointer in $ref", yml: ` $ref: "#" `, - wantErrs: []string{"[2:1] invalid reference JSON pointer: empty"}, + wantErrs: []string{"[2:1] error validation-invalid-reference invalid reference JSON pointer: empty"}, }, } diff --git a/jsonschema/oas3/validation.go b/jsonschema/oas3/validation.go index b27ac26f..07716155 100644 --- a/jsonschema/oas3/validation.go +++ b/jsonschema/oas3/validation.go @@ -85,7 +85,7 @@ func (js *Schema) Validate(ctx context.Context, opts ...validation.Option) []err // Validate reference string if present if js.IsReference() { if err := js.GetRef().Validate(); err != nil { - errs = append(errs, validation.NewValidationError(err, js.GetCore().Ref.GetKeyNodeOrRoot(js.GetRootNode()))) + errs = append(errs, validation.NewValidationError(validation.SeverityError, validation.RuleValidationInvalidReference, err, js.GetCore().Ref.GetKeyNodeOrRoot(js.GetRootNode()))) } } @@ -129,14 +129,14 @@ func (js *Schema) Validate(ctx context.Context, opts ...validation.Option) []err if err := json.YAMLToJSON(core.RootNode, 0, buf); err != nil { return []error{ - validation.NewValidationError(fmt.Errorf("schema is not valid json: %w", err), core.RootNode), + validation.NewValidationError(validation.SeverityError, validation.RuleValidationInvalidSyntax, fmt.Errorf("schema is not valid json: %w", err), core.RootNode), } } jsAny, err := jsValidator.UnmarshalJSON(buf) if err != nil { return []error{ - validation.NewValidationError(fmt.Errorf("schema is not valid json: %w", err), core.RootNode), + validation.NewValidationError(validation.SeverityError, validation.RuleValidationInvalidSyntax, fmt.Errorf("schema is not valid json: %w", err), core.RootNode), } } @@ -146,7 +146,7 @@ func (js *Schema) Validate(ctx context.Context, opts ...validation.Option) []err if errors.As(err, &validationErr) { errs = append(errs, getRootCauses(validationErr, *core)...) } else { - errs = append(errs, validation.NewValidationError(validation.NewValueValidationError("schema invalid: %s", err.Error()), core.RootNode)) + errs = append(errs, validation.NewValidationError(validation.SeverityError, validation.RuleValidationInvalidSchema, fmt.Errorf("schema invalid: %s", err.Error()), core.RootNode)) } } @@ -172,7 +172,7 @@ func getRootCauses(err *jsValidator.ValidationError, js core.Schema) []error { t, err := jsonpointer.GetTarget(js, errJP, jsonpointer.WithStructTags("key")) if err != nil { - errs = append(errs, validation.NewValidationError(err, js.GetRootNode())) + errs = append(errs, validation.NewValidationError(validation.SeverityError, validation.RuleValidationInvalidTarget, err, js.GetRootNode())) continue } @@ -199,18 +199,25 @@ func getRootCauses(err *jsValidator.ValidationError, js core.Schema) []error { case *kind.Type: var want string if len(t.Want) == 1 { - want = t.Want[0] + want = "`" + t.Want[0] + "`" } else { - want = fmt.Sprintf("one of [%s]", strings.Join(t.Want, ", ")) + // Wrap each type in backticks + wrappedTypes := make([]string, len(t.Want)) + for i, typ := range t.Want { + wrappedTypes[i] = "`" + typ + "`" + } + want = fmt.Sprintf("one of [%s]", strings.Join(wrappedTypes, ", ")) } - msg = fmt.Sprintf("expected %s, got %s", want, t.Got) + msg = fmt.Sprintf("expected %s, got `%s`", want, t.Got) - newErr = validation.NewValidationError(validation.NewTypeMismatchError(parentName, msg), valueNode) + newErr = validation.NewValidationError(validation.SeverityError, validation.RuleValidationTypeMismatch, validation.NewTypeMismatchError(parentName, msg), valueNode) case *kind.Required: - newErr = validation.NewValidationError(validation.NewMissingFieldError("%s %s", parentName, msg), valueNode) + // Replace single quotes with backticks in the message + msg = strings.ReplaceAll(msg, "'", "`") + newErr = validation.NewValidationError(validation.SeverityError, validation.RuleValidationRequiredField, fmt.Errorf("`%s` %s", parentName, msg), valueNode) default: - newErr = validation.NewValidationError(validation.NewValueValidationError("%s %s", parentName, msg), valueNode) + newErr = validation.NewValidationError(validation.SeverityError, validation.RuleValidationInvalidSchema, fmt.Errorf("%s %s", parentName, msg), valueNode) } if newErr != nil { errs = append(errs, newErr) diff --git a/jsonschema/oas3/walk.go b/jsonschema/oas3/walk.go index f2a9440b..34c3f438 100644 --- a/jsonschema/oas3/walk.go +++ b/jsonschema/oas3/walk.go @@ -65,7 +65,7 @@ func walkSchema(ctx context.Context, schema *JSONSchema[Referenceable], loc walk } if schema.IsSchema() { - js := schema.Left + js := schema.GetSchema() // Walk through allOf schemas for i, schema := range js.AllOf { diff --git a/jsonschema/oas3/xml.go b/jsonschema/oas3/xml.go index e8c16ac0..052dc46b 100644 --- a/jsonschema/oas3/xml.go +++ b/jsonschema/oas3/xml.go @@ -2,6 +2,7 @@ package oas3 import ( "context" + "fmt" "net/url" "reflect" @@ -124,9 +125,9 @@ func (x *XML) Validate(ctx context.Context, opts ...validation.Option) []error { if x.Namespace != nil { u, err := url.Parse(*x.Namespace) if err != nil { - errs = append(errs, validation.NewValueError(validation.NewValueValidationError("xml.namespace is not a valid uri: %s", err), core, core.Namespace)) + errs = append(errs, validation.NewValueError(validation.SeverityWarning, validation.RuleValidationInvalidFormat, fmt.Errorf("xml.namespace is not a valid uri: %w", err), core, core.Namespace)) } else if !u.IsAbs() { - errs = append(errs, validation.NewValueError(validation.NewValueValidationError("xml.namespace must be an absolute uri: %s", *x.Namespace), core, core.Namespace)) + errs = append(errs, validation.NewValueError(validation.SeverityWarning, validation.RuleValidationInvalidFormat, fmt.Errorf("xml.namespace must be an absolute uri: %s", *x.Namespace), core, core.Namespace)) } } diff --git a/linter/README.md b/linter/README.md new file mode 100644 index 00000000..81b98137 --- /dev/null +++ b/linter/README.md @@ -0,0 +1,260 @@ +# Linter Engine + +This document provides an overview of the linter engine implementation. + +## Architecture Overview + +The linter engine is a generic, spec-agnostic framework for implementing configurable linting rules across different API specifications (OpenAPI, Arazzo, Swagger). + +### Core Components + +1. **Generic Linter Engine** ([`linter/`](linter/)) + - [`Linter[T]`](linter/linter.go) - Main linting engine with configuration support + - [`Registry[T]`](linter/registry.go) - Rule registry with category management + - [`Rule`](linter/rule.go) - Base rule interface and specialized interfaces + - [`RuleConfig`](linter/config.go) - Per-rule configuration with severity overrides + - [`DocumentInfo[T]`](linter/document.go) - Document + location for reference resolution + - Format types for text and JSON output + - Parallel rule execution for improved performance + +2. **OpenAPI Linter** ([`openapi/linter/`](openapi/linter/)) + - OpenAPI-specific linter implementation + - Rule registry with built-in rules + - Integration with OpenAPI parser and validator + +3. **Rules** ([`openapi/linter/rules/`](openapi/linter/rules/)) + - Individual linting rules (e.g., [`style-path-params`](openapi/linter/rules/path_params.go)) + - Each rule implements the [`RuleRunner[*openapi.OpenAPI]`](linter/rule.go) interface + +4. **CLI Integration** ([`cmd/openapi/commands/openapi/lint.go`](cmd/openapi/commands/openapi/lint.go)) + - `openapi spec lint` command + - Configuration file support (`lint.yaml`) + - Rule documentation generation (`--list-rules`) + +## Key Features + +### 1. Rule Configuration + +Rules can be configured via YAML configuration file: + +```yaml +extends: + - all # or specific rulesets like "recommended", "strict" + +categories: + style: + enabled: true + severity: warning + +rules: + - id: style-path-params + severity: error + + - id: validation-required-field + match: ".*info\\.title is required.*" + disabled: true +``` + +### 2. Severity Overrides + +Rules have default severities that can be overridden: +- Fatal errors (terminate execution) +- Error severity (build failures) +- Warning severity (informational) + +### 3. External Reference Resolution + +Rules automatically resolve external references (HTTP URLs, file paths): + +```yaml +paths: + /users/{userId}: + get: + parameters: + - $ref: "https://example.com/params/user-id.yaml" + responses: + '200': + description: ok +``` + +The linter: +- Uses [`DocumentInfo.Location`](linter/document.go) as the base for resolving relative references +- Supports custom HTTP clients and virtual filesystems via [`LintOptions.ResolveOptions`](linter/document.go) +- Reports resolution errors as validation errors with proper severity and location + +### 5. Quick Fix Suggestions + +Rules can suggest fixes using [`validation.Error`](validation/validation.go) with quick fix support: + +```go +validation.NewValidationErrorWithQuickFix( + severity, + rule, + fmt.Errorf("path parameter {%s} is not defined", param), + node, + &validation.QuickFix{ + Description: "Add missing path parameter", + Replacement: "...", + }, +) +``` + +## Implemented Rules + +### style-path-params + +Ensures path template variables (e.g., `{userId}`) have corresponding parameter definitions with `in='path'`. + +**Checks:** +- All template params must have corresponding parameter definitions +- All path parameters must be used in the template +- Works with parameters at PathItem level (inherited) and Operation level (can override) +- Resolves external references to parameters + +**Example:** + +```yaml +# ✅ Valid +paths: + /users/{userId}: + get: + parameters: + - name: userId + in: path + required: true + +# ❌ Invalid - missing parameter definition +paths: + /users/{userId}: + get: + responses: + '200': + description: ok +``` + +## Usage + +### CLI + +```bash +# Lint with default configuration +openapi spec lint openapi.yaml + +# Lint with custom config +openapi spec lint --config /path/to/lint.yaml openapi.yaml + +# List all available rules +openapi spec lint --list-rules + +# Output in JSON format +openapi spec lint --format json openapi.yaml +``` + +### Programmatic + +```go +import ( + "context" + "github.com/speakeasy-api/openapi/linter" + openapiLinter "github.com/speakeasy-api/openapi/openapi/linter" +) + +// Create linter with configuration +config := &linter.Config{ + Extends: []string{"all"}, +} +lntr := openapiLinter.NewOpenAPILinter(config) + +// Lint document +docInfo := &linter.DocumentInfo[*openapi.OpenAPI]{ + Document: doc, + Location: "/path/to/openapi.yaml", +} +output, err := lntr.Lint(ctx, docInfo, nil, nil) +if err != nil { + // Handle error +} + +// Check results +if output.HasErrors() { + fmt.Println(output.FormatText()) +} +``` + +## Filtering Errors After Linting + +To apply the config filters to additional errors after the initial lint (for example, errors discovered during lazy reference resolution), use [`FilterErrors`](linter/linter.go:237): + +```go +filtered := lntr.FilterErrors(extraErrors) +``` + +## Adding New Rules + +To add a new rule: + +1. **Create the rule** in [`openapi/linter/rules/`](openapi/linter/rules/) + +```go +type MyRule struct{} + +func (r *MyRule) ID() string { return "style-my-rule" } +func (r *MyRule) Category() string { return "style" } +func (r *MyRule) Description() string { return "..." } +func (r *MyRule) Link() string { return "..." } +func (r *MyRule) DefaultSeverity() validation.Severity { + return validation.SeverityWarning +} +func (r *MyRule) Versions() []string { return nil } + +func (r *MyRule) Run(ctx context.Context, docInfo *linter.DocumentInfo[*openapi.OpenAPI], config *linter.RuleConfig) []error { + doc := docInfo.Document + // Implement rule logic + // Use openapi.Walk() to traverse the document + // Return validation.Error instances for violations + return nil +} +``` + +2. **Register the rule** in [`openapi/linter/linter.go`](openapi/linter/linter.go) + +```go +registry.Register(&rules.MyRule{}) +``` + +3. **Write tests** in [`openapi/linter/rules/my_rule_test.go`](openapi/linter/rules/) + +```go +func TestMyRule_Success(t *testing.T) { + t.Parallel() + // ... test implementation +} +``` + +## Custom Rule Loading + +The linter engine supports custom rule loaders that can be registered via the `RegisterCustomRuleLoader` function. This allows spec-specific linters to support custom rules written in different languages or formats. + +```go +// CustomRuleLoaderFunc loads custom rules from configuration +type CustomRuleLoaderFunc func(config *CustomRulesConfig) ([]RuleRunner[T], error) + +// Register a custom rule loader +linter.RegisterCustomRuleLoader(myLoader) +``` + +Custom rules loaded through registered loaders: + +- Are automatically registered with the rule registry +- Support the same configuration options as built-in rules (severity, disabled, match) +- Integrate seamlessly with category-based configuration + +## Design Principles + +1. **Generic Architecture** - The core linter is spec-agnostic (`Linter[T any]`) +2. **Type Safety** - Spec-specific rules use typed interfaces (`RuleRunner[*openapi.OpenAPI]`) +3. **Separation of Concerns** - Core engine, spec linters, and rules are separate packages +4. **Extensibility** - Easy to add new rules, rulesets, specs, and custom rule loaders +5. **Configuration Over Code** - Rule behavior controlled via YAML config +6. **Reference Resolution** - Automatic external reference resolution with proper error handling +7. **Testing** - Comprehensive test coverage with parallel execution diff --git a/linter/config.go b/linter/config.go new file mode 100644 index 00000000..97d199e7 --- /dev/null +++ b/linter/config.go @@ -0,0 +1,207 @@ +package linter + +import ( + "errors" + "fmt" + "regexp" + "strings" + "time" + + "github.com/speakeasy-api/openapi/references" + "github.com/speakeasy-api/openapi/validation" + "gopkg.in/yaml.v3" +) + +// Config represents the linter configuration +type Config struct { + // Extends specifies rulesets to extend (e.g., "recommended", "all") + Extends []string `yaml:"extends,omitempty" json:"extends,omitempty"` + + // Rules contains per-rule configuration + Rules []RuleEntry `yaml:"rules,omitempty" json:"rules,omitempty"` + + // Categories contains per-category configuration + Categories map[string]CategoryConfig `yaml:"categories,omitempty" json:"categories,omitempty"` + + // OutputFormat specifies the output format + OutputFormat OutputFormat `yaml:"output_format,omitempty" json:"output_format,omitempty"` + + // CustomRules configures custom rule loading (requires customrules package import) + CustomRules *CustomRulesConfig `yaml:"custom_rules,omitempty" json:"custom_rules,omitempty"` +} + +// CustomRulesConfig configures custom rule loading. +// This is the YAML-serializable configuration. The customrules package +// extends this with additional programmatic options like Logger. +type CustomRulesConfig struct { + // Paths are glob patterns for rule files (e.g., "./rules/*.ts") + Paths []string `yaml:"paths,omitempty" json:"paths,omitempty"` + + // Timeout is the maximum execution time per rule (default: 30s) + Timeout time.Duration `yaml:"timeout,omitempty" json:"timeout,omitempty"` +} + +// UnmarshalYAML supports "extends" as string or list and severity aliases. +func (c *Config) UnmarshalYAML(value *yaml.Node) error { + var raw struct { + Extends yaml.Node `yaml:"extends,omitempty"` + Rules []RuleEntry `yaml:"rules,omitempty"` + Categories map[string]CategoryConfig `yaml:"categories,omitempty"` + OutputFormat OutputFormat `yaml:"output_format,omitempty"` + CustomRules *CustomRulesConfig `yaml:"custom_rules,omitempty"` + } + if err := value.Decode(&raw); err != nil { + return err + } + + if raw.Extends.Kind != 0 { + switch raw.Extends.Kind { + case yaml.ScalarNode: + switch raw.Extends.Tag { + case "!!null": + c.Extends = nil + case "!!str", "": + c.Extends = []string{raw.Extends.Value} + default: + return errors.New("extends must be a string or list of strings") + } + case yaml.SequenceNode: + var list []string + if err := raw.Extends.Decode(&list); err != nil { + return err + } + c.Extends = list + default: + return errors.New("extends must be a string or list of strings") + } + } + + c.Rules = raw.Rules + c.Categories = raw.Categories + c.OutputFormat = raw.OutputFormat + c.CustomRules = raw.CustomRules + return nil +} + +// RuleEntry configures rule behavior in lint.yaml. +type RuleEntry struct { + ID string `yaml:"id" json:"id"` + Severity *validation.Severity `yaml:"severity,omitempty" json:"severity,omitempty"` + Disabled *bool `yaml:"disabled,omitempty" json:"disabled,omitempty"` + Match *regexp.Regexp `yaml:"match,omitempty" json:"match,omitempty"` +} + +// UnmarshalYAML allows severity aliases (warn, info) in rule entries. +func (r *RuleEntry) UnmarshalYAML(value *yaml.Node) error { + var raw struct { + ID string `yaml:"id"` + Severity *string `yaml:"severity,omitempty"` + Disabled *bool `yaml:"disabled,omitempty"` + Match *regexp.Regexp `yaml:"match,omitempty"` + } + if err := value.Decode(&raw); err != nil { + return err + } + + r.ID = raw.ID + r.Disabled = raw.Disabled + r.Match = raw.Match + if raw.Severity != nil { + sev, err := parseSeverity(*raw.Severity) + if err != nil { + return err + } + r.Severity = &sev + } + return nil +} + +// Validate checks for missing rule IDs in the configuration. +func (c *Config) Validate() error { + for _, entry := range c.Rules { + if strings.TrimSpace(entry.ID) == "" { + return errors.New("rule entry missing id") + } + } + return nil +} + +// RuleConfig configures a specific rule +type RuleConfig struct { + // Enabled controls whether the rule is active + Enabled *bool `yaml:"enabled,omitempty" json:"enabled,omitempty"` + + // Severity overrides the default severity + Severity *validation.Severity `yaml:"severity,omitempty" json:"severity,omitempty"` + + // ResolveOptions contains runtime options for reference resolution (not serialized) + // These are set by the linter engine when running rules + ResolveOptions *references.ResolveOptions `yaml:"-" json:"-"` +} + +// GetSeverity returns the effective severity, falling back to default if not overridden +func (c *RuleConfig) GetSeverity(defaultSeverity validation.Severity) validation.Severity { + if c != nil && c.Severity != nil { + return *c.Severity + } + return defaultSeverity +} + +// CategoryConfig configures an entire category of rules +type CategoryConfig struct { + // Enabled controls whether all rules in the category are active + Enabled *bool `yaml:"enabled,omitempty" json:"enabled,omitempty"` + + // Severity overrides the default severity for all rules in the category + Severity *validation.Severity `yaml:"severity,omitempty" json:"severity,omitempty"` +} + +// UnmarshalYAML allows severity aliases (warn, info) in categories. +func (c *CategoryConfig) UnmarshalYAML(value *yaml.Node) error { + var raw struct { + Enabled *bool `yaml:"enabled,omitempty"` + Severity *string `yaml:"severity,omitempty"` + } + if err := value.Decode(&raw); err != nil { + return err + } + if raw.Severity != nil { + sev, err := parseSeverity(*raw.Severity) + if err != nil { + return err + } + c.Severity = &sev + } + c.Enabled = raw.Enabled + return nil +} + +type OutputFormat string + +const ( + OutputFormatText OutputFormat = "text" + OutputFormatJSON OutputFormat = "json" +) + +// NewConfig creates a new default configuration +func NewConfig() *Config { + return &Config{ + Extends: []string{"all"}, + Rules: []RuleEntry{}, + Categories: make(map[string]CategoryConfig), + OutputFormat: OutputFormatText, + } +} + +func parseSeverity(value string) (validation.Severity, error) { + switch strings.ToLower(strings.TrimSpace(value)) { + case "error": + return validation.SeverityError, nil + case "warn", "warning": + return validation.SeverityWarning, nil + case "hint", "info": + return validation.SeverityHint, nil + default: + return "", fmt.Errorf("unknown severity %q", value) + } +} diff --git a/linter/config_loader.go b/linter/config_loader.go new file mode 100644 index 00000000..0400cdf1 --- /dev/null +++ b/linter/config_loader.go @@ -0,0 +1,51 @@ +package linter + +import ( + "fmt" + "io" + "os" + + "gopkg.in/yaml.v3" +) + +// LoadConfig loads lint configuration from a YAML reader. +func LoadConfig(r io.Reader) (*Config, error) { + data, err := io.ReadAll(r) + if err != nil { + return nil, fmt.Errorf("failed to read config: %w", err) + } + + var cfg Config + if err := yaml.Unmarshal(data, &cfg); err != nil { + return nil, fmt.Errorf("failed to parse config: %w", err) + } + + if len(cfg.Extends) == 0 { + cfg.Extends = []string{"all"} + } + if cfg.Categories == nil { + cfg.Categories = make(map[string]CategoryConfig) + } + if cfg.Rules == nil { + cfg.Rules = []RuleEntry{} + } + if cfg.OutputFormat == "" { + cfg.OutputFormat = OutputFormatText + } + if err := cfg.Validate(); err != nil { + return nil, err + } + + return &cfg, nil +} + +// LoadConfigFromFile loads lint configuration from a YAML file. +func LoadConfigFromFile(path string) (*Config, error) { + f, err := os.Open(path) //nolint:gosec + if err != nil { + return nil, fmt.Errorf("failed to open config file: %w", err) + } + defer f.Close() + + return LoadConfig(f) +} diff --git a/linter/config_test.go b/linter/config_test.go new file mode 100644 index 00000000..4f6009e3 --- /dev/null +++ b/linter/config_test.go @@ -0,0 +1,284 @@ +package linter_test + +import ( + "os" + "regexp" + "strings" + "testing" + + "github.com/speakeasy-api/openapi/linter" + "github.com/speakeasy-api/openapi/validation" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestRuleConfig_GetSeverity(t *testing.T) { + t.Parallel() + + t.Run("returns configured severity when set", func(t *testing.T) { + t.Parallel() + + warningSeverity := validation.SeverityWarning + config := linter.RuleConfig{ + Severity: &warningSeverity, + } + + assert.Equal(t, validation.SeverityWarning, config.GetSeverity(validation.SeverityError)) + }) + + t.Run("returns default severity when not set", func(t *testing.T) { + t.Parallel() + + config := linter.RuleConfig{} + + assert.Equal(t, validation.SeverityError, config.GetSeverity(validation.SeverityError)) + }) + + t.Run("returns configured severity overriding different default", func(t *testing.T) { + t.Parallel() + + hintSeverity := validation.SeverityHint + config := linter.RuleConfig{ + Severity: &hintSeverity, + } + + assert.Equal(t, validation.SeverityHint, config.GetSeverity(validation.SeverityWarning)) + }) +} + +func TestNewConfig(t *testing.T) { + t.Parallel() + + config := linter.NewConfig() + assert.NotNil(t, config) + assert.Equal(t, linter.OutputFormatText, config.OutputFormat) + assert.NotNil(t, config.Rules) + assert.NotNil(t, config.Categories) + assert.NotNil(t, config.Extends) +} + +func TestLoadConfig_ExtendsString(t *testing.T) { + t.Parallel() + + configYAML := `extends: recommended` + config, err := linter.LoadConfig(strings.NewReader(configYAML)) + require.NoError(t, err) + assert.Equal(t, []string{"recommended"}, config.Extends) +} + +func TestLoadConfig_ExtendsList(t *testing.T) { + t.Parallel() + + configYAML := `extends: + - recommended + - strict` + config, err := linter.LoadConfig(strings.NewReader(configYAML)) + require.NoError(t, err) + assert.Equal(t, []string{"recommended", "strict"}, config.Extends) +} + +func TestLoadConfig_MatchRegex(t *testing.T) { + t.Parallel() + + configYAML := `rules: + - id: validation-required + match: ".*title.*"` + config, err := linter.LoadConfig(strings.NewReader(configYAML)) + require.NoError(t, err) + require.Len(t, config.Rules, 1) + require.NotNil(t, config.Rules[0].Match) + assert.Equal(t, regexp.MustCompile(".*title.*").String(), config.Rules[0].Match.String()) +} + +func TestLoadConfig_CustomRulesRoundTrip(t *testing.T) { + t.Parallel() + + configYAML := `extends: all +custom_rules: + paths: + - "./rules/*.ts" + - "./extra/*.ts"` + config, err := linter.LoadConfig(strings.NewReader(configYAML)) + require.NoError(t, err, "should load config with custom_rules") + require.NotNil(t, config.CustomRules, "custom_rules should survive UnmarshalYAML round-trip") + assert.Equal(t, []string{"./rules/*.ts", "./extra/*.ts"}, config.CustomRules.Paths, "custom_rules.paths should be preserved") +} + +func TestLoadConfig_CategorySeverityAliases(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + yaml string + expectedSeverity validation.Severity + }{ + { + name: "error severity", + yaml: `categories: + style: + severity: error`, + expectedSeverity: validation.SeverityError, + }, + { + name: "warn alias for warning", + yaml: `categories: + style: + severity: warn`, + expectedSeverity: validation.SeverityWarning, + }, + { + name: "warning severity", + yaml: `categories: + style: + severity: warning`, + expectedSeverity: validation.SeverityWarning, + }, + { + name: "hint severity", + yaml: `categories: + style: + severity: hint`, + expectedSeverity: validation.SeverityHint, + }, + { + name: "info alias for hint", + yaml: `categories: + style: + severity: info`, + expectedSeverity: validation.SeverityHint, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + config, err := linter.LoadConfig(strings.NewReader(tt.yaml)) + require.NoError(t, err) + require.NotNil(t, config.Categories["style"].Severity, "severity should be set") + assert.Equal(t, tt.expectedSeverity, *config.Categories["style"].Severity, "severity should match expected") + }) + } +} + +func TestLoadConfig_CategoryEnabled(t *testing.T) { + t.Parallel() + + configYAML := `categories: + security: + enabled: false` + config, err := linter.LoadConfig(strings.NewReader(configYAML)) + require.NoError(t, err) + require.NotNil(t, config.Categories["security"].Enabled, "enabled should be set") + assert.False(t, *config.Categories["security"].Enabled, "security category should be disabled") +} + +func TestLoadConfig_CategoryInvalidSeverity(t *testing.T) { + t.Parallel() + + configYAML := `categories: + style: + severity: critical` + _, err := linter.LoadConfig(strings.NewReader(configYAML)) + require.Error(t, err) + assert.Contains(t, err.Error(), "unknown severity") +} + +func TestLoadConfig_RuleSeverityAliases(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + yaml string + expectedSeverity validation.Severity + }{ + { + name: "warn alias", + yaml: `rules: + - id: test-rule + severity: warn`, + expectedSeverity: validation.SeverityWarning, + }, + { + name: "info alias", + yaml: `rules: + - id: test-rule + severity: info`, + expectedSeverity: validation.SeverityHint, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + config, err := linter.LoadConfig(strings.NewReader(tt.yaml)) + require.NoError(t, err) + require.Len(t, config.Rules, 1) + require.NotNil(t, config.Rules[0].Severity, "severity should be set") + assert.Equal(t, tt.expectedSeverity, *config.Rules[0].Severity, "severity should match expected") + }) + } +} + +func TestLoadConfig_RuleInvalidSeverity(t *testing.T) { + t.Parallel() + + configYAML := `rules: + - id: test-rule + severity: critical` + _, err := linter.LoadConfig(strings.NewReader(configYAML)) + require.Error(t, err) + assert.Contains(t, err.Error(), "unknown severity") +} + +func TestLoadConfig_ExtendsInvalidType(t *testing.T) { + t.Parallel() + + configYAML := `extends: + key: value` + _, err := linter.LoadConfig(strings.NewReader(configYAML)) + require.Error(t, err) + assert.Contains(t, err.Error(), "extends must be a string or list of strings") +} + +func TestLoadConfig_ExtendsNull(t *testing.T) { + t.Parallel() + + configYAML := `extends: null` + config, err := linter.LoadConfig(strings.NewReader(configYAML)) + require.NoError(t, err) + assert.Equal(t, []string{"all"}, config.Extends, "null extends should default to all") +} + +func TestLoadConfigFromFile_Success(t *testing.T) { + t.Parallel() + + tmpFile := t.TempDir() + "/lint.yaml" + err := os.WriteFile(tmpFile, []byte("extends: recommended\n"), 0644) + require.NoError(t, err) + + config, err := linter.LoadConfigFromFile(tmpFile) + require.NoError(t, err) + assert.Equal(t, []string{"recommended"}, config.Extends) +} + +func TestLoadConfigFromFile_Error(t *testing.T) { + t.Parallel() + + _, err := linter.LoadConfigFromFile("/nonexistent/path/lint.yaml") + require.Error(t, err) + assert.Contains(t, err.Error(), "failed to open config file") +} + +func TestConfig_ValidateMissingRuleID(t *testing.T) { + t.Parallel() + + config := &linter.Config{ + Rules: []linter.RuleEntry{{}}, + } + + err := config.Validate() + require.Error(t, err) + assert.Contains(t, err.Error(), "rule entry missing id") +} diff --git a/linter/doc.go b/linter/doc.go new file mode 100644 index 00000000..efb743f8 --- /dev/null +++ b/linter/doc.go @@ -0,0 +1,267 @@ +package linter + +import ( + "encoding/json" + "fmt" + "io" + "strings" +) + +// DocGenerator generates documentation from registered rules +type DocGenerator[T any] struct { + registry *Registry[T] +} + +// NewDocGenerator creates a new documentation generator +func NewDocGenerator[T any](registry *Registry[T]) *DocGenerator[T] { + return &DocGenerator[T]{registry: registry} +} + +// RuleDoc represents documentation for a single rule +type RuleDoc struct { + ID string `json:"id" yaml:"id"` + Category string `json:"category" yaml:"category"` + Summary string `json:"summary" yaml:"summary"` + Description string `json:"description" yaml:"description"` + Rationale string `json:"rationale,omitempty" yaml:"rationale,omitempty"` + Link string `json:"link,omitempty" yaml:"link,omitempty"` + DefaultSeverity string `json:"default_severity" yaml:"default_severity"` + Versions []string `json:"versions,omitempty" yaml:"versions,omitempty"` + GoodExample string `json:"good_example,omitempty" yaml:"good_example,omitempty"` + BadExample string `json:"bad_example,omitempty" yaml:"bad_example,omitempty"` + FixAvailable bool `json:"fix_available" yaml:"fix_available"` + ConfigSchema map[string]any `json:"config_schema,omitempty" yaml:"config_schema,omitempty"` + ConfigDefaults map[string]any `json:"config_defaults,omitempty" yaml:"config_defaults,omitempty"` + Rulesets []string `json:"rulesets" yaml:"rulesets"` +} + +// GenerateRuleDoc generates documentation for a single rule +func (g *DocGenerator[T]) GenerateRuleDoc(rule RuleRunner[T]) *RuleDoc { + doc := &RuleDoc{ + ID: rule.ID(), + Category: rule.Category(), + Summary: rule.Summary(), + Description: rule.Description(), + Link: rule.Link(), + DefaultSeverity: rule.DefaultSeverity().String(), + Versions: rule.Versions(), + Rulesets: g.registry.RulesetsContaining(rule.ID()), + } + + // Check for optional documentation interface + if documented, ok := any(rule).(DocumentedRule); ok { + doc.GoodExample = documented.GoodExample() + doc.BadExample = documented.BadExample() + doc.Rationale = documented.Rationale() + doc.FixAvailable = documented.FixAvailable() + } + + // Check for configuration interface + if configurable, ok := any(rule).(ConfigurableRule); ok { + doc.ConfigSchema = configurable.ConfigSchema() + doc.ConfigDefaults = configurable.ConfigDefaults() + } + + return doc +} + +// GenerateAllRuleDocs generates documentation for all registered rules +func (g *DocGenerator[T]) GenerateAllRuleDocs() []*RuleDoc { + var docs []*RuleDoc + for _, rule := range g.registry.AllRules() { + docs = append(docs, g.GenerateRuleDoc(rule)) + } + return docs +} + +// GenerateCategoryDocs groups rules by category +func (g *DocGenerator[T]) GenerateCategoryDocs() map[string][]*RuleDoc { + categories := make(map[string][]*RuleDoc) + for _, rule := range g.registry.AllRules() { + doc := g.GenerateRuleDoc(rule) + categories[doc.Category] = append(categories[doc.Category], doc) + } + return categories +} + +// WriteJSON writes rule documentation as JSON +func (g *DocGenerator[T]) WriteJSON(w io.Writer) error { + docs := g.GenerateAllRuleDocs() + enc := json.NewEncoder(w) + enc.SetIndent("", " ") + return enc.Encode(map[string]any{ + "rules": docs, + "categories": g.registry.AllCategories(), + "rulesets": g.registry.AllRulesets(), + }) +} + +// WriteMarkdown writes rule documentation as Markdown +func (g *DocGenerator[T]) WriteMarkdown(w io.Writer) error { + docs := g.GenerateCategoryDocs() + + if err := writeLine(w, "# Lint Rules Reference"); err != nil { + return err + } + if err := writeEmptyLine(w); err != nil { + return err + } + + // Table of contents + if err := writeLine(w, "## Categories"); err != nil { + return err + } + if err := writeEmptyLine(w); err != nil { + return err + } + for category := range docs { + if err := writeF(w, "- [%s](#%s)\n", category, category); err != nil { + return err + } + } + if err := writeEmptyLine(w); err != nil { + return err + } + + // Rules by category + for category, rules := range docs { + if err := writeF(w, "## %s\n\n", category); err != nil { + return err + } + + for _, rule := range rules { + if err := g.writeRuleMarkdown(w, rule); err != nil { + return err + } + } + } + + return nil +} + +func (g *DocGenerator[T]) writeRuleMarkdown(w io.Writer, rule *RuleDoc) error { + if err := writeF(w, "### %s\n\n", rule.ID); err != nil { + return err + } + if err := writeF(w, "**Severity:** %s \n", rule.DefaultSeverity); err != nil { + return err + } + if err := writeF(w, "**Category:** %s \n", rule.Category); err != nil { + return err + } + if rule.Summary != "" { + if err := writeF(w, "**Summary:** %s \n", rule.Summary); err != nil { + return err + } + } + + if len(rule.Versions) > 0 { + if err := writeF(w, "**Applies to:** %s \n", strings.Join(rule.Versions, ", ")); err != nil { + return err + } + } + + if rule.FixAvailable { + if err := writeLine(w, "**Auto-fix available:** Yes "); err != nil { + return err + } + } + if err := writeEmptyLine(w); err != nil { + return err + } + + if err := writeF(w, "%s\n\n", rule.Description); err != nil { + return err + } + + if rule.Rationale != "" { + if err := writeF(w, "#### Rationale\n\n%s\n\n", rule.Rationale); err != nil { + return err + } + } + + if rule.BadExample != "" { + if err := writeLine(w, "#### ❌ Incorrect"); err != nil { + return err + } + if err := writeLine(w, "```yaml"); err != nil { + return err + } + if err := writeLine(w, rule.BadExample); err != nil { + return err + } + if err := writeLine(w, "```"); err != nil { + return err + } + if err := writeEmptyLine(w); err != nil { + return err + } + } + + if rule.GoodExample != "" { + if err := writeLine(w, "#### ✅ Correct"); err != nil { + return err + } + if err := writeLine(w, "```yaml"); err != nil { + return err + } + if err := writeLine(w, rule.GoodExample); err != nil { + return err + } + if err := writeLine(w, "```"); err != nil { + return err + } + if err := writeEmptyLine(w); err != nil { + return err + } + } + + if len(rule.ConfigSchema) > 0 { + if err := writeLine(w, "#### Configuration"); err != nil { + return err + } + if err := writeEmptyLine(w); err != nil { + return err + } + if err := writeLine(w, "| Option | Type | Default | Description |"); err != nil { + return err + } + if err := writeLine(w, "|--------|------|---------|-------------|"); err != nil { + return err + } + // Write config options table + if err := writeEmptyLine(w); err != nil { + return err + } + } + + if rule.Link != "" { + if err := writeF(w, "[Documentation →](%s)\n\n", rule.Link); err != nil { + return err + } + } + + if err := writeLine(w, "---"); err != nil { + return err + } + if err := writeEmptyLine(w); err != nil { + return err + } + + return nil +} + +func writeLine(w io.Writer, text string) error { + _, err := fmt.Fprintln(w, text) + return err +} + +func writeEmptyLine(w io.Writer) error { + _, err := fmt.Fprintln(w) + return err +} + +func writeF(w io.Writer, format string, args ...any) error { + _, err := fmt.Fprintf(w, format, args...) + return err +} diff --git a/linter/doc_test.go b/linter/doc_test.go new file mode 100644 index 00000000..5ebfb605 --- /dev/null +++ b/linter/doc_test.go @@ -0,0 +1,280 @@ +package linter_test + +import ( + "bytes" + "encoding/json" + "testing" + + "github.com/speakeasy-api/openapi/linter" + "github.com/speakeasy-api/openapi/validation" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestDocGenerator_GenerateRuleDoc(t *testing.T) { + t.Parallel() + + t.Run("basic rule documentation", func(t *testing.T) { + t.Parallel() + + registry := linter.NewRegistry[*MockDoc]() + registry.Register(&mockRule{ + id: "test-rule", + category: "style", + summary: "Test rule summary", + description: "Test rule description", + link: "https://example.com/rules/test-rule", + defaultSeverity: validation.SeverityError, + versions: []string{"3.1.0", "3.2.0"}, + }) + + generator := linter.NewDocGenerator(registry) + rule, _ := registry.GetRule("test-rule") + doc := generator.GenerateRuleDoc(rule) + + assert.Equal(t, "test-rule", doc.ID) + assert.Equal(t, "style", doc.Category) + assert.Equal(t, "Test rule summary", doc.Summary) + assert.Equal(t, "Test rule description", doc.Description) + assert.Equal(t, "https://example.com/rules/test-rule", doc.Link) + assert.Equal(t, "error", doc.DefaultSeverity) + assert.Equal(t, []string{"3.1.0", "3.2.0"}, doc.Versions) + assert.Contains(t, doc.Rulesets, "all") + }) + + t.Run("documented rule with examples", func(t *testing.T) { + t.Parallel() + + registry := linter.NewRegistry[*MockDoc]() + registry.Register(&documentedMockRule{ + mockRule: mockRule{ + id: "documented-rule", + category: "style", + summary: "Documented rule summary", + description: "Rule with examples", + defaultSeverity: validation.SeverityWarning, + }, + goodExample: "good:\n example: value", + badExample: "bad:\n example: value", + rationale: "This is why the rule exists", + fixAvailable: true, + }) + + generator := linter.NewDocGenerator(registry) + rule, _ := registry.GetRule("documented-rule") + doc := generator.GenerateRuleDoc(rule) + + assert.Equal(t, "good:\n example: value", doc.GoodExample) + assert.Equal(t, "bad:\n example: value", doc.BadExample) + assert.Equal(t, "This is why the rule exists", doc.Rationale) + assert.True(t, doc.FixAvailable) + }) + + t.Run("configurable rule with schema", func(t *testing.T) { + t.Parallel() + + registry := linter.NewRegistry[*MockDoc]() + registry.Register(&configurableMockRule{ + mockRule: mockRule{ + id: "configurable-rule", + category: "style", + summary: "Configurable rule summary", + description: "Configurable rule", + defaultSeverity: validation.SeverityError, + }, + configSchema: map[string]any{ + "maxLength": map[string]any{"type": "integer"}, + }, + configDefaults: map[string]any{ + "maxLength": 100, + }, + }) + + generator := linter.NewDocGenerator(registry) + rule, _ := registry.GetRule("configurable-rule") + doc := generator.GenerateRuleDoc(rule) + + assert.NotNil(t, doc.ConfigSchema) + assert.Contains(t, doc.ConfigSchema, "maxLength") + assert.NotNil(t, doc.ConfigDefaults) + assert.Equal(t, 100, doc.ConfigDefaults["maxLength"]) + }) +} + +func TestDocGenerator_GenerateAllRuleDocs(t *testing.T) { + t.Parallel() + + registry := linter.NewRegistry[*MockDoc]() + registry.Register(&mockRule{id: "rule-1", category: "style", defaultSeverity: validation.SeverityError, description: "Rule 1"}) + registry.Register(&mockRule{id: "rule-2", category: "security", defaultSeverity: validation.SeverityWarning, description: "Rule 2"}) + registry.Register(&mockRule{id: "rule-3", category: "style", defaultSeverity: validation.SeverityHint, description: "Rule 3"}) + + generator := linter.NewDocGenerator(registry) + docs := generator.GenerateAllRuleDocs() + + assert.Len(t, docs, 3) + + // Verify all rules are documented + ids := make([]string, len(docs)) + for i, doc := range docs { + ids[i] = doc.ID + } + assert.ElementsMatch(t, []string{"rule-1", "rule-2", "rule-3"}, ids) +} + +func TestDocGenerator_GenerateCategoryDocs(t *testing.T) { + t.Parallel() + + registry := linter.NewRegistry[*MockDoc]() + registry.Register(&mockRule{id: "style-1", category: "style", defaultSeverity: validation.SeverityError, description: "Style 1"}) + registry.Register(&mockRule{id: "style-2", category: "style", defaultSeverity: validation.SeverityError, description: "Style 2"}) + registry.Register(&mockRule{id: "security-1", category: "security", defaultSeverity: validation.SeverityError, description: "Security 1"}) + + generator := linter.NewDocGenerator(registry) + categoryDocs := generator.GenerateCategoryDocs() + + assert.Len(t, categoryDocs, 2) + assert.Len(t, categoryDocs["style"], 2) + assert.Len(t, categoryDocs["security"], 1) + + // Verify correct grouping + styleIDs := []string{categoryDocs["style"][0].ID, categoryDocs["style"][1].ID} + assert.ElementsMatch(t, []string{"style-1", "style-2"}, styleIDs) + assert.Equal(t, "security-1", categoryDocs["security"][0].ID) +} + +func TestDocGenerator_WriteJSON(t *testing.T) { + t.Parallel() + + registry := linter.NewRegistry[*MockDoc]() + registry.Register(&mockRule{ + id: "test-rule", + category: "style", + summary: "Test rule summary", + description: "Test description", + link: "https://example.com", + defaultSeverity: validation.SeverityError, + }) + _ = registry.RegisterRuleset("recommended", []string{"test-rule"}) + + generator := linter.NewDocGenerator(registry) + + var buf bytes.Buffer + err := generator.WriteJSON(&buf) + require.NoError(t, err) + + // Verify valid JSON + var result map[string]any + err = json.Unmarshal(buf.Bytes(), &result) + require.NoError(t, err) + + // Verify structure + assert.Contains(t, result, "rules") + assert.Contains(t, result, "categories") + assert.Contains(t, result, "rulesets") + + // Verify rules array + rules, ok := result["rules"].([]any) + require.True(t, ok) + assert.Len(t, rules, 1) + + // Verify rule details + ruleMap, ok := rules[0].(map[string]any) + require.True(t, ok) + assert.Equal(t, "test-rule", ruleMap["id"]) + assert.Equal(t, "style", ruleMap["category"]) +} + +func TestDocGenerator_WriteMarkdown(t *testing.T) { + t.Parallel() + + registry := linter.NewRegistry[*MockDoc]() + registry.Register(&documentedMockRule{ + mockRule: mockRule{ + id: "test-rule", + category: "style", + summary: "Test rule summary", + description: "Test rule description", + link: "https://docs.example.com/rules/test-rule", + defaultSeverity: validation.SeverityError, + }, + goodExample: "good:\n value: correct", + badExample: "bad:\n value: incorrect", + rationale: "This rule ensures consistency", + fixAvailable: true, + }) + + generator := linter.NewDocGenerator(registry) + + var buf bytes.Buffer + err := generator.WriteMarkdown(&buf) + require.NoError(t, err) + + output := buf.String() + + // Verify markdown structure + assert.Contains(t, output, "# Lint Rules Reference") + assert.Contains(t, output, "## Categories") + assert.Contains(t, output, "## style") // Category header + assert.Contains(t, output, "### test-rule") // Rule header + assert.Contains(t, output, "**Severity:** error") + assert.Contains(t, output, "**Category:** style") + assert.Contains(t, output, "**Summary:** Test rule summary") + assert.Contains(t, output, "Test rule description") + assert.Contains(t, output, "#### Rationale") + assert.Contains(t, output, "This rule ensures consistency") + assert.Contains(t, output, "#### ❌ Incorrect") + assert.Contains(t, output, "bad:\n value: incorrect") + assert.Contains(t, output, "#### ✅ Correct") + assert.Contains(t, output, "good:\n value: correct") + assert.Contains(t, output, "**Auto-fix available:** Yes") + assert.Contains(t, output, "[Documentation →](https://docs.example.com/rules/test-rule)") + assert.Contains(t, output, "---") // Separator +} + +func TestDocGenerator_WriteMarkdown_WithVersions(t *testing.T) { + t.Parallel() + + registry := linter.NewRegistry[*MockDoc]() + registry.Register(&mockRule{ + id: "versioned-rule", + category: "validation", + summary: "Versioned rule summary", + description: "Version-specific rule", + defaultSeverity: validation.SeverityError, + versions: []string{"3.1.0", "3.2.0"}, + }) + + generator := linter.NewDocGenerator(registry) + + var buf bytes.Buffer + err := generator.WriteMarkdown(&buf) + require.NoError(t, err) + + output := buf.String() + assert.Contains(t, output, "**Applies to:** 3.1.0, 3.2.0") +} + +// documentedMockRule implements DocumentedRule interface +type documentedMockRule struct { + mockRule + goodExample string + badExample string + rationale string + fixAvailable bool +} + +func (r *documentedMockRule) GoodExample() string { return r.goodExample } +func (r *documentedMockRule) BadExample() string { return r.badExample } +func (r *documentedMockRule) Rationale() string { return r.rationale } +func (r *documentedMockRule) FixAvailable() bool { return r.fixAvailable } + +// configurableMockRule implements ConfigurableRule interface +type configurableMockRule struct { + mockRule + configSchema map[string]any + configDefaults map[string]any +} + +func (r *configurableMockRule) ConfigSchema() map[string]any { return r.configSchema } +func (r *configurableMockRule) ConfigDefaults() map[string]any { return r.configDefaults } diff --git a/linter/document.go b/linter/document.go new file mode 100644 index 00000000..b4fe8c86 --- /dev/null +++ b/linter/document.go @@ -0,0 +1,48 @@ +package linter + +import ( + "github.com/speakeasy-api/openapi/openapi" + "github.com/speakeasy-api/openapi/references" +) + +// DocumentInfo contains a document and its metadata for linting +type DocumentInfo[T any] struct { + // Document is the parsed document to lint + Document T + + // Location is the absolute location (URL or file path) of the document + // This is used for resolving relative references + Location string + + // Index contains an index of various nodes from the provided document + Index *openapi.Index +} + +// NewDocumentInfo creates a new DocumentInfo with the given document and location +func NewDocumentInfo[T any](doc T, location string) *DocumentInfo[T] { + return &DocumentInfo[T]{ + Document: doc, + Location: location, + } +} + +// NewDocumentInfoWithIndex creates a new DocumentInfo with a pre-computed index +func NewDocumentInfoWithIndex[T any](doc T, location string, index *openapi.Index) *DocumentInfo[T] { + return &DocumentInfo[T]{ + Document: doc, + Location: location, + Index: index, + } +} + +// LintOptions contains runtime options for linting +type LintOptions struct { + // ResolveOptions contains options for reference resolution + // If nil, default options will be used + ResolveOptions *references.ResolveOptions + + // VersionFilter is the document version (e.g., "3.0", "3.1") + // If set, only rules that apply to this version will be run + // Rules with nil/empty Versions() apply to all versions + VersionFilter *string +} diff --git a/linter/document_test.go b/linter/document_test.go new file mode 100644 index 00000000..89b8d45d --- /dev/null +++ b/linter/document_test.go @@ -0,0 +1,38 @@ +package linter_test + +import ( + "testing" + + "github.com/speakeasy-api/openapi/linter" + "github.com/speakeasy-api/openapi/openapi" + "github.com/stretchr/testify/assert" +) + +func TestNewDocumentInfo(t *testing.T) { + t.Parallel() + + doc := &MockDoc{ID: "test-doc"} + location := "/path/to/openapi.yaml" + + docInfo := linter.NewDocumentInfo(doc, location) + + assert.NotNil(t, docInfo) + assert.Equal(t, doc, docInfo.Document) + assert.Equal(t, location, docInfo.Location) + assert.Nil(t, docInfo.Index) +} + +func TestNewDocumentInfoWithIndex(t *testing.T) { + t.Parallel() + + doc := &MockDoc{ID: "test-doc"} + location := "/path/to/openapi.yaml" + index := &openapi.Index{} + + docInfo := linter.NewDocumentInfoWithIndex(doc, location, index) + + assert.NotNil(t, docInfo) + assert.Equal(t, doc, docInfo.Document) + assert.Equal(t, location, docInfo.Location) + assert.Equal(t, index, docInfo.Index) +} diff --git a/linter/format/format_test.go b/linter/format/format_test.go new file mode 100644 index 00000000..766d7452 --- /dev/null +++ b/linter/format/format_test.go @@ -0,0 +1,140 @@ +package format_test + +import ( + "errors" + "strings" + "testing" + + "github.com/speakeasy-api/openapi/linter/format" + "github.com/speakeasy-api/openapi/validation" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "gopkg.in/yaml.v3" +) + +func TestTextFormatter_Format(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + errors []error + contains []string + }{ + { + name: "empty errors", + errors: []error{}, + contains: []string{}, + }, + { + name: "single error", + errors: []error{ + validation.NewValidationError(validation.SeverityError, "test-rule", errors.New("test error message"), nil), + }, + contains: []string{"error", "test-rule", "test error message"}, + }, + { + name: "multiple errors with different severities", + errors: []error{ + validation.NewValidationError(validation.SeverityError, "error-rule", errors.New("error message"), nil), + validation.NewValidationError(validation.SeverityWarning, "warning-rule", errors.New("warning message"), nil), + validation.NewValidationError(validation.SeverityHint, "hint-rule", errors.New("hint message"), nil), + }, + contains: []string{ + "error", "error-rule", "error message", + "warning", "warning-rule", "warning message", + "hint", "hint-rule", "hint message", + }, + }, + { + name: "error with line number", + errors: []error{ + &validation.Error{ + UnderlyingError: errors.New("at specific location"), + Node: &yaml.Node{Line: 42, Column: 10}, + Severity: validation.SeverityError, + Rule: "location-rule", + }, + }, + contains: []string{"42", "10", "location-rule"}, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + formatter := format.NewTextFormatter() + result, err := formatter.Format(tt.errors) + require.NoError(t, err) + + for _, substr := range tt.contains { + assert.Contains(t, result, substr, "output should contain %q", substr) + } + }) + } +} + +func TestJSONFormatter_Format(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + errors []error + contains []string + }{ + { + name: "empty errors", + errors: []error{}, + contains: []string{`"results"`, `"summary"`}, + }, + { + name: "single error", + errors: []error{ + validation.NewValidationError(validation.SeverityError, "test-rule", errors.New("test error message"), nil), + }, + contains: []string{`"error"`, `"test-rule"`, `"test error message"`}, + }, + { + name: "multiple errors", + errors: []error{ + validation.NewValidationError(validation.SeverityError, "rule-1", errors.New("error 1"), nil), + validation.NewValidationError(validation.SeverityWarning, "rule-2", errors.New("error 2"), nil), + }, + contains: []string{ + `"rule-1"`, `"error 1"`, + `"rule-2"`, `"error 2"`, + `"warning"`, + }, + }, + { + name: "error with location", + errors: []error{ + &validation.Error{ + UnderlyingError: errors.New("located error"), + Node: &yaml.Node{Line: 15, Column: 25}, + Severity: validation.SeverityError, + Rule: "location-rule", + }, + }, + contains: []string{`"line": 15`, `"column": 25`, `"location-rule"`}, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + formatter := format.NewJSONFormatter() + result, err := formatter.Format(tt.errors) + require.NoError(t, err) + + // Verify it's valid JSON by checking structure (it's an object, not an array) + assert.True(t, strings.HasPrefix(strings.TrimSpace(result), "{"), "should start with {") + assert.True(t, strings.HasSuffix(strings.TrimSpace(result), "}"), "should end with }") + + for _, substr := range tt.contains { + assert.Contains(t, result, substr, "JSON should contain %q", substr) + } + }) + } +} diff --git a/linter/format/formatter.go b/linter/format/formatter.go new file mode 100644 index 00000000..fac6c55a --- /dev/null +++ b/linter/format/formatter.go @@ -0,0 +1,5 @@ +package format + +type Formatter interface { + Format(results []error) (string, error) +} diff --git a/linter/format/json.go b/linter/format/json.go new file mode 100644 index 00000000..6a60e199 --- /dev/null +++ b/linter/format/json.go @@ -0,0 +1,113 @@ +package format + +import ( + "encoding/json" + "errors" + "strings" + + "github.com/speakeasy-api/openapi/validation" +) + +type JSONFormatter struct{} + +func NewJSONFormatter() *JSONFormatter { + return &JSONFormatter{} +} + +type jsonOutput struct { + Results []jsonResult `json:"results"` + Summary jsonSummary `json:"summary"` +} + +type jsonResult struct { + Rule string `json:"rule"` + Category string `json:"category"` + Severity string `json:"severity"` + Message string `json:"message"` + Location jsonLocation `json:"location"` + Document string `json:"document,omitempty"` + Fix *jsonFix `json:"fix,omitempty"` +} + +type jsonLocation struct { + Line int `json:"line"` + Column int `json:"column"` + Pointer string `json:"pointer,omitempty"` // TODO: Add pointer support +} + +type jsonFix struct { + Description string `json:"description"` +} + +type jsonSummary struct { + Total int `json:"total"` + Errors int `json:"errors"` + Warnings int `json:"warnings"` + Hints int `json:"hints"` +} + +func (f *JSONFormatter) Format(results []error) (string, error) { + output := jsonOutput{ + Results: make([]jsonResult, 0, len(results)), + } + + for _, err := range results { + var vErr *validation.Error + if errors.As(err, &vErr) { + category := "unknown" + if idx := strings.Index(vErr.Rule, "-"); idx > 0 { + category = vErr.Rule[:idx] + } + + result := jsonResult{ + Rule: vErr.Rule, + Category: category, + Severity: vErr.Severity.String(), + Message: vErr.UnderlyingError.Error(), + Location: jsonLocation{ + Line: vErr.GetLineNumber(), + Column: vErr.GetColumnNumber(), + }, + } + + if vErr.DocumentLocation != "" { + result.Document = vErr.DocumentLocation + } + + if vErr.Fix != nil { + result.Fix = &jsonFix{ + Description: vErr.Fix.FixDescription(), + } + } + + output.Results = append(output.Results, result) + + switch vErr.Severity { + case validation.SeverityError: + output.Summary.Errors++ + case validation.SeverityWarning: + output.Summary.Warnings++ + case validation.SeverityHint: + output.Summary.Hints++ + } + } else { + // Non-validation error + output.Results = append(output.Results, jsonResult{ + Rule: "internal", + Category: "internal", + Severity: "error", + Message: err.Error(), + }) + output.Summary.Errors++ + } + } + + output.Summary.Total = len(results) + + bytes, err := json.MarshalIndent(output, "", " ") + if err != nil { + return "", err + } + + return string(bytes), nil +} diff --git a/linter/format/text.go b/linter/format/text.go new file mode 100644 index 00000000..cdf30f8c --- /dev/null +++ b/linter/format/text.go @@ -0,0 +1,59 @@ +package format + +import ( + "errors" + "fmt" + "strings" + + "github.com/speakeasy-api/openapi/validation" +) + +type TextFormatter struct{} + +func NewTextFormatter() *TextFormatter { + return &TextFormatter{} +} + +func (f *TextFormatter) Format(results []error) (string, error) { + var sb strings.Builder + + errorCount := 0 + warningCount := 0 + hintCount := 0 + + for _, err := range results { + var vErr *validation.Error + if errors.As(err, &vErr) { + line := vErr.GetLineNumber() + col := vErr.GetColumnNumber() + severity := vErr.Severity + rule := vErr.Rule + msg := vErr.UnderlyingError.Error() + if vErr.DocumentLocation != "" { + msg = fmt.Sprintf("%s (document: %s)", msg, vErr.DocumentLocation) + } + + sb.WriteString(fmt.Sprintf("%d:%d\t%s\t%s\t%s\n", line, col, severity, rule, msg)) + + switch severity { + case validation.SeverityError: + errorCount++ + case validation.SeverityWarning: + warningCount++ + case validation.SeverityHint: + hintCount++ + } + } else { + // Non-validation error + sb.WriteString(fmt.Sprintf("-\t-\terror\tinternal\t%s\n", err.Error())) + errorCount++ + } + } + + if len(results) > 0 { + sb.WriteString("\n") + sb.WriteString(fmt.Sprintf("✖ %d problems (%d errors, %d warnings, %d hints)\n", len(results), errorCount, warningCount, hintCount)) + } + + return sb.String(), nil +} diff --git a/linter/linter.go b/linter/linter.go new file mode 100644 index 00000000..8d067bda --- /dev/null +++ b/linter/linter.go @@ -0,0 +1,407 @@ +package linter + +import ( + "context" + "errors" + "regexp" + "sort" + "sync" + + "github.com/speakeasy-api/openapi/linter/format" + "github.com/speakeasy-api/openapi/validation" +) + +// Linter is the main linting engine +type Linter[T any] struct { + config *Config + registry *Registry[T] +} + +type ruleOverride struct { + present bool + severity *validation.Severity + disabled *bool +} + +type matchFilter struct { + ruleID string + pattern *regexp.Regexp + severity *validation.Severity + disabled *bool +} + +// NewLinter creates a new linter with the given configuration +func NewLinter[T any](config *Config, registry *Registry[T]) *Linter[T] { + return &Linter[T]{ + config: config, + registry: registry, + } +} + +// Registry returns the rule registry for documentation generation +func (l *Linter[T]) Registry() *Registry[T] { + return l.registry +} + +// Lint runs all configured rules against the document +func (l *Linter[T]) Lint(ctx context.Context, docInfo *DocumentInfo[T], preExistingErrors []error, opts *LintOptions) (*Output, error) { + var allErrs []error + + if len(preExistingErrors) > 0 { + allErrs = append(allErrs, preExistingErrors...) + } + + // Run lint rules - these also return validation.Error instances + lintErrs := l.runRules(ctx, docInfo, opts) + allErrs = append(allErrs, lintErrs...) + + // Apply severity overrides from config + allErrs = l.applySeverityOverrides(allErrs) + + allErrs = l.FilterErrors(allErrs) + + // Sort errors by location + validation.SortValidationErrors(allErrs) + + // Format output + return l.formatOutput(allErrs), nil +} + +func (l *Linter[T]) runRules(ctx context.Context, docInfo *DocumentInfo[T], opts *LintOptions) []error { + // Determine enabled rules + enabledRules := l.getEnabledRules() + + // Run rules in parallel for better performance + var ( + mu sync.Mutex + errs []error + wg sync.WaitGroup + ) + + for _, rule := range enabledRules { + ruleConfig := l.getRuleConfig(rule.ID()) + + // Skip if disabled (though getEnabledRules should handle this, double check) + if ruleConfig.Enabled != nil && !*ruleConfig.Enabled { + continue + } + + // Filter rules based on version if VersionFilter is set + if opts != nil && opts.VersionFilter != nil && *opts.VersionFilter != "" { + ruleVersions := rule.Versions() + // If rule specifies versions, check if current version matches + if len(ruleVersions) > 0 { + versionMatches := false + for _, ruleVersion := range ruleVersions { + // Match against rule's supported versions + // Support both "3.1" and "3.1.0" formats + if ruleVersion == *opts.VersionFilter || + (len(*opts.VersionFilter) > len(ruleVersion) && + (*opts.VersionFilter)[:len(ruleVersion)] == ruleVersion) { + versionMatches = true + break + } + } + if !versionMatches { + continue // Skip this rule - doesn't apply to this version + } + } + // If rule.Versions() is nil/empty, it applies to all versions + } + + // Set resolve options if provided + if opts != nil && opts.ResolveOptions != nil { + resolveOpts := *opts.ResolveOptions + // Set document location as target location if not already set + if resolveOpts.TargetLocation == "" && docInfo.Location != "" { + resolveOpts.TargetLocation = docInfo.Location + } + ruleConfig.ResolveOptions = &resolveOpts + } + + // Run rule in parallel + wg.Add(1) + go func(r RuleRunner[T], cfg RuleConfig) { + defer wg.Done() + + ruleErrs := r.Run(ctx, docInfo, &cfg) + + mu.Lock() + errs = append(errs, ruleErrs...) + mu.Unlock() + }(rule, ruleConfig) + } + + wg.Wait() + return errs +} + +func (l *Linter[T]) getEnabledRules() []RuleRunner[T] { + // Start with all rules if "all" is extended (default) + // Or specific rulesets + + // For now, simple implementation: check config for enabled rules + // If config.Extends contains "all", include all rules unless disabled + + // Map to track enabled status: ruleID -> enabled + ruleStatus := make(map[string]bool) + + // Apply rulesets + for _, ruleset := range l.config.Extends { + if ids, ok := l.registry.GetRuleset(ruleset); ok { + for _, id := range ids { + ruleStatus[id] = true + } + } + } + + // Apply category config + // Category config overrides ruleset config but is overridden by individual rule config + for _, rule := range l.registry.AllRules() { + if catConfig, ok := l.config.Categories[rule.Category()]; ok { + if catConfig.Enabled != nil { + ruleStatus[rule.ID()] = *catConfig.Enabled + } + } + } + + // Apply rule config from list entries without match + for id, override := range l.ruleOverrides() { + if override.disabled != nil { + ruleStatus[id] = !*override.disabled + continue + } + if override.present { + ruleStatus[id] = true + } + } + + var enabled []RuleRunner[T] + for id, enabledFlag := range ruleStatus { + if enabledFlag { + if rule, ok := l.registry.GetRule(id); ok { + enabled = append(enabled, rule) + } + } + } + + // Sort for deterministic order + sort.Slice(enabled, func(i, j int) bool { + return enabled[i].ID() < enabled[j].ID() + }) + + return enabled +} + +func (l *Linter[T]) getRuleConfig(ruleID string) RuleConfig { + // Start with default config + config := RuleConfig{} + + // Apply category config + if rule, ok := l.registry.GetRule(ruleID); ok { + if catConfig, ok := l.config.Categories[rule.Category()]; ok { + if catConfig.Severity != nil { + config.Severity = catConfig.Severity + } + } + } + + // Apply rule config from list entries without match + if override, ok := l.ruleOverrides()[ruleID]; ok { + if override.severity != nil { + config.Severity = override.severity + } + if override.disabled != nil { + enabled := !*override.disabled + config.Enabled = &enabled + } + } + + return config +} + +func (l *Linter[T]) applySeverityOverrides(errs []error) []error { + for _, err := range errs { + var vErr *validation.Error + if errors.As(err, &vErr) { + config := l.getRuleConfig(vErr.Rule) + if config.Severity != nil { + vErr.Severity = *config.Severity + } + } + } + return errs +} + +// FilterErrors applies rule-level overrides and match filters to any errors. +func (l *Linter[T]) FilterErrors(errs []error) []error { + filters := l.buildMatchFilters() + + var filtered []error + for _, err := range errs { + var vErr *validation.Error + if !errors.As(err, &vErr) { + filtered = append(filtered, err) + continue + } + + updatedErr, include := applyMatchFilters(vErr, filters) + if include { + filtered = append(filtered, updatedErr) + } + } + + return filtered +} + +func (l *Linter[T]) formatOutput(errs []error) *Output { + return &Output{ + Results: errs, + Format: l.config.OutputFormat, + } +} + +func (l *Linter[T]) ruleOverrides() map[string]ruleOverride { + overrides := make(map[string]ruleOverride) + for _, entry := range l.config.Rules { + if entry.Match != nil { + continue + } + if entry.ID == "" { + continue + } + override := overrides[entry.ID] + override.present = true + if entry.Severity != nil { + override.severity = entry.Severity + } + if entry.Disabled != nil { + override.disabled = entry.Disabled + } + overrides[entry.ID] = override + } + return overrides +} + +func (l *Linter[T]) buildMatchFilters() []matchFilter { + var filters []matchFilter + for _, entry := range l.config.Rules { + if entry.ID == "" { + continue + } + + if entry.Match == nil { + if entry.Severity == nil && entry.Disabled == nil { + continue + } + filters = append(filters, matchFilter{ + ruleID: entry.ID, + pattern: nil, + severity: entry.Severity, + disabled: entry.Disabled, + }) + continue + } + filters = append(filters, matchFilter{ + ruleID: entry.ID, + pattern: entry.Match, + severity: entry.Severity, + disabled: entry.Disabled, + }) + } + return filters +} + +func applyMatchFilters(vErr *validation.Error, filters []matchFilter) (*validation.Error, bool) { + var ( + matched bool + severity *validation.Severity + disabled *bool + ) + + message := "" + if vErr.UnderlyingError != nil { + message = vErr.UnderlyingError.Error() + } + + for _, filter := range filters { + if filter.ruleID != "" && filter.ruleID != vErr.Rule { + continue + } + if filter.pattern != nil && !filter.pattern.MatchString(message) { + continue + } + + matched = true + if filter.severity != nil { + severity = filter.severity + } + if filter.disabled != nil { + disabled = filter.disabled + } + } + + if !matched { + return vErr, true + } + + if disabled != nil && *disabled { + return nil, false + } + + if severity != nil { + modifiedErr := *vErr + modifiedErr.Severity = *severity + return &modifiedErr, true + } + + return vErr, true +} + +// Output represents the result of linting +type Output struct { + Results []error + Format OutputFormat +} + +func (o *Output) HasErrors() bool { + for _, err := range o.Results { + var vErr *validation.Error + if errors.As(err, &vErr) { + if vErr.Severity == validation.SeverityError { + return true + } + } else { + // Non-validation errors are treated as errors + return true + } + } + return false +} + +func (o *Output) ErrorCount() int { + count := 0 + for _, err := range o.Results { + var vErr *validation.Error + if errors.As(err, &vErr) { + if vErr.Severity == validation.SeverityError { + count++ + } + } else { + count++ + } + } + return count +} + +func (o *Output) FormatText() string { + f := format.NewTextFormatter() + s, _ := f.Format(o.Results) + return s +} + +func (o *Output) FormatJSON() string { + f := format.NewJSONFormatter() + s, _ := f.Format(o.Results) + return s +} diff --git a/linter/linter_test.go b/linter/linter_test.go new file mode 100644 index 00000000..1259a808 --- /dev/null +++ b/linter/linter_test.go @@ -0,0 +1,701 @@ +package linter_test + +import ( + "context" + "errors" + "fmt" + "regexp" + "testing" + + "github.com/speakeasy-api/openapi/linter" + "github.com/speakeasy-api/openapi/pointer" + "github.com/speakeasy-api/openapi/validation" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +// Mock document type for testing +type MockDoc struct { + ID string +} + +// Mock rule for testing +type mockRule struct { + id string + category string + description string + link string + defaultSeverity validation.Severity + versions []string + summary string + runFunc func(ctx context.Context, docInfo *linter.DocumentInfo[*MockDoc], config *linter.RuleConfig) []error +} + +func (r *mockRule) ID() string { return r.id } +func (r *mockRule) Category() string { return r.category } +func (r *mockRule) Summary() string { return r.summary } +func (r *mockRule) Description() string { return r.description } +func (r *mockRule) Link() string { return r.link } +func (r *mockRule) DefaultSeverity() validation.Severity { return r.defaultSeverity } +func (r *mockRule) Versions() []string { return r.versions } + +func (r *mockRule) Run(ctx context.Context, docInfo *linter.DocumentInfo[*MockDoc], config *linter.RuleConfig) []error { + if r.runFunc != nil { + return r.runFunc(ctx, docInfo, config) + } + return nil +} + +func TestLinter_RuleSelection(t *testing.T) { + t.Parallel() + + t.Run("extends all includes all rules", func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + registry := linter.NewRegistry[*MockDoc]() + registry.Register(&mockRule{ + id: "test-rule-1", + category: "style", + defaultSeverity: validation.SeverityError, + runFunc: func(_ context.Context, _ *linter.DocumentInfo[*MockDoc], _ *linter.RuleConfig) []error { + return []error{validation.NewValidationError(validation.SeverityError, "test-rule-1", errors.New("test error"), nil)} + }, + }) + registry.Register(&mockRule{ + id: "test-rule-2", + category: "security", + defaultSeverity: validation.SeverityWarning, + runFunc: func(_ context.Context, _ *linter.DocumentInfo[*MockDoc], _ *linter.RuleConfig) []error { + return []error{validation.NewValidationError(validation.SeverityWarning, "test-rule-2", errors.New("test warning"), nil)} + }, + }) + + config := &linter.Config{ + Extends: []string{"all"}, + } + + lntr := linter.NewLinter(config, registry) + docInfo := &linter.DocumentInfo[*MockDoc]{ + Document: &MockDoc{ID: "test"}, + } + + output, err := lntr.Lint(ctx, docInfo, nil, nil) + require.NoError(t, err) + + // Should have errors from both rules + assert.Len(t, output.Results, 2) + }) + + t.Run("disabled rule not executed", func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + registry := linter.NewRegistry[*MockDoc]() + registry.Register(&mockRule{ + id: "test-rule-1", + category: "style", + defaultSeverity: validation.SeverityError, + runFunc: func(_ context.Context, _ *linter.DocumentInfo[*MockDoc], _ *linter.RuleConfig) []error { + return []error{validation.NewValidationError(validation.SeverityError, "test-rule-1", errors.New("test error"), nil)} + }, + }) + + config := &linter.Config{ + Extends: []string{"all"}, + Rules: []linter.RuleEntry{ + { + ID: "test-rule-1", + Disabled: pointer.From(true), + }, + }, + } + + lntr := linter.NewLinter(config, registry) + docInfo := &linter.DocumentInfo[*MockDoc]{ + Document: &MockDoc{ID: "test"}, + } + + output, err := lntr.Lint(ctx, docInfo, nil, nil) + require.NoError(t, err) + + // Should have no errors since rule is disabled + assert.Empty(t, output.Results) + }) + + t.Run("category disabled affects all rules in category", func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + registry := linter.NewRegistry[*MockDoc]() + registry.Register(&mockRule{ + id: "style-rule-1", + category: "style", + defaultSeverity: validation.SeverityError, + runFunc: func(_ context.Context, _ *linter.DocumentInfo[*MockDoc], _ *linter.RuleConfig) []error { + return []error{validation.NewValidationError(validation.SeverityError, "style-rule-1", errors.New("style error 1"), nil)} + }, + }) + registry.Register(&mockRule{ + id: "style-rule-2", + category: "style", + defaultSeverity: validation.SeverityError, + runFunc: func(_ context.Context, _ *linter.DocumentInfo[*MockDoc], _ *linter.RuleConfig) []error { + return []error{validation.NewValidationError(validation.SeverityError, "style-rule-2", errors.New("style error 2"), nil)} + }, + }) + registry.Register(&mockRule{ + id: "security-rule-1", + category: "security", + defaultSeverity: validation.SeverityError, + runFunc: func(_ context.Context, _ *linter.DocumentInfo[*MockDoc], _ *linter.RuleConfig) []error { + return []error{validation.NewValidationError(validation.SeverityError, "security-rule-1", errors.New("security error"), nil)} + }, + }) + + falseVal := false + config := &linter.Config{ + Extends: []string{"all"}, + Categories: map[string]linter.CategoryConfig{ + "style": { + Enabled: &falseVal, + }, + }, + } + + lntr := linter.NewLinter(config, registry) + docInfo := &linter.DocumentInfo[*MockDoc]{ + Document: &MockDoc{ID: "test"}, + } + + output, err := lntr.Lint(ctx, docInfo, nil, nil) + require.NoError(t, err) + + // Should only have security error, style rules disabled + require.Len(t, output.Results, 1) + assert.Contains(t, output.Results[0].Error(), "security-rule-1") + }) +} + +func TestLinter_SeverityOverrides(t *testing.T) { + t.Parallel() + + t.Run("rule severity override", func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + registry := linter.NewRegistry[*MockDoc]() + registry.Register(&mockRule{ + id: "test-rule", + category: "style", + defaultSeverity: validation.SeverityError, + runFunc: func(_ context.Context, _ *linter.DocumentInfo[*MockDoc], _ *linter.RuleConfig) []error { + return []error{validation.NewValidationError(validation.SeverityError, "test-rule", errors.New("test error"), nil)} + }, + }) + + warningSeverity := validation.SeverityWarning + config := &linter.Config{ + Extends: []string{"all"}, + Rules: []linter.RuleEntry{ + { + ID: "test-rule", + Severity: &warningSeverity, + }, + }, + } + + lntr := linter.NewLinter(config, registry) + docInfo := &linter.DocumentInfo[*MockDoc]{ + Document: &MockDoc{ID: "test"}, + } + + output, err := lntr.Lint(ctx, docInfo, nil, nil) + require.NoError(t, err) + + require.Len(t, output.Results, 1) + var vErr *validation.Error + require.ErrorAs(t, output.Results[0], &vErr) + assert.Equal(t, validation.SeverityWarning, vErr.Severity) + }) + + t.Run("category severity override", func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + registry := linter.NewRegistry[*MockDoc]() + registry.Register(&mockRule{ + id: "style-rule", + category: "style", + defaultSeverity: validation.SeverityError, + runFunc: func(_ context.Context, _ *linter.DocumentInfo[*MockDoc], _ *linter.RuleConfig) []error { + return []error{validation.NewValidationError(validation.SeverityError, "style-rule", errors.New("style error"), nil)} + }, + }) + + warningSeverity := validation.SeverityWarning + config := &linter.Config{ + Extends: []string{"all"}, + Categories: map[string]linter.CategoryConfig{ + "style": { + Severity: &warningSeverity, + }, + }, + } + + lntr := linter.NewLinter(config, registry) + docInfo := &linter.DocumentInfo[*MockDoc]{ + Document: &MockDoc{ID: "test"}, + } + + output, err := lntr.Lint(ctx, docInfo, nil, nil) + require.NoError(t, err) + + require.Len(t, output.Results, 1) + var vErr *validation.Error + require.ErrorAs(t, output.Results[0], &vErr) + assert.Equal(t, validation.SeverityWarning, vErr.Severity) + }) + + t.Run("rule severity override takes precedence over category", func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + registry := linter.NewRegistry[*MockDoc]() + registry.Register(&mockRule{ + id: "style-rule", + category: "style", + defaultSeverity: validation.SeverityError, + runFunc: func(_ context.Context, _ *linter.DocumentInfo[*MockDoc], _ *linter.RuleConfig) []error { + return []error{validation.NewValidationError(validation.SeverityError, "style-rule", errors.New("style error"), nil)} + }, + }) + + warningSeverity := validation.SeverityWarning + hintSeverity := validation.SeverityHint + config := &linter.Config{ + Extends: []string{"all"}, + Categories: map[string]linter.CategoryConfig{ + "style": { + Severity: &warningSeverity, + }, + }, + Rules: []linter.RuleEntry{ + { + ID: "style-rule", + Severity: &hintSeverity, + }, + }, + } + + lntr := linter.NewLinter(config, registry) + docInfo := &linter.DocumentInfo[*MockDoc]{ + Document: &MockDoc{ID: "test"}, + } + + output, err := lntr.Lint(ctx, docInfo, nil, nil) + require.NoError(t, err) + + require.Len(t, output.Results, 1) + var vErr *validation.Error + require.ErrorAs(t, output.Results[0], &vErr) + // Rule severity should override category severity + assert.Equal(t, validation.SeverityHint, vErr.Severity) + }) +} + +func TestLinter_PreExistingErrors(t *testing.T) { + t.Parallel() + ctx := t.Context() + + registry := linter.NewRegistry[*MockDoc]() + registry.Register(&mockRule{ + id: "test-rule", + category: "style", + defaultSeverity: validation.SeverityError, + runFunc: func(_ context.Context, _ *linter.DocumentInfo[*MockDoc], _ *linter.RuleConfig) []error { + return []error{validation.NewValidationError(validation.SeverityError, "test-rule", errors.New("lint error"), nil)} + }, + }) + + config := &linter.Config{ + Extends: []string{"all"}, + } + + lntr := linter.NewLinter(config, registry) + docInfo := &linter.DocumentInfo[*MockDoc]{ + Document: &MockDoc{ID: "test"}, + } + + preExistingErrs := []error{ + validation.NewValidationError(validation.SeverityError, "validation-required", errors.New("validation error"), nil), + } + + output, err := lntr.Lint(ctx, docInfo, preExistingErrs, nil) + require.NoError(t, err) + + // Should include both pre-existing and lint errors + assert.Len(t, output.Results, 2) +} + +func TestLinter_FilterErrors_RuleLevelOverride(t *testing.T) { + t.Parallel() + + warningSeverity := validation.SeverityWarning + config := &linter.Config{ + Extends: []string{"all"}, + Rules: []linter.RuleEntry{ + { + ID: "validation-required", + Severity: &warningSeverity, + }, + }, + } + + lntr := linter.NewLinter(config, linter.NewRegistry[*MockDoc]()) + input := []error{ + validation.NewValidationError(validation.SeverityError, "validation-required", errors.New("validation error"), nil), + } + + filtered := lntr.FilterErrors(input) + require.Len(t, filtered, 1) + + var vErr *validation.Error + require.ErrorAs(t, filtered[0], &vErr) + assert.Equal(t, validation.SeverityWarning, vErr.Severity) +} + +func TestLinter_FilterErrors_UnknownRuleNoMatch_Passthrough(t *testing.T) { + t.Parallel() + + config := &linter.Config{ + Extends: []string{"all"}, + Rules: []linter.RuleEntry{ + { + ID: "validation-required", + }, + }, + } + + lntr := linter.NewLinter(config, linter.NewRegistry[*MockDoc]()) + input := []error{ + validation.NewValidationError(validation.SeverityError, "validation-required", errors.New("validation error"), nil), + } + + filtered := lntr.FilterErrors(input) + require.Len(t, filtered, 1) + + var vErr *validation.Error + require.ErrorAs(t, filtered[0], &vErr) + assert.Equal(t, validation.SeverityError, vErr.Severity) +} + +func TestLinter_FilterErrors_MatchOrder_LastWins(t *testing.T) { + t.Parallel() + + warningSeverity := validation.SeverityWarning + hintSeverity := validation.SeverityHint + config := &linter.Config{ + Extends: []string{"all"}, + Rules: []linter.RuleEntry{ + { + ID: "validation-required", + Match: regexp.MustCompile(".*title.*"), + Severity: &warningSeverity, + }, + { + ID: "validation-required", + Match: regexp.MustCompile(".*title.*"), + Severity: &hintSeverity, + }, + }, + } + + lntr := linter.NewLinter(config, linter.NewRegistry[*MockDoc]()) + input := []error{ + validation.NewValidationError(validation.SeverityError, "validation-required", errors.New("info.title is required"), nil), + } + + filtered := lntr.FilterErrors(input) + require.Len(t, filtered, 1) + + var vErr *validation.Error + require.ErrorAs(t, filtered[0], &vErr) + assert.Equal(t, validation.SeverityHint, vErr.Severity) +} + +func TestLinter_FilterErrors_MatchDisable(t *testing.T) { + t.Parallel() + + disabled := true + config := &linter.Config{ + Extends: []string{"all"}, + Rules: []linter.RuleEntry{ + { + ID: "validation-required", + Match: regexp.MustCompile(".*title.*"), + Disabled: &disabled, + }, + }, + } + + lntr := linter.NewLinter(config, linter.NewRegistry[*MockDoc]()) + input := []error{ + validation.NewValidationError(validation.SeverityError, "validation-required", errors.New("info.title is required"), nil), + } + + filtered := lntr.FilterErrors(input) + assert.Empty(t, filtered) +} + +func TestLinter_ParallelExecution(t *testing.T) { + t.Parallel() + ctx := t.Context() + + registry := linter.NewRegistry[*MockDoc]() + + // Create multiple rules that all run + for i := 0; i < 10; i++ { + ruleID := fmt.Sprintf("test-rule-%d", i) + registry.Register(&mockRule{ + id: ruleID, + category: "test", + defaultSeverity: validation.SeverityError, + runFunc: func(_ context.Context, _ *linter.DocumentInfo[*MockDoc], _ *linter.RuleConfig) []error { + return []error{validation.NewValidationError(validation.SeverityError, ruleID, fmt.Errorf("error from %s", ruleID), nil)} + }, + }) + } + + config := &linter.Config{ + Extends: []string{"all"}, + } + + lntr := linter.NewLinter(config, registry) + docInfo := &linter.DocumentInfo[*MockDoc]{ + Document: &MockDoc{ID: "test"}, + } + + output, err := lntr.Lint(ctx, docInfo, nil, nil) + require.NoError(t, err) + + // Should have errors from all 10 rules + assert.Len(t, output.Results, 10) + + // Verify all rules executed + foundRules := make(map[string]bool) + for _, result := range output.Results { + var vErr *validation.Error + if errors.As(result, &vErr) { + foundRules[vErr.Rule] = true + } + } + assert.Len(t, foundRules, 10, "all rules should have executed") +} + +func TestOutput_HasErrors(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + results []error + hasErrors bool + }{ + { + name: "no errors", + results: []error{}, + hasErrors: false, + }, + { + name: "only warnings", + results: []error{ + validation.NewValidationError(validation.SeverityWarning, "test-rule", errors.New("warning"), nil), + }, + hasErrors: false, + }, + { + name: "only hints", + results: []error{ + validation.NewValidationError(validation.SeverityHint, "test-rule", errors.New("hint"), nil), + }, + hasErrors: false, + }, + { + name: "has error severity", + results: []error{ + validation.NewValidationError(validation.SeverityError, "test-rule", errors.New("error"), nil), + }, + hasErrors: true, + }, + { + name: "mixed severities with error", + results: []error{ + validation.NewValidationError(validation.SeverityWarning, "test-rule", errors.New("warning"), nil), + validation.NewValidationError(validation.SeverityError, "test-rule", errors.New("error"), nil), + }, + hasErrors: true, + }, + { + name: "non-validation error treated as error", + results: []error{ + errors.New("plain error"), + }, + hasErrors: true, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + output := &linter.Output{ + Results: tt.results, + } + + assert.Equal(t, tt.hasErrors, output.HasErrors()) + }) + } +} + +func TestOutput_ErrorCount(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + results []error + errorCount int + }{ + { + name: "no errors", + results: []error{}, + errorCount: 0, + }, + { + name: "only warnings", + results: []error{ + validation.NewValidationError(validation.SeverityWarning, "test-rule", errors.New("warning"), nil), + }, + errorCount: 0, + }, + { + name: "one error", + results: []error{ + validation.NewValidationError(validation.SeverityError, "test-rule", errors.New("error"), nil), + }, + errorCount: 1, + }, + { + name: "mixed severities", + results: []error{ + validation.NewValidationError(validation.SeverityWarning, "test-rule", errors.New("warning"), nil), + validation.NewValidationError(validation.SeverityError, "test-rule-1", errors.New("error 1"), nil), + validation.NewValidationError(validation.SeverityHint, "test-rule", errors.New("hint"), nil), + validation.NewValidationError(validation.SeverityError, "test-rule-2", errors.New("error 2"), nil), + }, + errorCount: 2, + }, + { + name: "non-validation errors counted", + results: []error{ + errors.New("plain error 1"), + validation.NewValidationError(validation.SeverityWarning, "test-rule", errors.New("warning"), nil), + errors.New("plain error 2"), + }, + errorCount: 2, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + output := &linter.Output{ + Results: tt.results, + } + + assert.Equal(t, tt.errorCount, output.ErrorCount()) + }) + } +} + +func TestOutput_Formatting(t *testing.T) { + t.Parallel() + + output := &linter.Output{ + Results: []error{ + validation.NewValidationError(validation.SeverityError, "test-rule", errors.New("test error"), nil), + }, + Format: linter.OutputFormatText, + } + + t.Run("format text non-empty", func(t *testing.T) { + t.Parallel() + text := output.FormatText() + assert.NotEmpty(t, text) + assert.Contains(t, text, "test-rule") + }) + + t.Run("format json non-empty", func(t *testing.T) { + t.Parallel() + json := output.FormatJSON() + assert.NotEmpty(t, json) + assert.Contains(t, json, "test-rule") + }) +} + +func TestLinter_ErrorSorting(t *testing.T) { + t.Parallel() + ctx := t.Context() + + registry := linter.NewRegistry[*MockDoc]() + registry.Register(&mockRule{ + id: "test-rule", + category: "style", + defaultSeverity: validation.SeverityError, + runFunc: func(_ context.Context, _ *linter.DocumentInfo[*MockDoc], _ *linter.RuleConfig) []error { + // Return errors in unsorted order + return []error{ + validation.NewValidationError(validation.SeverityError, "test-rule", errors.New("error 3"), nil), + validation.NewValidationError(validation.SeverityError, "test-rule", errors.New("error 1"), nil), + validation.NewValidationError(validation.SeverityError, "test-rule", errors.New("error 2"), nil), + } + }, + }) + + config := &linter.Config{ + Extends: []string{"all"}, + } + + lntr := linter.NewLinter(config, registry) + docInfo := &linter.DocumentInfo[*MockDoc]{ + Document: &MockDoc{ID: "test"}, + } + + output, err := lntr.Lint(ctx, docInfo, nil, nil) + require.NoError(t, err) + + // Errors should be sorted by validation.SortValidationErrors + assert.Len(t, output.Results, 3) +} + +func TestLinter_Registry(t *testing.T) { + t.Parallel() + + registry := linter.NewRegistry[*MockDoc]() + registry.Register(&mockRule{ + id: "test-rule", + category: "style", + defaultSeverity: validation.SeverityError, + }) + + config := &linter.Config{} + lntr := linter.NewLinter(config, registry) + + // Should be able to access registry for documentation + reg := lntr.Registry() + require.NotNil(t, reg) + + rule, exists := reg.GetRule("test-rule") + assert.True(t, exists) + assert.Equal(t, "test-rule", rule.ID()) +} diff --git a/linter/registry.go b/linter/registry.go new file mode 100644 index 00000000..4366e8ca --- /dev/null +++ b/linter/registry.go @@ -0,0 +1,125 @@ +package linter + +import ( + "fmt" + "sort" +) + +// Registry holds registered rules +type Registry[T any] struct { + rules map[string]RuleRunner[T] + rulesets map[string][]string // ruleset name -> rule IDs +} + +// NewRegistry creates a new rule registry +func NewRegistry[T any]() *Registry[T] { + return &Registry[T]{ + rules: make(map[string]RuleRunner[T]), + rulesets: make(map[string][]string), + } +} + +// Register registers a rule +func (r *Registry[T]) Register(rule RuleRunner[T]) { + r.rules[rule.ID()] = rule +} + +// RegisterRuleset registers a ruleset +func (r *Registry[T]) RegisterRuleset(name string, ruleIDs []string) error { + if _, exists := r.rulesets[name]; exists { + return fmt.Errorf("ruleset %q already registered", name) + } + + // Validate rule IDs + for _, id := range ruleIDs { + if _, exists := r.rules[id]; !exists { + return fmt.Errorf("rule %q in ruleset %q not found", id, name) + } + } + + r.rulesets[name] = ruleIDs + return nil +} + +// GetRule returns a rule by ID +func (r *Registry[T]) GetRule(id string) (RuleRunner[T], bool) { + rule, ok := r.rules[id] + return rule, ok +} + +// GetRuleset returns rule IDs for a ruleset +func (r *Registry[T]) GetRuleset(name string) ([]string, bool) { + if name == "all" { + return r.AllRuleIDs(), true + } + ids, ok := r.rulesets[name] + return ids, ok +} + +// AllRules returns all registered rules +func (r *Registry[T]) AllRules() []RuleRunner[T] { + rules := make([]RuleRunner[T], 0, len(r.rules)) + for _, rule := range r.rules { + rules = append(rules, rule) + } + // Sort for deterministic order + sort.Slice(rules, func(i, j int) bool { + return rules[i].ID() < rules[j].ID() + }) + return rules +} + +// AllRuleIDs returns all registered rule IDs +func (r *Registry[T]) AllRuleIDs() []string { + ids := make([]string, 0, len(r.rules)) + for id := range r.rules { + ids = append(ids, id) + } + sort.Strings(ids) + return ids +} + +// AllCategories returns all unique categories +func (r *Registry[T]) AllCategories() []string { + categories := make(map[string]bool) + for _, rule := range r.rules { + categories[rule.Category()] = true + } + + cats := make([]string, 0, len(categories)) + for cat := range categories { + cats = append(cats, cat) + } + sort.Strings(cats) + return cats +} + +// AllRulesets returns all registered ruleset names +func (r *Registry[T]) AllRulesets() []string { + names := make([]string, 0, len(r.rulesets)+1) + names = append(names, "all") + for name := range r.rulesets { + names = append(names, name) + } + sort.Strings(names) + return names +} + +// RulesetsContaining returns names of rulesets that contain the given rule ID +func (r *Registry[T]) RulesetsContaining(ruleID string) []string { + var sets []string + + // "all" always contains everything + sets = append(sets, "all") + + for name, ids := range r.rulesets { + for _, id := range ids { + if id == ruleID { + sets = append(sets, name) + break + } + } + } + sort.Strings(sets) + return sets +} diff --git a/linter/registry_test.go b/linter/registry_test.go new file mode 100644 index 00000000..232123b6 --- /dev/null +++ b/linter/registry_test.go @@ -0,0 +1,127 @@ +package linter_test + +import ( + "testing" + + "github.com/speakeasy-api/openapi/linter" + "github.com/speakeasy-api/openapi/validation" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestRegistry_RegisterRuleset(t *testing.T) { + t.Parallel() + + t.Run("successfully register ruleset", func(t *testing.T) { + t.Parallel() + + registry := linter.NewRegistry[*MockDoc]() + registry.Register(&mockRule{id: "rule-1", category: "style", defaultSeverity: validation.SeverityError}) + registry.Register(&mockRule{id: "rule-2", category: "style", defaultSeverity: validation.SeverityError}) + + err := registry.RegisterRuleset("recommended", []string{"rule-1", "rule-2"}) + require.NoError(t, err) + + ruleIDs, exists := registry.GetRuleset("recommended") + assert.True(t, exists) + assert.ElementsMatch(t, []string{"rule-1", "rule-2"}, ruleIDs) + }) + + t.Run("error when rule not found", func(t *testing.T) { + t.Parallel() + + registry := linter.NewRegistry[*MockDoc]() + registry.Register(&mockRule{id: "rule-1", category: "style", defaultSeverity: validation.SeverityError}) + + err := registry.RegisterRuleset("test", []string{"rule-1", "nonexistent"}) + require.Error(t, err) + assert.Contains(t, err.Error(), "nonexistent") + assert.Contains(t, err.Error(), "not found") + }) + + t.Run("error when ruleset already registered", func(t *testing.T) { + t.Parallel() + + registry := linter.NewRegistry[*MockDoc]() + registry.Register(&mockRule{id: "rule-1", category: "style", defaultSeverity: validation.SeverityError}) + + err := registry.RegisterRuleset("test", []string{"rule-1"}) + require.NoError(t, err) + + err = registry.RegisterRuleset("test", []string{"rule-1"}) + require.Error(t, err) + assert.Contains(t, err.Error(), "already registered") + }) +} + +func TestRegistry_AllCategories(t *testing.T) { + t.Parallel() + + registry := linter.NewRegistry[*MockDoc]() + registry.Register(&mockRule{id: "rule-1", category: "style", defaultSeverity: validation.SeverityError}) + registry.Register(&mockRule{id: "rule-2", category: "style", defaultSeverity: validation.SeverityError}) + registry.Register(&mockRule{id: "rule-3", category: "security", defaultSeverity: validation.SeverityError}) + registry.Register(&mockRule{id: "rule-4", category: "best-practices", defaultSeverity: validation.SeverityError}) + + categories := registry.AllCategories() + // Should be sorted + assert.Equal(t, []string{"best-practices", "security", "style"}, categories) +} + +func TestRegistry_AllRulesets(t *testing.T) { + t.Parallel() + + registry := linter.NewRegistry[*MockDoc]() + registry.Register(&mockRule{id: "rule-1", category: "style", defaultSeverity: validation.SeverityError}) + require.NoError(t, registry.RegisterRuleset("recommended", []string{"rule-1"})) + require.NoError(t, registry.RegisterRuleset("strict", []string{"rule-1"})) + + rulesets := registry.AllRulesets() + assert.Contains(t, rulesets, "all") + assert.Contains(t, rulesets, "recommended") + assert.Contains(t, rulesets, "strict") + // Should be sorted + assert.Equal(t, "all", rulesets[0]) +} + +func TestRegistry_RulesetsContaining(t *testing.T) { + t.Parallel() + + registry := linter.NewRegistry[*MockDoc]() + registry.Register(&mockRule{id: "rule-1", category: "style", defaultSeverity: validation.SeverityError}) + registry.Register(&mockRule{id: "rule-2", category: "security", defaultSeverity: validation.SeverityError}) + require.NoError(t, registry.RegisterRuleset("recommended", []string{"rule-1"})) + require.NoError(t, registry.RegisterRuleset("strict", []string{"rule-1", "rule-2"})) + + t.Run("rule in multiple rulesets", func(t *testing.T) { + t.Parallel() + rulesets := registry.RulesetsContaining("rule-1") + assert.Contains(t, rulesets, "all") + assert.Contains(t, rulesets, "recommended") + assert.Contains(t, rulesets, "strict") + }) + + t.Run("rule in subset of rulesets", func(t *testing.T) { + t.Parallel() + rulesets := registry.RulesetsContaining("rule-2") + assert.Contains(t, rulesets, "all") + assert.Contains(t, rulesets, "strict") + assert.NotContains(t, rulesets, "recommended") + }) +} + +func TestRegistry_GetRuleset_UnknownReturnsFalse(t *testing.T) { + t.Parallel() + + registry := linter.NewRegistry[*MockDoc]() + _, exists := registry.GetRuleset("nonexistent") + assert.False(t, exists) +} + +func TestRegistry_GetRule_UnknownReturnsFalse(t *testing.T) { + t.Parallel() + + registry := linter.NewRegistry[*MockDoc]() + _, exists := registry.GetRule("nonexistent") + assert.False(t, exists) +} diff --git a/linter/rule.go b/linter/rule.go new file mode 100644 index 00000000..dd53c623 --- /dev/null +++ b/linter/rule.go @@ -0,0 +1,70 @@ +package linter + +import ( + "context" + + "github.com/speakeasy-api/openapi/validation" +) + +// Rule represents a single linting rule +type Rule interface { + // ID returns the unique identifier for this rule (e.g., "style-path-params") + ID() string + + // Category returns the rule category (e.g., "style", "validation", "security") + Category() string + + // Description returns a human-readable description of what the rule checks + Description() string + + // Summary returns a short summary of what the rule checks + Summary() string + + // Link returns an optional URL to documentation for this rule + Link() string + + // DefaultSeverity returns the default severity level for this rule + DefaultSeverity() validation.Severity + + // Versions returns the spec versions this rule applies to (nil = all versions) + Versions() []string +} + +// RuleRunner is the interface rules must implement to execute their logic +// This is separate from Rule to allow different runner types for different specs +type RuleRunner[T any] interface { + Rule + + // Run executes the rule against the provided document + // DocumentInfo provides both the document and its location for resolving external references + // Returns any issues found as validation errors + Run(ctx context.Context, docInfo *DocumentInfo[T], config *RuleConfig) []error +} + +// DocumentedRule provides extended documentation for a rule +type DocumentedRule interface { + Rule + + // GoodExample returns YAML showing correct usage + GoodExample() string + + // BadExample returns YAML showing incorrect usage + BadExample() string + + // Rationale explains why this rule exists + Rationale() string + + // FixAvailable returns true if the rule provides auto-fix suggestions + FixAvailable() bool +} + +// ConfigurableRule indicates a rule has configurable options +type ConfigurableRule interface { + Rule + + // ConfigSchema returns JSON Schema for rule-specific options + ConfigSchema() map[string]any + + // ConfigDefaults returns default values for options + ConfigDefaults() map[string]any +} diff --git a/marshaller/model.go b/marshaller/model.go index daee7cec..ec7feae7 100644 --- a/marshaller/model.go +++ b/marshaller/model.go @@ -49,8 +49,9 @@ type Model[T any] struct { Valid bool core T - objectCache *sync.Map - documentCache *sync.Map + objectCache *sync.Map + documentCache *sync.Map + externalDocumentCache *sync.Map } // GetCore will return the low level representation of the model. @@ -119,37 +120,45 @@ func (m *Model[T]) GetRootNodeColumn() int { return -1 } -func (m *Model[T]) GetPropertyLine(prop string) int { +func (m *Model[T]) GetPropertyNode(prop string) *yaml.Node { // Use reflection to find the property in the core and then see if it is a marshaller.Node and if it is get the line of the key node if set if m == nil { - return -1 + return nil } // Get reflection value of the core coreValue := reflect.ValueOf(&m.core).Elem() if !coreValue.IsValid() { - return -1 + return nil } // Find the field by name fieldValue := coreValue.FieldByName(prop) if !fieldValue.IsValid() { - return -1 + return nil } // Check if the field implements the interface we need to get the key node // We need to check if it has a GetKeyNode method or if it's a Node type fieldInterface := fieldValue.Interface() + var keyNode *yaml.Node + // Try to cast to a Node-like interface that has GetKeyNode method if nodeWithKeyNode, ok := fieldInterface.(interface{ GetKeyNode() *yaml.Node }); ok { - keyNode := nodeWithKeyNode.GetKeyNode() - if keyNode != nil { - return keyNode.Line - } + keyNode = nodeWithKeyNode.GetKeyNode() + } - return -1 + return keyNode +} + +func (m *Model[T]) GetPropertyLine(prop string) int { + node := m.GetPropertyNode(prop) + if node == nil { + return -1 + } + return node.Line } // SetCore implements CoreAccessor interface @@ -195,6 +204,17 @@ func (m *Model[T]) StoreReferenceDocumentInCache(key string, doc []byte) { m.documentCache.Store(key, doc) } +func (m *Model[T]) GetCachedExternalDocument(key string) (any, bool) { + if m == nil || m.externalDocumentCache == nil { + return nil, false + } + return m.externalDocumentCache.Load(key) +} + +func (m *Model[T]) StoreExternalDocumentInCache(key string, doc any) { + m.externalDocumentCache.Store(key, doc) +} + func (m *Model[T]) InitCache() { if m.objectCache == nil { m.objectCache = &sync.Map{} @@ -202,4 +222,7 @@ func (m *Model[T]) InitCache() { if m.documentCache == nil { m.documentCache = &sync.Map{} } + if m.externalDocumentCache == nil { + m.externalDocumentCache = &sync.Map{} + } } diff --git a/marshaller/model_test.go b/marshaller/model_test.go index c96e4dee..1e1822c1 100644 --- a/marshaller/model_test.go +++ b/marshaller/model_test.go @@ -9,6 +9,153 @@ import ( "gopkg.in/yaml.v3" ) +// TestModel_GetPropertyNode_Success tests the GetPropertyNode method with valid inputs +func TestModel_GetPropertyNode_Success(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + setup func() *marshaller.Model[core.TestPrimitiveModel] + prop string + expected int + }{ + { + name: "property with key node returns line number", + setup: func() *marshaller.Model[core.TestPrimitiveModel] { + keyNode := &yaml.Node{Line: 42} + coreModel := core.TestPrimitiveModel{ + StringField: marshaller.Node[string]{ + KeyNode: keyNode, + Key: "stringField", + Value: "testValue", + Present: true, + }, + } + model := &marshaller.Model[core.TestPrimitiveModel]{ + Valid: true, + } + model.SetCore(&coreModel) + return model + }, + prop: "StringField", + expected: 42, + }, + { + name: "property with nil key node returns -1", + setup: func() *marshaller.Model[core.TestPrimitiveModel] { + coreModel := core.TestPrimitiveModel{ + StringField: marshaller.Node[string]{ + KeyNode: nil, + Key: "stringField", + Value: "testValue", + Present: true, + }, + } + model := &marshaller.Model[core.TestPrimitiveModel]{ + Valid: true, + } + model.SetCore(&coreModel) + return model + }, + prop: "StringField", + expected: -1, + }, + { + name: "bool field with key node returns line number", + setup: func() *marshaller.Model[core.TestPrimitiveModel] { + keyNode := &yaml.Node{Line: 15} + coreModel := core.TestPrimitiveModel{ + BoolField: marshaller.Node[bool]{ + KeyNode: keyNode, + Key: "boolField", + Value: true, + Present: true, + }, + } + model := &marshaller.Model[core.TestPrimitiveModel]{ + Valid: true, + } + model.SetCore(&coreModel) + return model + }, + prop: "BoolField", + expected: 15, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + model := tt.setup() + actual := model.GetPropertyNode(tt.prop) + line := -1 + if actual != nil { + line = actual.Line + } + assert.Equal(t, tt.expected, line, "line number should match expected value") + }) + } +} + +// TestModel_GetPropertyNode_Error tests the GetPropertyNode method with error conditions +func TestModel_GetPropertyNode_Error(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + setup func() *marshaller.Model[core.TestPrimitiveModel] + prop string + expected int + }{ + { + name: "nil model returns -1", + setup: func() *marshaller.Model[core.TestPrimitiveModel] { + return nil + }, + prop: "StringField", + expected: -1, + }, + { + name: "non-existent property returns -1", + setup: func() *marshaller.Model[core.TestPrimitiveModel] { + return &marshaller.Model[core.TestPrimitiveModel]{} + }, + prop: "NonExistentField", + expected: -1, + }, + { + name: "property that is not a Node returns -1", + setup: func() *marshaller.Model[core.TestPrimitiveModel] { + coreModel := core.TestPrimitiveModel{ + CoreModel: marshaller.CoreModel{}, // This field doesn't implement GetKeyNode + } + model := &marshaller.Model[core.TestPrimitiveModel]{ + Valid: true, + } + model.SetCore(&coreModel) + return model + }, + prop: "CoreModel", + expected: -1, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + model := tt.setup() + actual := model.GetPropertyNode(tt.prop) + if actual == nil { + assert.Equal(t, tt.expected, -1, "should return -1 for error conditions") + } else { + assert.Equal(t, tt.expected, actual.Line, "line number should match expected value") + } + }) + } +} + // TestModel_GetPropertyLine_Success tests the GetPropertyLine method with valid inputs func TestModel_GetPropertyLine_Success(t *testing.T) { t.Parallel() diff --git a/marshaller/nodecollector.go b/marshaller/nodecollector.go new file mode 100644 index 00000000..39d1b5e2 --- /dev/null +++ b/marshaller/nodecollector.go @@ -0,0 +1,238 @@ +package marshaller + +import ( + "reflect" + + "gopkg.in/yaml.v3" +) + +// NodeCollector provides utilities for collecting yaml.Node pointers from core models. +// This is useful for features that need to map nodes to contexts (like operation tracking). + +// CollectLeafNodes extracts all KeyNode and ValueNode pointers from marshaller.Node fields +// within a core model. It only returns nodes for "leaf" fields - those whose values are +// primitive types or slices/maps of primitives, not nested core models (which get visited +// separately by the walk). +// +// The returned nodes can be used for features like node-to-operation mapping where you +// need to track all yaml.Nodes within a model's scope. +func CollectLeafNodes(core any) []*yaml.Node { + if core == nil { + return nil + } + + var nodes []*yaml.Node + collectLeafNodesRecursive(reflect.ValueOf(core), &nodes, make(map[uintptr]bool)) + return nodes +} + +// collectLeafNodesRecursive traverses the struct using reflection to find marshaller.Node fields +func collectLeafNodesRecursive(v reflect.Value, nodes *[]*yaml.Node, visited map[uintptr]bool) { + // Handle pointers and interfaces + for v.Kind() == reflect.Ptr || v.Kind() == reflect.Interface { + if v.IsNil() { + return + } + v = v.Elem() + } + + // Only process structs + if v.Kind() != reflect.Struct { + return + } + + // Check for cycles (using pointer address of the struct) + if v.CanAddr() { + ptr := v.Addr().Pointer() + if visited[ptr] { + return + } + visited[ptr] = true + } + + t := v.Type() + + // Iterate through all fields + for i := 0; i < v.NumField(); i++ { + field := v.Field(i) + fieldType := t.Field(i) + + // Skip unexported fields + if !fieldType.IsExported() { + continue + } + + // Check if it's a marshaller.Node type by looking for KeyNode/ValueNode fields + if isNodeType(fieldType.Type) { + collectFromNodeField(field, nodes) + continue + } + + // Recurse into embedded structs (like CoreModel) + if fieldType.Anonymous { + collectLeafNodesRecursive(field, nodes, visited) + } + } +} + +// isNodeType checks if a type is marshaller.Node[T] by looking for characteristic fields +func isNodeType(t reflect.Type) bool { + // Handle pointers + for t.Kind() == reflect.Ptr { + t = t.Elem() + } + + if t.Kind() != reflect.Struct { + return false + } + + // Check for the characteristic fields of marshaller.Node + hasKeyNode := false + hasValueNode := false + hasPresent := false + + for i := 0; i < t.NumField(); i++ { + field := t.Field(i) + switch field.Name { + case "KeyNode": + if field.Type == reflect.TypeOf((*yaml.Node)(nil)) { + hasKeyNode = true + } + case "ValueNode": + if field.Type == reflect.TypeOf((*yaml.Node)(nil)) { + hasValueNode = true + } + case "Present": + if field.Type.Kind() == reflect.Bool { + hasPresent = true + } + } + } + + return hasKeyNode && hasValueNode && hasPresent +} + +// collectFromNodeField extracts nodes from a marshaller.Node field +func collectFromNodeField(field reflect.Value, nodes *[]*yaml.Node) { + // Handle pointers + for field.Kind() == reflect.Ptr { + if field.IsNil() { + return + } + field = field.Elem() + } + + if field.Kind() != reflect.Struct { + return + } + + // Get KeyNode and ValueNode fields + keyNodeField := field.FieldByName("KeyNode") + valueNodeField := field.FieldByName("ValueNode") + presentField := field.FieldByName("Present") + valueField := field.FieldByName("Value") + + // Only collect if present + if presentField.IsValid() && !presentField.Bool() { + return + } + + // Add KeyNode if not nil + if keyNodeField.IsValid() && !keyNodeField.IsNil() { + if node, ok := keyNodeField.Interface().(*yaml.Node); ok && node != nil { + *nodes = append(*nodes, node) + } + } + + // Add ValueNode if not nil + if valueNodeField.IsValid() && !valueNodeField.IsNil() { + if node, ok := valueNodeField.Interface().(*yaml.Node); ok && node != nil { + *nodes = append(*nodes, node) + + // If the Value is a primitive type (or slice/map of primitives), + // also collect child nodes from the ValueNode + if valueField.IsValid() && isLeafValueType(valueField.Type()) { + collectYAMLNodeChildren(node, nodes) + } + } + } +} + +// isLeafValueType returns true if the type represents a leaf value (primitive or container of primitives) +// rather than a core model that will be walked separately +func isLeafValueType(t reflect.Type) bool { + // Handle pointers + for t.Kind() == reflect.Ptr { + t = t.Elem() + } + + switch t.Kind() { + case reflect.Bool, reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, + reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, + reflect.Float32, reflect.Float64, reflect.String: + return true + + case reflect.Slice: + elemType := t.Elem() + // Slices of primitives are leaf types + // Slices of core models are not (they get walked) + return isLeafValueType(elemType) + + case reflect.Map: + // Maps with primitive keys and values are leaf types + return isLeafValueType(t.Key()) && isLeafValueType(t.Elem()) + + case reflect.Struct: + // Check if it's a CoreModeler (has GetRootNode method) + // If so, it's not a leaf - it will be walked separately + if hasCoreModelerMethod(t) { + return false + } + // Check if it's a marshaller.Node type + if isNodeType(t) { + // Get the inner value type and check that + valueField, found := t.FieldByName("Value") + if found { + return isLeafValueType(valueField.Type) + } + } + // Other structs might be leaf types (like custom value types) + return true + + case reflect.Interface: + // Can't determine at compile time - assume not leaf + return false + + default: + return false + } +} + +// hasCoreModelerMethod checks if a type implements GetRootNode() *yaml.Node +func hasCoreModelerMethod(t reflect.Type) bool { + // Check both value and pointer receiver + _, hasMethod := t.MethodByName("GetRootNode") + if hasMethod { + return true + } + if t.Kind() != reflect.Ptr { + ptrType := reflect.PointerTo(t) + _, hasMethod = ptrType.MethodByName("GetRootNode") + } + return hasMethod +} + +// collectYAMLNodeChildren adds all direct children of a YAML node to the nodes slice +// This is used for simple values like slices of strings where the individual items +// aren't core models but we still want to track their nodes +func collectYAMLNodeChildren(node *yaml.Node, nodes *[]*yaml.Node) { + if node == nil || node.Content == nil { + return + } + + for _, child := range node.Content { + if child != nil { + *nodes = append(*nodes, child) + } + } +} diff --git a/marshaller/nodecollector_test.go b/marshaller/nodecollector_test.go new file mode 100644 index 00000000..edc24445 --- /dev/null +++ b/marshaller/nodecollector_test.go @@ -0,0 +1,412 @@ +package marshaller_test + +import ( + "testing" + + "github.com/speakeasy-api/openapi/marshaller" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "gopkg.in/yaml.v3" +) + +// Test models to verify CollectLeafNodes behavior + +// SimpleModel has only primitive leaf fields - all should be collected +type SimpleModel struct { + marshaller.CoreModel + + StringField marshaller.Node[*string] `key:"stringField"` + IntField marshaller.Node[*int] `key:"intField"` + BoolField marshaller.Node[*bool] `key:"boolField"` +} + +// ModelWithSlice has a slice of primitives - all items should be collected +type ModelWithSlice struct { + marshaller.CoreModel + + Items marshaller.Node[[]string] `key:"items"` +} + +// ModelWithNodeSlice has a slice of Node[string] - all items should be collected +type ModelWithNodeSlice struct { + marshaller.CoreModel + + Tags marshaller.Node[[]marshaller.Node[string]] `key:"tags"` +} + +// NestedCoreModel represents a model that would be walked separately +type NestedCoreModel struct { + marshaller.CoreModel + + Name marshaller.Node[*string] `key:"name"` +} + +func (n *NestedCoreModel) GetRootNode() *yaml.Node { + return n.RootNode +} + +// ModelWithNestedCore has a nested core model - the nested model's nodes should NOT be collected +type ModelWithNestedCore struct { + marshaller.CoreModel + + Title marshaller.Node[*string] `key:"title"` + Nested marshaller.Node[*NestedCoreModel] `key:"nested"` +} + +// ModelWithSliceOfCoreModels has a slice of core models - those nodes should NOT be collected +type ModelWithSliceOfCoreModels struct { + marshaller.CoreModel + + Description marshaller.Node[*string] `key:"description"` + Children marshaller.Node[[]*NestedCoreModel] `key:"children"` +} + +func TestCollectLeafNodes_NilInput_Success(t *testing.T) { + t.Parallel() + + nodes := marshaller.CollectLeafNodes(nil) + assert.Nil(t, nodes, "should return nil for nil input") +} + +func TestCollectLeafNodes_SimpleModel_CollectsAllNodes(t *testing.T) { + t.Parallel() + + // Create YAML nodes + stringKeyNode := &yaml.Node{Kind: yaml.ScalarNode, Value: "stringField"} + stringValueNode := &yaml.Node{Kind: yaml.ScalarNode, Value: "hello"} + intKeyNode := &yaml.Node{Kind: yaml.ScalarNode, Value: "intField"} + intValueNode := &yaml.Node{Kind: yaml.ScalarNode, Value: "42"} + boolKeyNode := &yaml.Node{Kind: yaml.ScalarNode, Value: "boolField"} + boolValueNode := &yaml.Node{Kind: yaml.ScalarNode, Value: "true"} + + str := "hello" + intVal := 42 + boolVal := true + + model := &SimpleModel{ + StringField: marshaller.Node[*string]{ + KeyNode: stringKeyNode, + ValueNode: stringValueNode, + Value: &str, + Present: true, + }, + IntField: marshaller.Node[*int]{ + KeyNode: intKeyNode, + ValueNode: intValueNode, + Value: &intVal, + Present: true, + }, + BoolField: marshaller.Node[*bool]{ + KeyNode: boolKeyNode, + ValueNode: boolValueNode, + Value: &boolVal, + Present: true, + }, + } + + nodes := marshaller.CollectLeafNodes(model) + + // Should have 6 nodes (KeyNode + ValueNode for each of 3 fields) + require.Len(t, nodes, 6, "should collect all key and value nodes") + + // Verify all nodes are collected + nodeSet := make(map[*yaml.Node]bool) + for _, n := range nodes { + nodeSet[n] = true + } + + assert.True(t, nodeSet[stringKeyNode], "should include stringField key node") + assert.True(t, nodeSet[stringValueNode], "should include stringField value node") + assert.True(t, nodeSet[intKeyNode], "should include intField key node") + assert.True(t, nodeSet[intValueNode], "should include intField value node") + assert.True(t, nodeSet[boolKeyNode], "should include boolField key node") + assert.True(t, nodeSet[boolValueNode], "should include boolField value node") +} + +func TestCollectLeafNodes_NotPresent_SkipsField(t *testing.T) { + t.Parallel() + + stringKeyNode := &yaml.Node{Kind: yaml.ScalarNode, Value: "stringField"} + stringValueNode := &yaml.Node{Kind: yaml.ScalarNode, Value: "hello"} + intKeyNode := &yaml.Node{Kind: yaml.ScalarNode, Value: "intField"} + intValueNode := &yaml.Node{Kind: yaml.ScalarNode, Value: "42"} + + str := "hello" + + model := &SimpleModel{ + StringField: marshaller.Node[*string]{ + KeyNode: stringKeyNode, + ValueNode: stringValueNode, + Value: &str, + Present: true, + }, + IntField: marshaller.Node[*int]{ + KeyNode: intKeyNode, + ValueNode: intValueNode, + Value: nil, + Present: false, // Not present - should be skipped + }, + } + + nodes := marshaller.CollectLeafNodes(model) + + // Should have 2 nodes (only StringField) + require.Len(t, nodes, 2, "should only collect present fields") + + nodeSet := make(map[*yaml.Node]bool) + for _, n := range nodes { + nodeSet[n] = true + } + + assert.True(t, nodeSet[stringKeyNode], "should include present field key node") + assert.True(t, nodeSet[stringValueNode], "should include present field value node") + assert.False(t, nodeSet[intKeyNode], "should not include non-present field key node") + assert.False(t, nodeSet[intValueNode], "should not include non-present field value node") +} + +func TestCollectLeafNodes_SliceOfPrimitives_CollectsChildren(t *testing.T) { + t.Parallel() + + itemsKeyNode := &yaml.Node{Kind: yaml.ScalarNode, Value: "items"} + item1Node := &yaml.Node{Kind: yaml.ScalarNode, Value: "item1"} + item2Node := &yaml.Node{Kind: yaml.ScalarNode, Value: "item2"} + item3Node := &yaml.Node{Kind: yaml.ScalarNode, Value: "item3"} + itemsValueNode := &yaml.Node{ + Kind: yaml.SequenceNode, + Content: []*yaml.Node{item1Node, item2Node, item3Node}, + } + + model := &ModelWithSlice{ + Items: marshaller.Node[[]string]{ + KeyNode: itemsKeyNode, + ValueNode: itemsValueNode, + Value: []string{"item1", "item2", "item3"}, + Present: true, + }, + } + + nodes := marshaller.CollectLeafNodes(model) + + // Should have: keyNode + valueNode + 3 child nodes = 5 + require.Len(t, nodes, 5, "should collect key, value, and child nodes") + + nodeSet := make(map[*yaml.Node]bool) + for _, n := range nodes { + nodeSet[n] = true + } + + assert.True(t, nodeSet[itemsKeyNode], "should include items key node") + assert.True(t, nodeSet[itemsValueNode], "should include items value node") + assert.True(t, nodeSet[item1Node], "should include item1 node") + assert.True(t, nodeSet[item2Node], "should include item2 node") + assert.True(t, nodeSet[item3Node], "should include item3 node") +} + +func TestCollectLeafNodes_NestedCoreModel_DoesNotCollectNestedNodes(t *testing.T) { + t.Parallel() + + // Parent's leaf field + titleKeyNode := &yaml.Node{Kind: yaml.ScalarNode, Value: "title"} + titleValueNode := &yaml.Node{Kind: yaml.ScalarNode, Value: "My Title"} + + // Nested model's field - should NOT be collected + nestedNameKeyNode := &yaml.Node{Kind: yaml.ScalarNode, Value: "name"} + nestedNameValueNode := &yaml.Node{Kind: yaml.ScalarNode, Value: "Nested Name"} + nestedKeyNode := &yaml.Node{Kind: yaml.ScalarNode, Value: "nested"} + nestedValueNode := &yaml.Node{ + Kind: yaml.MappingNode, + Content: []*yaml.Node{ + nestedNameKeyNode, + nestedNameValueNode, + }, + } + + nestedName := "Nested Name" + title := "My Title" + + nestedCore := &NestedCoreModel{ + Name: marshaller.Node[*string]{ + KeyNode: nestedNameKeyNode, + ValueNode: nestedNameValueNode, + Value: &nestedName, + Present: true, + }, + } + nestedCore.RootNode = nestedValueNode + + model := &ModelWithNestedCore{ + Title: marshaller.Node[*string]{ + KeyNode: titleKeyNode, + ValueNode: titleValueNode, + Value: &title, + Present: true, + }, + Nested: marshaller.Node[*NestedCoreModel]{ + KeyNode: nestedKeyNode, + ValueNode: nestedValueNode, + Value: nestedCore, + Present: true, + }, + } + + nodes := marshaller.CollectLeafNodes(model) + + nodeSet := make(map[*yaml.Node]bool) + for _, n := range nodes { + nodeSet[n] = true + } + + // Should collect Title field nodes (leaf) + assert.True(t, nodeSet[titleKeyNode], "should include title key node") + assert.True(t, nodeSet[titleValueNode], "should include title value node") + + // Should NOT collect nested model's internal field nodes + // (the nested model itself will be walked separately) + assert.False(t, nodeSet[nestedNameKeyNode], "should NOT include nested model's internal key node") + assert.False(t, nodeSet[nestedNameValueNode], "should NOT include nested model's internal value node") +} + +func TestCollectLeafNodes_SliceOfCoreModels_DoesNotCollectNestedNodes(t *testing.T) { + t.Parallel() + + // Parent's leaf field + descKeyNode := &yaml.Node{Kind: yaml.ScalarNode, Value: "description"} + descValueNode := &yaml.Node{Kind: yaml.ScalarNode, Value: "A description"} + + // Child 1 - should NOT be collected + child1NameKeyNode := &yaml.Node{Kind: yaml.ScalarNode, Value: "name"} + child1NameValueNode := &yaml.Node{Kind: yaml.ScalarNode, Value: "Child 1"} + child1RootNode := &yaml.Node{ + Kind: yaml.MappingNode, + Content: []*yaml.Node{ + child1NameKeyNode, + child1NameValueNode, + }, + } + + // Child 2 - should NOT be collected + child2NameKeyNode := &yaml.Node{Kind: yaml.ScalarNode, Value: "name"} + child2NameValueNode := &yaml.Node{Kind: yaml.ScalarNode, Value: "Child 2"} + child2RootNode := &yaml.Node{ + Kind: yaml.MappingNode, + Content: []*yaml.Node{ + child2NameKeyNode, + child2NameValueNode, + }, + } + + childrenKeyNode := &yaml.Node{Kind: yaml.ScalarNode, Value: "children"} + childrenValueNode := &yaml.Node{ + Kind: yaml.SequenceNode, + Content: []*yaml.Node{child1RootNode, child2RootNode}, + } + + desc := "A description" + child1Name := "Child 1" + child2Name := "Child 2" + + child1 := &NestedCoreModel{ + Name: marshaller.Node[*string]{ + KeyNode: child1NameKeyNode, + ValueNode: child1NameValueNode, + Value: &child1Name, + Present: true, + }, + } + child1.RootNode = child1RootNode + + child2 := &NestedCoreModel{ + Name: marshaller.Node[*string]{ + KeyNode: child2NameKeyNode, + ValueNode: child2NameValueNode, + Value: &child2Name, + Present: true, + }, + } + child2.RootNode = child2RootNode + + model := &ModelWithSliceOfCoreModels{ + Description: marshaller.Node[*string]{ + KeyNode: descKeyNode, + ValueNode: descValueNode, + Value: &desc, + Present: true, + }, + Children: marshaller.Node[[]*NestedCoreModel]{ + KeyNode: childrenKeyNode, + ValueNode: childrenValueNode, + Value: []*NestedCoreModel{child1, child2}, + Present: true, + }, + } + + nodes := marshaller.CollectLeafNodes(model) + + nodeSet := make(map[*yaml.Node]bool) + for _, n := range nodes { + nodeSet[n] = true + } + + // Should collect Description field nodes (leaf) + assert.True(t, nodeSet[descKeyNode], "should include description key node") + assert.True(t, nodeSet[descValueNode], "should include description value node") + + // Should NOT collect Children array's child model nodes + // (they will be walked separately) + assert.False(t, nodeSet[child1NameKeyNode], "should NOT include child1's name key node") + assert.False(t, nodeSet[child1NameValueNode], "should NOT include child1's name value node") + assert.False(t, nodeSet[child2NameKeyNode], "should NOT include child2's name key node") + assert.False(t, nodeSet[child2NameValueNode], "should NOT include child2's name value node") +} + +func TestCollectLeafNodes_NilKeyNode_SkipsKeyNode(t *testing.T) { + t.Parallel() + + valueNode := &yaml.Node{Kind: yaml.ScalarNode, Value: "hello"} + str := "hello" + + model := &SimpleModel{ + StringField: marshaller.Node[*string]{ + KeyNode: nil, // No key node + ValueNode: valueNode, + Value: &str, + Present: true, + }, + } + + nodes := marshaller.CollectLeafNodes(model) + + require.Len(t, nodes, 1, "should only collect value node") + assert.Equal(t, valueNode, nodes[0], "should collect value node") +} + +func TestCollectLeafNodes_NilValueNode_SkipsValueNode(t *testing.T) { + t.Parallel() + + keyNode := &yaml.Node{Kind: yaml.ScalarNode, Value: "stringField"} + str := "hello" + + model := &SimpleModel{ + StringField: marshaller.Node[*string]{ + KeyNode: keyNode, + ValueNode: nil, // No value node + Value: &str, + Present: true, + }, + } + + nodes := marshaller.CollectLeafNodes(model) + + require.Len(t, nodes, 1, "should only collect key node") + assert.Equal(t, keyNode, nodes[0], "should collect key node") +} + +func TestCollectLeafNodes_EmptyModel_ReturnsEmpty(t *testing.T) { + t.Parallel() + + model := &SimpleModel{} + + nodes := marshaller.CollectLeafNodes(model) + + assert.Empty(t, nodes, "should return empty for model with no present fields") +} diff --git a/marshaller/populator.go b/marshaller/populator.go index 5d94a752..d5b75eb5 100644 --- a/marshaller/populator.go +++ b/marshaller/populator.go @@ -100,7 +100,7 @@ func PopulateModelWithContext(source any, target any, ctx *PopulationContext) er } if s.Kind() != reflect.Struct { - return fmt.Errorf("expected struct, got %s", s.Kind()) + return fmt.Errorf("expected `struct`, got `%s`", s.Kind()) } sType := s.Type() diff --git a/marshaller/sequencedmap.go b/marshaller/sequencedmap.go index bf55c1a8..0bbb583c 100644 --- a/marshaller/sequencedmap.go +++ b/marshaller/sequencedmap.go @@ -30,7 +30,7 @@ func unmarshalSequencedMap(ctx context.Context, parentName string, node *yaml.No // Check if the node is actually a mapping node if resolvedNode.Kind != yaml.MappingNode { validationErr := validation.NewTypeMismatchError(parentName, "expected mapping node for sequenced map, got %v", resolvedNode.Kind) - return []error{validation.NewValidationError(validationErr, resolvedNode)}, nil + return []error{validation.NewValidationError(validation.SeverityError, validation.RuleValidationTypeMismatch, validationErr, resolvedNode)}, nil } target.Init() @@ -57,7 +57,9 @@ func unmarshalSequencedMap(ctx context.Context, parentName string, node *yaml.No indicesToSkip[existing.lastIndex] = true // Create validation error for the earlier occurrence duplicateKeyErrs = append(duplicateKeyErrs, validation.NewValidationError( - validation.NewValueValidationError("mapping key %q at line %d is a duplicate; previous definition at line %d", key, keyNode.Line, existing.firstLine), + validation.SeverityWarning, + validation.RuleValidationDuplicateKey, + fmt.Errorf("mapping key %q at line %d is a duplicate; previous definition at line %d", key, keyNode.Line, existing.firstLine), keyNode, )) // Update to point to current (last) occurrence diff --git a/marshaller/syncer.go b/marshaller/syncer.go index a9b622fb..ea529340 100644 --- a/marshaller/syncer.go +++ b/marshaller/syncer.go @@ -128,7 +128,7 @@ func syncChanges(ctx context.Context, source any, target any, valueNode *yaml.No t = getUnderlyingValue(t) if sUnderlying.Kind() != reflect.Struct { - return nil, fmt.Errorf("syncChanges expected struct, got %s", s.Type()) + return nil, fmt.Errorf("syncChanges expected `struct`, got `%s`", s.Type()) } valid := true diff --git a/marshaller/unmarshaller.go b/marshaller/unmarshaller.go index c066e88b..42c6faeb 100644 --- a/marshaller/unmarshaller.go +++ b/marshaller/unmarshaller.go @@ -85,7 +85,7 @@ func UnmarshalCore(ctx context.Context, parentName string, node *yaml.Node, out var documentNode *yaml.Node if node.Kind == yaml.DocumentNode { if len(node.Content) != 1 { - return nil, fmt.Errorf("expected 1 node, got %d at line %d, column %d", len(node.Content), node.Line, node.Column) + return nil, fmt.Errorf("expected 1 node, got `%d` at line `%d`, column `%d`", len(node.Content), node.Line, node.Column) } // Save the document node for potential use by CoreModeler implementations @@ -168,7 +168,7 @@ func unmarshal(ctx context.Context, parentName string, node *yaml.Node, out refl nodeMutator, ok := out.Interface().(NodeMutator) if !ok { - return nil, fmt.Errorf("expected NodeMutator, got %s at line %d, column %d", out.Type(), resolvedNode.Line, resolvedNode.Column) + return nil, fmt.Errorf("expected NodeMutator, got `%s` at line `%d`, column `%d`", out.Type(), resolvedNode.Line, resolvedNode.Column) } return nodeMutator.Unmarshal(ctx, parentName, nil, node) @@ -189,10 +189,21 @@ func unmarshal(ctx context.Context, parentName string, node *yaml.Node, out refl unmarshallable, ok := out.Interface().(Unmarshallable) if !ok { - return nil, fmt.Errorf("expected Unmarshallable, got %s at line %d, column %d", out.Type(), resolvedNode.Line, resolvedNode.Column) + return nil, fmt.Errorf("expected Unmarshallable, got `%s` at line `%d`, column `%d`", out.Type(), resolvedNode.Line, resolvedNode.Column) } - return unmarshallable.Unmarshal(ctx, parentName, node) + validationErrs, err := unmarshallable.Unmarshal(ctx, parentName, node) + if err != nil { + return nil, err + } + + if implementsInterface(out, coreModelerType) { + if coreModeler, ok := out.Interface().(CoreModeler); ok { + coreModeler.SetRootNode(node) + } + } + + return validationErrs, nil } if implementsInterface(out, sequencedMapType) { @@ -206,7 +217,7 @@ func unmarshal(ctx context.Context, parentName string, node *yaml.Node, out refl seqMapInterface, ok := out.Interface().(interfaces.SequencedMapInterface) if !ok { - return nil, fmt.Errorf("expected sequencedMapInterface, got %s at line %d, column %d", out.Type(), resolvedNode.Line, resolvedNode.Column) + return nil, fmt.Errorf("expected sequencedMapInterface, got `%s` at line `%d`, column `%d`", out.Type(), resolvedNode.Line, resolvedNode.Column) } return unmarshalSequencedMap(ctx, parentName, node, seqMapInterface) @@ -268,9 +279,9 @@ func unmarshalMapping(ctx context.Context, parentName string, node *yaml.Node, o return unmarshalStruct(ctx, parentName, node, out.Addr().Interface()) } case out.Kind() == reflect.Map: - return nil, fmt.Errorf("currently unsupported out kind: %v (type: %s) at line %d, column %d", out.Kind(), out.Type(), resolvedNode.Line, resolvedNode.Column) + return nil, fmt.Errorf("currently unsupported out kind: `%v` (type: `%s`) at line `%d`, column `%d`", out.Kind(), out.Type(), resolvedNode.Line, resolvedNode.Column) default: - return nil, fmt.Errorf("expected struct or map, got %s (type: %s) at line %d, column %d", out.Kind(), out.Type(), resolvedNode.Line, resolvedNode.Column) + return nil, fmt.Errorf("expected struct or map, got `%s` (type: `%s`) at line `%d`, column `%d`", out.Kind(), out.Type(), resolvedNode.Line, resolvedNode.Column) } } @@ -287,27 +298,27 @@ func unmarshalModel(ctx context.Context, parentName string, node *yaml.Node, str } if out.Kind() != reflect.Struct { - return nil, fmt.Errorf("expected a struct, got %s (type: %s) at line %d, column %d", out.Kind(), out.Type(), resolvedNode.Line, resolvedNode.Column) + return nil, fmt.Errorf("expected a struct, got `%s` (type: `%s`) at line `%d`, column `%d`", out.Kind(), out.Type(), resolvedNode.Line, resolvedNode.Column) } structType := out.Type() // Get the "model" tag value from the embedded CoreModel field which should be the first field always if structType.NumField() < 1 { - return nil, fmt.Errorf("expected embedded CoreModel field, got %s at line %d, column %d", out.Type(), resolvedNode.Line, resolvedNode.Column) + return nil, fmt.Errorf("expected embedded CoreModel field, got `%s` at line `%d`, column `%d`", out.Type(), resolvedNode.Line, resolvedNode.Column) } field := structType.Field(0) if field.Type != reflect.TypeOf(CoreModel{}) { - return nil, fmt.Errorf("expected embedded CoreModel field to be of type CoreModel, got %s at line %d, column %d", out.Type(), resolvedNode.Line, resolvedNode.Column) + return nil, fmt.Errorf("expected embedded CoreModel field to be of type CoreModel, got `%s` at line `%d`, column `%d`", out.Type(), resolvedNode.Line, resolvedNode.Column) } modelTag := field.Tag.Get("model") if modelTag == "" { - return nil, fmt.Errorf("expected embedded CoreModel field to have a 'model' tag, got %s at line %d, column %d", out.Type(), resolvedNode.Line, resolvedNode.Column) + return nil, fmt.Errorf("expected embedded CoreModel field to have a 'model' tag, got `%s` at line `%d`, column `%d`", out.Type(), resolvedNode.Line, resolvedNode.Column) } if resolvedNode.Kind != yaml.MappingNode { return []error{ - validation.NewValidationError(validation.NewTypeMismatchError(parentName, "expected object, got %s", yml.NodeKindToString(resolvedNode.Kind)), resolvedNode), + validation.NewValidationError(validation.SeverityError, validation.RuleValidationTypeMismatch, validation.NewTypeMismatchError(parentName, "expected `object`, got `%s`", yml.NodeKindToString(resolvedNode.Kind)), resolvedNode), }, nil } @@ -318,10 +329,10 @@ func unmarshalModel(ctx context.Context, parentName string, node *yaml.Node, str var ok bool unmarshallable, ok = out.Addr().Interface().(CoreModeler) if !ok { - return nil, fmt.Errorf("expected CoreModeler, got %s at line %d, column %d", out.Type(), resolvedNode.Line, resolvedNode.Column) + return nil, fmt.Errorf("expected CoreModeler, got `%s` at line `%d`, column `%d`", out.Type(), resolvedNode.Line, resolvedNode.Column) } } else { - return nil, fmt.Errorf("expected struct to implement CoreModeler, got %s at line %d, column %d", out.Type(), resolvedNode.Line, resolvedNode.Column) + return nil, fmt.Errorf("expected struct to implement CoreModeler, got `%s` at line `%d`, column `%d`", out.Type(), resolvedNode.Line, resolvedNode.Column) } unmarshallable.SetRootNode(node) @@ -368,7 +379,9 @@ func unmarshalModel(ctx context.Context, parentName string, node *yaml.Node, str indicesToSkip[info.lastIndex] = true // Create validation error for the earlier occurrence duplicateKeyErrs = append(duplicateKeyErrs, validation.NewValidationError( - validation.NewValueValidationError("mapping key %q at line %d is a duplicate; previous definition at line %d", key, keyNode.Line, info.firstLine), + validation.SeverityWarning, + validation.RuleValidationDuplicateKey, + fmt.Errorf("mapping key `%q` at line `%d` is a duplicate; previous definition at line `%d`", key, keyNode.Line, info.firstLine), keyNode, )) // Update to track this as the new last occurrence @@ -460,7 +473,7 @@ func unmarshalModel(ctx context.Context, parentName string, node *yaml.Node, str foundRequiredFields.Store(key, true) } } else { - return fmt.Errorf("expected field '%s' to be marshaller.Node, got %s at line %d, column %d (key: %s)", cachedField.Name, fieldVal.Type(), keyNode.Line, keyNode.Column, key) + return fmt.Errorf("expected field `%s` to be marshaller.Node, got `%s` at line `%d`, column `%d` (key: `%s`)", cachedField.Name, fieldVal.Type(), keyNode.Line, keyNode.Column, key) } } @@ -489,7 +502,7 @@ func unmarshalModel(ctx context.Context, parentName string, node *yaml.Node, str // Check for missing required fields using cached required field info for tag := range fieldMap.RequiredFields { if _, ok := foundRequiredFields.Load(tag); !ok { - validationErrs = append(validationErrs, validation.NewValidationError(validation.NewMissingFieldError("%s.%s is missing", modelTag, tag), resolvedNode)) + validationErrs = append(validationErrs, validation.NewValidationError(validation.SeverityError, validation.RuleValidationRequiredField, fmt.Errorf("`%s.%s` is required", modelTag, tag), resolvedNode)) } } @@ -532,7 +545,7 @@ func decodeNode(_ context.Context, parentName string, node *yaml.Node, out any) // Check if this is a type mismatch error if yamlTypeErr := asTypeMismatchError(err); yamlTypeErr != nil { // Convert type mismatch to validation error - validationErr := validation.NewValidationError(validation.NewTypeMismatchError(parentName, strings.Join(yamlTypeErr.Errors, ", ")), resolvedNode) + validationErr := validation.NewValidationError(validation.SeverityError, validation.RuleValidationTypeMismatch, validation.NewTypeMismatchError(parentName, strings.Join(yamlTypeErr.Errors, ", ")), resolvedNode) return []error{validationErr}, nil //nolint:nilerr } @@ -547,7 +560,7 @@ func unmarshalSequence(ctx context.Context, parentName string, node *yaml.Node, } if out.Kind() != reflect.Slice { - return nil, fmt.Errorf("expected slice, got %s (type: %s) at line %d, column %d", out.Kind(), out.Type(), resolvedNode.Line, resolvedNode.Column) + return nil, fmt.Errorf("expected `slice`, got `%s` (type: `%s`) at line `%d`, column `%d`", out.Kind(), out.Type(), resolvedNode.Line, resolvedNode.Column) } out.Set(reflect.MakeSlice(out.Type(), len(resolvedNode.Content), len(resolvedNode.Content))) @@ -604,13 +617,13 @@ func unmarshalNode(ctx context.Context, parentName string, keyNode, valueNode *y out.Set(reflect.New(out.Type().Elem())) ref = out.Elem().Addr() } else { - return nil, fmt.Errorf("field %s is a nil pointer and cannot be set at line %d, column %d", fieldName, resolvedKeyNode.Line, resolvedKeyNode.Column) + return nil, fmt.Errorf("field `%s` is a nil pointer and cannot be set at line `%d`, column `%d`", fieldName, resolvedKeyNode.Line, resolvedKeyNode.Column) } } unmarshallable, ok := ref.Interface().(NodeMutator) if !ok { - return nil, fmt.Errorf("expected field '%s' to be marshaller.Node, got %s at line %d, column %d", fieldName, ref.Type(), resolvedKeyNode.Line, resolvedKeyNode.Column) + return nil, fmt.Errorf("expected field `%s` to be marshaller.Node, got `%s` at line `%d`, column `%d`", fieldName, ref.Type(), resolvedKeyNode.Line, resolvedKeyNode.Column) } validationErrs, err := unmarshallable.Unmarshal(ctx, parentName, keyNode, valueNode) @@ -678,7 +691,7 @@ func isMapType(out reflect.Value) bool { // validateNodeKind checks if the node kind matches the expected kind and returns appropriate error func validateNodeKind(resolvedNode *yaml.Node, expectedKind yaml.Kind, parentName string, reflectType reflect.Type, expectedType string) error { if resolvedNode == nil { - return validation.NewValidationError(validation.NewTypeMismatchError(parentName, "expected %s, got nil", yml.NodeKindToString(expectedKind)), nil) + return validation.NewValidationError(validation.SeverityError, validation.RuleValidationTypeMismatch, validation.NewTypeMismatchError(parentName, "expected `%s`, got nil", yml.NodeKindToString(expectedKind)), nil) } // Check if the node kind matches @@ -723,13 +736,15 @@ func validateNodeKind(resolvedNode *yaml.Node, expectedKind yaml.Kind, parentNam value = value[:maxLen] + "..." } actualKindStr = fmt.Sprintf("`%s`", value) + } else { + actualKindStr = fmt.Sprintf("`%s`", actualKindStr) } - return validation.NewValidationError(validation.NewTypeMismatchError(parentName, "expected %s, got %s", expectedType, actualKindStr), resolvedNode) + return validation.NewValidationError(validation.SeverityError, validation.RuleValidationTypeMismatch, validation.NewTypeMismatchError(parentName, "expected `%s`, got %s", expectedType, actualKindStr), resolvedNode) } if !tagMatches { - return validation.NewValidationError(validation.NewTypeMismatchError(parentName, "expected %s, got %s", expectedType, yml.NodeTagToString(resolvedNode.Tag)), resolvedNode) + return validation.NewValidationError(validation.SeverityError, validation.RuleValidationTypeMismatch, validation.NewTypeMismatchError(parentName, "expected `%s`, got `%s`", expectedType, yml.NodeTagToString(resolvedNode.Tag)), resolvedNode) } return nil } diff --git a/marshaller/unmarshalling_test.go b/marshaller/unmarshalling_test.go index aa0ab3de..fc64e1f5 100644 --- a/marshaller/unmarshalling_test.go +++ b/marshaller/unmarshalling_test.go @@ -153,10 +153,10 @@ func TestUnmarshal_PrimitiveTypes_Error(t *testing.T) { stringPtrField: "optional field" `, wantErrs: []string{ - "[2:1] testPrimitiveModel.boolField is missing", - "[2:1] testPrimitiveModel.float64Field is missing", - "[2:1] testPrimitiveModel.intField is missing", - "[2:1] testPrimitiveModel.stringField is missing", + "[2:1] error validation-required-field `testPrimitiveModel.boolField` is required", + "[2:1] error validation-required-field `testPrimitiveModel.float64Field` is required", + "[2:1] error validation-required-field `testPrimitiveModel.intField` is required", + "[2:1] error validation-required-field `testPrimitiveModel.stringField` is required", }, }, { @@ -167,7 +167,7 @@ boolField: true intField: 42 float64Field: 3.14 `, - wantErrs: []string{"[2:14] testPrimitiveModel.stringField expected string, got sequence"}, + wantErrs: []string{"[2:14] error validation-type-mismatch testPrimitiveModel.stringField expected `string`, got `sequence`"}, }, { name: "type mismatch - bool field gets string", @@ -177,7 +177,7 @@ boolField: "not a bool" intField: 42 float64Field: 3.14 `, - wantErrs: []string{"[3:12] testPrimitiveModel.boolField line 3: cannot unmarshal !!str `not a bool` into bool"}, + wantErrs: []string{"[3:12] error validation-type-mismatch testPrimitiveModel.boolField line 3: cannot unmarshal !!str `not a bool` into bool"}, }, { name: "type mismatch - int field gets string", @@ -187,7 +187,7 @@ boolField: true intField: "not an int" float64Field: 3.14 `, - wantErrs: []string{"[4:11] testPrimitiveModel.intField line 4: cannot unmarshal !!str `not an int` into int"}, + wantErrs: []string{"[4:11] error validation-type-mismatch testPrimitiveModel.intField line 4: cannot unmarshal !!str `not an int` into int"}, }, { name: "type mismatch - float field gets string", @@ -197,7 +197,7 @@ boolField: true intField: 42 float64Field: "not a float" `, - wantErrs: []string{"[5:15] testPrimitiveModel.float64Field line 5: cannot unmarshal !!str `not a f...` into float64"}, + wantErrs: []string{"[5:15] error validation-type-mismatch testPrimitiveModel.float64Field line 5: cannot unmarshal !!str `not a f...` into float64"}, }, { name: "multiple validation errors", @@ -206,10 +206,10 @@ boolField: "not a bool" intField: "not an int" `, wantErrs: []string{ - "[2:1] testPrimitiveModel.float64Field is missing", - "[2:1] testPrimitiveModel.stringField is missing", - "[2:12] testPrimitiveModel.boolField line 2: cannot unmarshal !!str `not a bool` into bool", - "[3:11] testPrimitiveModel.intField line 3: cannot unmarshal !!str `not an int` into int", + "[2:1] error validation-required-field `testPrimitiveModel.float64Field` is required", + "[2:1] error validation-required-field `testPrimitiveModel.stringField` is required", + "[2:12] error validation-type-mismatch testPrimitiveModel.boolField line 2: cannot unmarshal !!str `not a bool` into bool", + "[3:11] error validation-type-mismatch testPrimitiveModel.intField line 3: cannot unmarshal !!str `not an int` into int", }, }, } @@ -358,9 +358,9 @@ nestedModel: # missing required stringField, boolField, float64Field `, wantErrs: []string{ - "[8:3] testPrimitiveModel.stringField is missing", - "[8:3] testPrimitiveModel.boolField is missing", - "[8:3] testPrimitiveModel.float64Field is missing", + "[8:3] error validation-required-field `testPrimitiveModel.stringField` is required", + "[8:3] error validation-required-field `testPrimitiveModel.boolField` is required", + "[8:3] error validation-required-field `testPrimitiveModel.float64Field` is required", }, }, { @@ -374,7 +374,7 @@ nestedModelValue: nestedModel: - "this should be an object" `, - wantErrs: []string{"[8:3] testComplexModel.nestedModel expected object, got sequence"}, + wantErrs: []string{"[8:3] error validation-type-mismatch testComplexModel.nestedModel expected `object`, got `sequence`"}, }, { name: "type mismatch - array field gets object", @@ -387,7 +387,7 @@ nestedModelValue: arrayField: key: "this should be an array" `, - wantErrs: []string{"[8:3] testComplexModel.arrayField expected sequence, got object"}, + wantErrs: []string{"[8:3] error validation-type-mismatch testComplexModel.arrayField expected `sequence`, got `object`"}, }, { name: "deeply nested validation error", @@ -407,7 +407,7 @@ structArrayField: float64Field: 4.56 # missing required stringField in second element `, - wantErrs: []string{"[12:5] testPrimitiveModel.stringField is missing"}, + wantErrs: []string{"[12:5] error validation-required-field `testPrimitiveModel.stringField` is required"}, }, } @@ -658,7 +658,7 @@ func TestUnmarshal_RequiredPointer_Error(t *testing.T) { yml: ` optionalPtr: "only optional set" `, - wantErrs: []string{"[2:1] testRequiredPointerModel.requiredPtr is missing"}, + wantErrs: []string{"[2:1] error validation-required-field `testRequiredPointerModel.requiredPtr` is required"}, }, { name: "required pointer field with null value should be valid", @@ -768,12 +768,12 @@ func TestUnmarshal_RequiredNilableTypes_Error(t *testing.T) { optionalPtr: "only optional set" `, wantErrs: []string{ - "[2:1] testRequiredNilableModel.requiredEither is missing", - "[2:1] testRequiredNilableModel.requiredMap is missing", - "[2:1] testRequiredNilableModel.requiredPtr is missing", - "[2:1] testRequiredNilableModel.requiredRawNode is missing", - "[2:1] testRequiredNilableModel.requiredSlice is missing", - "[2:1] testRequiredNilableModel.requiredStruct is missing", + "[2:1] error validation-required-field `testRequiredNilableModel.requiredEither` is required", + "[2:1] error validation-required-field `testRequiredNilableModel.requiredMap` is required", + "[2:1] error validation-required-field `testRequiredNilableModel.requiredPtr` is required", + "[2:1] error validation-required-field `testRequiredNilableModel.requiredRawNode` is required", + "[2:1] error validation-required-field `testRequiredNilableModel.requiredSlice` is required", + "[2:1] error validation-required-field `testRequiredNilableModel.requiredStruct` is required", }, }, { @@ -784,10 +784,10 @@ requiredSlice: ["item1"] # missing requiredMap, requiredStruct, requiredEither, requiredRawNode `, wantErrs: []string{ - "[2:1] testRequiredNilableModel.requiredEither is missing", - "[2:1] testRequiredNilableModel.requiredMap is missing", - "[2:1] testRequiredNilableModel.requiredRawNode is missing", - "[2:1] testRequiredNilableModel.requiredStruct is missing", + "[2:1] error validation-required-field `testRequiredNilableModel.requiredEither` is required", + "[2:1] error validation-required-field `testRequiredNilableModel.requiredMap` is required", + "[2:1] error validation-required-field `testRequiredNilableModel.requiredRawNode` is required", + "[2:1] error validation-required-field `testRequiredNilableModel.requiredStruct` is required", }, }, { @@ -804,10 +804,10 @@ requiredEither: "string value" requiredRawNode: "raw value" `, wantErrs: []string{ - "[8:3] testPrimitiveModel.boolField is missing", - "[8:3] testPrimitiveModel.float64Field is missing", - "[8:3] testPrimitiveModel.intField is missing", - "[8:3] testPrimitiveModel.stringField is missing", + "[8:3] error validation-required-field `testPrimitiveModel.boolField` is required", + "[8:3] error validation-required-field `testPrimitiveModel.float64Field` is required", + "[8:3] error validation-required-field `testPrimitiveModel.intField` is required", + "[8:3] error validation-required-field `testPrimitiveModel.stringField` is required", }, }, } diff --git a/mise-tasks/test b/mise-tasks/test index 8dee87e7..357c46c1 100755 --- a/mise-tasks/test +++ b/mise-tasks/test @@ -19,6 +19,8 @@ else echo "🧪 Running tests in separate modules..." (cd jsonschema/oas3/tests && GOWORK=off gotestsum --format testname -- -race ./...) + (cd openapi/linter/customrules && GOWORK=off gotestsum --format testname -- -race ./...) + (cd openapi/linter/converter/tests && GOWORK=off gotestsum --format testname -- -race ./...) fi echo "✅ All tests passed!" \ No newline at end of file diff --git a/mise-tasks/update-lint-docs b/mise-tasks/update-lint-docs new file mode 100755 index 00000000..d32e476c --- /dev/null +++ b/mise-tasks/update-lint-docs @@ -0,0 +1,18 @@ +#!/usr/bin/env bash +set -euo pipefail + +# Update Lint Docs - Automatically update lint rule documentation in READMEs +# This script uses a Go program to generate a rules table from registered linter rules +# and updates the corresponding README.md files between the lint rules tags. + +echo "🔄 Updating lint rules in README files..." + +echo "🚀 Running update-lint-docs tool..." +go run ./cmd/update-lint-docs + +echo "" +echo "📋 Summary:" +echo " • Updated openapi/linter/README.md with rules from the OpenAPI linter registry" +echo "" +echo "💡 Rule documentation is automatically generated from the Rule interface methods." +echo " To update the docs, modify the rule's Description(), Link(), etc. methods and re-run this task." diff --git a/openapi/bundle.go b/openapi/bundle.go index 84be0452..abce65de 100644 --- a/openapi/bundle.go +++ b/openapi/bundle.go @@ -299,7 +299,7 @@ func bundleSchema(ctx context.Context, schema *oas3.JSONSchema[oas3.Referenceabl if err := bundleObject(ctx, resolvedRefSchema, namingStrategy, references.ResolveOptions{ RootDocument: opts.RootDocument, TargetDocument: targetDocInfo.ResolvedDocument, - TargetLocation: targetDocInfo.AbsoluteReference, + TargetLocation: targetDocInfo.AbsoluteDocumentPath, }, componentStorage); err != nil { return fmt.Errorf("failed to bundle nested references in %s: %w", ref, err) } @@ -702,12 +702,12 @@ func bundleGenericReference[T any, V interfaces.Validator[T], C marshaller.CoreM if targetDocInfo == nil { return fmt.Errorf("failed to get resolution info for %s reference %s", componentType, refStr) } - componentStorage.componentLocations[componentType+"/"+componentName] = targetDocInfo.AbsoluteReference + componentStorage.componentLocations[componentType+"/"+componentName] = targetDocInfo.AbsoluteDocumentPath if err := bundleObject(ctx, bundledRef, namingStrategy, references.ResolveOptions{ RootDocument: opts.RootDocument, TargetDocument: targetDocInfo.ResolvedDocument, - TargetLocation: targetDocInfo.AbsoluteReference, + TargetLocation: targetDocInfo.AbsoluteDocumentPath, }, componentStorage); err != nil { return fmt.Errorf("failed to bundle nested references in %s: %w", ref.GetReference(), err) } @@ -736,7 +736,7 @@ func getFinalAbsoluteRef[T any, V interfaces.Validator[T], C marshaller.CoreMode nextRefInfo := resInfo.Object.GetReferenceResolutionInfo() if nextRefInfo != nil { // Build the absolute reference from the final resolution - finalRef := nextRefInfo.AbsoluteReference + finalRef := nextRefInfo.AbsoluteDocumentPath if nextRefInfo.Object != nil && nextRefInfo.Object.Reference != nil { // Add the fragment from the chained reference fragment := string(nextRefInfo.Object.Reference.GetJSONPointer()) diff --git a/openapi/callbacks.go b/openapi/callbacks.go index c6b770cf..b17bb62f 100644 --- a/openapi/callbacks.go +++ b/openapi/callbacks.go @@ -2,6 +2,7 @@ package openapi import ( "context" + "fmt" "github.com/speakeasy-api/openapi/expression" "github.com/speakeasy-api/openapi/extensions" @@ -64,7 +65,7 @@ func (c *Callback) Validate(ctx context.Context, opts ...validation.Option) []er } } - errs = append(errs, validation.NewValidationError(validation.NewValueValidationError("callback expression is invalid: %s", err.Error()), node)) + errs = append(errs, validation.NewValidationError(validation.SeverityWarning, validation.RuleValidationInvalidFormat, fmt.Errorf("callback expression is invalid: %w", err), node)) } errs = append(errs, pathItem.Validate(ctx, opts...)...) diff --git a/openapi/callbacks_validate_test.go b/openapi/callbacks_validate_test.go index bb5e8161..41d5ee90 100644 --- a/openapi/callbacks_validate_test.go +++ b/openapi/callbacks_validate_test.go @@ -117,7 +117,7 @@ func TestCallback_Validate_Error(t *testing.T) { '200': description: Webhook received `, - wantErrs: []string{"[2:1] callback expression is invalid: expression is not valid, must begin with $: request.body#/webhookUrl"}, + wantErrs: []string{"[2:1] warning validation-invalid-format callback expression is invalid: expression is not valid, must begin with $: request.body#/webhookUrl"}, }, { name: "invalid_expression_unknown_type", @@ -129,7 +129,7 @@ func TestCallback_Validate_Error(t *testing.T) { '200': description: Webhook received `, - wantErrs: []string{"[2:1] callback expression is invalid: expression is not valid, must begin with one of [url, method, statusCode, request, response, inputs, outputs, steps, workflows, sourceDescriptions, components]: {$unknown.body#/webhookUrl}"}, + wantErrs: []string{"[2:1] warning validation-invalid-format callback expression is invalid: expression is not valid, must begin with one of [url, method, statusCode, request, response, inputs, outputs, steps, workflows, sourceDescriptions, components]: {$unknown.body#/webhookUrl}"}, }, { name: "invalid_expression_url_with_extra_parts", @@ -141,7 +141,7 @@ func TestCallback_Validate_Error(t *testing.T) { '200': description: Webhook received `, - wantErrs: []string{"[2:1] callback expression is invalid: expression is not valid, extra characters after $url: {$url.extra}"}, + wantErrs: []string{"[2:1] warning validation-invalid-format callback expression is invalid: expression is not valid, extra characters after $url: {$url.extra}"}, }, { name: "invalid_expression_request_without_reference", @@ -153,7 +153,7 @@ func TestCallback_Validate_Error(t *testing.T) { '200': description: Webhook received `, - wantErrs: []string{"[2:1] callback expression is invalid: expression is not valid, expected one of [header, query, path, body] after $request: {$request}"}, + wantErrs: []string{"[2:1] warning validation-invalid-format callback expression is invalid: expression is not valid, expected one of [header, query, path, body] after $request: {$request}"}, }, { name: "invalid_expression_request_unknown_reference", @@ -165,7 +165,7 @@ func TestCallback_Validate_Error(t *testing.T) { '200': description: Webhook received `, - wantErrs: []string{"[2:1] callback expression is invalid: expression is not valid, expected one of [header, query, path, body] after $request: {$request.unknown}"}, + wantErrs: []string{"[2:1] warning validation-invalid-format callback expression is invalid: expression is not valid, expected one of [header, query, path, body] after $request: {$request.unknown}"}, }, { name: "invalid_expression_request_header_missing_token", @@ -177,7 +177,7 @@ func TestCallback_Validate_Error(t *testing.T) { '200': description: Webhook received `, - wantErrs: []string{"[2:1] callback expression is invalid: expression is not valid, expected token after $request.header: {$request.header}"}, + wantErrs: []string{"[2:1] warning validation-invalid-format callback expression is invalid: expression is not valid, expected token after $request.header: {$request.header}"}, }, { name: "invalid_expression_request_header_invalid_token", @@ -189,7 +189,7 @@ func TestCallback_Validate_Error(t *testing.T) { '200': description: Webhook received `, - wantErrs: []string{"[2:1] callback expression is invalid: header reference must be a valid token [^[!#$%&'*+\\-.^_`|~\\dA-Za-z]+$]: {$request.header.some@header}"}, + wantErrs: []string{"[2:1] warning validation-invalid-format callback expression is invalid: header reference must be a valid token [^[!#$%&'*+\\-.^_`|~\\dA-Za-z]+$]: {$request.header.some@header}"}, }, { name: "invalid_expression_request_query_missing_name", @@ -201,7 +201,7 @@ func TestCallback_Validate_Error(t *testing.T) { '200': description: Webhook received `, - wantErrs: []string{"[2:1] callback expression is invalid: expression is not valid, expected name after $request.query: {$request.query}"}, + wantErrs: []string{"[2:1] warning validation-invalid-format callback expression is invalid: expression is not valid, expected name after $request.query: {$request.query}"}, }, { name: "invalid_expression_request_path_missing_name", @@ -213,7 +213,7 @@ func TestCallback_Validate_Error(t *testing.T) { '200': description: Webhook received `, - wantErrs: []string{"[2:1] callback expression is invalid: expression is not valid, expected name after $request.path: {$request.path}"}, + wantErrs: []string{"[2:1] warning validation-invalid-format callback expression is invalid: expression is not valid, expected name after $request.path: {$request.path}"}, }, { name: "invalid_expression_request_body_with_extra_parts", @@ -225,7 +225,7 @@ func TestCallback_Validate_Error(t *testing.T) { '200': description: Webhook received `, - wantErrs: []string{"[2:1] callback expression is invalid: expression is not valid, only json pointers are allowed after $request.body: {$request.body.extra}"}, + wantErrs: []string{"[2:1] warning validation-invalid-format callback expression is invalid: expression is not valid, only json pointers are allowed after $request.body: {$request.body.extra}"}, }, { name: "invalid_expression_invalid_json_pointer", @@ -237,7 +237,7 @@ func TestCallback_Validate_Error(t *testing.T) { '200': description: Webhook received `, - wantErrs: []string{"[2:1] callback expression is invalid: validation error -- jsonpointer must start with /: some/path}"}, + wantErrs: []string{"[2:1] warning validation-invalid-format callback expression is invalid: validation error -- jsonpointer must start with /: some/path}"}, }, { name: "invalid_nested_pathitem_invalid_server", @@ -251,7 +251,7 @@ func TestCallback_Validate_Error(t *testing.T) { '200': description: Webhook received `, - wantErrs: []string{"[4:7] server.url is missing"}, + wantErrs: []string{"[4:7] error validation-required-field `server.url` is required"}, }, } diff --git a/openapi/components_validate_test.go b/openapi/components_validate_test.go index 01bffe8a..390fc5ee 100644 --- a/openapi/components_validate_test.go +++ b/openapi/components_validate_test.go @@ -255,7 +255,7 @@ securitySchemes: InvalidScheme: description: Some scheme `, - wantErrs: []string{"[4:5] securityScheme.type is missing"}, + wantErrs: []string{"[4:5] error validation-required-field `securityScheme.type` is required"}, }, } diff --git a/openapi/core/reference.go b/openapi/core/reference.go index 988e63a7..ee5d2882 100644 --- a/openapi/core/reference.go +++ b/openapi/core/reference.go @@ -34,7 +34,7 @@ func (r *Reference[T]) Unmarshal(ctx context.Context, parentName string, node *y if resolvedNode.Kind != yaml.MappingNode { r.SetValid(false, false) - return []error{validation.NewValidationError(validation.NewTypeMismatchError(parentName, "reference expected object, got %s", yml.NodeKindToString(resolvedNode.Kind)), resolvedNode)}, nil + return []error{validation.NewValidationError(validation.SeverityError, validation.RuleValidationTypeMismatch, validation.NewTypeMismatchError(parentName, "reference expected `object`, got %s", yml.NodeKindToString(resolvedNode.Kind)), resolvedNode)}, nil } if _, _, ok := yml.GetMapElementNodes(ctx, resolvedNode, "$ref"); ok { diff --git a/openapi/encoding.go b/openapi/encoding.go index 50b000f1..ca1fdc9b 100644 --- a/openapi/encoding.go +++ b/openapi/encoding.go @@ -128,7 +128,7 @@ func (e *Encoding) Validate(ctx context.Context, opts ...validation.Option) []er for _, mediaType := range mediaTypes { _, _, err := mime.ParseMediaType(mediaType) if err != nil { - errs = append(errs, validation.NewValueError(validation.NewValueValidationError(fmt.Sprintf("encoding.contentType %s is not a valid media type: %s", mediaType, err)), core, core.ContentType)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidFormat, fmt.Errorf("encoding.contentType %s is not a valid media type: %w", mediaType, err), core, core.ContentType)) } } } @@ -140,7 +140,7 @@ func (e *Encoding) Validate(ctx context.Context, opts ...validation.Option) []er if core.Style.Present { allowedStyles := []string{string(SerializationStyleForm), string(SerializationStyleSpaceDelimited), string(SerializationStylePipeDelimited), string(SerializationStyleDeepObject)} if !slices.Contains(allowedStyles, string(*e.Style)) { - errs = append(errs, validation.NewValueError(validation.NewValueValidationError(fmt.Sprintf("encoding.style must be one of [%s]", strings.Join(allowedStyles, ", "))), core, core.Style)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationAllowedValues, fmt.Errorf("encoding.style must be one of [`%s`]", strings.Join(allowedStyles, ", ")), core, core.Style)) } } diff --git a/openapi/encoding_validate_test.go b/openapi/encoding_validate_test.go index ea99cad0..5e16f4e1 100644 --- a/openapi/encoding_validate_test.go +++ b/openapi/encoding_validate_test.go @@ -146,7 +146,7 @@ func TestEncoding_Validate_Error(t *testing.T) { yml: ` style: invalidStyle `, - expectedErr: "style must be one of [form, spaceDelimited, pipeDelimited, deepObject]", + expectedErr: "style must be one of [`form, spaceDelimited, pipeDelimited, deepObject`]", }, } diff --git a/openapi/examples.go b/openapi/examples.go index f2bc01ba..2e8c064f 100644 --- a/openapi/examples.go +++ b/openapi/examples.go @@ -2,6 +2,7 @@ package openapi import ( "context" + "errors" "fmt" "net/url" @@ -104,27 +105,27 @@ func (e *Example) Validate(ctx context.Context, opts ...validation.Option) []err // Check mutual exclusivity: value and externalValue if core.Value.Present && core.ExternalValue.Present { - errs = append(errs, validation.NewValueError(validation.NewValueValidationError("example.value and externalValue are mutually exclusive"), core, core.Value)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationMutuallyExclusiveFields, errors.New("example.value and example.externalValue are mutually exclusive"), core, core.Value)) } // Check mutual exclusivity: dataValue and value if core.DataValue.Present && core.Value.Present { - errs = append(errs, validation.NewValueError(validation.NewValueValidationError("example.dataValue and value are mutually exclusive"), core, core.DataValue)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationMutuallyExclusiveFields, errors.New("example.dataValue and example.value are mutually exclusive"), core, core.DataValue)) } // Check mutual exclusivity: serializedValue and value if core.SerializedValue.Present && core.Value.Present { - errs = append(errs, validation.NewValueError(validation.NewValueValidationError("example.serializedValue and value are mutually exclusive"), core, core.SerializedValue)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationMutuallyExclusiveFields, errors.New("example.serializedValue and example.value are mutually exclusive"), core, core.SerializedValue)) } // Check mutual exclusivity: serializedValue and externalValue if core.SerializedValue.Present && core.ExternalValue.Present { - errs = append(errs, validation.NewValueError(validation.NewValueValidationError("example.serializedValue and externalValue are mutually exclusive"), core, core.SerializedValue)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationMutuallyExclusiveFields, errors.New("example.serializedValue and example.externalValue are mutually exclusive"), core, core.SerializedValue)) } if core.ExternalValue.Present { if _, err := url.Parse(*e.ExternalValue); err != nil { - errs = append(errs, validation.NewValueError(validation.NewValueValidationError(fmt.Sprintf("example.externalValue is not a valid uri: %s", err)), core, core.ExternalValue)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidFormat, fmt.Errorf("example.externalValue is not a valid uri: %w", err), core, core.ExternalValue)) } } diff --git a/openapi/examples_validate_test.go b/openapi/examples_validate_test.go index dd1ac9a6..7a7c670a 100644 --- a/openapi/examples_validate_test.go +++ b/openapi/examples_validate_test.go @@ -157,14 +157,14 @@ func TestExample_Validate_Error(t *testing.T) { summary: Example with invalid URL externalValue: ":invalid" `, - wantErrs: []string{"[3:16] example.externalValue is not a valid uri: parse \":invalid\": missing protocol scheme"}, + wantErrs: []string{"[3:16] error validation-invalid-format example.externalValue is not a valid uri: parse \":invalid\": missing protocol scheme"}, }, { name: "invalid external value URL with spaces", yml: ` externalValue: ":invalid url" `, - wantErrs: []string{"[2:16] example.externalValue is not a valid uri: parse \":invalid url\": missing protocol scheme"}, + wantErrs: []string{"[2:16] error validation-invalid-format example.externalValue is not a valid uri: parse \":invalid url\": missing protocol scheme"}, }, { name: "both value and external value provided", @@ -173,7 +173,7 @@ summary: Invalid example value: "test" externalValue: "https://example.com/test.json" `, - wantErrs: []string{"[3:8] example.value and externalValue are mutually exclusive"}, + wantErrs: []string{"[3:8] error validation-mutually-exclusive-fields example.value and example.externalValue are mutually exclusive"}, }, { name: "multiple validation errors", @@ -182,8 +182,8 @@ value: "test" externalValue: ":invalid" `, wantErrs: []string{ - "[2:8] example.value and externalValue are mutually exclusive", - "[3:16] example.externalValue is not a valid uri: parse \":invalid\": missing protocol scheme", + "[2:8] error validation-mutually-exclusive-fields example.value and example.externalValue are mutually exclusive", + "[3:16] error validation-invalid-format example.externalValue is not a valid uri: parse \":invalid\": missing protocol scheme", }, }, { @@ -194,7 +194,7 @@ dataValue: id: 123 value: "test" `, - wantErrs: []string{"example.dataValue and value are mutually exclusive"}, + wantErrs: []string{"error validation-mutually-exclusive-fields example.dataValue and example.value are mutually exclusive"}, }, { name: "serializedValue and value are mutually exclusive", @@ -203,7 +203,7 @@ summary: Invalid example serializedValue: "test=123" value: "test" `, - wantErrs: []string{"example.serializedValue and value are mutually exclusive"}, + wantErrs: []string{"error validation-mutually-exclusive-fields example.serializedValue and example.value are mutually exclusive"}, }, { name: "serializedValue and externalValue are mutually exclusive", @@ -212,23 +212,23 @@ summary: Invalid example serializedValue: "test=123" externalValue: https://example.com/test.json `, - wantErrs: []string{"example.serializedValue and externalValue are mutually exclusive"}, + wantErrs: []string{"error validation-mutually-exclusive-fields example.serializedValue and example.externalValue are mutually exclusive"}, }, { name: "multiple mutual exclusivity violations", yml: ` summary: Invalid example dataValue: - id: 123 + id: 123 value: "test" serializedValue: "test=123" externalValue: https://example.com/test.json `, wantErrs: []string{ - "example.value and externalValue are mutually exclusive", - "example.dataValue and value are mutually exclusive", - "example.serializedValue and value are mutually exclusive", - "example.serializedValue and externalValue are mutually exclusive", + "error validation-mutually-exclusive-fields example.value and example.externalValue are mutually exclusive", + "error validation-mutually-exclusive-fields example.dataValue and example.value are mutually exclusive", + "error validation-mutually-exclusive-fields example.serializedValue and example.value are mutually exclusive", + "error validation-mutually-exclusive-fields example.serializedValue and example.externalValue are mutually exclusive", }, }, } diff --git a/openapi/header.go b/openapi/header.go index 36591af4..bcae281b 100644 --- a/openapi/header.go +++ b/openapi/header.go @@ -2,6 +2,7 @@ package openapi import ( "context" + "fmt" "slices" "strings" @@ -131,7 +132,7 @@ func (h *Header) Validate(ctx context.Context, opts ...validation.Option) []erro if core.Style.Present { allowedStyles := []string{string(SerializationStyleSimple)} if !slices.Contains(allowedStyles, string(*h.Style)) { - errs = append(errs, validation.NewValueError(validation.NewValueValidationError("header.style must be one of [%s]", strings.Join(allowedStyles, ", ")), core, core.Style)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationAllowedValues, fmt.Errorf("header.style must be one of [`%s`]", strings.Join(allowedStyles, ", ")), core, core.Style)) } } diff --git a/openapi/header_validate_test.go b/openapi/header_validate_test.go index 4e229c64..b5174a05 100644 --- a/openapi/header_validate_test.go +++ b/openapi/header_validate_test.go @@ -128,8 +128,8 @@ schema: description: Header with invalid schema `, wantErrs: []string{ - "[3:9] schema.type value must be one of 'array', 'boolean', 'integer', 'null', 'number', 'object', 'string'", - "[3:9] schema.type expected array, got string", + "[3:9] error validation-invalid-schema schema.type value must be one of 'array', 'boolean', 'integer', 'null', 'number', 'object', 'string'", + "[3:9] error validation-type-mismatch schema.type expected `array`, got `string`", }, }, } diff --git a/openapi/index.go b/openapi/index.go new file mode 100644 index 00000000..c8a1eb87 --- /dev/null +++ b/openapi/index.go @@ -0,0 +1,2528 @@ +package openapi + +import ( + "context" + "errors" + "fmt" + "strings" + + "github.com/speakeasy-api/openapi/internal/interfaces" + "github.com/speakeasy-api/openapi/jsonschema/oas3" + "github.com/speakeasy-api/openapi/marshaller" + "github.com/speakeasy-api/openapi/pointer" + "github.com/speakeasy-api/openapi/references" + "github.com/speakeasy-api/openapi/validation" + "gopkg.in/yaml.v3" +) + +// CircularClassification represents the classification of a circular reference. +type CircularClassification int + +const ( + // CircularUnclassified means the circular reference has not been classified yet. + CircularUnclassified CircularClassification = iota + // CircularValid means the circular reference is valid (has a termination point). + CircularValid + // CircularInvalid means the circular reference is invalid (no termination point). + CircularInvalid + // CircularPending means the circular reference is part of polymorphic and needs post-processing. + CircularPending +) + +// CircularPathSegment represents a segment of the path through the schema tree. +// It captures constraint information needed to determine if a circular reference can terminate. +type CircularPathSegment struct { + Field string // e.g., "properties", "items", "allOf", "oneOf", "anyOf", "additionalProperties" + PropertyName string // Set if Field == "properties" + IsRequired bool // Set if this property is in parent's Required array + ArrayMinItems int64 // Parent's MinItems value (0 means empty array terminates) + MinProperties int64 // Parent's MinProperties value (0 means empty object terminates) + BranchIndex int // Index in oneOf/anyOf/allOf array + IsNullable bool // True if this schema allows null (termination point) + ParentSchema *oas3.JSONSchemaReferenceable // The parent schema (for polymorphic cases) +} + +// SchemaVisitInfo tracks the visitation state of a schema during indexing. +type SchemaVisitInfo struct { + Location Locations // Location where first seen + InCurrentPath bool // True while actively walking this schema's children + CircularType CircularClassification // Classification result +} + +// PolymorphicCircularRef tracks a polymorphic schema with recursive branches. +// Used for post-processing to determine if all branches recurse. +type PolymorphicCircularRef struct { + ParentSchema *oas3.JSONSchemaReferenceable // The parent with oneOf/anyOf/allOf + ParentLocation Locations // Location of the parent + Field string // "oneOf", "anyOf", or "allOf" + BranchResults map[int]CircularClassification // Index -> classification per branch + TotalBranches int // Total number of branches +} + +// referenceStackEntry tracks a schema in the active reference resolution chain. +// Uses JSON pointer strings for identity to handle type differences. +type referenceStackEntry struct { + refTarget string // The $ref target (JSON pointer or URI) + location Locations // Where this reference was encountered +} + +type Descriptioner interface { + GetDescription() string +} + +type Summarizer interface { + GetSummary() string +} + +type DescriptionAndSummary interface { + GetDescription() string + GetSummary() string +} + +func (i *Index) currentDocumentPath() string { + if i == nil { + return "" + } + if len(i.currentDocumentStack) == 0 { + return "" + } + return i.currentDocumentStack[len(i.currentDocumentStack)-1] +} + +// Index represents a pre-computed index of an OpenAPI document. +// It provides efficient access to document elements without repeated full traversals. +type Index struct { + Doc *OpenAPI + + ExternalDocumentation []*IndexNode[*oas3.ExternalDocumentation] // All external documentation nodes + + Tags []*IndexNode[*Tag] // All tags defined in the document + + Servers []*IndexNode[*Server] // All servers defined in the document + ServerVariables []*IndexNode[*ServerVariable] // All server variables from all servers + + BooleanSchemas []*IndexNode[*oas3.JSONSchemaReferenceable] // Boolean schema values (true/false) + InlineSchemas []*IndexNode[*oas3.JSONSchemaReferenceable] // Schemas defined inline (properties, items, etc.) + ComponentSchemas []*IndexNode[*oas3.JSONSchemaReferenceable] // Schemas in /components/schemas/ of main document + ExternalSchemas []*IndexNode[*oas3.JSONSchemaReferenceable] // Top-level schemas in external documents + SchemaReferences []*IndexNode[*oas3.JSONSchemaReferenceable] // All $ref pointers + + InlinePathItems []*IndexNode[*ReferencedPathItem] // PathItems defined inline (in paths map) + ComponentPathItems []*IndexNode[*ReferencedPathItem] // PathItems in /components/pathItems/ + ExternalPathItems []*IndexNode[*ReferencedPathItem] // Top-level PathItems in external documents + PathItemReferences []*IndexNode[*ReferencedPathItem] // All PathItem $ref pointers + + Operations []*IndexNode[*Operation] // All operations (GET, POST, PUT, DELETE, PATCH, HEAD, OPTIONS, TRACE, etc.) + + InlineParameters []*IndexNode[*ReferencedParameter] // Parameters defined inline in operations/path items + ComponentParameters []*IndexNode[*ReferencedParameter] // Parameters in /components/parameters/ + ExternalParameters []*IndexNode[*ReferencedParameter] // Top-level Parameters in external documents + ParameterReferences []*IndexNode[*ReferencedParameter] // All Parameter $ref pointers + + Responses []*IndexNode[*Responses] // All Responses containers (operation.responses) + + InlineResponses []*IndexNode[*ReferencedResponse] // Responses defined inline in operations + ComponentResponses []*IndexNode[*ReferencedResponse] // Responses in /components/responses/ + ExternalResponses []*IndexNode[*ReferencedResponse] // Top-level Responses in external documents + ResponseReferences []*IndexNode[*ReferencedResponse] // All Response $ref pointers + + InlineRequestBodies []*IndexNode[*ReferencedRequestBody] // RequestBodies defined inline in operations + ComponentRequestBodies []*IndexNode[*ReferencedRequestBody] // RequestBodies in /components/requestBodies/ + ExternalRequestBodies []*IndexNode[*ReferencedRequestBody] // Top-level RequestBodies in external documents + RequestBodyReferences []*IndexNode[*ReferencedRequestBody] // All RequestBody $ref pointers + + InlineHeaders []*IndexNode[*ReferencedHeader] // Headers defined inline + ComponentHeaders []*IndexNode[*ReferencedHeader] // Headers in /components/headers/ + ExternalHeaders []*IndexNode[*ReferencedHeader] // Top-level Headers in external documents + HeaderReferences []*IndexNode[*ReferencedHeader] // All Header $ref pointers + + InlineExamples []*IndexNode[*ReferencedExample] // Examples defined inline + ComponentExamples []*IndexNode[*ReferencedExample] // Examples in /components/examples/ + ExternalExamples []*IndexNode[*ReferencedExample] // Top-level Examples in external documents + ExampleReferences []*IndexNode[*ReferencedExample] // All Example $ref pointers + + InlineLinks []*IndexNode[*ReferencedLink] // Links defined inline in responses + ComponentLinks []*IndexNode[*ReferencedLink] // Links in /components/links/ + ExternalLinks []*IndexNode[*ReferencedLink] // Top-level Links in external documents + LinkReferences []*IndexNode[*ReferencedLink] // All Link $ref pointers + + InlineCallbacks []*IndexNode[*ReferencedCallback] // Callbacks defined inline in operations + ComponentCallbacks []*IndexNode[*ReferencedCallback] // Callbacks in /components/callbacks/ + ExternalCallbacks []*IndexNode[*ReferencedCallback] // Top-level Callbacks in external documents + CallbackReferences []*IndexNode[*ReferencedCallback] // All Callback $ref pointers + + ComponentSecuritySchemes []*IndexNode[*ReferencedSecurityScheme] // SecuritySchemes in /components/securitySchemes/ + SecuritySchemeReferences []*IndexNode[*ReferencedSecurityScheme] // All SecurityScheme $ref pointers + SecurityRequirements []*IndexNode[*SecurityRequirement] // All security requirement objects + + Discriminators []*IndexNode[*oas3.Discriminator] // All discriminator objects in schemas + XMLs []*IndexNode[*oas3.XML] // All XML metadata in schemas + MediaTypes []*IndexNode[*MediaType] // All media types in request/response bodies + Encodings []*IndexNode[*Encoding] // All encoding objects in media types + OAuthFlows []*IndexNode[*OAuthFlows] // All OAuth flows containers + OAuthFlowItems []*IndexNode[*OAuthFlow] // Individual OAuth flow objects (implicit, password, clientCredentials, authorizationCode) + + DescriptionNodes []*IndexNode[Descriptioner] // All nodes that have a Description field + SummaryNodes []*IndexNode[Summarizer] // All nodes that have a Summary field + DescriptionAndSummaryNodes []*IndexNode[DescriptionAndSummary] // All nodes that have both Description and Summary fields + + // NodeToOperations maps yaml.Node pointers to the operations that reference them. + // A node may be referenced by multiple operations (e.g., shared schemas via $ref). + // This is only populated when BuildIndex is called with WithNodeOperationMap(). + // nil when the feature is disabled. + NodeToOperations map[*yaml.Node][]*IndexNode[*Operation] + + validationErrs []error + resolutionErrs []error + circularErrs []error + + validCircularRefs int // Count of valid (terminating) circular references + invalidCircularRefs int // Count of invalid (non-terminating) circular references + + resolveOpts references.ResolveOptions + + // Circular reference tracking (internal) + indexedSchemas map[*oas3.JSONSchemaReferenceable]bool // Tracks which schemas have been fully indexed + indexedParameters map[*Parameter]bool // Tracks which parameters have been fully indexed + indexedResponses map[*Response]bool // Tracks which responses have been fully indexed + indexedRequestBodies map[*RequestBody]bool // Tracks which request bodies have been fully indexed + indexedHeaders map[*Header]bool // Tracks which headers have been fully indexed + indexedExamples map[*Example]bool // Tracks which examples have been fully indexed + indexedLinks map[*Link]bool // Tracks which links have been fully indexed + indexedCallbacks map[*Callback]bool // Tracks which callbacks have been fully indexed + indexedPathItems map[*PathItem]bool // Tracks which path items have been fully indexed + referenceStack []referenceStackEntry // Active reference resolution chain (by ref target) + polymorphicRefs []*PolymorphicCircularRef // Pending polymorphic circulars + visitedRefs map[string]bool // Tracks visited ref targets to avoid duplicates + indexedReferences map[any]bool // Tracks indexed reference objects to ensure each $ref appears once + reportedUnknownProps map[marshaller.CoreModeler]map[string]bool // Tracks which unknown properties have been reported per core model + currentDocumentStack []string // Stack of document paths being walked (for determining external vs main) + buildNodeOperationMap bool // Whether to build the node-to-operation map + currentOperation *IndexNode[*Operation] // Current operation being walked (for node-to-operation mapping) + operationLocationDepth int // Location depth when we entered the current operation +} + +// IndexNode wraps a node with its location in the document. +type IndexNode[T any] struct { + Node T + + Location Locations +} + +// IndexOptions configures optional features when building the index. +type IndexOptions struct { + // BuildNodeOperationMap enables building the NodeToOperations map + // which tracks which operations reference each yaml.Node. + // This is disabled by default as it adds overhead. + // Enable this when you need to determine which operations are affected + // by issues found on specific nodes (e.g., for validity tracking). + BuildNodeOperationMap bool +} + +// IndexOption is a function that configures IndexOptions. +type IndexOption func(*IndexOptions) + +// WithNodeOperationMap enables building the node-to-operation mapping. +func WithNodeOperationMap() IndexOption { + return func(opts *IndexOptions) { + opts.BuildNodeOperationMap = true + } +} + +// IsWebhookLocation returns true if this location is within the webhooks section. +func IsWebhookLocation(loc Locations) bool { + for _, l := range loc { + if l.ParentField == "webhooks" { + return true + } + } + return false +} + +// ExtractOperationInfo extracts path/webhook name, method, and whether it's a webhook +// from a location. Works for any location within an operation's subtree. +func ExtractOperationInfo(loc Locations) (path, method string, isWebhook bool) { + for i := len(loc) - 1; i >= 0; i-- { + l := loc[i] + parentType := GetParentType(l) + + switch parentType { + case "Paths": + if l.ParentKey != nil { + path = *l.ParentKey + } + case "PathItem", "ReferencedPathItem": + if l.ParentKey != nil { + method = *l.ParentKey + } + } + + if l.ParentField == "webhooks" { + isWebhook = true + if l.ParentKey != nil { + path = *l.ParentKey + } + } + } + return +} + +// BuildIndex creates a new Index by walking the entire OpenAPI document. +// It resolves references and detects circular reference patterns. +// Requires resolveOpts to have RootDocument, TargetDocument, and TargetLocation set. +// Optional features can be enabled via IndexOption functions. +func BuildIndex(ctx context.Context, doc *OpenAPI, resolveOpts references.ResolveOptions, opts ...IndexOption) *Index { + if resolveOpts.RootDocument == nil { + panic("BuildIndex: resolveOpts.RootDocument is required") + } + if resolveOpts.TargetDocument == nil { + panic("BuildIndex: resolveOpts.TargetDocument is required") + } + if resolveOpts.TargetLocation == "" { + panic("BuildIndex: resolveOpts.TargetLocation is required") + } + + // Apply options + var options IndexOptions + for _, opt := range opts { + opt(&options) + } + + idx := &Index{ + Doc: doc, + resolveOpts: resolveOpts, + indexedSchemas: make(map[*oas3.JSONSchemaReferenceable]bool), + indexedParameters: make(map[*Parameter]bool), + indexedResponses: make(map[*Response]bool), + indexedRequestBodies: make(map[*RequestBody]bool), + indexedHeaders: make(map[*Header]bool), + indexedExamples: make(map[*Example]bool), + indexedLinks: make(map[*Link]bool), + indexedCallbacks: make(map[*Callback]bool), + indexedPathItems: make(map[*PathItem]bool), + referenceStack: make([]referenceStackEntry, 0), + polymorphicRefs: make([]*PolymorphicCircularRef, 0), + visitedRefs: make(map[string]bool), + indexedReferences: make(map[any]bool), + reportedUnknownProps: make(map[marshaller.CoreModeler]map[string]bool), + currentDocumentStack: []string{resolveOpts.TargetLocation}, // Start with main document + buildNodeOperationMap: options.BuildNodeOperationMap, + } + + // Initialize the node-to-operation map if enabled + if options.BuildNodeOperationMap { + idx.NodeToOperations = make(map[*yaml.Node][]*IndexNode[*Operation]) + } + + // Phase 1: Walk and index everything + _ = buildIndex(ctx, idx, doc) + + // Phase 2: Post-process polymorphic circular refs + idx.finalizePolymorphicCirculars() + + return idx +} + +// GetAllSchemas returns all schemas in the index (boolean, inline, component, external, and references). +func (i *Index) GetAllSchemas() []*IndexNode[*oas3.JSONSchemaReferenceable] { + if i == nil { + return nil + } + + allSchemas := make([]*IndexNode[*oas3.JSONSchemaReferenceable], 0, len(i.BooleanSchemas)+ + len(i.InlineSchemas)+ + len(i.ComponentSchemas)+ + len(i.ExternalSchemas), + ) + allSchemas = append(allSchemas, i.BooleanSchemas...) + allSchemas = append(allSchemas, i.InlineSchemas...) + allSchemas = append(allSchemas, i.ComponentSchemas...) + allSchemas = append(allSchemas, i.ExternalSchemas...) + return allSchemas +} + +// GetAllPathItems returns all path items in the index (inline, component, and external). +func (i *Index) GetAllPathItems() []*IndexNode[*ReferencedPathItem] { + if i == nil { + return nil + } + + allPathItems := make([]*IndexNode[*ReferencedPathItem], 0, len(i.InlinePathItems)+ + len(i.ComponentPathItems)+ + len(i.ExternalPathItems), + ) + allPathItems = append(allPathItems, i.InlinePathItems...) + allPathItems = append(allPathItems, i.ComponentPathItems...) + allPathItems = append(allPathItems, i.ExternalPathItems...) + return allPathItems +} + +// GetAllParameters returns all parameters in the index (inline, component, and external). +func (i *Index) GetAllParameters() []*IndexNode[*ReferencedParameter] { + if i == nil { + return nil + } + + allParameters := make([]*IndexNode[*ReferencedParameter], 0, len(i.InlineParameters)+ + len(i.ComponentParameters)+ + len(i.ExternalParameters), + ) + allParameters = append(allParameters, i.InlineParameters...) + allParameters = append(allParameters, i.ComponentParameters...) + allParameters = append(allParameters, i.ExternalParameters...) + return allParameters +} + +// GetAllResponses returns all responses in the index (inline, component, and external). +func (i *Index) GetAllResponses() []*IndexNode[*ReferencedResponse] { + if i == nil { + return nil + } + + allResponses := make([]*IndexNode[*ReferencedResponse], 0, len(i.InlineResponses)+ + len(i.ComponentResponses)+ + len(i.ExternalResponses), + ) + allResponses = append(allResponses, i.InlineResponses...) + allResponses = append(allResponses, i.ComponentResponses...) + allResponses = append(allResponses, i.ExternalResponses...) + return allResponses +} + +// GetAllRequestBodies returns all request bodies in the index (inline, component, and external). +func (i *Index) GetAllRequestBodies() []*IndexNode[*ReferencedRequestBody] { + if i == nil { + return nil + } + + allRequestBodies := make([]*IndexNode[*ReferencedRequestBody], 0, len(i.InlineRequestBodies)+ + len(i.ComponentRequestBodies)+ + len(i.ExternalRequestBodies), + ) + allRequestBodies = append(allRequestBodies, i.InlineRequestBodies...) + allRequestBodies = append(allRequestBodies, i.ComponentRequestBodies...) + allRequestBodies = append(allRequestBodies, i.ExternalRequestBodies...) + return allRequestBodies +} + +// GetAllHeaders returns all headers in the index (inline, component, and external). +func (i *Index) GetAllHeaders() []*IndexNode[*ReferencedHeader] { + if i == nil { + return nil + } + + allHeaders := make([]*IndexNode[*ReferencedHeader], 0, len(i.InlineHeaders)+ + len(i.ComponentHeaders)+ + len(i.ExternalHeaders), + ) + allHeaders = append(allHeaders, i.InlineHeaders...) + allHeaders = append(allHeaders, i.ComponentHeaders...) + allHeaders = append(allHeaders, i.ExternalHeaders...) + return allHeaders +} + +// GetAllExamples returns all examples in the index (inline, component, and external). +func (i *Index) GetAllExamples() []*IndexNode[*ReferencedExample] { + if i == nil { + return nil + } + + allExamples := make([]*IndexNode[*ReferencedExample], 0, len(i.InlineExamples)+ + len(i.ComponentExamples)+ + len(i.ExternalExamples), + ) + allExamples = append(allExamples, i.InlineExamples...) + allExamples = append(allExamples, i.ComponentExamples...) + allExamples = append(allExamples, i.ExternalExamples...) + return allExamples +} + +// GetAllLinks returns all links in the index (inline, component, and external). +func (i *Index) GetAllLinks() []*IndexNode[*ReferencedLink] { + if i == nil { + return nil + } + + allLinks := make([]*IndexNode[*ReferencedLink], 0, len(i.InlineLinks)+ + len(i.ComponentLinks)+ + len(i.ExternalLinks), + ) + allLinks = append(allLinks, i.InlineLinks...) + allLinks = append(allLinks, i.ComponentLinks...) + allLinks = append(allLinks, i.ExternalLinks...) + return allLinks +} + +// GetAllCallbacks returns all callbacks in the index (inline, component, and external). +func (i *Index) GetAllCallbacks() []*IndexNode[*ReferencedCallback] { + if i == nil { + return nil + } + + allCallbacks := make([]*IndexNode[*ReferencedCallback], 0, len(i.InlineCallbacks)+ + len(i.ComponentCallbacks)+ + len(i.ExternalCallbacks), + ) + allCallbacks = append(allCallbacks, i.InlineCallbacks...) + allCallbacks = append(allCallbacks, i.ComponentCallbacks...) + allCallbacks = append(allCallbacks, i.ExternalCallbacks...) + return allCallbacks +} + +// ReferenceNode represents any node that can be a reference in an OpenAPI document. +// This interface is satisfied by both Reference[T, V, C] types (PathItem, Parameter, Response, etc.) +// and JSONSchemaReferenceable. +type ReferenceNode interface { + GetReference() references.Reference + IsReference() bool + GetRootNode() *yaml.Node +} + +// GetAllReferences returns all reference nodes in the index across all reference types. +// This includes SchemaReferences, PathItemReferences, ParameterReferences, ResponseReferences, +// RequestBodyReferences, HeaderReferences, ExampleReferences, LinkReferences, CallbackReferences, +// and SecuritySchemeReferences. +func (i *Index) GetAllReferences() []*IndexNode[ReferenceNode] { + if i == nil { + return nil + } + + totalCount := len(i.SchemaReferences) + + len(i.PathItemReferences) + + len(i.ParameterReferences) + + len(i.ResponseReferences) + + len(i.RequestBodyReferences) + + len(i.HeaderReferences) + + len(i.ExampleReferences) + + len(i.LinkReferences) + + len(i.CallbackReferences) + + len(i.SecuritySchemeReferences) + + allReferences := make([]*IndexNode[ReferenceNode], 0, totalCount) + + // Add schema references + for _, ref := range i.SchemaReferences { + allReferences = append(allReferences, &IndexNode[ReferenceNode]{ + Node: ref.Node, + Location: ref.Location, + }) + } + + // Add path item references + for _, ref := range i.PathItemReferences { + allReferences = append(allReferences, &IndexNode[ReferenceNode]{ + Node: ref.Node, + Location: ref.Location, + }) + } + + // Add parameter references + for _, ref := range i.ParameterReferences { + allReferences = append(allReferences, &IndexNode[ReferenceNode]{ + Node: ref.Node, + Location: ref.Location, + }) + } + + // Add response references + for _, ref := range i.ResponseReferences { + allReferences = append(allReferences, &IndexNode[ReferenceNode]{ + Node: ref.Node, + Location: ref.Location, + }) + } + + // Add request body references + for _, ref := range i.RequestBodyReferences { + allReferences = append(allReferences, &IndexNode[ReferenceNode]{ + Node: ref.Node, + Location: ref.Location, + }) + } + + // Add header references + for _, ref := range i.HeaderReferences { + allReferences = append(allReferences, &IndexNode[ReferenceNode]{ + Node: ref.Node, + Location: ref.Location, + }) + } + + // Add example references + for _, ref := range i.ExampleReferences { + allReferences = append(allReferences, &IndexNode[ReferenceNode]{ + Node: ref.Node, + Location: ref.Location, + }) + } + + // Add link references + for _, ref := range i.LinkReferences { + allReferences = append(allReferences, &IndexNode[ReferenceNode]{ + Node: ref.Node, + Location: ref.Location, + }) + } + + // Add callback references + for _, ref := range i.CallbackReferences { + allReferences = append(allReferences, &IndexNode[ReferenceNode]{ + Node: ref.Node, + Location: ref.Location, + }) + } + + // Add security scheme references + for _, ref := range i.SecuritySchemeReferences { + allReferences = append(allReferences, &IndexNode[ReferenceNode]{ + Node: ref.Node, + Location: ref.Location, + }) + } + + return allReferences +} + +// GetValidationErrors returns validation errors from resolution operations. +func (i *Index) GetValidationErrors() []error { + if i == nil { + return nil + } + return i.validationErrs +} + +// GetResolutionErrors returns errors from failed reference resolution. +func (i *Index) GetResolutionErrors() []error { + if i == nil { + return nil + } + return i.resolutionErrs +} + +// GetCircularReferenceErrors returns invalid (non-terminating) circular reference errors. +func (i *Index) GetCircularReferenceErrors() []error { + if i == nil { + return nil + } + return i.circularErrs +} + +// GetAllErrors returns all errors collected during indexing. +func (i *Index) GetAllErrors() []error { + if i == nil { + return nil + } + all := make([]error, 0, len(i.validationErrs)+len(i.resolutionErrs)+len(i.circularErrs)) + all = append(all, i.validationErrs...) + all = append(all, i.resolutionErrs...) + all = append(all, i.circularErrs...) + return all +} + +// HasErrors returns true if any errors were collected during indexing. +func (i *Index) HasErrors() bool { + if i == nil { + return false + } + return len(i.validationErrs) > 0 || len(i.resolutionErrs) > 0 || len(i.circularErrs) > 0 +} + +// GetValidCircularRefCount returns the count of valid (terminating) circular references found during indexing. +func (i *Index) GetValidCircularRefCount() int { + if i == nil { + return 0 + } + return i.validCircularRefs +} + +// GetInvalidCircularRefCount returns the count of invalid (non-terminating) circular references found during indexing. +func (i *Index) GetInvalidCircularRefCount() int { + if i == nil { + return 0 + } + return i.invalidCircularRefs +} + +// GetNodeOperations returns the operations that reference a given yaml.Node. +// Returns nil if the node was not found or if the node-to-operation mapping was not enabled. +// Enable this feature by passing WithNodeOperationMap() to BuildIndex. +func (i *Index) GetNodeOperations(node *yaml.Node) []*IndexNode[*Operation] { + if i == nil || i.NodeToOperations == nil || node == nil { + return nil + } + return i.NodeToOperations[node] +} + +// registerNodeWithOperation adds a node-operation mapping, avoiding duplicates. +func (i *Index) registerNodeWithOperation(node *yaml.Node, op *IndexNode[*Operation]) { + if node == nil || op == nil || i.NodeToOperations == nil { + return + } + // Check for duplicates + existing := i.NodeToOperations[node] + for _, existingOp := range existing { + if existingOp == op { + return + } + } + i.NodeToOperations[node] = append(existing, op) +} + +func buildIndex[T any](ctx context.Context, index *Index, obj *T) error { + for item := range Walk(ctx, obj) { + if err := item.Match(Matcher{ + ExternalDocs: func(ed *oas3.ExternalDocumentation) error { + index.indexExternalDocs(ctx, item.Location, ed) + return nil + }, + Tag: func(t *Tag) error { index.indexTag(ctx, item.Location, t); return nil }, + Server: func(s *Server) error { index.indexServer(ctx, item.Location, s); return nil }, + ServerVariable: func(sv *ServerVariable) error { index.indexServerVariable(ctx, item.Location, sv); return nil }, + ReferencedPathItem: func(rpi *ReferencedPathItem) error { + index.indexReferencedPathItem(ctx, item.Location, rpi) + return nil + }, + ReferencedParameter: func(rp *ReferencedParameter) error { + index.indexReferencedParameter(ctx, item.Location, rp) + return nil + }, + Schema: func(j *oas3.JSONSchemaReferenceable) error { + return index.indexSchema(ctx, item.Location, j) + }, + Discriminator: func(d *oas3.Discriminator) error { + index.indexDiscriminator(ctx, item.Location, d) + return nil + }, + XML: func(x *oas3.XML) error { + index.indexXML(ctx, item.Location, x) + return nil + }, + MediaType: func(mt *MediaType) error { + index.indexMediaType(ctx, item.Location, mt) + return nil + }, + Encoding: func(enc *Encoding) error { + index.indexEncoding(ctx, item.Location, enc) + return nil + }, + ReferencedHeader: func(rh *ReferencedHeader) error { + index.indexReferencedHeader(ctx, item.Location, rh) + return nil + }, + ReferencedExample: func(re *ReferencedExample) error { + index.indexReferencedExample(ctx, item.Location, re) + return nil + }, + Operation: func(op *Operation) error { + index.indexOperation(ctx, item.Location, op) + return nil + }, + ReferencedRequestBody: func(rb *ReferencedRequestBody) error { + index.indexReferencedRequestBody(ctx, item.Location, rb) + return nil + }, + Responses: func(r *Responses) error { + index.indexResponses(ctx, item.Location, r) + return nil + }, + ReferencedResponse: func(rr *ReferencedResponse) error { + index.indexReferencedResponse(ctx, item.Location, rr) + return nil + }, + ReferencedLink: func(rl *ReferencedLink) error { + index.indexReferencedLink(ctx, item.Location, rl) + return nil + }, + ReferencedCallback: func(rc *ReferencedCallback) error { + index.indexReferencedCallback(ctx, item.Location, rc) + return nil + }, + ReferencedSecurityScheme: func(rss *ReferencedSecurityScheme) error { + index.indexReferencedSecurityScheme(ctx, item.Location, rss) + return nil + }, + Security: func(req *SecurityRequirement) error { + index.indexSecurityRequirement(ctx, item.Location, req) + return nil + }, + OAuthFlows: func(of *OAuthFlows) error { + index.indexOAuthFlows(ctx, item.Location, of) + return nil + }, + OAuthFlow: func(of *OAuthFlow) error { + index.indexOAuthFlow(ctx, item.Location, of) + return nil + }, + Any: func(a any) error { + // Node-to-operation mapping: check if we've exited the current operation's subtree + // Only check location depth when NOT in a reference walk + // During reference walks (len(referenceStack) > 0), location depth resets for the resolved target + // but we should continue associating nodes with the current operation + if index.buildNodeOperationMap && index.currentOperation != nil && len(index.referenceStack) == 0 { + if len(item.Location) < index.operationLocationDepth { + // We've moved to a shallower level - no longer under the operation + index.currentOperation = nil + } + } + + // Register nodes with current operation if applicable + if index.buildNodeOperationMap && index.currentOperation != nil { + // Register the root node + if rootNode := getRootNodeFromAny(a); rootNode != nil { + index.registerNodeWithOperation(rootNode, index.currentOperation) + } + // Also register all leaf nodes from the core model + // This ensures scalar values (like items: true) are also mapped + if core := getCoreModelFromAny(a); core != nil { + for _, node := range marshaller.CollectLeafNodes(core) { + index.registerNodeWithOperation(node, index.currentOperation) + } + } + } + + // Check for unknown properties on any model with a core + if core := getCoreModelFromAny(a); core != nil { + if coreModeler, ok := core.(marshaller.CoreModeler); ok { + index.checkUnknownProperties(ctx, coreModeler) + } + } + + if d, ok := a.(Descriptioner); ok { + index.indexDescriptionNode(ctx, item.Location, d) + } + if s, ok := a.(Summarizer); ok { + index.indexSummaryNode(ctx, item.Location, s) + } + if ds, ok := a.(DescriptionAndSummary); ok { + index.indexDescriptionAndSummaryNode(ctx, item.Location, ds) + } + return nil + }, + }); err != nil { + return err + } + } + + return nil +} + +func (i *Index) indexSchema(ctx context.Context, loc Locations, schema *oas3.JSONSchemaReferenceable) error { + // Resolve if needed (do this first to get the resolved schema for tracking) + if !schema.IsResolved() { + vErrs, err := schema.Resolve(ctx, i.getCurrentResolveOptions()) + if err != nil { + i.resolutionErrs = append(i.resolutionErrs, validation.NewValidationErrorWithDocumentLocation( + validation.SeverityError, + "resolution-json-schema", + err, + getSchemaErrorNode(schema), + i.documentPathForSchema(schema), + )) + return nil + } + i.validationErrs = append(i.validationErrs, i.applyDocumentLocation(vErrs, i.documentPathForSchema(schema))...) + if resolved := schema.GetResolvedSchema(); resolved != nil && i.Doc != nil { + opts := i.referenceValidationOptions() + schemaErrs := resolved.Validate(ctx, opts...) + i.validationErrs = append(i.validationErrs, i.applyDocumentLocation(schemaErrs, i.documentPathForSchema(schema))...) + } + } + + // Index the schema based on its type + if schema.IsBool() { + if !i.indexedSchemas[schema] { + i.BooleanSchemas = append(i.BooleanSchemas, &IndexNode[*oas3.JSONSchemaReferenceable]{ + Node: schema, + Location: loc, + }) + i.indexedSchemas[schema] = true + } + return nil + } + + if schema.IsReference() { + // Add to references list only if this exact schema object hasn't been indexed yet + // This ensures each $ref in the source document is indexed exactly once + if !i.indexedSchemas[schema] { + i.SchemaReferences = append(i.SchemaReferences, &IndexNode[*oas3.JSONSchemaReferenceable]{ + Node: schema, + Location: loc, + }) + i.indexedSchemas[schema] = true + } + + // Get the $ref target for tracking + refTarget := getRefTarget(schema) + if refTarget == "" { + return nil // Can't track without a ref target + } + + // IMPORTANT: Check circular reference BEFORE walking + // A schema might be visited AND currently in the reference stack (circular case) + for stackIdx, entry := range i.referenceStack { + if entry.refTarget == refTarget { + // CIRCULAR REFERENCE DETECTED - this is the SECOND+ encounter + // Build path segments from first occurrence to current + pathSegments := i.buildPathSegmentsFromStack(stackIdx, loc) + externalDocumentPath := "" + currentDocPath := i.currentDocumentPath() + if currentDocPath != i.resolveOpts.TargetLocation { + externalDocumentPath = currentDocPath + } + circularChain := i.buildCircularReferenceChain(stackIdx, refTarget) + + // Classify the circular reference + classification, polymorphicInfo := i.classifyCircularPath(schema, pathSegments, loc) + + switch classification { + case CircularInvalid: + i.invalidCircularRefs++ + err := fmt.Errorf("non-terminating circular reference detected: %s", joinReferenceChainWithArrows(circularChain)) + i.circularErrs = append(i.circularErrs, validation.NewValidationErrorWithDocumentLocation( + validation.SeverityError, + "circular-reference-invalid", + err, + getSchemaErrorNode(schema), + externalDocumentPath, + )) + case CircularPending: + if polymorphicInfo != nil { + i.recordPolymorphicBranch(polymorphicInfo) + } + case CircularValid: + i.validCircularRefs++ + case CircularUnclassified: + // No action needed for unclassified circulars + } + + // Stop processing this branch - don't walk the same schema again + return nil + } + } + + // Get the document path for the resolved schema + info := schema.GetReferenceResolutionInfo() + var docPath string + if info != nil { + docPath = info.AbsoluteDocumentPath + } + + // Push ref target onto reference stack + i.referenceStack = append(i.referenceStack, referenceStackEntry{ + refTarget: refTarget, + location: copyLocations(loc), + }) + + // Push document path onto document stack BEFORE walking + // This allows nested resolved documents (including returning to main) to + // attribute errors to the correct document. + currentDoc := "" + if len(i.currentDocumentStack) > 0 { + currentDoc = i.currentDocumentStack[len(i.currentDocumentStack)-1] + } + if docPath != "" && docPath != currentDoc { + i.currentDocumentStack = append(i.currentDocumentStack, docPath) + defer func() { + // Pop from document stack + if len(i.currentDocumentStack) > 1 { + i.currentDocumentStack = i.currentDocumentStack[:len(i.currentDocumentStack)-1] + } + }() + } + + // Get the resolved schema and recursively walk it + // Walk API doesn't walk resolved references automatically - we must walk them + resolved := schema.GetResolvedSchema() + if resolved != nil { + // Convert Concrete to Referenceable for walking + refableResolved := oas3.ConcreteToReferenceable(resolved) + if err := buildIndex(ctx, i, refableResolved); err != nil { + i.referenceStack = i.referenceStack[:len(i.referenceStack)-1] + return err + } + } + + // Pop from reference stack + i.referenceStack = i.referenceStack[:len(i.referenceStack)-1] + + return nil + } + + // Non-reference schema (component, external, or inline) + // Note: We don't use indexedSchemas check here because schemas can be referenced + // from multiple paths and should be indexed for each occurrence + + // Check if this is a top-level component in the main document + if isTopLevelComponent(loc, "schemas") { + if !i.indexedSchemas[schema] { + i.ComponentSchemas = append(i.ComponentSchemas, &IndexNode[*oas3.JSONSchemaReferenceable]{ + Node: schema, + Location: loc, + }) + i.indexedSchemas[schema] = true + } + return nil + } + + // Check if this is a top-level schema in an external document + // Important: Only mark as external if it's NOT from the main document + if isTopLevelExternalSchema(loc) { + if !i.isFromMainDocument() && !i.indexedSchemas[schema] { + i.ExternalSchemas = append(i.ExternalSchemas, &IndexNode[*oas3.JSONSchemaReferenceable]{ + Node: schema, + Location: loc, + }) + i.indexedSchemas[schema] = true + } + return nil + } + + // Everything else is an inline schema + // Inline schemas can appear multiple times (e.g., same property type in different schemas) + // but we only index each unique schema object once + if !i.indexedSchemas[schema] { + i.InlineSchemas = append(i.InlineSchemas, &IndexNode[*oas3.JSONSchemaReferenceable]{ + Node: schema, + Location: loc, + }) + i.indexedSchemas[schema] = true + } + + return nil +} + +// isTopLevelExternalSchema checks if the location represents a top-level schema +// in an external document (i.e., at the root of an external document, not under /components/). +func isTopLevelExternalSchema(loc Locations) bool { + // Top-level external schemas appear at location "/" (root of external doc) + // They have 0 location contexts (empty Locations slice) + if len(loc) == 0 { + return true + } + + // Single context with no ParentField (or empty ParentField) also indicates root + if len(loc) == 1 && loc[0].ParentField == "" { + return true + } + + return false +} + +// isFromMainDocument checks if we're currently walking the main document +// by checking the current document stack. +func (i *Index) isFromMainDocument() bool { + if len(i.currentDocumentStack) == 0 { + return true // Safety fallback - assume main document + } + + currentDoc := i.currentDocumentStack[len(i.currentDocumentStack)-1] + mainDoc := i.resolveOpts.TargetLocation + + return currentDoc == mainDoc +} + +// buildPathSegmentsFromStack builds path segments from a point in the reference stack to current location. +func (i *Index) buildPathSegmentsFromStack(startStackIdx int, currentLoc Locations) []CircularPathSegment { + // Collect only the segments WITHIN the circular loop. + // Skip referenceStack[startStackIdx].location because it contains the path + // leading TO the circular loop start (outside the loop), not the path within it. + // Only include entries after startStackIdx (intermediate refs in the loop) plus currentLoc. + var segments []CircularPathSegment + + for stackIdx := startStackIdx + 1; stackIdx < len(i.referenceStack); stackIdx++ { + entry := i.referenceStack[stackIdx] + for _, locCtx := range entry.location { + segments = append(segments, buildPathSegment(locCtx)) + } + } + + // Add segments from current location + for _, locCtx := range currentLoc { + segments = append(segments, buildPathSegment(locCtx)) + } + + return segments +} + +func (i *Index) buildCircularReferenceChain(startStackIdx int, refTarget string) []string { + chain := make([]string, 0, len(i.referenceStack)-startStackIdx+1) + for stackIdx := startStackIdx; stackIdx < len(i.referenceStack); stackIdx++ { + chain = append(chain, i.referenceStack[stackIdx].refTarget) + } + chain = append(chain, refTarget) + return chain +} + +// checkUnknownProperties checks for unknown properties in a core model and adds warnings. +func (i *Index) checkUnknownProperties(_ context.Context, core marshaller.CoreModeler) { + if core == nil { + return + } + + unknownProps := core.GetUnknownProperties() + if len(unknownProps) == 0 { + return + } + + // Initialize the map for this core model if not already present + if i.reportedUnknownProps[core] == nil { + i.reportedUnknownProps[core] = make(map[string]bool) + } + + docPath := "" + if len(i.currentDocumentStack) > 0 { + currentDoc := i.currentDocumentStack[len(i.currentDocumentStack)-1] + if currentDoc != i.resolveOpts.TargetLocation { + docPath = currentDoc + } + } + + for _, prop := range unknownProps { + // Skip if this property has already been reported for this core model + if i.reportedUnknownProps[core][prop] { + continue + } + + // Mark as reported + i.reportedUnknownProps[core][prop] = true + + err := fmt.Errorf("unknown property `%s` found", prop) + i.validationErrs = append(i.validationErrs, validation.NewValidationErrorWithDocumentLocation( + validation.SeverityWarning, + "validation-unknown-properties", + err, + core.GetRootNode(), + docPath, + )) + } +} + +func (i *Index) indexExternalDocs(_ context.Context, loc Locations, ed *oas3.ExternalDocumentation) { + i.ExternalDocumentation = append(i.ExternalDocumentation, &IndexNode[*oas3.ExternalDocumentation]{ + Node: ed, + Location: loc, + }) +} + +func (i *Index) indexTag(_ context.Context, loc Locations, tag *Tag) { + i.Tags = append(i.Tags, &IndexNode[*Tag]{ + Node: tag, + Location: loc, + }) +} + +func (i *Index) indexServer(_ context.Context, loc Locations, server *Server) { + i.Servers = append(i.Servers, &IndexNode[*Server]{ + Node: server, + Location: loc, + }) +} + +func (i *Index) indexServerVariable(_ context.Context, loc Locations, serverVariable *ServerVariable) { + i.ServerVariables = append(i.ServerVariables, &IndexNode[*ServerVariable]{ + Node: serverVariable, + Location: loc, + }) +} + +func (i *Index) indexReferencedPathItem(ctx context.Context, loc Locations, pathItem *ReferencedPathItem) { + if pathItem == nil { + return + } + + if pathItem.IsReference() && !pathItem.IsResolved() { + resolveAndValidateReference(i, ctx, pathItem) + } + + // Index description and summary if both are present + // For PathItems wrapped in References, we need to get the underlying PathItem + obj := pathItem.GetObject() + if obj != nil { + desc := obj.GetDescription() + summary := obj.GetSummary() + + if desc != "" { + i.indexDescriptionNode(ctx, loc, obj) + } + if summary != "" { + i.indexSummaryNode(ctx, loc, obj) + } + if desc != "" && summary != "" { + i.indexDescriptionAndSummaryNode(ctx, loc, obj) + } + } + + // Categorize path items similarly to schemas + if pathItem.IsReference() { + // Add to references list only if this exact reference object hasn't been indexed + if !i.indexedReferences[pathItem] { + i.PathItemReferences = append(i.PathItemReferences, &IndexNode[*ReferencedPathItem]{ + Node: pathItem, + Location: loc, + }) + i.indexedReferences[pathItem] = true + } + + // Get the document path for the resolved path item + info := pathItem.GetReferenceResolutionInfo() + var docPath string + if info != nil { + docPath = info.AbsoluteDocumentPath + } + + // Push document path onto document stack BEFORE walking + currentDoc := "" + if len(i.currentDocumentStack) > 0 { + currentDoc = i.currentDocumentStack[len(i.currentDocumentStack)-1] + } + if docPath != "" && docPath != currentDoc { + i.currentDocumentStack = append(i.currentDocumentStack, docPath) + defer func() { + // Pop from document stack + if len(i.currentDocumentStack) > 1 { + i.currentDocumentStack = i.currentDocumentStack[:len(i.currentDocumentStack)-1] + } + }() + } + + // If resolved, explicitly walk the resolved content (similar to how schemas are handled) + resolved := pathItem.GetObject() + if resolved != nil { + // Wrap the resolved PathItem back into a ReferencedPathItem for walking + wrapped := &ReferencedPathItem{Object: resolved} + _ = buildIndex(ctx, i, wrapped) + } + return + } + + if obj == nil { + return + } + + // Check if this is a component path item + if isTopLevelComponent(loc, "pathItems") { + if !i.indexedPathItems[obj] { + i.ComponentPathItems = append(i.ComponentPathItems, &IndexNode[*ReferencedPathItem]{ + Node: pathItem, + Location: loc, + }) + i.indexedPathItems[obj] = true + } + return + } + + // Check if this is a top-level path item in an external document + // External path items appear at location "/" (root of external doc) + if isTopLevelExternalSchema(loc) { + if !i.indexedPathItems[obj] { + i.ExternalPathItems = append(i.ExternalPathItems, &IndexNode[*ReferencedPathItem]{ + Node: pathItem, + Location: loc, + }) + i.indexedPathItems[obj] = true + } + return + } + + // Everything else is an inline path item + if !i.indexedPathItems[obj] { + i.InlinePathItems = append(i.InlinePathItems, &IndexNode[*ReferencedPathItem]{ + Node: pathItem, + Location: loc, + }) + i.indexedPathItems[obj] = true + } +} + +func (i *Index) indexOperation(_ context.Context, loc Locations, operation *Operation) { + if operation == nil { + return + } + + indexNode := &IndexNode[*Operation]{ + Node: operation, + Location: loc, + } + i.Operations = append(i.Operations, indexNode) + + // Track current operation for node-to-operation mapping + if i.buildNodeOperationMap { + i.currentOperation = indexNode + i.operationLocationDepth = len(loc) + } +} + +func (i *Index) indexReferencedParameter(ctx context.Context, loc Locations, param *ReferencedParameter) { + if param == nil { + return + } + + if param.IsReference() && !param.IsResolved() { + resolveAndValidateReference(i, ctx, param) + } + + if param.IsReference() { + // Add to references list only if this exact reference object hasn't been indexed + if !i.indexedReferences[param] { + i.ParameterReferences = append(i.ParameterReferences, &IndexNode[*ReferencedParameter]{ + Node: param, + Location: loc, + }) + i.indexedReferences[param] = true + } + + // Get the document path for the resolved parameter + info := param.GetReferenceResolutionInfo() + var docPath string + if info != nil { + docPath = info.AbsoluteDocumentPath + } + + // Push document path onto document stack BEFORE walking + currentDoc := "" + if len(i.currentDocumentStack) > 0 { + currentDoc = i.currentDocumentStack[len(i.currentDocumentStack)-1] + } + if docPath != "" && docPath != currentDoc { + i.currentDocumentStack = append(i.currentDocumentStack, docPath) + defer func() { + // Pop from document stack + if len(i.currentDocumentStack) > 1 { + i.currentDocumentStack = i.currentDocumentStack[:len(i.currentDocumentStack)-1] + } + }() + } + + // If resolved, explicitly walk the resolved content + resolved := param.GetObject() + if resolved != nil { + wrapped := &ReferencedParameter{Object: resolved} + _ = buildIndex(ctx, i, wrapped) + } + return + } + + obj := param.GetObject() + if obj == nil { + return + } + + if isTopLevelComponent(loc, "parameters") { + if !i.indexedParameters[obj] { + i.ComponentParameters = append(i.ComponentParameters, &IndexNode[*ReferencedParameter]{ + Node: param, + Location: loc, + }) + i.indexedParameters[obj] = true + } + return + } + + // Check if this is a top-level parameter in an external document + // Important: Only mark as external if it's NOT from the main document + if isTopLevelExternalSchema(loc) { + if !i.isFromMainDocument() && !i.indexedParameters[obj] { + i.ExternalParameters = append(i.ExternalParameters, &IndexNode[*ReferencedParameter]{ + Node: param, + Location: loc, + }) + i.indexedParameters[obj] = true + } + return + } + + // Everything else is an inline parameter + if !i.indexedParameters[obj] { + i.InlineParameters = append(i.InlineParameters, &IndexNode[*ReferencedParameter]{ + Node: param, + Location: loc, + }) + i.indexedParameters[obj] = true + } +} + +func (i *Index) indexResponses(_ context.Context, loc Locations, responses *Responses) { + if responses == nil { + return + } + i.Responses = append(i.Responses, &IndexNode[*Responses]{ + Node: responses, + Location: loc, + }) +} + +func (i *Index) indexReferencedResponse(ctx context.Context, loc Locations, resp *ReferencedResponse) { + if resp == nil { + return + } + + if resp.IsReference() && !resp.IsResolved() { + resolveAndValidateReference(i, ctx, resp) + } + + if resp.IsReference() { + // Add to references list only if this exact reference object hasn't been indexed + if !i.indexedReferences[resp] { + i.ResponseReferences = append(i.ResponseReferences, &IndexNode[*ReferencedResponse]{ + Node: resp, + Location: loc, + }) + i.indexedReferences[resp] = true + } + + // Get the document path for the resolved response + info := resp.GetReferenceResolutionInfo() + var docPath string + if info != nil { + docPath = info.AbsoluteDocumentPath + } + + // Push document path onto document stack BEFORE walking + currentDoc := "" + if len(i.currentDocumentStack) > 0 { + currentDoc = i.currentDocumentStack[len(i.currentDocumentStack)-1] + } + if docPath != "" && docPath != currentDoc { + i.currentDocumentStack = append(i.currentDocumentStack, docPath) + defer func() { + // Pop from document stack + if len(i.currentDocumentStack) > 1 { + i.currentDocumentStack = i.currentDocumentStack[:len(i.currentDocumentStack)-1] + } + }() + } + + // If resolved, explicitly walk the resolved content + resolved := resp.GetObject() + if resolved != nil { + wrapped := &ReferencedResponse{Object: resolved} + _ = buildIndex(ctx, i, wrapped) + } + return + } + + obj := resp.GetObject() + if obj == nil { + return + } + + if isTopLevelComponent(loc, "responses") { + if !i.indexedResponses[obj] { + i.ComponentResponses = append(i.ComponentResponses, &IndexNode[*ReferencedResponse]{ + Node: resp, + Location: loc, + }) + i.indexedResponses[obj] = true + } + return + } + + // Check if this is a top-level response in an external document + // Important: Only mark as external if it's NOT from the main document + if isTopLevelExternalSchema(loc) { + if !i.isFromMainDocument() && !i.indexedResponses[obj] { + i.ExternalResponses = append(i.ExternalResponses, &IndexNode[*ReferencedResponse]{ + Node: resp, + Location: loc, + }) + i.indexedResponses[obj] = true + } + return + } + + // Everything else is an inline response + if !i.indexedResponses[obj] { + i.InlineResponses = append(i.InlineResponses, &IndexNode[*ReferencedResponse]{ + Node: resp, + Location: loc, + }) + i.indexedResponses[obj] = true + } +} + +func (i *Index) indexReferencedRequestBody(ctx context.Context, loc Locations, rb *ReferencedRequestBody) { + if rb == nil { + return + } + + if rb.IsReference() && !rb.IsResolved() { + resolveAndValidateReference(i, ctx, rb) + } + + if rb.IsReference() { + // Add to references list only if this exact reference object hasn't been indexed + if !i.indexedReferences[rb] { + i.RequestBodyReferences = append(i.RequestBodyReferences, &IndexNode[*ReferencedRequestBody]{ + Node: rb, + Location: loc, + }) + i.indexedReferences[rb] = true + } + + // Get the document path for the resolved request body + info := rb.GetReferenceResolutionInfo() + var docPath string + if info != nil { + docPath = info.AbsoluteDocumentPath + } + + // Push document path onto document stack BEFORE walking + currentDoc := "" + if len(i.currentDocumentStack) > 0 { + currentDoc = i.currentDocumentStack[len(i.currentDocumentStack)-1] + } + if docPath != "" && docPath != currentDoc { + i.currentDocumentStack = append(i.currentDocumentStack, docPath) + defer func() { + // Pop from document stack + if len(i.currentDocumentStack) > 1 { + i.currentDocumentStack = i.currentDocumentStack[:len(i.currentDocumentStack)-1] + } + }() + } + + // If resolved, explicitly walk the resolved content + resolved := rb.GetObject() + if resolved != nil { + wrapped := &ReferencedRequestBody{Object: resolved} + _ = buildIndex(ctx, i, wrapped) + } + return + } + + obj := rb.GetObject() + if obj == nil { + return + } + + if isTopLevelComponent(loc, "requestBodies") { + if !i.indexedRequestBodies[obj] { + i.ComponentRequestBodies = append(i.ComponentRequestBodies, &IndexNode[*ReferencedRequestBody]{ + Node: rb, + Location: loc, + }) + i.indexedRequestBodies[obj] = true + } + return + } + + // Check if this is a top-level request body in an external document + // Important: Only mark as external if it's NOT from the main document + if isTopLevelExternalSchema(loc) { + if !i.isFromMainDocument() && !i.indexedRequestBodies[obj] { + i.ExternalRequestBodies = append(i.ExternalRequestBodies, &IndexNode[*ReferencedRequestBody]{ + Node: rb, + Location: loc, + }) + i.indexedRequestBodies[obj] = true + } + return + } + + // Everything else is an inline request body + if !i.indexedRequestBodies[obj] { + i.InlineRequestBodies = append(i.InlineRequestBodies, &IndexNode[*ReferencedRequestBody]{ + Node: rb, + Location: loc, + }) + i.indexedRequestBodies[obj] = true + } +} + +func (i *Index) indexReferencedHeader(ctx context.Context, loc Locations, header *ReferencedHeader) { + if header == nil { + return + } + + if header.IsReference() && !header.IsResolved() { + resolveAndValidateReference(i, ctx, header) + } + + if header.IsReference() { + // Add to references list only if this exact reference object hasn't been indexed + if !i.indexedReferences[header] { + i.HeaderReferences = append(i.HeaderReferences, &IndexNode[*ReferencedHeader]{ + Node: header, + Location: loc, + }) + i.indexedReferences[header] = true + } + + // Get the document path for the resolved header + info := header.GetReferenceResolutionInfo() + var docPath string + if info != nil { + docPath = info.AbsoluteDocumentPath + } + + // Push document path onto document stack BEFORE walking + currentDoc := "" + if len(i.currentDocumentStack) > 0 { + currentDoc = i.currentDocumentStack[len(i.currentDocumentStack)-1] + } + if docPath != "" && docPath != currentDoc { + i.currentDocumentStack = append(i.currentDocumentStack, docPath) + defer func() { + // Pop from document stack + if len(i.currentDocumentStack) > 1 { + i.currentDocumentStack = i.currentDocumentStack[:len(i.currentDocumentStack)-1] + } + }() + } + + // If resolved, explicitly walk the resolved content + resolved := header.GetObject() + if resolved != nil { + wrapped := &ReferencedHeader{Object: resolved} + _ = buildIndex(ctx, i, wrapped) + } + return + } + + obj := header.GetObject() + if obj == nil { + return + } + + if isTopLevelComponent(loc, "headers") { + if !i.indexedHeaders[obj] { + i.ComponentHeaders = append(i.ComponentHeaders, &IndexNode[*ReferencedHeader]{ + Node: header, + Location: loc, + }) + i.indexedHeaders[obj] = true + } + return + } + + // Check if this is a top-level header in an external document + // Important: Only mark as external if it's NOT from the main document + if isTopLevelExternalSchema(loc) { + if !i.isFromMainDocument() && !i.indexedHeaders[obj] { + i.ExternalHeaders = append(i.ExternalHeaders, &IndexNode[*ReferencedHeader]{ + Node: header, + Location: loc, + }) + i.indexedHeaders[obj] = true + } + return + } + + // Everything else is an inline header + if !i.indexedHeaders[obj] { + i.InlineHeaders = append(i.InlineHeaders, &IndexNode[*ReferencedHeader]{ + Node: header, + Location: loc, + }) + i.indexedHeaders[obj] = true + } +} + +func (i *Index) indexReferencedExample(ctx context.Context, loc Locations, example *ReferencedExample) { + if example == nil { + return + } + + if example.IsReference() && !example.IsResolved() { + resolveAndValidateReference(i, ctx, example) + } + + if example.IsReference() { + // Add to references list only if this exact reference object hasn't been indexed + if !i.indexedReferences[example] { + i.ExampleReferences = append(i.ExampleReferences, &IndexNode[*ReferencedExample]{ + Node: example, + Location: loc, + }) + i.indexedReferences[example] = true + } + + // Get the document path for the resolved example + info := example.GetReferenceResolutionInfo() + var docPath string + if info != nil { + docPath = info.AbsoluteDocumentPath + } + + // Push document path onto document stack BEFORE walking + currentDoc := "" + if len(i.currentDocumentStack) > 0 { + currentDoc = i.currentDocumentStack[len(i.currentDocumentStack)-1] + } + if docPath != "" && docPath != currentDoc { + i.currentDocumentStack = append(i.currentDocumentStack, docPath) + defer func() { + // Pop from document stack + if len(i.currentDocumentStack) > 1 { + i.currentDocumentStack = i.currentDocumentStack[:len(i.currentDocumentStack)-1] + } + }() + } + + // If resolved, explicitly walk the resolved content + resolved := example.GetObject() + if resolved != nil { + wrapped := &ReferencedExample{Object: resolved} + _ = buildIndex(ctx, i, wrapped) + } + return + } + + obj := example.GetObject() + if obj == nil { + return + } + + if isTopLevelComponent(loc, "examples") { + if !i.indexedExamples[obj] { + i.ComponentExamples = append(i.ComponentExamples, &IndexNode[*ReferencedExample]{ + Node: example, + Location: loc, + }) + i.indexedExamples[obj] = true + } + return + } + + // Check if this is a top-level example in an external document + // Important: Only mark as external if it's NOT from the main document + if isTopLevelExternalSchema(loc) { + if !i.isFromMainDocument() && !i.indexedExamples[obj] { + i.ExternalExamples = append(i.ExternalExamples, &IndexNode[*ReferencedExample]{ + Node: example, + Location: loc, + }) + i.indexedExamples[obj] = true + } + return + } + + // Everything else is an inline example + if !i.indexedExamples[obj] { + i.InlineExamples = append(i.InlineExamples, &IndexNode[*ReferencedExample]{ + Node: example, + Location: loc, + }) + i.indexedExamples[obj] = true + } +} + +func (i *Index) indexReferencedLink(ctx context.Context, loc Locations, link *ReferencedLink) { + if link == nil { + return + } + + if link.IsReference() && !link.IsResolved() { + resolveAndValidateReference(i, ctx, link) + } + + if link.IsReference() { + // Add to references list only if this exact reference object hasn't been indexed + if !i.indexedReferences[link] { + i.LinkReferences = append(i.LinkReferences, &IndexNode[*ReferencedLink]{ + Node: link, + Location: loc, + }) + i.indexedReferences[link] = true + } + + // Get the document path for the resolved link + info := link.GetReferenceResolutionInfo() + var docPath string + if info != nil { + docPath = info.AbsoluteDocumentPath + } + + // Push document path onto document stack BEFORE walking + currentDoc := "" + if len(i.currentDocumentStack) > 0 { + currentDoc = i.currentDocumentStack[len(i.currentDocumentStack)-1] + } + if docPath != "" && docPath != currentDoc { + i.currentDocumentStack = append(i.currentDocumentStack, docPath) + defer func() { + // Pop from document stack + if len(i.currentDocumentStack) > 1 { + i.currentDocumentStack = i.currentDocumentStack[:len(i.currentDocumentStack)-1] + } + }() + } + + // If resolved, explicitly walk the resolved content + resolved := link.GetObject() + if resolved != nil { + wrapped := &ReferencedLink{Object: resolved} + _ = buildIndex(ctx, i, wrapped) + } + return + } + + obj := link.GetObject() + if obj == nil { + return + } + + if isTopLevelComponent(loc, "links") { + if !i.indexedLinks[obj] { + i.ComponentLinks = append(i.ComponentLinks, &IndexNode[*ReferencedLink]{ + Node: link, + Location: loc, + }) + i.indexedLinks[obj] = true + } + return + } + + // Check if this is a top-level link in an external document + // Important: Only mark as external if it's NOT from the main document + if isTopLevelExternalSchema(loc) { + if !i.isFromMainDocument() && !i.indexedLinks[obj] { + i.ExternalLinks = append(i.ExternalLinks, &IndexNode[*ReferencedLink]{ + Node: link, + Location: loc, + }) + i.indexedLinks[obj] = true + } + return + } + + // Everything else is an inline link + if !i.indexedLinks[obj] { + i.InlineLinks = append(i.InlineLinks, &IndexNode[*ReferencedLink]{ + Node: link, + Location: loc, + }) + i.indexedLinks[obj] = true + } +} + +func (i *Index) indexReferencedCallback(ctx context.Context, loc Locations, callback *ReferencedCallback) { + if callback == nil { + return + } + + if callback.IsReference() && !callback.IsResolved() { + resolveAndValidateReference(i, ctx, callback) + } + + if callback.IsReference() { + // Add to references list only if this exact reference object hasn't been indexed + if !i.indexedReferences[callback] { + i.CallbackReferences = append(i.CallbackReferences, &IndexNode[*ReferencedCallback]{ + Node: callback, + Location: loc, + }) + i.indexedReferences[callback] = true + } + + // Get the document path for the resolved callback + info := callback.GetReferenceResolutionInfo() + var docPath string + if info != nil { + docPath = info.AbsoluteDocumentPath + } + + // Push document path onto document stack BEFORE walking + currentDoc := "" + if len(i.currentDocumentStack) > 0 { + currentDoc = i.currentDocumentStack[len(i.currentDocumentStack)-1] + } + if docPath != "" && docPath != currentDoc { + i.currentDocumentStack = append(i.currentDocumentStack, docPath) + defer func() { + // Pop from document stack + if len(i.currentDocumentStack) > 1 { + i.currentDocumentStack = i.currentDocumentStack[:len(i.currentDocumentStack)-1] + } + }() + } + + // If resolved, explicitly walk the resolved content + resolved := callback.GetObject() + if resolved != nil { + wrapped := &ReferencedCallback{Object: resolved} + _ = buildIndex(ctx, i, wrapped) + } + return + } + + obj := callback.GetObject() + if obj == nil { + return + } + + if isTopLevelComponent(loc, "callbacks") { + if !i.indexedCallbacks[obj] { + i.ComponentCallbacks = append(i.ComponentCallbacks, &IndexNode[*ReferencedCallback]{ + Node: callback, + Location: loc, + }) + i.indexedCallbacks[obj] = true + } + return + } + + // Check if this is a top-level callback in an external document + // Important: Only mark as external if it's NOT from the main document + if isTopLevelExternalSchema(loc) { + if !i.isFromMainDocument() && !i.indexedCallbacks[obj] { + i.ExternalCallbacks = append(i.ExternalCallbacks, &IndexNode[*ReferencedCallback]{ + Node: callback, + Location: loc, + }) + i.indexedCallbacks[obj] = true + } + return + } + + // Everything else is an inline callback + if !i.indexedCallbacks[obj] { + i.InlineCallbacks = append(i.InlineCallbacks, &IndexNode[*ReferencedCallback]{ + Node: callback, + Location: loc, + }) + i.indexedCallbacks[obj] = true + } +} + +func (i *Index) indexReferencedSecurityScheme(ctx context.Context, loc Locations, ss *ReferencedSecurityScheme) { + if ss == nil { + return + } + + if ss.IsReference() && !ss.IsResolved() { + resolveAndValidateReference(i, ctx, ss) + } + + if ss.IsReference() { + // Add to references list only if this exact reference object hasn't been indexed + if !i.indexedReferences[ss] { + i.SecuritySchemeReferences = append(i.SecuritySchemeReferences, &IndexNode[*ReferencedSecurityScheme]{ + Node: ss, + Location: loc, + }) + i.indexedReferences[ss] = true + } + return + } + + // SecuritySchemes are always components (no inline security schemes) + i.ComponentSecuritySchemes = append(i.ComponentSecuritySchemes, &IndexNode[*ReferencedSecurityScheme]{ + Node: ss, + Location: loc, + }) +} + +func (i *Index) indexSecurityRequirement(_ context.Context, loc Locations, req *SecurityRequirement) { + if req == nil { + return + } + + i.SecurityRequirements = append(i.SecurityRequirements, &IndexNode[*SecurityRequirement]{ + Node: req, + Location: loc, + }) +} + +func (i *Index) indexDiscriminator(_ context.Context, loc Locations, discriminator *oas3.Discriminator) { + if discriminator == nil { + return + } + i.Discriminators = append(i.Discriminators, &IndexNode[*oas3.Discriminator]{ + Node: discriminator, + Location: loc, + }) +} + +func (i *Index) indexXML(_ context.Context, loc Locations, xml *oas3.XML) { + if xml == nil { + return + } + i.XMLs = append(i.XMLs, &IndexNode[*oas3.XML]{ + Node: xml, + Location: loc, + }) +} + +func (i *Index) indexMediaType(_ context.Context, loc Locations, mediaType *MediaType) { + if mediaType == nil { + return + } + i.MediaTypes = append(i.MediaTypes, &IndexNode[*MediaType]{ + Node: mediaType, + Location: loc, + }) +} + +func (i *Index) indexEncoding(_ context.Context, loc Locations, encoding *Encoding) { + if encoding == nil { + return + } + i.Encodings = append(i.Encodings, &IndexNode[*Encoding]{ + Node: encoding, + Location: loc, + }) +} + +func (i *Index) indexOAuthFlows(_ context.Context, loc Locations, flows *OAuthFlows) { + if flows == nil { + return + } + i.OAuthFlows = append(i.OAuthFlows, &IndexNode[*OAuthFlows]{ + Node: flows, + Location: loc, + }) +} + +func (i *Index) indexOAuthFlow(_ context.Context, loc Locations, flow *OAuthFlow) { + if flow == nil { + return + } + i.OAuthFlowItems = append(i.OAuthFlowItems, &IndexNode[*OAuthFlow]{ + Node: flow, + Location: loc, + }) +} + +func (i *Index) indexDescriptionNode(_ context.Context, loc Locations, d Descriptioner) { + if d == nil { + return + } + i.DescriptionNodes = append(i.DescriptionNodes, &IndexNode[Descriptioner]{ + Node: d, + Location: loc, + }) +} + +func (i *Index) indexSummaryNode(_ context.Context, loc Locations, s Summarizer) { + if s == nil { + return + } + i.SummaryNodes = append(i.SummaryNodes, &IndexNode[Summarizer]{ + Node: s, + Location: loc, + }) +} + +func (i *Index) indexDescriptionAndSummaryNode(_ context.Context, loc Locations, ds DescriptionAndSummary) { + if ds == nil { + return + } + i.DescriptionAndSummaryNodes = append(i.DescriptionAndSummaryNodes, &IndexNode[DescriptionAndSummary]{ + Node: ds, + Location: loc, + }) +} + +func (i *Index) documentPathForSchema(schema *oas3.JSONSchemaReferenceable) string { + if i == nil || schema == nil { + return "" + } + + if info := schema.GetReferenceResolutionInfo(); info != nil { + if info.AbsoluteDocumentPath != i.resolveOpts.TargetLocation { + return info.AbsoluteDocumentPath + } + if len(i.currentDocumentStack) > 0 { + current := i.currentDocumentStack[len(i.currentDocumentStack)-1] + if current != i.resolveOpts.TargetLocation { + return current + } + } + return "" + } + + if len(i.currentDocumentStack) > 0 { + current := i.currentDocumentStack[len(i.currentDocumentStack)-1] + if current != i.resolveOpts.TargetLocation { + return current + } + return "" + } + + return "" +} + +func (i *Index) applyDocumentLocation(errs []error, documentPath string) []error { + if len(errs) == 0 || documentPath == "" { + return errs + } + + updated := make([]error, 0, len(errs)) + for _, err := range errs { + if err == nil { + continue + } + var vErr *validation.Error + if errors.As(err, &vErr) && vErr != nil { + if vErr.DocumentLocation == "" { + vErr.DocumentLocation = documentPath + } + updated = append(updated, vErr) + continue + } + updated = append(updated, err) + } + + return updated +} + +func (i *Index) referenceValidationOptions() []validation.Option { + if i == nil || i.Doc == nil { + return nil + } + + return []validation.Option{ + validation.WithContextObject(i.Doc), + validation.WithContextObject(&oas3.ParentDocumentVersion{OpenAPI: pointer.From(i.Doc.OpenAPI)}), + } +} + +// getCurrentResolveOptions returns ResolveOptions appropriate for the current document context. +// CRITICAL FIX for multi-file specs: When processing schemas/references in external files, +// this ensures they resolve internal references against the external file's YAML structure, +// not the main document. Without this, references like #/components/schemas/... in external +// files would fail with "source is nil" errors. +func (i *Index) getCurrentResolveOptions() references.ResolveOptions { + resolveOpts := i.resolveOpts + + if len(i.currentDocumentStack) > 0 { + currentDoc := i.currentDocumentStack[len(i.currentDocumentStack)-1] + // If we're in a different document than the original target, use that document's context + if currentDoc != i.resolveOpts.TargetLocation { + // Check if we have a cached parsed YAML node for this external document + if cachedDoc, ok := i.resolveOpts.RootDocument.GetCachedExternalDocument(currentDoc); ok { + // Use the cached YAML node as the TargetDocument for this resolution + // This allows internal references to navigate through the external file's structure + resolveOpts.TargetDocument = cachedDoc + resolveOpts.TargetLocation = currentDoc + } + } + } + + return resolveOpts +} + +func documentPathForReference[T any, V interfaces.Validator[T], C marshaller.CoreModeler](i *Index, ref *Reference[T, V, C]) string { + if i == nil || ref == nil { + return "" + } + + if info := ref.GetReferenceResolutionInfo(); info != nil { + if info.AbsoluteDocumentPath != i.resolveOpts.TargetLocation { + return info.AbsoluteDocumentPath + } + return "" + } + + return "" +} + +func resolveAndValidateReference[T any, V interfaces.Validator[T], C marshaller.CoreModeler](i *Index, ctx context.Context, ref *Reference[T, V, C]) { + if i == nil || ref == nil { + return + } + + if _, err := ref.Resolve(ctx, i.getCurrentResolveOptions()); err != nil { + i.resolutionErrs = append(i.resolutionErrs, validation.NewValidationErrorWithDocumentLocation( + validation.SeverityError, + "resolution-openapi-reference", + err, + nil, + documentPathForReference(i, ref), + )) + return + } + + obj := ref.GetObject() + if obj == nil || i.Doc == nil { + return + } + + var validator V + if v, ok := any(obj).(V); ok { + validator = v + validationErrs := validator.Validate(ctx, i.referenceValidationOptions()...) + i.validationErrs = append(i.validationErrs, i.applyDocumentLocation(validationErrs, documentPathForReference(i, ref))...) + } +} + +// isTopLevelComponent checks if the location represents a top-level component definition. +// A top-level component has the path: /components/{componentType}/{name} +func isTopLevelComponent(loc Locations, componentType string) bool { + // Location should be exactly: /components/{componentType}/{name} + // Length 2: [components context, {componentType}/{name} context] + if len(loc) != 2 { + return false + } + + // First element: ParentField = "components" + if loc[0].ParentField != "components" { + return false + } + + // Second element: ParentField = componentType, ParentKey = name + if loc[1].ParentField != componentType || loc[1].ParentKey == nil { + return false + } + + return true +} + +// getParentSchema extracts the parent schema from a LocationContext using the ParentMatchFunc. +func getParentSchema(loc LocationContext) *oas3.Schema { + var parentSchema *oas3.Schema + + // Use the ParentMatchFunc to capture the parent node + _ = loc.ParentMatchFunc(Matcher{ + Schema: func(s *oas3.JSONSchemaReferenceable) error { + if s == nil { + return nil + } + if !s.IsBool() && !s.IsReference() { + parentSchema = s.GetSchema() + } else if s.IsReference() { + // For references, get the resolved schema + if resolved := s.GetResolvedSchema(); resolved != nil && !resolved.IsBool() { + parentSchema = resolved.GetSchema() + } + } + return nil + }, + }) + + return parentSchema +} + +// buildPathSegment creates a CircularPathSegment with constraint info from the parent schema. +func buildPathSegment(loc LocationContext) CircularPathSegment { + segment := CircularPathSegment{ + Field: loc.ParentField, + } + + if loc.ParentKey != nil { + segment.PropertyName = *loc.ParentKey + } + if loc.ParentIndex != nil { + segment.BranchIndex = *loc.ParentIndex + } + + // Get the parent schema for this segment + var parentSchemaRef *oas3.JSONSchemaReferenceable + _ = loc.ParentMatchFunc(Matcher{ + Schema: func(s *oas3.JSONSchemaReferenceable) error { + parentSchemaRef = s + return nil + }, + }) + segment.ParentSchema = parentSchemaRef + + parent := getParentSchema(loc) + if parent == nil { + return segment + } + + // Check if parent schema is nullable (termination point) + segment.IsNullable = isNullable(parent) + + // Extract constraints based on field type + switch loc.ParentField { + case "properties": + if loc.ParentKey != nil { + // Check if property is required + for _, req := range parent.GetRequired() { + if req == *loc.ParentKey { + segment.IsRequired = true + break + } + } + } + case "items": + segment.ArrayMinItems = parent.GetMinItems() // Returns 0 if nil (default) + case "additionalProperties": + if minProps := parent.GetMinProperties(); minProps != nil { + segment.MinProperties = *minProps + } + // Default is 0 (empty object allowed) + } + + return segment +} + +// isNullable checks if a schema allows null values (termination point for circular refs). +func isNullable(schema *oas3.Schema) bool { + if schema == nil { + return false + } + + // OAS 3.0 style: nullable: true + if schema.GetNullable() { + return true + } + + // OAS 3.1 style: type includes "null" + types := schema.GetType() + for _, t := range types { + if t == oas3.SchemaTypeNull { + return true + } + } + + return false +} + +// classifyCircularPath determines if the path allows termination. +// Returns (classification, polymorphicInfo) where polymorphicInfo is set if pending. +func (i *Index) classifyCircularPath(schema *oas3.JSONSchemaReferenceable, segments []CircularPathSegment, loc Locations) (CircularClassification, *PolymorphicCircularRef) { + // Check if any segment allows termination + for segIdx, segment := range segments { + // Check nullable at any point in the path + if segment.IsNullable { + return CircularValid, nil + } + + switch segment.Field { + case "properties": + // Optional property = valid termination + if !segment.IsRequired { + return CircularValid, nil + } + + case "items": + // Empty array terminates if minItems == 0 (default) + if segment.ArrayMinItems == 0 { + return CircularValid, nil + } + + case "additionalProperties": + // Empty object terminates if minProperties == 0 (default) + if segment.MinProperties == 0 { + return CircularValid, nil + } + + case "oneOf", "anyOf": + // Mark for post-processing - need to check ALL branches + // Create polymorphic tracking info + parentLocLen := len(loc) - len(segments) + segIdx + if parentLocLen < 0 { + parentLocLen = 0 + } + parentLoc := copyLocations(loc[:parentLocLen]) + + // Use the ParentSchema from the segment (which has the oneOf/anyOf) + // instead of the schema parameter (which is the $ref) + parentSchema := segment.ParentSchema + if parentSchema == nil { + parentSchema = schema // Fallback to old behavior if ParentSchema not set + } + + totalBranches := countPolymorphicBranches(parentSchema, segment.Field) + polymorphicInfo := &PolymorphicCircularRef{ + ParentSchema: parentSchema, + ParentLocation: parentLoc, + Field: segment.Field, + BranchResults: make(map[int]CircularClassification), + TotalBranches: totalBranches, + } + // Record this branch as potentially invalid (recurses) + polymorphicInfo.BranchResults[segment.BranchIndex] = CircularInvalid + return CircularPending, polymorphicInfo + + case "allOf": + // For allOf, if ANY branch has invalid circular ref, the whole thing is invalid + // because ALL branches must be satisfied + // Check if rest of path allows termination + remaining := segments[segIdx+1:] + if !pathAllowsTermination(remaining) { + return CircularInvalid, nil + } + } + } + + // No termination point found in non-polymorphic path + return CircularInvalid, nil +} + +// countPolymorphicBranches counts the number of branches in a oneOf/anyOf schema. +func countPolymorphicBranches(schema *oas3.JSONSchemaReferenceable, field string) int { + if schema == nil || schema.IsBool() { + return 0 + } + + innerSchema := schema.GetSchema() + if innerSchema == nil { + return 0 + } + + switch field { + case "oneOf": + if oneOf := innerSchema.GetOneOf(); oneOf != nil { + return len(oneOf) + } + case "anyOf": + if anyOf := innerSchema.GetAnyOf(); anyOf != nil { + return len(anyOf) + } + case "allOf": + if allOf := innerSchema.GetAllOf(); allOf != nil { + return len(allOf) + } + } + + return 0 +} + +// pathAllowsTermination checks if any segment in the remaining path allows termination. +func pathAllowsTermination(segments []CircularPathSegment) bool { + for _, seg := range segments { + if seg.IsNullable { + return true + } + + switch seg.Field { + case "properties": + if !seg.IsRequired { + return true + } + case "items": + if seg.ArrayMinItems == 0 { + return true + } + case "additionalProperties": + if seg.MinProperties == 0 { + return true + } + case "oneOf", "anyOf": + // Assume polymorphic branches might provide termination + return true + } + } + return false +} + +func joinReferenceChainWithArrows(chain []string) string { + if len(chain) == 0 { + return "" + } + if len(chain) == 1 { + return chain[0] + } + + var result strings.Builder + result.WriteString(chain[0]) + for i := 1; i < len(chain); i++ { + result.WriteString(" -> ") + result.WriteString(chain[i]) + } + return result.String() +} + +// recordPolymorphicBranch records a polymorphic branch for post-processing. +func (i *Index) recordPolymorphicBranch(info *PolymorphicCircularRef) { + if info == nil { + return + } + i.polymorphicRefs = append(i.polymorphicRefs, info) +} + +// finalizePolymorphicCirculars is called after all walking completes. +// It analyzes polymorphic schemas to determine if ALL branches recurse. +func (i *Index) finalizePolymorphicCirculars() { + // Group by parent schema + grouped := make(map[*oas3.JSONSchemaReferenceable]*PolymorphicCircularRef) + + for _, ref := range i.polymorphicRefs { + existing, found := grouped[ref.ParentSchema] + if found { + // Merge branch results + for idx, classification := range ref.BranchResults { + existing.BranchResults[idx] = classification + } + } else { + grouped[ref.ParentSchema] = ref + } + } + + // Analyze each polymorphic schema + for _, ref := range grouped { + switch ref.Field { + case "oneOf", "anyOf": + // Invalid only if ALL branches have invalid circular refs + allInvalid := true + for branchIdx := 0; branchIdx < ref.TotalBranches; branchIdx++ { + classification, found := ref.BranchResults[branchIdx] + if !found || classification != CircularInvalid { + // This branch either doesn't recurse or has valid termination + allInvalid = false + break + } + } + + if allInvalid && ref.TotalBranches > 0 { + i.invalidCircularRefs++ + i.circularErrs = append(i.circularErrs, validation.NewValidationErrorWithDocumentLocation( + validation.SeverityError, + "circular-reference-invalid", + fmt.Errorf("non-terminating circular reference: all %s branches recurse with no base case", ref.Field), + getSchemaErrorNode(ref.ParentSchema), + i.documentPathForSchema(ref.ParentSchema), + )) + } else if !allInvalid && ref.TotalBranches > 0 { + // At least one branch allows termination - this is a valid circular ref + i.validCircularRefs++ + } + + case "allOf": + // Invalid if ANY branch has invalid circular ref (already handled inline in classifyCircularPath) + // This case is here for completeness if we need cross-branch tracking + } + } +} + +// copyLocations creates a copy of the Locations slice. +func copyLocations(loc Locations) Locations { + if loc == nil { + return nil + } + result := make(Locations, len(loc)) + copy(result, loc) + return result +} + +// getRefTarget extracts the absolute $ref target string from a schema reference. +// Uses the resolved AbsoluteReference from resolution cache for normalization. +func getRefTarget(schema *oas3.JSONSchemaReferenceable) string { + if schema == nil || !schema.IsReference() { + return "" + } + + if !schema.IsResolved() { + panic("getRefTarget called on unresolved schema reference") + } + + info := schema.GetReferenceResolutionInfo() + if info == nil { + return "" + } + + return info.AbsoluteReference.String() +} + +// getSchemaErrorNode returns an appropriate YAML node for error reporting. +func getSchemaErrorNode(schema *oas3.JSONSchemaReferenceable) *yaml.Node { + if schema == nil { + return nil + } + if schema.IsBool() { + return nil + } + innerSchema := schema.GetSchema() + if innerSchema == nil { + return nil + } + // Try to get the $ref node if it's a reference + if core := innerSchema.GetCore(); core != nil && core.Ref.Present { + return core.Ref.GetKeyNodeOrRoot(innerSchema.GetRootNode()) + } + return innerSchema.GetRootNode() +} diff --git a/openapi/index_external_test.go b/openapi/index_external_test.go new file mode 100644 index 00000000..75a6f762 --- /dev/null +++ b/openapi/index_external_test.go @@ -0,0 +1,903 @@ +package openapi_test + +import ( + "errors" + "fmt" + "io" + "io/fs" + "net/http" + "strings" + "testing" + + "github.com/speakeasy-api/openapi/openapi" + "github.com/speakeasy-api/openapi/references" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +// MockVirtualFS implements system.VirtualFS for testing external file references +type MockVirtualFS struct { + files map[string]string +} + +func NewMockVirtualFS() *MockVirtualFS { + return &MockVirtualFS{ + files: make(map[string]string), + } +} + +func (m *MockVirtualFS) AddFile(path, content string) { + m.files[path] = content +} + +func (m *MockVirtualFS) Open(name string) (fs.File, error) { + content, exists := m.files[name] + if !exists { + return nil, fmt.Errorf("file not found: %s", name) + } + return &MockFile{content: content}, nil +} + +// MockFile implements fs.File +type MockFile struct { + content string + pos int +} + +func (m *MockFile) Read(p []byte) (n int, err error) { + if m.pos >= len(m.content) { + return 0, io.EOF + } + n = copy(p, m.content[m.pos:]) + m.pos += n + return n, nil +} + +func (m *MockFile) Close() error { + return nil +} + +func (m *MockFile) Stat() (fs.FileInfo, error) { + return nil, errors.New("not implemented") +} + +// MockHTTPClient implements system.Client for testing external HTTP references +type MockHTTPClient struct { + responses map[string]string +} + +func NewMockHTTPClient() *MockHTTPClient { + return &MockHTTPClient{ + responses: make(map[string]string), + } +} + +func (m *MockHTTPClient) AddResponse(url, body string) { + m.responses[url] = body +} + +func (m *MockHTTPClient) Do(req *http.Request) (*http.Response, error) { + url := req.URL.String() + body, exists := m.responses[url] + if !exists { + return nil, fmt.Errorf("no response configured for URL: %s", url) + } + return &http.Response{ + StatusCode: http.StatusOK, + Body: io.NopCloser(strings.NewReader(body)), + Header: make(http.Header), + }, nil +} + +// setupComprehensiveExternalRefs creates a complete test environment with: +// - File-based external references +// - HTTP-based external references +// - Valid and invalid circular references +// - Referenced and unreferenced schemas +func setupComprehensiveExternalRefs(t *testing.T) (*openapi.Index, *MockVirtualFS, *MockHTTPClient) { + t.Helper() + ctx := t.Context() + + vfs := NewMockVirtualFS() + httpClient := NewMockHTTPClient() + + // Expected index counts (verified by tests): + // ExternalDocumentation: 2 (main doc + users tag) + // Tags: 2 (users, products) + // Servers: 2 (production, staging) + // ServerVariables: 1 (version variable) + // BooleanSchemas: 2 (true, false from additionalProperties) + // InlineSchemas: 10 (9 from external + 1 from LocalSchema.id property) + // ComponentSchemas: 2 (LocalSchema, AnotherLocal) + // ExternalSchemas: 6 (UserResponse, User, Address, Product, Category, TreeNode) + // SchemaReferences: 9 (all $ref pointers including circulars) + // CircularErrors: 1 (Product<->Category invalid circular) + + // TODO: PathItems indexing (currently marked TODO in buildIndex) + + // Main API document + vfs.AddFile("/api/openapi.yaml", ` +openapi: "3.1.0" +info: + title: Comprehensive API + version: 1.0.0 +externalDocs: + url: https://docs.example.com + description: Main API Documentation +tags: + - name: users + description: User operations + externalDocs: + url: https://docs.example.com/users + - name: products + description: Product operations +servers: + - url: https://api.example.com/{version} + description: Production server + variables: + version: + default: v1 + enum: [v1, v2] + - url: https://staging.example.com + description: Staging server +paths: + /users: + get: + operationId: getUsers + responses: + "200": + description: Users response + content: + application/json: + schema: + $ref: 'schemas/user.yaml#/UserResponse' + /products: + get: + operationId: getProducts + responses: + "200": + description: Products response + content: + application/json: + schema: + $ref: 'https://schemas.example.com/product.yaml#/Product' + /trees: + get: + operationId: getTrees + responses: + "200": + description: Trees response + content: + application/json: + schema: + $ref: 'schemas/tree.yaml#/TreeNode' +components: + schemas: + LocalSchema: + type: object + additionalProperties: true + properties: + id: + type: integer + AnotherLocal: + type: object + additionalProperties: false +`) + + // External file: User schemas with valid circular (optional property) + vfs.AddFile("/api/schemas/user.yaml", ` +UserResponse: + type: object + properties: + user: + $ref: '#/User' +User: + type: object + required: [id, name] + properties: + id: + type: integer + name: + type: string + address: + $ref: '#/Address' +Address: + type: object + properties: + street: + type: string + user: + $ref: '#/User' +# Unreferenced schema in external file +UnreferencedUser: + type: object + properties: + neverUsed: + type: string +`) + + // External file: Tree with valid self-reference (array with minItems=0) + vfs.AddFile("/api/schemas/tree.yaml", ` +TreeNode: + type: object + properties: + value: + type: string + children: + type: array + items: + $ref: '#/TreeNode' +# Another unreferenced schema +UnusedTreeType: + type: object + properties: + unusedProp: + type: boolean +`) + + // Unreferenced file - nothing from here should appear in index + vfs.AddFile("/api/schemas/completely-unreferenced.yaml", ` +TotallyUnused: + type: object + properties: + shouldNotAppear: + type: string +`) + + // External HTTP: Product with invalid circular (required + minItems) + httpClient.AddResponse("https://schemas.example.com/product.yaml", ` +Product: + type: object + required: [id, category] + properties: + id: + type: integer + name: + type: string + category: + $ref: '#/Category' +Category: + type: object + required: [products] + properties: + name: + type: string + products: + type: array + minItems: 1 + items: + $ref: '#/Product' +# Unreferenced in HTTP document +UnreferencedCategory: + type: object + properties: + alsoNeverUsed: + type: integer +`) + + // Unmarshal and build index + doc, validationErrs, err := openapi.Unmarshal(ctx, strings.NewReader(vfs.files["/api/openapi.yaml"])) + require.NoError(t, err) + require.Empty(t, validationErrs) + + resolveOpts := references.ResolveOptions{ + TargetLocation: "/api/openapi.yaml", + RootDocument: doc, + TargetDocument: doc, + VirtualFS: vfs, + HTTPClient: httpClient, + } + idx := openapi.BuildIndex(ctx, doc, resolveOpts) + require.NotNil(t, idx) + + return idx, vfs, httpClient +} + +func TestBuildIndex_ExternalReferences_Comprehensive(t *testing.T) { + t.Parallel() + + idx, _, _ := setupComprehensiveExternalRefs(t) + + tests := []struct { + name string + assertion func(t *testing.T, idx *openapi.Index) + }{ + { + name: "external schemas count correct", + assertion: func(t *testing.T, idx *openapi.Index) { + t.Helper() + // External schemas: UserResponse, User, Address, Product, Category, TreeNode (6) + assert.Len(t, idx.ExternalSchemas, 6, "should have exactly 6 external schemas") + }, + }, + { + name: "external documentation count correct", + assertion: func(t *testing.T, idx *openapi.Index) { + t.Helper() + // ExternalDocs: main doc + users tag + assert.Len(t, idx.ExternalDocumentation, 2, "should have exactly 2 external documentation") + }, + }, + { + name: "tags count correct", + assertion: func(t *testing.T, idx *openapi.Index) { + t.Helper() + // Tags: users, products + assert.Len(t, idx.Tags, 2, "should have exactly 2 tags") + }, + }, + { + name: "servers count correct", + assertion: func(t *testing.T, idx *openapi.Index) { + t.Helper() + // Servers: production, staging + assert.Len(t, idx.Servers, 2, "should have exactly 2 servers") + }, + }, + { + name: "server variables count correct", + assertion: func(t *testing.T, idx *openapi.Index) { + t.Helper() + // ServerVariables: version + assert.Len(t, idx.ServerVariables, 1, "should have exactly 1 server variable") + }, + }, + { + name: "boolean schemas count correct", + assertion: func(t *testing.T, idx *openapi.Index) { + t.Helper() + // BooleanSchemas: true, false from additionalProperties + assert.Len(t, idx.BooleanSchemas, 2, "should have exactly 2 boolean schemas") + }, + }, + { + name: "component schemas count correct", + assertion: func(t *testing.T, idx *openapi.Index) { + t.Helper() + // ComponentSchemas: LocalSchema, AnotherLocal + assert.Len(t, idx.ComponentSchemas, 2, "should have exactly 2 component schemas") + }, + }, + { + name: "schema references count correct", + assertion: func(t *testing.T, idx *openapi.Index) { + t.Helper() + // Schema references: 9 $ref pointers total + assert.Len(t, idx.SchemaReferences, 9, "should have exactly 9 schema references") + }, + }, + { + name: "inline property schemas count correct", + assertion: func(t *testing.T, idx *openapi.Index) { + t.Helper() + // Inline schemas: 9 from external + 1 from LocalSchema.id + assert.Len(t, idx.InlineSchemas, 10, "should have exactly 10 inline schemas") + }, + }, + { + name: "inline path items count correct", + assertion: func(t *testing.T, idx *openapi.Index) { + t.Helper() + // InlinePathItems: /users, /products, /trees + assert.Len(t, idx.InlinePathItems, 3, "should have exactly 3 inline path items") + }, + }, + { + name: "operations count correct", + assertion: func(t *testing.T, idx *openapi.Index) { + t.Helper() + // Operations: getUsers, getProducts, getTrees + assert.Len(t, idx.Operations, 3, "should have exactly 3 operations") + }, + }, + { + name: "inline responses count correct", + assertion: func(t *testing.T, idx *openapi.Index) { + t.Helper() + // InlineResponses: 200 response for each operation + assert.Len(t, idx.InlineResponses, 3, "should have exactly 3 inline responses") + }, + }, + { + name: "circular error count correct", + assertion: func(t *testing.T, idx *openapi.Index) { + t.Helper() + // Should detect 1 invalid circular: Product<->Category + assert.Len(t, idx.GetCircularReferenceErrors(), 1, "should have exactly 1 circular error") + }, + }, + { + name: "no errors for valid references", + assertion: func(t *testing.T, idx *openapi.Index) { + t.Helper() + // Should have no resolution errors + assert.Empty(t, idx.GetResolutionErrors(), "should have no resolution errors") + assert.Empty(t, idx.GetValidationErrors(), "should have no validation errors") + }, + }, + { + name: "unreferenced schemas in external files not indexed", + assertion: func(t *testing.T, idx *openapi.Index) { + t.Helper() + for _, schema := range idx.GetAllSchemas() { + loc := string(schema.Location.ToJSONPointer()) + assert.NotContains(t, loc, "UnreferencedUser", "UnreferencedUser should not be indexed") + assert.NotContains(t, loc, "UnusedTreeType", "UnusedTreeType should not be indexed") + assert.NotContains(t, loc, "TotallyUnused", "TotallyUnused should not be indexed") + assert.NotContains(t, loc, "UnreferencedCategory", "UnreferencedCategory should not be indexed") + } + }, + }, + { + name: "valid circular reference via optional property", + assertion: func(t *testing.T, idx *openapi.Index) { + t.Helper() + circularErrs := idx.GetCircularReferenceErrors() + for _, err := range circularErrs { + errStr := err.Error() + // User<->Address should not have circular error (address is optional) + if strings.Contains(errStr, "User") && strings.Contains(errStr, "Address") { + t.Errorf("User<->Address circular via optional property should be valid, got error: %v", err) + } + } + }, + }, + { + name: "valid circular reference via array minItems=0", + assertion: func(t *testing.T, idx *openapi.Index) { + t.Helper() + circularErrs := idx.GetCircularReferenceErrors() + for _, err := range circularErrs { + errStr := err.Error() + // TreeNode self-reference should not have circular error + if strings.Contains(errStr, "TreeNode") { + t.Errorf("TreeNode self-reference via array should be valid, got error: %v", err) + } + } + }, + }, + { + name: "schema references tracked with locations", + assertion: func(t *testing.T, idx *openapi.Index) { + t.Helper() + assert.NotEmpty(t, idx.SchemaReferences, "should have schema references") + for _, ref := range idx.SchemaReferences { + assert.NotNil(t, ref.Location, "reference should have location") + assert.NotNil(t, ref.Node, "reference should have node") + } + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + tt.assertion(t, idx) + }) + } +} + +// TestExternalPathItemReferencesWithOperations verifies that: +// 1. External path item references are resolved correctly +// 2. Operations within external path items are indexed +// 3. Walk descends into resolved external path items +func TestExternalPathItemReferencesWithOperations(t *testing.T) { + t.Parallel() + ctx := t.Context() + + // Create external file with path items containing operations + externalSpec := ` +a: + get: + operationId: op-a + responses: + '200': + description: OK + post: + operationId: op-a-post + responses: + '201': + description: Created +b: + get: + operationId: op-b + responses: + '200': + description: OK +` + + // Create main spec that references the external path items + mainSpec := ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /a: + $ref: "./external.yaml#/a" + /b: + $ref: "./external.yaml#/b" +` + + // Parse main document + doc, _, err := openapi.Unmarshal(ctx, strings.NewReader(mainSpec)) + require.NoError(t, err) + require.NotNil(t, doc) + + // Setup virtual filesystem with external file + // Use absolute path and matching reference in spec + vfs := NewMockVirtualFS() + vfs.AddFile("/test/external.yaml", externalSpec) + + // Build index with external reference resolution + resolveOpts := references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "/test/main.yaml", // Absolute path so relative refs resolve correctly + VirtualFS: vfs, + } + + idx := openapi.BuildIndex(ctx, doc, resolveOpts) + require.NotNil(t, idx) + + // Verify external path item references were resolved + assert.Len(t, idx.PathItemReferences, 2, "should have 2 external path item references") + + // Verify operations from external path items are indexed + assert.Len(t, idx.Operations, 3, "should have 3 operations (2 from /a, 1 from /b)") + + // Verify operation IDs are correct + operationIDs := make([]string, len(idx.Operations)) + for i, op := range idx.Operations { + operationIDs[i] = op.Node.GetOperationID() + } + assert.Contains(t, operationIDs, "op-a", "should contain op-a") + assert.Contains(t, operationIDs, "op-a-post", "should contain op-a-post") + assert.Contains(t, operationIDs, "op-b", "should contain op-b") + + // Verify no resolution errors + assert.Empty(t, idx.GetResolutionErrors(), "should have no resolution errors") +} + +// TestExternalReferencedComponentsWithinOperations verifies that: +// 1. External parameter, requestBody, response, header, and example references are resolved +// 2. Walk descends into resolved external references within operations +// 3. Referenced components are properly indexed +func TestExternalReferencedComponentsWithinOperations(t *testing.T) { + t.Parallel() + ctx := t.Context() + + // Create external file with reusable components + componentsSpec := ` +UserParam: + name: userId + in: path + required: true + schema: + type: string + +CreateRequest: + required: true + content: + application/json: + schema: + type: object + properties: + name: + type: string + +SuccessResponse: + description: Success + headers: + X-Request-ID: + description: Request ID header + schema: + type: string + content: + application/json: + schema: + type: object + examples: + example1: + value: + status: success +` + + // Create main spec with operations that reference external components + mainSpec := ` +openapi: 3.1.0 +info: + title: Test API + version: 1.0.0 +paths: + /users/{userId}: + parameters: + - $ref: "./components.yaml#/UserParam" + post: + operationId: createUser + requestBody: + $ref: "./components.yaml#/CreateRequest" + responses: + '200': + $ref: "./components.yaml#/SuccessResponse" +` + + // Parse main document + doc, _, err := openapi.Unmarshal(ctx, strings.NewReader(mainSpec)) + require.NoError(t, err) + require.NotNil(t, doc) + + // Setup virtual filesystem + vfs := NewMockVirtualFS() + vfs.AddFile("/test/components.yaml", componentsSpec) + + // Build index + resolveOpts := references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "/test/main.yaml", + VirtualFS: vfs, + } + + idx := openapi.BuildIndex(ctx, doc, resolveOpts) + require.NotNil(t, idx) + + // Verify operations were indexed + assert.Len(t, idx.Operations, 1, "should have 1 operation") + + // Verify external parameter reference was resolved and indexed + assert.NotEmpty(t, idx.ParameterReferences, "should have parameter references") + + // Verify external request body reference was resolved + assert.NotEmpty(t, idx.RequestBodyReferences, "should have request body references") + + // Verify external response reference was resolved + assert.NotEmpty(t, idx.ResponseReferences, "should have response references") + + // Verify headers within resolved response are indexed (inline headers, not references) + assert.NotEmpty(t, idx.InlineHeaders, "should have inline headers from resolved response") + + // Verify examples within resolved response are indexed (inline examples, not references) + assert.NotEmpty(t, idx.InlineExamples, "should have inline examples from resolved response") + + // Verify no resolution errors + assert.Empty(t, idx.GetResolutionErrors(), "should have no resolution errors") +} +func TestBuildIndex_ExternalReferencesForAllTypes_Success(t *testing.T) { + t.Parallel() + ctx := t.Context() + + vfs := NewMockVirtualFS() + + // Main API document with references to external components + vfs.AddFile("/api/openapi.yaml", ` +openapi: "3.1.0" +info: + title: External Components Test API + version: 1.0.0 +paths: + /users: + get: + operationId: getUsers + parameters: + - $ref: 'components.yaml#/PageSize' + responses: + "200": + $ref: 'components.yaml#/UsersResponse' + callbacks: + onUpdate: + $ref: 'components.yaml#/UpdateCallback' + post: + operationId: createUser + requestBody: + $ref: 'components.yaml#/UserRequestBody' + responses: + "201": + description: Created +`) + + // External components file with all types at top level + vfs.AddFile("/api/components.yaml", ` +PageSize: + name: pageSize + in: query + schema: + type: integer + +UsersResponse: + description: Users response + headers: + X-Total-Count: + $ref: '#/TotalCountHeader' + content: + application/json: + schema: + type: array + items: + type: object + examples: + singleUser: + $ref: '#/SingleUserExample' + links: + GetUserById: + $ref: '#/UserLink' + +UserRequestBody: + description: User request body + content: + application/json: + schema: + type: object + +TotalCountHeader: + description: Total count header + schema: + type: integer + +SingleUserExample: + value: + id: 1 + name: John Doe + +UserLink: + operationId: getUsers + description: Link to get users + +UpdateCallback: + '{$request.body#/callbackUrl}': + post: + requestBody: + description: Update notification + content: + application/json: + schema: + type: object + responses: + "200": + description: OK +`) + + // Unmarshal and build index + doc, validationErrs, err := openapi.Unmarshal(ctx, strings.NewReader(vfs.files["/api/openapi.yaml"])) + require.NoError(t, err) + require.Empty(t, validationErrs) + + resolveOpts := references.ResolveOptions{ + TargetLocation: "/api/openapi.yaml", + RootDocument: doc, + TargetDocument: doc, + VirtualFS: vfs, + } + idx := openapi.BuildIndex(ctx, doc, resolveOpts) + require.NotNil(t, idx) + + // Test External Parameters + assert.Len(t, idx.ExternalParameters, 1, "should have 1 external parameter (PageSize)") + assert.Len(t, idx.ParameterReferences, 1, "should have 1 parameter reference") + assert.Empty(t, idx.ComponentParameters, "should have 0 component parameters (PageSize is external)") + assert.Empty(t, idx.InlineParameters, "should have 0 inline parameters") + + // Test External Responses + assert.Len(t, idx.ExternalResponses, 1, "should have 1 external response (UsersResponse)") + assert.Len(t, idx.ResponseReferences, 1, "should have 1 response reference") + assert.Empty(t, idx.ComponentResponses, "should have 0 component responses (UsersResponse is external)") + assert.Len(t, idx.InlineResponses, 2, "should have 2 inline responses (201 Created + default from callback)") + + // Test External RequestBodies + assert.Len(t, idx.ExternalRequestBodies, 1, "should have 1 external request body (UserRequestBody)") + assert.Len(t, idx.RequestBodyReferences, 1, "should have 1 request body reference") + assert.Empty(t, idx.ComponentRequestBodies, "should have 0 component request bodies") + assert.Len(t, idx.InlineRequestBodies, 1, "should have 1 inline request body (from callback)") + + // Test External Headers + // FIXED: Header references inside external files CAN now be resolved! + assert.Len(t, idx.ExternalHeaders, 1, "should have 1 external header (TotalCountHeader)") + assert.Len(t, idx.HeaderReferences, 1, "should have 1 header reference") + assert.Empty(t, idx.ComponentHeaders, "should have 0 component headers") + assert.Empty(t, idx.InlineHeaders, "should have 0 inline headers") + + // Test External Examples + // FIXED: Example references inside external files CAN now be resolved! + assert.Len(t, idx.ExternalExamples, 1, "should have 1 external example (SingleUserExample)") + assert.Len(t, idx.ExampleReferences, 1, "should have 1 example reference") + assert.Empty(t, idx.ComponentExamples, "should have 0 component examples") + assert.Empty(t, idx.InlineExamples, "should have 0 inline examples") + + // Test External Links + // FIXED: Link references inside external files CAN now be resolved! + assert.Len(t, idx.ExternalLinks, 1, "should have 1 external link (UserLink)") + assert.Len(t, idx.LinkReferences, 1, "should have 1 link reference") + assert.Empty(t, idx.ComponentLinks, "should have 0 component links") + assert.Empty(t, idx.InlineLinks, "should have 0 inline links") + + // Test External Callbacks + assert.Len(t, idx.ExternalCallbacks, 1, "should have 1 external callback (UpdateCallback)") + assert.Len(t, idx.CallbackReferences, 1, "should have 1 callback reference") + assert.Empty(t, idx.ComponentCallbacks, "should have 0 component callbacks") + assert.Empty(t, idx.InlineCallbacks, "should have 0 inline callbacks") + + // Test GetAll* methods include external items (but not references) + allParameters := idx.GetAllParameters() + assert.Len(t, allParameters, 1, "GetAllParameters should return external (not reference)") + + allResponses := idx.GetAllResponses() + assert.Len(t, allResponses, 3, "GetAllResponses should return external + 2 inline (not reference)") + + allRequestBodies := idx.GetAllRequestBodies() + assert.Len(t, allRequestBodies, 2, "GetAllRequestBodies should return external + inline (not reference)") + + allHeaders := idx.GetAllHeaders() + assert.Len(t, allHeaders, 1, "GetAllHeaders should have 1 (TotalCountHeader - internal refs now work!)") + + allExamples := idx.GetAllExamples() + assert.Len(t, allExamples, 1, "GetAllExamples should have 1 (SingleUserExample - internal refs now work!)") + + allLinks := idx.GetAllLinks() + assert.Len(t, allLinks, 1, "GetAllLinks should have 1 (UserLink - internal refs now work!)") + + allCallbacks := idx.GetAllCallbacks() + assert.Len(t, allCallbacks, 1, "GetAllCallbacks should return external (not reference)") + + // FIXED: No more resolution errors! Internal references in external files now work correctly + assert.False(t, idx.HasErrors(), "should have no errors after multi-file reference fix") + assert.Empty(t, idx.GetResolutionErrors(), "should have 0 resolution errors (bug is fixed!)") +} +func TestDebugExternalParameter(t *testing.T) { + t.Parallel() + ctx := t.Context() + + vfs := NewMockVirtualFS() + + // Main document + vfs.AddFile("/api/main.yaml", ` +openapi: "3.1.0" +info: + title: Test + version: 1.0.0 +paths: + /test: + get: + operationId: test + parameters: + - $ref: 'external.yaml#/PageSize' + responses: + "200": + description: OK +`) + + // External parameter + vfs.AddFile("/api/external.yaml", ` +PageSize: + name: pageSize + in: query + schema: + type: integer +`) + + doc, _, err := openapi.Unmarshal(ctx, strings.NewReader(vfs.files["/api/main.yaml"])) + require.NoError(t, err) + + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + TargetLocation: "/api/main.yaml", + RootDocument: doc, + TargetDocument: doc, + VirtualFS: vfs, + }) + + t.Logf("ComponentParameters: %d", len(idx.ComponentParameters)) + t.Logf("ExternalParameters: %d", len(idx.ExternalParameters)) + t.Logf("InlineParameters: %d", len(idx.InlineParameters)) + t.Logf("ParameterReferences: %d", len(idx.ParameterReferences)) + + if len(idx.ExternalParameters) > 0 { + t.Logf("External parameter location: %s", idx.ExternalParameters[0].Location.ToJSONPointer()) + } + if len(idx.InlineParameters) > 0 { + t.Logf("Inline parameter location: %s", idx.InlineParameters[0].Location.ToJSONPointer()) + } + + t.Logf("Errors: %v", idx.HasErrors()) + for _, err := range idx.GetAllErrors() { + t.Logf("Error: %v", err) + } +} diff --git a/openapi/index_node_operation_test.go b/openapi/index_node_operation_test.go new file mode 100644 index 00000000..c539f661 --- /dev/null +++ b/openapi/index_node_operation_test.go @@ -0,0 +1,796 @@ +package openapi_test + +import ( + "strings" + "testing" + + "github.com/speakeasy-api/openapi/openapi" + "github.com/speakeasy-api/openapi/references" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "gopkg.in/yaml.v3" +) + +func TestBuildIndex_NodeToOperations_WithOption_Success(t *testing.T) { + t.Parallel() + ctx := t.Context() + + yml := ` +openapi: "3.1.0" +info: + title: Test API + version: 1.0.0 +paths: + /users: + get: + operationId: getUsers + responses: + "200": + description: Success + content: + application/json: + schema: + $ref: '#/components/schemas/User' + post: + operationId: createUser + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/User' + responses: + "201": + description: Created +components: + schemas: + User: + type: object + properties: + id: + type: integer + name: + type: string +` + doc := unmarshalOpenAPI(t, ctx, yml) + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }, openapi.WithNodeOperationMap()) + + require.NotNil(t, idx, "index should not be nil") + assert.False(t, idx.HasErrors(), "should have no errors") + + // Map should be populated + require.NotNil(t, idx.NodeToOperations, "NodeToOperations map should be initialized") + assert.NotEmpty(t, idx.NodeToOperations, "NodeToOperations should have entries when enabled") + + // Should have operations indexed + assert.Len(t, idx.Operations, 2, "should have 2 operations") +} + +func TestBuildIndex_NodeToOperations_Disabled_Success(t *testing.T) { + t.Parallel() + ctx := t.Context() + + yml := ` +openapi: "3.1.0" +info: + title: Test API + version: 1.0.0 +paths: + /users: + get: + operationId: getUsers + responses: + "200": + description: Success +` + doc := unmarshalOpenAPI(t, ctx, yml) + // Don't pass WithNodeOperationMap() option + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + + require.NotNil(t, idx, "index should not be nil") + assert.False(t, idx.HasErrors(), "should have no errors") + + // Map should be nil when disabled (default) + assert.Nil(t, idx.NodeToOperations, "NodeToOperations should be nil when disabled") + + // GetNodeOperations should return nil for any node + assert.Nil(t, idx.GetNodeOperations(nil), "GetNodeOperations should return nil when disabled") +} + +func TestBuildIndex_NodeToOperations_SharedSchema_Success(t *testing.T) { + t.Parallel() + ctx := t.Context() + + yml := ` +openapi: "3.1.0" +info: + title: Test API + version: 1.0.0 +paths: + /users: + get: + operationId: getUsers + responses: + "200": + description: Success + content: + application/json: + schema: + $ref: '#/components/schemas/User' + /admin/users: + get: + operationId: getAdminUsers + responses: + "200": + description: Success + content: + application/json: + schema: + $ref: '#/components/schemas/User' +components: + schemas: + User: + type: object + properties: + id: + type: integer +` + doc := unmarshalOpenAPI(t, ctx, yml) + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }, openapi.WithNodeOperationMap()) + + require.NotNil(t, idx, "index should not be nil") + assert.False(t, idx.HasErrors(), "should have no errors") + + // Should have 2 operations + assert.Len(t, idx.Operations, 2, "should have 2 operations") + + // The User schema is referenced by both operations + // Get the User schema node + require.Len(t, idx.ComponentSchemas, 1, "should have 1 component schema") + userSchema := idx.ComponentSchemas[0] + require.NotNil(t, userSchema, "User schema should exist") + + userNode := userSchema.Node.GetRootNode() + require.NotNil(t, userNode, "User schema should have a root node") + + // Check that the User schema is mapped to both operations + ops := idx.GetNodeOperations(userNode) + assert.Len(t, ops, 2, "User schema should be referenced by 2 operations") +} + +func TestBuildIndex_NodeToOperations_Webhooks_Success(t *testing.T) { + t.Parallel() + ctx := t.Context() + + yml := ` +openapi: "3.1.0" +info: + title: Test API + version: 1.0.0 +paths: {} +webhooks: + newUser: + post: + operationId: userCreatedWebhook + requestBody: + content: + application/json: + schema: + type: object + properties: + userId: + type: string + responses: + "200": + description: OK +` + doc := unmarshalOpenAPI(t, ctx, yml) + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }, openapi.WithNodeOperationMap()) + + require.NotNil(t, idx, "index should not be nil") + assert.False(t, idx.HasErrors(), "should have no errors") + + // Should have 1 webhook operation + assert.Len(t, idx.Operations, 1, "should have 1 operation from webhook") + + // Check that nodes are mapped to the webhook operation + assert.NotEmpty(t, idx.NodeToOperations, "NodeToOperations should have entries") + + // Verify the operation location indicates it's a webhook + op := idx.Operations[0] + require.NotNil(t, op, "operation should exist") + assert.True(t, openapi.IsWebhookLocation(op.Location), "operation should be identified as webhook") +} + +func TestBuildIndex_GetNodeOperations_NilCases_Success(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + test func(t *testing.T) + }{ + { + name: "nil index", + test: func(t *testing.T) { + t.Helper() + var idx *openapi.Index + result := idx.GetNodeOperations(nil) + assert.Nil(t, result, "should return nil for nil index") + }, + }, + { + name: "nil node", + test: func(t *testing.T) { + t.Helper() + ctx := t.Context() + yml := ` +openapi: "3.1.0" +info: + title: Test + version: 1.0.0 +paths: {} +` + doc := unmarshalOpenAPI(t, ctx, yml) + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }, openapi.WithNodeOperationMap()) + + result := idx.GetNodeOperations(nil) + assert.Nil(t, result, "should return nil for nil node") + }, + }, + { + name: "node not found", + test: func(t *testing.T) { + t.Helper() + ctx := t.Context() + yml := ` +openapi: "3.1.0" +info: + title: Test + version: 1.0.0 +paths: {} +` + doc := unmarshalOpenAPI(t, ctx, yml) + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }, openapi.WithNodeOperationMap()) + + // Create a node that's not in the document + unknownNode := &yaml.Node{Kind: yaml.ScalarNode, Value: "unknown"} + result := idx.GetNodeOperations(unknownNode) + assert.Nil(t, result, "should return nil for unknown node") + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + tt.test(t) + }) + } +} + +func TestIsWebhookLocation_Success(t *testing.T) { + t.Parallel() + ctx := t.Context() + + tests := []struct { + name string + yml string + isWebhook bool + opId string + }{ + { + name: "path operation is not webhook", + yml: ` +openapi: "3.1.0" +info: + title: Test + version: 1.0.0 +paths: + /users: + get: + operationId: getUsers + responses: + "200": + description: OK +`, + isWebhook: false, + opId: "getUsers", + }, + { + name: "webhook operation is webhook", + yml: ` +openapi: "3.1.0" +info: + title: Test + version: 1.0.0 +paths: {} +webhooks: + userCreated: + post: + operationId: userCreatedWebhook + responses: + "200": + description: OK +`, + isWebhook: true, + opId: "userCreatedWebhook", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + doc := unmarshalOpenAPI(t, ctx, tt.yml) + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + + require.NotNil(t, idx, "index should not be nil") + require.Len(t, idx.Operations, 1, "should have 1 operation") + + op := idx.Operations[0] + assert.Equal(t, tt.isWebhook, openapi.IsWebhookLocation(op.Location), + "IsWebhookLocation should return %v for %s", tt.isWebhook, tt.opId) + }) + } +} + +func TestExtractOperationInfo_Success(t *testing.T) { + t.Parallel() + ctx := t.Context() + + tests := []struct { + name string + yml string + expectedPath string + expectedMethod string + isWebhook bool + }{ + { + name: "path operation", + yml: ` +openapi: "3.1.0" +info: + title: Test + version: 1.0.0 +paths: + /users/{id}: + get: + operationId: getUser + responses: + "200": + description: OK +`, + expectedPath: "/users/{id}", + expectedMethod: "get", + isWebhook: false, + }, + { + name: "webhook operation", + yml: ` +openapi: "3.1.0" +info: + title: Test + version: 1.0.0 +paths: {} +webhooks: + orderCreated: + post: + operationId: orderWebhook + responses: + "200": + description: OK +`, + expectedPath: "orderCreated", + expectedMethod: "post", + isWebhook: true, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + doc := unmarshalOpenAPI(t, ctx, tt.yml) + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + + require.NotNil(t, idx, "index should not be nil") + require.Len(t, idx.Operations, 1, "should have 1 operation") + + op := idx.Operations[0] + path, method, isWebhook := openapi.ExtractOperationInfo(op.Location) + + assert.Equal(t, tt.expectedPath, path, "path should match") + assert.Equal(t, tt.expectedMethod, method, "method should match") + assert.Equal(t, tt.isWebhook, isWebhook, "isWebhook should match") + }) + } +} + +func TestBuildIndex_NodeToOperations_ComponentsNotMapped_Success(t *testing.T) { + t.Parallel() + ctx := t.Context() + + // This test verifies that schemas defined in components but not referenced + // by any operation are NOT in the NodeToOperations map + yml := ` +openapi: "3.1.0" +info: + title: Test API + version: 1.0.0 +paths: + /users: + get: + operationId: getUsers + responses: + "200": + description: Success + content: + application/json: + schema: + $ref: '#/components/schemas/User' +components: + schemas: + User: + type: object + UnusedSchema: + type: object + properties: + unused: + type: string +` + doc := unmarshalOpenAPI(t, ctx, yml) + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }, openapi.WithNodeOperationMap()) + + require.NotNil(t, idx, "index should not be nil") + assert.False(t, idx.HasErrors(), "should have no errors") + + // Get the User schema - should be mapped to the operation + require.Len(t, idx.ComponentSchemas, 2, "should have 2 component schemas") + + // Find the User and UnusedSchema + var userOps, unusedOps []*openapi.IndexNode[*openapi.Operation] + for _, schema := range idx.ComponentSchemas { + if schema == nil || schema.Node == nil { + continue + } + node := schema.Node.GetRootNode() + if node == nil { + continue + } + ops := idx.GetNodeOperations(node) + // Check location to identify which schema this is + jp := schema.Location.ToJSONPointer() + if strings.Contains(jp.String(), "User") { + userOps = ops + } else if strings.Contains(jp.String(), "UnusedSchema") { + unusedOps = ops + } + } + + // User should be mapped to 1 operation + assert.Len(t, userOps, 1, "User schema should be mapped to 1 operation") + + // UnusedSchema should NOT be mapped to any operations + // (it's after paths in the walk order, so currentOperation is nil) + assert.Empty(t, unusedOps, "UnusedSchema should not be mapped to any operations") +} + +func TestBuildIndex_NodeToOperations_NestedSchemaNodes_Success(t *testing.T) { + t.Parallel() + ctx := t.Context() + + // This test verifies that nested nodes WITHIN a component schema + // are also mapped to operations that reference the parent schema via $ref + yml := ` +openapi: "3.1.0" +info: + title: Test API + version: 1.0.0 +paths: + /test: + get: + operationId: getTest + responses: + "200": + description: OK + content: + application/json: + schema: + $ref: '#/components/schemas/MySchema' +components: + schemas: + MySchema: + type: array + items: + type: object + properties: + id: + type: integer +` + doc := unmarshalOpenAPI(t, ctx, yml) + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }, openapi.WithNodeOperationMap()) + + require.NotNil(t, idx, "index should not be nil") + assert.False(t, idx.HasErrors(), "should have no errors") + + // Should have 1 operation + require.Len(t, idx.Operations, 1, "should have 1 operation") + + // Get the component schema (MySchema) + require.Len(t, idx.ComponentSchemas, 1, "should have 1 component schema") + mySchema := idx.ComponentSchemas[0] + require.NotNil(t, mySchema, "MySchema should exist") + + // The root node of MySchema should be mapped to the operation + mySchemaNode := mySchema.Node.GetRootNode() + require.NotNil(t, mySchemaNode, "MySchema should have a root node") + + rootOps := idx.GetNodeOperations(mySchemaNode) + require.Len(t, rootOps, 1, "MySchema root should be mapped to 1 operation") + assert.Equal(t, "getTest", *rootOps[0].Node.OperationID, "should be getTest operation") + + // Now check nested nodes - the items schema should also be mapped + // Find an inline schema that's within MySchema (like the items schema) + var itemsSchemaOps []*openapi.IndexNode[*openapi.Operation] + for _, schema := range idx.InlineSchemas { + if schema == nil || schema.Node == nil { + continue + } + node := schema.Node.GetRootNode() + if node == nil { + continue + } + ops := idx.GetNodeOperations(node) + if len(ops) > 0 { + // This is an inline schema that's mapped to operations + itemsSchemaOps = ops + break + } + } + + // At least one inline schema (like items or id property) should be mapped + assert.NotEmpty(t, itemsSchemaOps, "nested inline schemas should be mapped to operations") +} + +func TestBuildIndex_NodeToOperations_BooleanSchema_Success(t *testing.T) { + t.Parallel() + ctx := t.Context() + + // This test reproduces the exact scenario the user described: + // - Operation references a component schema via $ref + // - The component schema has `items: true` (boolean schema) + // - We need to verify that the items node is mapped to the operation + yml := ` +openapi: "3.1.0" +info: + title: Test + version: 1.0.0 +paths: + /test: + get: + operationId: getTest + responses: + "200": + description: OK + content: + application/json: + schema: + $ref: '#/components/schemas/MySchema' +components: + schemas: + MySchema: + type: array + items: true +` + doc := unmarshalOpenAPI(t, ctx, yml) + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }, openapi.WithNodeOperationMap()) + + require.NotNil(t, idx, "index should not be nil") + assert.False(t, idx.HasErrors(), "should have no errors") + + // Should have 1 operation + require.Len(t, idx.Operations, 1, "should have 1 operation") + + // Find the boolean schema (items: true) + require.NotEmpty(t, idx.BooleanSchemas, "should have boolean schemas") + + // Check if the boolean schema is mapped to the operation + var boolSchemaOps []*openapi.IndexNode[*openapi.Operation] + for _, boolSchema := range idx.BooleanSchemas { + if boolSchema == nil || boolSchema.Node == nil { + continue + } + node := boolSchema.Node.GetRootNode() + if node != nil { + ops := idx.GetNodeOperations(node) + if len(ops) > 0 { + boolSchemaOps = ops + break + } + } + } + + // The boolean schema should be mapped to the getTest operation + require.Len(t, boolSchemaOps, 1, "boolean schema should be mapped to 1 operation") + assert.Equal(t, "getTest", *boolSchemaOps[0].Node.OperationID, "should be getTest operation") +} + +func TestBuildIndex_NodeToOperations_LeafValueNode_Success(t *testing.T) { + t.Parallel() + ctx := t.Context() + + // This test verifies that GetNodeOperations works for leaf VALUE nodes, + // not just root nodes. For example, when a linter finds an issue on + // the `true` value node in `items: true`, GetNodeOperations should + // return the operations that reference the parent schema. + yml := ` +openapi: "3.1.0" +info: + title: Test + version: 1.0.0 +paths: + /test: + get: + operationId: getTest + responses: + "200": + description: OK + content: + application/json: + schema: + $ref: '#/components/schemas/MyArray' +components: + schemas: + MyArray: + type: array + items: true +` + doc := unmarshalOpenAPI(t, ctx, yml) + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }, openapi.WithNodeOperationMap()) + + require.NotNil(t, idx, "index should not be nil") + assert.False(t, idx.HasErrors(), "should have no errors") + require.Len(t, idx.Operations, 1, "should have 1 operation") + + // Find the boolean schema (representing items: true) + // This is the scenario where a linter gets a value node + require.NotEmpty(t, idx.BooleanSchemas, "should have boolean schemas") + + // The boolean schema's root node is the actual value node (`true`) + boolSchema := idx.BooleanSchemas[0] + require.NotNil(t, boolSchema, "boolean schema should exist") + require.NotNil(t, boolSchema.Node, "boolean schema node should not be nil") + + // Get the boolean value node - this is what a linter would get + // when it finds an issue on `items: true` + boolValueNode := boolSchema.Node.GetRootNode() + require.NotNil(t, boolValueNode, "boolean value node should not be nil") + + // Verify this is actually the `true` value node + assert.Equal(t, yaml.ScalarNode, boolValueNode.Kind, "should be a scalar node") + assert.Equal(t, "true", boolValueNode.Value, "should have value 'true'") + + // Now verify GetNodeOperations works for this leaf value node + ops := idx.GetNodeOperations(boolValueNode) + require.Len(t, ops, 1, "leaf value node should be mapped to 1 operation") + assert.Equal(t, "getTest", *ops[0].Node.OperationID, "should be getTest operation") +} + +func TestBuildIndex_NodeToOperations_LeafKeyNode_Success(t *testing.T) { + t.Parallel() + ctx := t.Context() + + // This test verifies that GetNodeOperations works for leaf KEY nodes. + // For example, when a linter reports an issue on the key `type` in + // a schema, GetNodeOperations should return the associated operations. + yml := ` +openapi: "3.1.0" +info: + title: Test + version: 1.0.0 +paths: + /pets: + get: + operationId: getPets + responses: + "200": + description: OK + content: + application/json: + schema: + $ref: '#/components/schemas/Pet' +components: + schemas: + Pet: + type: object + properties: + name: + type: string +` + doc := unmarshalOpenAPI(t, ctx, yml) + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }, openapi.WithNodeOperationMap()) + + require.NotNil(t, idx, "index should not be nil") + assert.False(t, idx.HasErrors(), "should have no errors") + require.Len(t, idx.Operations, 1, "should have 1 operation") + + // Find the component schema (Pet) + require.Len(t, idx.ComponentSchemas, 1, "should have 1 component schema") + petSchema := idx.ComponentSchemas[0] + require.NotNil(t, petSchema, "Pet schema should exist") + + // Get the actual schema to access the core model's Type field + schema := petSchema.Node.GetSchema() + require.NotNil(t, schema, "schema should not be nil") + + core := schema.GetCore() + require.NotNil(t, core, "core should not be nil") + + // Access the Type field's key node directly + // This tests that leaf key nodes are registered + typeKeyNode := core.Type.KeyNode + if typeKeyNode != nil { + ops := idx.GetNodeOperations(typeKeyNode) + require.Len(t, ops, 1, "type key node should be mapped to 1 operation") + assert.Equal(t, "getPets", *ops[0].Node.OperationID, "should be getPets operation") + } + + // Also test the value node of the Type field + typeValueNode := core.Type.ValueNode + if typeValueNode != nil { + ops := idx.GetNodeOperations(typeValueNode) + require.Len(t, ops, 1, "type value node should be mapped to 1 operation") + assert.Equal(t, "getPets", *ops[0].Node.OperationID, "should be getPets operation") + } +} diff --git a/openapi/index_test.go b/openapi/index_test.go new file mode 100644 index 00000000..f148972c --- /dev/null +++ b/openapi/index_test.go @@ -0,0 +1,2207 @@ +package openapi_test + +import ( + "context" + "errors" + "strings" + "testing" + + "github.com/speakeasy-api/openapi/openapi" + "github.com/speakeasy-api/openapi/references" + "github.com/speakeasy-api/openapi/validation" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func unmarshalOpenAPI(t *testing.T, ctx context.Context, yaml string) *openapi.OpenAPI { + t.Helper() + o, validationErrs, err := openapi.Unmarshal(ctx, strings.NewReader(yaml)) + require.NoError(t, err, "unmarshal should succeed") + require.Empty(t, validationErrs, "should have no validation errors") + return o +} + +func TestBuildIndex_EmptyDoc_Success(t *testing.T) { + t.Parallel() + ctx := t.Context() + + yaml := ` +openapi: "3.1.0" +info: + title: Empty API + version: 1.0.0 +paths: {} +` + doc := unmarshalOpenAPI(t, ctx, yaml) + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + + require.NotNil(t, idx, "index should not be nil") + assert.Empty(t, idx.GetAllSchemas(), "should have no schemas") + assert.Empty(t, idx.GetAllPathItems(), "should have no path items") + assert.False(t, idx.HasErrors(), "should have no errors") +} + +func TestBuildIndex_ComponentSchemas_Success(t *testing.T) { + t.Parallel() + ctx := t.Context() + + yaml := ` +openapi: "3.1.0" +info: + title: Test API + version: 1.0.0 +paths: {} +components: + schemas: + User: + type: object + properties: + id: + type: integer + name: + type: string + Pet: + type: object + properties: + name: + type: string +` + doc := unmarshalOpenAPI(t, ctx, yaml) + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + + require.NotNil(t, idx, "index should not be nil") + assert.False(t, idx.HasErrors(), "should have no errors") + + // Should have component schemas indexed + assert.Len(t, idx.ComponentSchemas, 2, "should have 2 component schemas") + + // Should have inline schemas within the components + assert.Len(t, idx.InlineSchemas, 3, "should have 3 inline schemas from properties") +} + +func TestBuildIndex_InlineSchemas_Success(t *testing.T) { + t.Parallel() + ctx := t.Context() + + yaml := ` +openapi: "3.1.0" +info: + title: Test API + version: 1.0.0 +paths: + /users: + get: + operationId: getUsers + responses: + "200": + description: Success + content: + application/json: + schema: + type: array + items: + type: object + properties: + id: + type: integer +` + doc := unmarshalOpenAPI(t, ctx, yaml) + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + + require.NotNil(t, idx, "index should not be nil") + assert.False(t, idx.HasErrors(), "should have no errors") + + // Should have inline schemas: array, object (items), integer (id property) + assert.Len(t, idx.InlineSchemas, 3, "should have 3 inline schemas") + assert.Empty(t, idx.ComponentSchemas, "should have no component schemas") + assert.Empty(t, idx.SchemaReferences, "should have no schema references") +} + +func TestBuildIndex_SchemaReferences_Success(t *testing.T) { + t.Parallel() + ctx := t.Context() + + yaml := ` +openapi: "3.1.0" +info: + title: Test API + version: 1.0.0 +paths: + /users: + get: + operationId: getUsers + responses: + "200": + description: Success + content: + application/json: + schema: + $ref: '#/components/schemas/User' +components: + schemas: + User: + type: object + properties: + id: + type: integer +` + doc := unmarshalOpenAPI(t, ctx, yaml) + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + + require.NotNil(t, idx, "index should not be nil") + assert.False(t, idx.HasErrors(), "should have no errors") + + // $ref to User schema + assert.Len(t, idx.SchemaReferences, 1, "should have 1 schema reference") + // User component schema + assert.Len(t, idx.ComponentSchemas, 1, "should have 1 component schema") + // id property inline schema + assert.Len(t, idx.InlineSchemas, 1, "should have 1 inline schema") +} + +func TestBuildIndex_BooleanSchemas_Success(t *testing.T) { + t.Parallel() + ctx := t.Context() + + yaml := ` +openapi: "3.1.0" +info: + title: Test API + version: 1.0.0 +paths: {} +components: + schemas: + AnyValue: + type: object + additionalProperties: true + NoAdditional: + type: object + additionalProperties: false +` + doc := unmarshalOpenAPI(t, ctx, yaml) + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + + require.NotNil(t, idx, "index should not be nil") + assert.False(t, idx.HasErrors(), "should have no errors") + + // Two boolean schemas (true and false for additionalProperties) + assert.Len(t, idx.BooleanSchemas, 2, "should have 2 boolean schemas") + // Two component schemas (AnyValue and NoAdditional) + assert.Len(t, idx.ComponentSchemas, 2, "should have 2 component schemas") + assert.Empty(t, idx.InlineSchemas, "should have no inline schemas") +} + +func TestBuildIndex_Servers_Success(t *testing.T) { + t.Parallel() + ctx := t.Context() + + yaml := ` +openapi: "3.1.0" +info: + title: Test API + version: 1.0.0 +servers: + - url: https://api.example.com + description: Production + variables: + version: + default: v1 + enum: [v1, v2] + - url: https://staging.example.com + description: Staging +paths: {} +` + doc := unmarshalOpenAPI(t, ctx, yaml) + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + + require.NotNil(t, idx, "index should not be nil") + assert.False(t, idx.HasErrors(), "should have no errors") + + assert.Len(t, idx.Servers, 2, "should have 2 servers") + assert.Len(t, idx.ServerVariables, 1, "should have 1 server variable") +} + +func TestBuildIndex_Tags_Success(t *testing.T) { + t.Parallel() + ctx := t.Context() + + yaml := ` +openapi: "3.1.0" +info: + title: Test API + version: 1.0.0 +tags: + - name: users + description: User operations + - name: pets + description: Pet operations +paths: {} +` + doc := unmarshalOpenAPI(t, ctx, yaml) + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + + require.NotNil(t, idx, "index should not be nil") + assert.False(t, idx.HasErrors(), "should have no errors") + + assert.Len(t, idx.Tags, 2, "should have 2 tags") +} + +func TestBuildIndex_ExternalDocs_Success(t *testing.T) { + t.Parallel() + ctx := t.Context() + + yaml := ` +openapi: "3.1.0" +info: + title: Test API + version: 1.0.0 +externalDocs: + url: https://docs.example.com + description: API Documentation +tags: + - name: users + externalDocs: + url: https://docs.example.com/users +paths: {} +` + doc := unmarshalOpenAPI(t, ctx, yaml) + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + + require.NotNil(t, idx, "index should not be nil") + assert.False(t, idx.HasErrors(), "should have no errors") + + assert.Len(t, idx.ExternalDocumentation, 2, "should have 2 external docs") +} + +func TestBuildIndex_GetAllSchemas_Success(t *testing.T) { + t.Parallel() + ctx := t.Context() + + yaml := ` +openapi: "3.1.0" +info: + title: Test API + version: 1.0.0 +paths: + /users: + get: + operationId: getUsers + responses: + "200": + description: Success + content: + application/json: + schema: + $ref: '#/components/schemas/User' +components: + schemas: + User: + type: object + additionalProperties: true + properties: + id: + type: integer +` + doc := unmarshalOpenAPI(t, ctx, yaml) + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + + require.NotNil(t, idx, "index should not be nil") + + allSchemas := idx.GetAllSchemas() + assert.NotEmpty(t, allSchemas, "should have schemas") + + // Should include boolean, inline, component, and external schemas (not references) + totalExpected := len(idx.BooleanSchemas) + len(idx.InlineSchemas) + + len(idx.ComponentSchemas) + len(idx.ExternalSchemas) + assert.Len(t, allSchemas, totalExpected, "GetAllSchemas should return all schema types") +} + +func TestBuildIndex_GetAllParameters_Success(t *testing.T) { + t.Parallel() + ctx := t.Context() + + yaml := ` +openapi: "3.1.0" +info: + title: Test API + version: 1.0.0 +paths: + /users/{id}: + parameters: + - name: id + in: path + required: true + schema: + type: integer + get: + operationId: getUser + parameters: + - $ref: '#/components/parameters/PageSize' + responses: + "200": + description: Success +components: + parameters: + PageSize: + name: pageSize + in: query + schema: + type: integer +` + doc := unmarshalOpenAPI(t, ctx, yaml) + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + + require.NotNil(t, idx, "index should not be nil") + + allParameters := idx.GetAllParameters() + assert.NotEmpty(t, allParameters, "should have parameters") + + // Should include inline, component, and external parameters (not references) + totalExpected := len(idx.InlineParameters) + len(idx.ComponentParameters) + + len(idx.ExternalParameters) + assert.Len(t, allParameters, totalExpected, "GetAllParameters should return all parameter types") +} + +func TestBuildIndex_GetAllResponses_Success(t *testing.T) { + t.Parallel() + ctx := t.Context() + + yaml := ` +openapi: "3.1.0" +info: + title: Test API + version: 1.0.0 +paths: + /users: + get: + operationId: getUsers + responses: + "200": + description: Success + "404": + $ref: '#/components/responses/NotFound' +components: + responses: + NotFound: + description: Not found +` + doc := unmarshalOpenAPI(t, ctx, yaml) + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + + require.NotNil(t, idx, "index should not be nil") + + allResponses := idx.GetAllResponses() + assert.NotEmpty(t, allResponses, "should have responses") + + // Should include inline, component, and external responses (not references) + totalExpected := len(idx.InlineResponses) + len(idx.ComponentResponses) + + len(idx.ExternalResponses) + assert.Len(t, allResponses, totalExpected, "GetAllResponses should return all response types") +} + +func TestBuildIndex_GetAllRequestBodies_Success(t *testing.T) { + t.Parallel() + ctx := t.Context() + + yaml := ` +openapi: "3.1.0" +info: + title: Test API + version: 1.0.0 +paths: + /users: + post: + operationId: createUser + requestBody: + description: User to create + content: + application/json: + schema: + type: object + responses: + "201": + description: Created + put: + operationId: updateUser + requestBody: + $ref: '#/components/requestBodies/UserBody' + responses: + "200": + description: Updated +components: + requestBodies: + UserBody: + description: User body + content: + application/json: + schema: + type: object +` + doc := unmarshalOpenAPI(t, ctx, yaml) + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + + require.NotNil(t, idx, "index should not be nil") + + allRequestBodies := idx.GetAllRequestBodies() + assert.NotEmpty(t, allRequestBodies, "should have request bodies") + + // Should include inline, component, and external request bodies (not references) + totalExpected := len(idx.InlineRequestBodies) + len(idx.ComponentRequestBodies) + + len(idx.ExternalRequestBodies) + assert.Len(t, allRequestBodies, totalExpected, "GetAllRequestBodies should return all request body types") +} + +func TestBuildIndex_GetAllHeaders_Success(t *testing.T) { + t.Parallel() + ctx := t.Context() + + yaml := ` +openapi: "3.1.0" +info: + title: Test API + version: 1.0.0 +paths: + /users: + get: + operationId: getUsers + responses: + "200": + description: Success + headers: + X-Rate-Limit: + description: Rate limit + schema: + type: integer + X-Custom: + $ref: '#/components/headers/CustomHeader' +components: + headers: + CustomHeader: + description: Custom header + schema: + type: string +` + doc := unmarshalOpenAPI(t, ctx, yaml) + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + + require.NotNil(t, idx, "index should not be nil") + + allHeaders := idx.GetAllHeaders() + assert.NotEmpty(t, allHeaders, "should have headers") + + // Should include inline, component, and external headers (not references) + totalExpected := len(idx.InlineHeaders) + len(idx.ComponentHeaders) + + len(idx.ExternalHeaders) + assert.Len(t, allHeaders, totalExpected, "GetAllHeaders should return all header types") +} + +func TestBuildIndex_GetAllExamples_Success(t *testing.T) { + t.Parallel() + ctx := t.Context() + + yaml := ` +openapi: "3.1.0" +info: + title: Test API + version: 1.0.0 +paths: + /users: + get: + operationId: getUsers + responses: + "200": + description: Success + content: + application/json: + examples: + inline: + value: { id: 1 } + referenced: + $ref: '#/components/examples/UserExample' +components: + examples: + UserExample: + value: { id: 2 } +` + doc := unmarshalOpenAPI(t, ctx, yaml) + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + + require.NotNil(t, idx, "index should not be nil") + + allExamples := idx.GetAllExamples() + assert.NotEmpty(t, allExamples, "should have examples") + + // Should include inline, component, and external examples (not references) + totalExpected := len(idx.InlineExamples) + len(idx.ComponentExamples) + + len(idx.ExternalExamples) + assert.Len(t, allExamples, totalExpected, "GetAllExamples should return all example types") +} + +func TestBuildIndex_GetAllLinks_Success(t *testing.T) { + t.Parallel() + ctx := t.Context() + + yaml := ` +openapi: "3.1.0" +info: + title: Test API + version: 1.0.0 +paths: + /users: + get: + operationId: getUsers + responses: + "200": + description: Success + links: + GetUserById: + operationId: getUsers + ReferencedLink: + $ref: '#/components/links/CustomLink' + /products: + get: + operationId: getProducts + responses: + "200": + description: Success +components: + links: + CustomLink: + operationId: getProducts +` + doc := unmarshalOpenAPI(t, ctx, yaml) + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + + require.NotNil(t, idx, "index should not be nil") + + allLinks := idx.GetAllLinks() + assert.NotEmpty(t, allLinks, "should have links") + + // Should include inline, component, and external links (not references) + totalExpected := len(idx.InlineLinks) + len(idx.ComponentLinks) + + len(idx.ExternalLinks) + assert.Len(t, allLinks, totalExpected, "GetAllLinks should return all link types") +} + +func TestBuildIndex_GetAllCallbacks_Success(t *testing.T) { + t.Parallel() + ctx := t.Context() + + yaml := ` +openapi: "3.1.0" +info: + title: Test API + version: 1.0.0 +paths: + /subscribe: + post: + operationId: subscribe + callbacks: + onData: + '{$request.body#/callbackUrl}': + post: + requestBody: + description: Callback + content: + application/json: + schema: + type: object + responses: + "200": + description: OK + onComplete: + $ref: '#/components/callbacks/CompleteCallback' + responses: + "201": + description: Created +components: + callbacks: + CompleteCallback: + '{$request.body#/callbackUrl}': + post: + requestBody: + description: Complete callback + content: + application/json: + schema: + type: object + responses: + "200": + description: OK +` + doc := unmarshalOpenAPI(t, ctx, yaml) + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + + require.NotNil(t, idx, "index should not be nil") + + allCallbacks := idx.GetAllCallbacks() + assert.NotEmpty(t, allCallbacks, "should have callbacks") + + // Should include inline, component, and external callbacks (not references) + totalExpected := len(idx.InlineCallbacks) + len(idx.ComponentCallbacks) + + len(idx.ExternalCallbacks) + assert.Len(t, allCallbacks, totalExpected, "GetAllCallbacks should return all callback types") +} + +func TestBuildIndex_NilIndex_Methods_Success(t *testing.T) { + t.Parallel() + + var idx *openapi.Index + + assert.Nil(t, idx.GetAllSchemas(), "nil index GetAllSchemas should return nil") + assert.Nil(t, idx.GetAllPathItems(), "nil index GetAllPathItems should return nil") + assert.Nil(t, idx.GetValidationErrors(), "nil index GetValidationErrors should return nil") + assert.Nil(t, idx.GetResolutionErrors(), "nil index GetResolutionErrors should return nil") + assert.Nil(t, idx.GetCircularReferenceErrors(), "nil index GetCircularReferenceErrors should return nil") + assert.Nil(t, idx.GetAllErrors(), "nil index GetAllErrors should return nil") + assert.False(t, idx.HasErrors(), "nil index HasErrors should return false") +} + +// Tests for circular reference detection + +func TestBuildIndex_CircularRef_OptionalProperty_Valid(t *testing.T) { + t.Parallel() + ctx := t.Context() + + // Optional property recursion - VALID (not required means {} is valid) + yaml := ` +openapi: "3.1.0" +info: + title: Test API + version: 1.0.0 +paths: {} +components: + schemas: + Node: + type: object + properties: + next: + $ref: '#/components/schemas/Node' +` + doc := unmarshalOpenAPI(t, ctx, yaml) + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + + require.NotNil(t, idx, "index should not be nil") + + // Optional property circular refs should be VALID (no error) + circularErrs := idx.GetCircularReferenceErrors() + assert.Empty(t, circularErrs, "optional property circular ref should be valid (no error)") +} + +func TestBuildIndex_CircularRef_RequiredProperty_Invalid(t *testing.T) { + t.Parallel() + ctx := t.Context() + + // Required property recursion - INVALID (no base case) + yaml := ` +openapi: "3.1.0" +info: + title: Test API + version: 1.0.0 +paths: {} +components: + schemas: + A: + type: object + required: [b] + properties: + b: + $ref: '#/components/schemas/B' + B: + type: object + required: [a] + properties: + a: + $ref: '#/components/schemas/A' +` + doc := unmarshalOpenAPI(t, ctx, yaml) + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + + require.NotNil(t, idx, "index should not be nil") + + // Required property circular refs should be INVALID + circularErrs := idx.GetCircularReferenceErrors() + assert.NotEmpty(t, circularErrs, "required property circular ref should be invalid") +} + +func TestBuildIndex_CircularRef_ArrayMinItemsZero_Valid(t *testing.T) { + t.Parallel() + ctx := t.Context() + + // Array with default minItems (0) - VALID (empty array terminates) + yaml := ` +openapi: "3.1.0" +info: + title: Test API + version: 1.0.0 +paths: {} +components: + schemas: + Category: + type: object + required: [children] + properties: + children: + type: array + items: + $ref: '#/components/schemas/Category' +` + doc := unmarshalOpenAPI(t, ctx, yaml) + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + + require.NotNil(t, idx, "index should not be nil") + + // Array with minItems=0 circular refs should be VALID + circularErrs := idx.GetCircularReferenceErrors() + assert.Empty(t, circularErrs, "array with minItems=0 circular ref should be valid") +} + +func TestBuildIndex_CircularRef_ArrayMinItemsOne_Invalid(t *testing.T) { + t.Parallel() + ctx := t.Context() + + // Array with minItems=1 - INVALID (can't have empty array) + yaml := ` +openapi: "3.1.0" +info: + title: Test API + version: 1.0.0 +paths: {} +components: + schemas: + Node: + type: object + required: [children] + properties: + children: + type: array + minItems: 1 + items: + $ref: '#/components/schemas/Node' +` + doc := unmarshalOpenAPI(t, ctx, yaml) + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + + require.NotNil(t, idx, "index should not be nil") + + // Array with minItems>=1 circular refs should be INVALID + circularErrs := idx.GetCircularReferenceErrors() + assert.NotEmpty(t, circularErrs, "array with minItems>=1 circular ref should be invalid") +} + +func TestBuildIndex_CircularRef_Nullable_Valid(t *testing.T) { + t.Parallel() + ctx := t.Context() + + // Nullable type union - VALID (null is a base case) + yaml := ` +openapi: "3.1.0" +info: + title: Test API + version: 1.0.0 +paths: {} +components: + schemas: + Node: + type: [object, "null"] + required: [next] + properties: + next: + $ref: '#/components/schemas/Node' +` + doc := unmarshalOpenAPI(t, ctx, yaml) + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + + require.NotNil(t, idx, "index should not be nil") + + // Nullable circular refs should be VALID + circularErrs := idx.GetCircularReferenceErrors() + assert.Empty(t, circularErrs, "nullable circular ref should be valid") +} + +func TestBuildIndex_CircularRef_AdditionalPropertiesMinZero_Valid(t *testing.T) { + t.Parallel() + ctx := t.Context() + + // AdditionalProperties with default minProperties (0) - VALID + yaml := ` +openapi: "3.1.0" +info: + title: Test API + version: 1.0.0 +paths: {} +components: + schemas: + TrieNode: + type: object + required: [children] + properties: + children: + type: object + additionalProperties: + $ref: '#/components/schemas/TrieNode' +` + doc := unmarshalOpenAPI(t, ctx, yaml) + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + + require.NotNil(t, idx, "index should not be nil") + + // AdditionalProperties with minProperties=0 should be VALID + circularErrs := idx.GetCircularReferenceErrors() + assert.Empty(t, circularErrs, "additionalProperties with minProperties=0 should be valid") +} + +func TestBuildIndex_CircularRef_AdditionalPropertiesMinOne_Invalid(t *testing.T) { + t.Parallel() + ctx := t.Context() + + // AdditionalProperties with minProperties>=1 - INVALID + yaml := ` +openapi: "3.1.0" +info: + title: Test API + version: 1.0.0 +paths: {} +components: + schemas: + Node: + type: object + required: [children] + properties: + children: + type: object + minProperties: 1 + additionalProperties: + $ref: '#/components/schemas/Node' +` + doc := unmarshalOpenAPI(t, ctx, yaml) + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + + require.NotNil(t, idx, "index should not be nil") + + // AdditionalProperties with minProperties>=1 should be INVALID + circularErrs := idx.GetCircularReferenceErrors() + assert.NotEmpty(t, circularErrs, "additionalProperties with minProperties>=1 should be invalid") +} + +func TestBuildIndex_CircularRef_OneOfWithNonRecursiveBranch_Valid(t *testing.T) { + t.Parallel() + ctx := t.Context() + + // oneOf with at least one non-recursive branch - VALID + yaml := ` +openapi: "3.1.0" +info: + title: Test API + version: 1.0.0 +paths: {} +components: + schemas: + Expr: + oneOf: + - $ref: '#/components/schemas/Literal' + - $ref: '#/components/schemas/BinaryExpr' + Literal: + type: object + properties: + value: + type: string + BinaryExpr: + type: object + required: [left, right] + properties: + left: + $ref: '#/components/schemas/Expr' + right: + $ref: '#/components/schemas/Expr' +` + doc := unmarshalOpenAPI(t, ctx, yaml) + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + + require.NotNil(t, idx, "index should not be nil") + + // oneOf with a non-recursive branch should be VALID + circularErrs := idx.GetCircularReferenceErrors() + assert.Empty(t, circularErrs, "oneOf with non-recursive branch should be valid") +} + +func TestBuildIndex_CircularRef_DirectSelfRef_Optional_Valid(t *testing.T) { + t.Parallel() + ctx := t.Context() + + // Direct self-reference through optional property - VALID + yaml := ` +openapi: "3.1.0" +info: + title: Test API + version: 1.0.0 +paths: {} +components: + schemas: + LinkedNode: + type: object + properties: + value: + type: string + next: + $ref: '#/components/schemas/LinkedNode' +` + doc := unmarshalOpenAPI(t, ctx, yaml) + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + + require.NotNil(t, idx, "index should not be nil") + + circularErrs := idx.GetCircularReferenceErrors() + assert.Empty(t, circularErrs, "direct self-ref through optional should be valid") +} + +func TestBuildIndex_CircularRef_DirectSelfRef_Required_Invalid(t *testing.T) { + t.Parallel() + ctx := t.Context() + + // Direct self-reference through required property - INVALID + yaml := ` +openapi: "3.1.0" +info: + title: Test API + version: 1.0.0 +paths: {} +components: + schemas: + InfiniteNode: + type: object + required: [self] + properties: + self: + $ref: '#/components/schemas/InfiniteNode' +` + doc := unmarshalOpenAPI(t, ctx, yaml) + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + + require.NotNil(t, idx, "index should not be nil") + + circularErrs := idx.GetCircularReferenceErrors() + assert.NotEmpty(t, circularErrs, "direct self-ref through required should be invalid") +} + +func TestBuildIndex_NoCircularRef_Success(t *testing.T) { + t.Parallel() + ctx := t.Context() + + // No circular reference - just regular refs + yaml := ` +openapi: "3.1.0" +info: + title: Test API + version: 1.0.0 +paths: + /users: + get: + operationId: getUsers + responses: + "200": + description: Success + content: + application/json: + schema: + $ref: '#/components/schemas/User' +components: + schemas: + User: + type: object + properties: + id: + type: integer + address: + $ref: '#/components/schemas/Address' + Address: + type: object + properties: + street: + type: string + city: + type: string +` + doc := unmarshalOpenAPI(t, ctx, yaml) + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + + require.NotNil(t, idx, "index should not be nil") + assert.False(t, idx.HasErrors(), "should have no errors") + assert.Empty(t, idx.GetCircularReferenceErrors(), "should have no circular reference errors") +} + +func TestBuildIndex_LocationInfo_Success(t *testing.T) { + t.Parallel() + ctx := t.Context() + + yaml := ` +openapi: "3.1.0" +info: + title: Test API + version: 1.0.0 +paths: {} +components: + schemas: + User: + type: object + properties: + id: + type: integer +` + doc := unmarshalOpenAPI(t, ctx, yaml) + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + + require.NotNil(t, idx, "index should not be nil") + + // Check that schemas have location information + for _, schema := range idx.ComponentSchemas { + assert.NotNil(t, schema.Location, "schema should have location") + jp := schema.Location.ToJSONPointer() + assert.NotEmpty(t, jp, "location should produce JSON pointer") + } +} + +func TestBuildIndex_Operations_Success(t *testing.T) { + t.Parallel() + ctx := t.Context() + + yaml := ` +openapi: "3.1.0" +info: + title: Test API + version: 1.0.0 +paths: + /users: + get: + operationId: getUsers + summary: Get users + responses: + "200": + description: Success + post: + operationId: createUser + summary: Create user + responses: + "201": + description: Created + /products: + get: + operationId: getProducts + summary: Get products + responses: + "200": + description: Success +` + doc := unmarshalOpenAPI(t, ctx, yaml) + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + + require.NotNil(t, idx, "index should not be nil") + assert.False(t, idx.HasErrors(), "should have no errors") + + // Should have 3 operations indexed + assert.Len(t, idx.Operations, 3, "should have 3 operations") + // Should have 2 inline path items + assert.Len(t, idx.InlinePathItems, 2, "should have 2 inline path items") +} + +func TestBuildIndex_Parameters_Success(t *testing.T) { + t.Parallel() + ctx := t.Context() + + yaml := ` +openapi: "3.1.0" +info: + title: Test API + version: 1.0.0 +paths: + /users/{id}: + parameters: + - name: id + in: path + required: true + schema: + type: integer + get: + operationId: getUser + responses: + "200": + description: Success + parameters: + - $ref: '#/components/parameters/PageSize' +components: + parameters: + PageSize: + name: pageSize + in: query + schema: + type: integer +` + doc := unmarshalOpenAPI(t, ctx, yaml) + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + + require.NotNil(t, idx, "index should not be nil") + assert.False(t, idx.HasErrors(), "should have no errors") + + // Should have 1 component parameter (PageSize) + assert.Len(t, idx.ComponentParameters, 1, "should have 1 component parameter") + // Should have 1 inline parameter (id in path) + assert.Len(t, idx.InlineParameters, 1, "should have 1 inline parameter") + // Should have 1 parameter reference ($ref to PageSize) + assert.Len(t, idx.ParameterReferences, 1, "should have 1 parameter reference") +} + +func TestBuildIndex_Responses_Success(t *testing.T) { + t.Parallel() + ctx := t.Context() + + yaml := ` +openapi: "3.1.0" +info: + title: Test API + version: 1.0.0 +paths: + /users: + get: + operationId: getUsers + responses: + "200": + description: Success + content: + application/json: + schema: + type: array + "404": + $ref: '#/components/responses/NotFound' +components: + responses: + NotFound: + description: Not found +` + doc := unmarshalOpenAPI(t, ctx, yaml) + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + + require.NotNil(t, idx, "index should not be nil") + assert.False(t, idx.HasErrors(), "should have no errors") + + // Should have 1 component response (NotFound) + assert.Len(t, idx.ComponentResponses, 1, "should have 1 component response") + // Should have 1 inline response (200) + assert.Len(t, idx.InlineResponses, 1, "should have 1 inline response") + // Should have 1 response reference ($ref to NotFound) + assert.Len(t, idx.ResponseReferences, 1, "should have 1 response reference") +} + +func TestBuildIndex_RequestBodies_Success(t *testing.T) { + t.Parallel() + ctx := t.Context() + + yaml := ` +openapi: "3.1.0" +info: + title: Test API + version: 1.0.0 +paths: + /users: + post: + operationId: createUser + requestBody: + description: User to create + content: + application/json: + schema: + type: object + responses: + "201": + description: Created + put: + operationId: updateUser + requestBody: + $ref: '#/components/requestBodies/UserBody' + responses: + "200": + description: Updated +components: + requestBodies: + UserBody: + description: User body + content: + application/json: + schema: + type: object +` + doc := unmarshalOpenAPI(t, ctx, yaml) + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + + require.NotNil(t, idx, "index should not be nil") + assert.False(t, idx.HasErrors(), "should have no errors") + + // Should have 1 component request body (UserBody) + assert.Len(t, idx.ComponentRequestBodies, 1, "should have 1 component request body") + // Should have 1 inline request body (POST) + assert.Len(t, idx.InlineRequestBodies, 1, "should have 1 inline request body") + // Should have 1 request body reference ($ref to UserBody) + assert.Len(t, idx.RequestBodyReferences, 1, "should have 1 request body reference") +} + +func TestBuildIndex_MediaTypes_Success(t *testing.T) { + t.Parallel() + ctx := t.Context() + + yaml := ` +openapi: "3.1.0" +info: + title: Test API + version: 1.0.0 +paths: + /users: + post: + operationId: createUser + requestBody: + content: + application/json: + schema: + type: object + application/xml: + schema: + type: object + responses: + "200": + description: Success + content: + application/json: + schema: + type: object +` + doc := unmarshalOpenAPI(t, ctx, yaml) + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + + require.NotNil(t, idx, "index should not be nil") + assert.False(t, idx.HasErrors(), "should have no errors") + + // Should have 3 media types (2 in request, 1 in response) + assert.Len(t, idx.MediaTypes, 3, "should have 3 media types") +} + +func TestBuildIndex_Discriminator_Success(t *testing.T) { + t.Parallel() + ctx := t.Context() + + yaml := ` +openapi: "3.1.0" +info: + title: Test API + version: 1.0.0 +paths: {} +components: + schemas: + Pet: + type: object + discriminator: + propertyName: petType + mapping: + dog: '#/components/schemas/Dog' + cat: '#/components/schemas/Cat' + properties: + petType: + type: string + Dog: + type: object + Cat: + type: object +` + doc := unmarshalOpenAPI(t, ctx, yaml) + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + + require.NotNil(t, idx, "index should not be nil") + assert.False(t, idx.HasErrors(), "should have no errors") + + // Should have 1 discriminator + assert.Len(t, idx.Discriminators, 1, "should have 1 discriminator") +} + +func TestBuildIndex_SecuritySchemes_Success(t *testing.T) { + t.Parallel() + ctx := t.Context() + + yaml := ` +openapi: "3.1.0" +info: + title: Test API + version: 1.0.0 +paths: {} +components: + securitySchemes: + apiKey: + type: apiKey + in: header + name: X-API-Key + oauth2: + type: oauth2 + flows: + implicit: + authorizationUrl: https://example.com/oauth/authorize + scopes: + read: Read access + write: Write access +` + doc := unmarshalOpenAPI(t, ctx, yaml) + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + + require.NotNil(t, idx, "index should not be nil") + assert.False(t, idx.HasErrors(), "should have no errors") + + // Should have 2 component security schemes + assert.Len(t, idx.ComponentSecuritySchemes, 2, "should have 2 component security schemes") + // Should have 1 OAuth flows container + assert.Len(t, idx.OAuthFlows, 1, "should have 1 OAuth flows") + // Should have 1 OAuth flow item (implicit) + assert.Len(t, idx.OAuthFlowItems, 1, "should have 1 OAuth flow item") +} + +func TestBuildIndex_UnknownProperties_DetectedAsWarnings(t *testing.T) { + t.Parallel() + ctx := t.Context() + + tests := []struct { + name string + yaml string + expectedWarningCount int + expectedWarningSubstr string + }{ + { + name: "MediaType with $ref property in OpenAPI 3.1", + yaml: ` +openapi: "3.1.0" +info: + title: Test API + version: 1.0.0 +paths: + /vehicles: + get: + responses: + "200": + description: Success + content: + application/json: + $ref: '#/components/schemas/VehiclesResponse' +components: + schemas: + VehiclesResponse: + type: object + properties: + vehicles: + type: array +`, + expectedWarningCount: 1, + expectedWarningSubstr: "unknown property `$ref`", + }, + { + name: "MediaType with schema property (valid)", + yaml: ` +openapi: "3.1.0" +info: + title: Test API + version: 1.0.0 +paths: + /vehicles: + get: + responses: + "200": + description: Success + content: + application/json: + schema: + $ref: '#/components/schemas/VehiclesResponse' +components: + schemas: + VehiclesResponse: + type: object +`, + expectedWarningCount: 0, + expectedWarningSubstr: "", + }, + { + name: "Operation with unknown property", + yaml: ` +openapi: "3.1.0" +info: + title: Test API + version: 1.0.0 +paths: + /test: + get: + unknownField: value + responses: + "200": + description: Success +`, + expectedWarningCount: 1, + expectedWarningSubstr: "unknown property `unknownField`", + }, + { + name: "Schema property with unknown keyword", + yaml: ` +openapi: "3.1.0" +info: + title: Test API + version: 1.0.0 +paths: + /test: + post: + requestBody: + required: true + content: + application/json: + schema: + type: object + example: foobar + properties: + test: + type: string + description: Test + name: foo + responses: + "204": + description: No content +`, + expectedWarningCount: 1, + expectedWarningSubstr: "unknown property `name`", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + doc := unmarshalOpenAPI(t, ctx, tt.yaml) + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + + require.NotNil(t, idx, "index should not be nil") + + allErrors := idx.GetAllErrors() + warnings := []error{} + for _, err := range allErrors { + var vErr *validation.Error + if errors.As(err, &vErr) && vErr.Severity == validation.SeverityWarning { + warnings = append(warnings, err) + } + } + + assert.Len(t, warnings, tt.expectedWarningCount, "should have expected number of warnings") + + if tt.expectedWarningCount > 0 { + found := false + for _, w := range warnings { + if strings.Contains(w.Error(), tt.expectedWarningSubstr) { + found = true + break + } + } + assert.True(t, found, "should have warning containing '%s'", tt.expectedWarningSubstr) + } + }) + } +} + +func TestBuildIndex_UnknownProperties_Deduplicated_WhenComponentReferencedMultipleTimes(t *testing.T) { + t.Parallel() + ctx := t.Context() + + // Create a schema with an unknown property that is referenced from multiple operations + yaml := ` +openapi: "3.1.0" +info: + title: Test API + version: 1.0.0 +paths: + /users: + get: + responses: + "200": + description: Get users + content: + application/json: + schema: + $ref: '#/components/schemas/User' + post: + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/User' + responses: + "201": + description: Created + /admin/users: + get: + responses: + "200": + description: Get admin users + content: + application/json: + schema: + type: array + items: + $ref: '#/components/schemas/User' +components: + schemas: + User: + type: object + unknownField: this-should-trigger-warning + properties: + id: + type: string + name: + type: string +` + + doc := unmarshalOpenAPI(t, ctx, yaml) + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + + require.NotNil(t, idx, "index should not be nil") + + // Get all warnings + allErrors := idx.GetAllErrors() + unknownPropWarnings := []error{} + for _, err := range allErrors { + var vErr *validation.Error + if errors.As(err, &vErr) && vErr.Severity == validation.SeverityWarning { + if strings.Contains(err.Error(), "unknown property `unknownField`") { + unknownPropWarnings = append(unknownPropWarnings, err) + } + } + } + + // Despite the User schema being referenced 3 times (in 3 different operations), + // we should only get 1 warning for the unknown property + assert.Len(t, unknownPropWarnings, 1, "should only have 1 warning for unknownField despite multiple references") + assert.Contains(t, unknownPropWarnings[0].Error(), "unknown property `unknownField`", "warning should mention the unknown field") +} + +func TestBuildIndex_CircularReferenceCounts_ValidCircular_Success(t *testing.T) { + t.Parallel() + ctx := t.Context() + + yaml := ` +openapi: "3.1.0" +info: + title: Test API + version: 1.0.0 +paths: {} +components: + schemas: + Node: + type: object + properties: + value: + type: string + next: + $ref: '#/components/schemas/Node' +` + doc := unmarshalOpenAPI(t, ctx, yaml) + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + + require.NotNil(t, idx, "index should not be nil") + assert.Equal(t, 1, idx.GetValidCircularRefCount(), "should have 1 valid circular reference") + assert.Equal(t, 0, idx.GetInvalidCircularRefCount(), "should have 0 invalid circular references") + assert.Empty(t, idx.GetCircularReferenceErrors(), "should have no circular reference errors") +} + +func TestBuildIndex_CircularReferenceCounts_InvalidCircular_Success(t *testing.T) { + t.Parallel() + ctx := t.Context() + + yaml := ` +openapi: "3.1.0" +info: + title: Test API + version: 1.0.0 +paths: {} +components: + schemas: + BadNode: + type: object + required: + - next + properties: + next: + $ref: '#/components/schemas/BadNode' +` + doc := unmarshalOpenAPI(t, ctx, yaml) + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + + require.NotNil(t, idx, "index should not be nil") + assert.Equal(t, 0, idx.GetValidCircularRefCount(), "should have 0 valid circular references") + assert.Equal(t, 1, idx.GetInvalidCircularRefCount(), "should have 1 invalid circular reference") + assert.Len(t, idx.GetCircularReferenceErrors(), 1, "should have 1 circular reference error") +} + +func TestBuildIndex_CircularReferenceCounts_MixedCirculars_Success(t *testing.T) { + t.Parallel() + ctx := t.Context() + + yaml := ` +openapi: "3.1.0" +info: + title: Test API + version: 1.0.0 +paths: {} +components: + schemas: + GoodNode: + type: object + properties: + value: + type: string + next: + $ref: '#/components/schemas/GoodNode' + BadNode: + type: object + required: + - next + properties: + next: + $ref: '#/components/schemas/BadNode' +` + doc := unmarshalOpenAPI(t, ctx, yaml) + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + + require.NotNil(t, idx, "index should not be nil") + assert.Equal(t, 1, idx.GetValidCircularRefCount(), "should have 1 valid circular reference") + assert.Equal(t, 1, idx.GetInvalidCircularRefCount(), "should have 1 invalid circular reference") + assert.Len(t, idx.GetCircularReferenceErrors(), 1, "should have 1 circular reference error") +} + +func TestBuildIndex_CircularReferenceCounts_ArrayWithMinItems_Success(t *testing.T) { + t.Parallel() + ctx := t.Context() + + yaml := ` +openapi: "3.1.0" +info: + title: Test API + version: 1.0.0 +paths: {} +components: + schemas: + TreeNode: + type: object + properties: + children: + type: array + items: + $ref: '#/components/schemas/TreeNode' +` + doc := unmarshalOpenAPI(t, ctx, yaml) + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + + require.NotNil(t, idx, "index should not be nil") + assert.Equal(t, 1, idx.GetValidCircularRefCount(), "should have 1 valid circular reference (empty array terminates)") + assert.Equal(t, 0, idx.GetInvalidCircularRefCount(), "should have 0 invalid circular references") + assert.Empty(t, idx.GetCircularReferenceErrors(), "should have no circular reference errors") +} + +func TestBuildIndex_CircularReferenceCounts_NullableSchema_Success(t *testing.T) { + t.Parallel() + ctx := t.Context() + + yaml := ` +openapi: "3.1.0" +info: + title: Test API + version: 1.0.0 +paths: {} +components: + schemas: + NullableNode: + type: object + nullable: true + required: + - next + properties: + next: + $ref: '#/components/schemas/NullableNode' +` + doc := unmarshalOpenAPI(t, ctx, yaml) + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + + require.NotNil(t, idx, "index should not be nil") + assert.Equal(t, 1, idx.GetValidCircularRefCount(), "should have 1 valid circular reference (nullable terminates)") + assert.Equal(t, 0, idx.GetInvalidCircularRefCount(), "should have 0 invalid circular references") + assert.Empty(t, idx.GetCircularReferenceErrors(), "should have no circular reference errors") +} + +func TestBuildIndex_CircularReferenceCounts_OneOfValid_Success(t *testing.T) { + t.Parallel() + ctx := t.Context() + + tests := []struct { + name string + yaml string + expectedValidCircular int + }{ + { + name: "oneOf with referenced schema", + yaml: ` +openapi: "3.1.0" +info: + title: Test API + version: 1.0.0 +paths: {} +components: + schemas: + PolyNode: + oneOf: + - type: string + - $ref: '#/components/schemas/PolyNodeObject' + PolyNodeObject: + type: object + properties: + next: + $ref: '#/components/schemas/PolyNode' +`, + // 2 circular refs detected: one starting from PolyNode, one from PolyNodeObject + // Both are part of the same cycle but detected at different entry points + expectedValidCircular: 2, + }, + { + name: "oneOf with inline schema", + yaml: ` +openapi: "3.1.0" +info: + title: Test API + version: 1.0.0 +paths: {} +components: + schemas: + PolyNode: + oneOf: + - type: string + - type: object + properties: + next: + $ref: '#/components/schemas/PolyNode' +`, + // 1 circular ref: PolyNode referencing itself + expectedValidCircular: 1, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + doc := unmarshalOpenAPI(t, ctx, tt.yaml) + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + + require.NotNil(t, idx, "index should not be nil") + assert.Equal(t, tt.expectedValidCircular, idx.GetValidCircularRefCount(), "should have expected valid circular references") + assert.Equal(t, 0, idx.GetInvalidCircularRefCount(), "should have 0 invalid circular references") + assert.Empty(t, idx.GetCircularReferenceErrors(), "should have no circular reference errors") + }) + } +} + +func TestBuildIndex_CircularReferenceCounts_GettersWithNilIndex_Success(t *testing.T) { + t.Parallel() + + var idx *openapi.Index = nil + + assert.Equal(t, 0, idx.GetValidCircularRefCount(), "should return 0 for nil index") + assert.Equal(t, 0, idx.GetInvalidCircularRefCount(), "should return 0 for nil index") +} + +func TestIndex_GetAllReferences_Success(t *testing.T) { + t.Parallel() + ctx := t.Context() + + yaml := ` +openapi: "3.1.0" +info: + title: Test API + version: 1.0.0 +paths: + /users: + get: + operationId: getUsers + parameters: + - $ref: '#/components/parameters/UserIdParam' + responses: + '200': + $ref: '#/components/responses/UserResponse' + callbacks: + statusUpdate: + $ref: '#/components/callbacks/StatusCallback' +components: + parameters: + UserIdParam: + name: userId + in: query + schema: + type: string + responses: + UserResponse: + description: User response + content: + application/json: + schema: + $ref: '#/components/schemas/User' + examples: + user1: + $ref: '#/components/examples/UserExample' + headers: + X-Custom: + $ref: '#/components/headers/CustomHeader' + links: + self: + $ref: '#/components/links/SelfLink' + schemas: + User: + type: object + properties: + id: + type: string + profile: + $ref: '#/components/schemas/Profile' + Profile: + type: object + properties: + name: + type: string + examples: + UserExample: + value: + id: "123" + headers: + CustomHeader: + schema: + type: string + links: + SelfLink: + operationId: getUsers + requestBodies: + UserBody: + content: + application/json: + schema: + $ref: '#/components/schemas/User' + callbacks: + StatusCallback: + '{$request.body#/callbackUrl}': + post: + requestBody: + $ref: '#/components/requestBodies/UserBody' + responses: + '200': + description: Callback response + securitySchemes: + ApiKey: + type: apiKey + in: header + name: X-API-Key +` + doc := unmarshalOpenAPI(t, ctx, yaml) + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + + require.NotNil(t, idx, "index should not be nil") + assert.False(t, idx.HasErrors(), "should have no errors") + + // Get all references + allRefs := idx.GetAllReferences() + require.NotNil(t, allRefs, "GetAllReferences should not return nil") + + expectedRefCount := 10 + assert.Len(t, allRefs, expectedRefCount, "should have expected number of references") + + // Verify all returned nodes implement ReferenceNode interface + for i, ref := range allRefs { + assert.NotNil(t, ref, "reference at index %d should not be nil", i) + assert.NotNil(t, ref.Node, "reference node at index %d should not be nil", i) + + // Verify it's actually a reference + assert.True(t, ref.Node.IsReference(), "node at index %d should be a reference", i) + + // Verify it has a reference value + refVal := ref.Node.GetReference() + assert.NotEmpty(t, refVal, "node at index %d should have a reference value", i) + } + + // Verify specific reference counts + assert.Len(t, idx.SchemaReferences, 3, "should have 3 schema references") + assert.Len(t, idx.ParameterReferences, 1, "should have 1 parameter reference") + assert.Len(t, idx.ResponseReferences, 1, "should have 1 response reference") + assert.Len(t, idx.ExampleReferences, 1, "should have 1 example reference") + assert.Len(t, idx.HeaderReferences, 1, "should have 1 header reference") + assert.Len(t, idx.LinkReferences, 1, "should have 1 link reference") + assert.Len(t, idx.RequestBodyReferences, 1, "should have 1 request body reference") + assert.Len(t, idx.CallbackReferences, 1, "should have 1 callback reference") +} + +func TestIndex_GetAllReferences_EmptyDoc_Success(t *testing.T) { + t.Parallel() + ctx := t.Context() + + yaml := ` +openapi: "3.1.0" +info: + title: Empty API + version: 1.0.0 +paths: {} +` + doc := unmarshalOpenAPI(t, ctx, yaml) + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + + require.NotNil(t, idx, "index should not be nil") + + allRefs := idx.GetAllReferences() + assert.Empty(t, allRefs, "should have no references in empty doc") +} + +func TestIndex_GetAllReferences_NilIndex_Success(t *testing.T) { + t.Parallel() + + var idx *openapi.Index = nil + allRefs := idx.GetAllReferences() + assert.Nil(t, allRefs, "should return nil for nil index") +} + +func TestBuildIndex_CircularRef_OneOfSelfRefWithBaseCases_Valid(t *testing.T) { + t.Parallel() + ctx := t.Context() + + // A recursive JSON-value-like type: oneOf with self-referencing branches (object/array) + // AND non-recursive base-case branches (string/number/boolean). + // Referenced from within an inline oneOf in a path response. + // This should be VALID because the oneOf has non-recursive branches. + yaml := ` +openapi: "3.0.3" +info: + title: Test API + version: 1.0.0 +paths: + /test: + get: + operationId: getTest + responses: + "200": + description: OK + content: + application/json: + schema: + oneOf: + - type: object + properties: + data: + $ref: '#/components/schemas/JsonValue' + - type: object + properties: + items: + $ref: '#/components/schemas/JsonValue' +components: + schemas: + JsonValue: + nullable: true + oneOf: + - type: string + - type: number + - type: object + additionalProperties: + $ref: '#/components/schemas/JsonValue' + - type: array + items: + $ref: '#/components/schemas/JsonValue' + - type: boolean +` + doc := unmarshalOpenAPI(t, ctx, yaml) + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + + require.NotNil(t, idx, "index should not be nil") + + circularErrs := idx.GetCircularReferenceErrors() + assert.Empty(t, circularErrs, "oneOf with non-recursive base-case branches should be valid") +} diff --git a/openapi/info.go b/openapi/info.go index 7ac8a587..31e4c794 100644 --- a/openapi/info.go +++ b/openapi/info.go @@ -2,6 +2,8 @@ package openapi import ( "context" + "errors" + "fmt" "net/mail" "net/url" @@ -106,16 +108,16 @@ func (i *Info) Validate(ctx context.Context, opts ...validation.Option) []error errs := []error{} if core.Title.Present && i.Title == "" { - errs = append(errs, validation.NewValueError(validation.NewMissingValueError("info.title is required"), core, core.Title)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("`info.title` is required"), core, core.Title)) } if core.Version.Present && i.Version == "" { - errs = append(errs, validation.NewValueError(validation.NewMissingValueError("info.version is required"), core, core.Version)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("`info.version` is required"), core, core.Version)) } if core.TermsOfService.Present { if _, err := url.Parse(*i.TermsOfService); err != nil { - errs = append(errs, validation.NewValueError(validation.NewValueValidationError("info.termsOfService is not a valid uri: %s", err), core, core.TermsOfService)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidFormat, fmt.Errorf("`info.termsOfService` is not a valid uri: %w", err), core, core.TermsOfService)) } } @@ -186,13 +188,13 @@ func (c *Contact) Validate(ctx context.Context, opts ...validation.Option) []err if core.URL.Present { if _, err := url.Parse(*c.URL); err != nil { - errs = append(errs, validation.NewValueError(validation.NewValueValidationError("contact.url is not a valid uri: %s", err), core, core.URL)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidFormat, fmt.Errorf("contact.url is not a valid uri: %w", err), core, core.URL)) } } if core.Email.Present { if _, err := mail.ParseAddress(*c.Email); err != nil { - errs = append(errs, validation.NewValueError(validation.NewValueValidationError("contact.email is not a valid email address: %s", err), core, core.Email)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidFormat, fmt.Errorf("contact.email is not a valid email address: %w", err), core, core.Email)) } } @@ -255,12 +257,12 @@ func (l *License) Validate(ctx context.Context, opts ...validation.Option) []err errs := []error{} if core.Name.Present && l.Name == "" { - errs = append(errs, validation.NewValueError(validation.NewMissingValueError("license.name is required"), core, core.Name)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("`license.name` is required"), core, core.Name)) } if core.URL.Present { if _, err := url.Parse(*l.URL); err != nil { - errs = append(errs, validation.NewValueError(validation.NewValueValidationError("license.url is not a valid uri: %s", err), core, core.URL)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidFormat, fmt.Errorf("license.url is not a valid uri: %w", err), core, core.URL)) } } diff --git a/openapi/info_validate_test.go b/openapi/info_validate_test.go index b469342f..6b0eb66d 100644 --- a/openapi/info_validate_test.go +++ b/openapi/info_validate_test.go @@ -108,7 +108,7 @@ func TestInfo_Validate_Error(t *testing.T) { yml: ` version: 1.0.0 `, - wantErrs: []string{"[2:1] info.title is missing"}, + wantErrs: []string{"[2:1] error validation-required-field `info.title` is required"}, }, { name: "empty title", @@ -116,14 +116,14 @@ version: 1.0.0 title: "" version: 1.0.0 `, - wantErrs: []string{"[2:8] info.title is required"}, + wantErrs: []string{"[2:8] error validation-required-field `info.title` is required"}, }, { name: "missing version", yml: ` title: Test API `, - wantErrs: []string{"[2:1] info.version is missing"}, + wantErrs: []string{"[2:1] error validation-required-field `info.version` is required"}, }, { name: "empty version", @@ -131,7 +131,7 @@ title: Test API title: Test API version: "" `, - wantErrs: []string{"[3:10] info.version is required"}, + wantErrs: []string{"[3:10] error validation-required-field `info.version` is required"}, }, { name: "invalid termsOfService URI", @@ -140,7 +140,7 @@ title: Test API version: 1.0.0 termsOfService: ":invalid" `, - wantErrs: []string{"[4:17] info.termsOfService is not a valid uri: parse \":invalid\": missing protocol scheme"}, + wantErrs: []string{"[4:17] error validation-invalid-format `info.termsOfService` is not a valid uri: parse \":invalid\": missing protocol scheme"}, }, { name: "invalid contact URL", @@ -151,7 +151,7 @@ contact: name: Support url: ":invalid" `, - wantErrs: []string{"[6:8] contact.url is not a valid uri: parse \":invalid\": missing protocol scheme"}, + wantErrs: []string{"[6:8] error validation-invalid-format contact.url is not a valid uri: parse \":invalid\": missing protocol scheme"}, }, { name: "invalid contact email", @@ -162,7 +162,7 @@ contact: name: Support email: "not-an-email" `, - wantErrs: []string{"[6:10] contact.email is not a valid email address: mail: missing '@' or angle-addr"}, + wantErrs: []string{"[6:10] error validation-invalid-format contact.email is not a valid email address: mail: missing '@' or angle-addr"}, }, { name: "invalid license URL", @@ -173,7 +173,7 @@ license: name: MIT url: ":invalid" `, - wantErrs: []string{"[6:8] license.url is not a valid uri: parse \":invalid\": missing protocol scheme"}, + wantErrs: []string{"[6:8] error validation-invalid-format license.url is not a valid uri: parse \":invalid\": missing protocol scheme"}, }, { name: "missing license name", @@ -183,7 +183,7 @@ version: 1.0.0 license: url: https://opensource.org/licenses/MIT `, - wantErrs: []string{"[5:3] license.name is missing"}, + wantErrs: []string{"[5:3] error validation-required-field `license.name` is required"}, }, { name: "multiple validation errors", @@ -196,10 +196,10 @@ license: name: "" `, wantErrs: []string{ - "[2:8] info.title is required", - "[3:10] info.version is required", - "[5:10] contact.email is not a valid email address: mail: missing '@' or angle-addr", - "[7:9] license.name is required", + "[2:8] error validation-required-field `info.title` is required", + "[3:10] error validation-required-field `info.version` is required", + "[5:10] error validation-invalid-format contact.email is not a valid email address: mail: missing '@' or angle-addr", + "[7:9] error validation-required-field `license.name` is required", }, }, } @@ -326,7 +326,7 @@ func TestContact_Validate_Error(t *testing.T) { name: Support url: ":invalid" `, - wantErrs: []string{"[3:6] contact.url is not a valid uri: parse \":invalid\": missing protocol scheme"}, + wantErrs: []string{"[3:6] error validation-invalid-format contact.url is not a valid uri: parse \":invalid\": missing protocol scheme"}, }, { name: "invalid email", @@ -334,7 +334,7 @@ url: ":invalid" name: Support email: "not-an-email" `, - wantErrs: []string{"[3:8] contact.email is not a valid email address: mail: missing '@' or angle-addr"}, + wantErrs: []string{"[3:8] error validation-invalid-format contact.email is not a valid email address: mail: missing '@' or angle-addr"}, }, { name: "invalid URL with spaces", @@ -342,7 +342,7 @@ email: "not-an-email" name: Support url: ":invalid url" `, - wantErrs: []string{"[3:6] contact.url is not a valid uri: parse \":invalid url\": missing protocol scheme"}, + wantErrs: []string{"[3:6] error validation-invalid-format contact.url is not a valid uri: parse \":invalid url\": missing protocol scheme"}, }, { name: "invalid email missing @", @@ -350,7 +350,7 @@ url: ":invalid url" name: Support email: "supportexample.com" `, - wantErrs: []string{"[3:8] contact.email is not a valid email address: mail: missing '@' or angle-addr"}, + wantErrs: []string{"[3:8] error validation-invalid-format contact.email is not a valid email address: mail: missing '@' or angle-addr"}, }, { name: "multiple validation errors", @@ -360,8 +360,8 @@ url: ":invalid" email: "invalid-email" `, wantErrs: []string{ - "[3:6] contact.url is not a valid uri: parse \":invalid\": missing protocol scheme", - "[4:8] contact.email is not a valid email address: mail: missing '@' or angle-addr", + "[3:6] error validation-invalid-format contact.url is not a valid uri: parse \":invalid\": missing protocol scheme", + "[4:8] error validation-invalid-format contact.email is not a valid email address: mail: missing '@' or angle-addr", }, }, } @@ -476,7 +476,7 @@ func TestLicense_Validate_Error(t *testing.T) { yml: ` url: https://opensource.org/licenses/MIT `, - wantErrs: []string{"[2:1] license.name is missing"}, + wantErrs: []string{"[2:1] error validation-required-field `license.name` is required"}, }, { name: "empty name", @@ -484,7 +484,7 @@ url: https://opensource.org/licenses/MIT name: "" url: https://opensource.org/licenses/MIT `, - wantErrs: []string{"[2:7] license.name is required"}, + wantErrs: []string{"[2:7] error validation-required-field `license.name` is required"}, }, { name: "invalid URL", @@ -492,7 +492,7 @@ url: https://opensource.org/licenses/MIT name: MIT url: ":invalid" `, - wantErrs: []string{"[3:6] license.url is not a valid uri: parse \":invalid\": missing protocol scheme"}, + wantErrs: []string{"[3:6] error validation-invalid-format license.url is not a valid uri: parse \":invalid\": missing protocol scheme"}, }, { name: "invalid URL with spaces", @@ -500,7 +500,7 @@ url: ":invalid" name: MIT url: ":invalid url" `, - wantErrs: []string{"[3:6] license.url is not a valid uri: parse \":invalid url\": missing protocol scheme"}, + wantErrs: []string{"[3:6] error validation-invalid-format license.url is not a valid uri: parse \":invalid url\": missing protocol scheme"}, }, { name: "multiple validation errors", @@ -509,8 +509,8 @@ name: "" url: ":invalid" `, wantErrs: []string{ - "[2:7] license.name is required", - "[3:6] license.url is not a valid uri: parse \":invalid\": missing protocol scheme", + "[2:7] error validation-required-field `license.name` is required", + "[3:6] error validation-invalid-format license.url is not a valid uri: parse \":invalid\": missing protocol scheme", }, }, } diff --git a/openapi/inline.go b/openapi/inline.go index a16650d2..096569d3 100644 --- a/openapi/inline.go +++ b/openapi/inline.go @@ -349,7 +349,7 @@ func inlineReference[T any, V interfaces.Validator[T], C marshaller.CoreModeler] recursiveOpts := ResolveOptions{ RootDocument: opts.RootDocument, TargetDocument: targetDocInfo.ResolvedDocument, - TargetLocation: targetDocInfo.AbsoluteReference, + TargetLocation: targetDocInfo.AbsoluteDocumentPath, } if err := inlineObject(ctx, ref, doc, recursiveOpts, collectedDefs, defHashes); err != nil { return fmt.Errorf("failed to inline nested references in %s: %w", ref.GetReference(), err) diff --git a/openapi/links.go b/openapi/links.go index f9c13d7b..5328b86e 100644 --- a/openapi/links.go +++ b/openapi/links.go @@ -2,6 +2,8 @@ package openapi import ( "context" + "errors" + "fmt" "net/url" "github.com/speakeasy-api/openapi/expression" @@ -11,7 +13,6 @@ import ( "github.com/speakeasy-api/openapi/openapi/core" "github.com/speakeasy-api/openapi/sequencedmap" "github.com/speakeasy-api/openapi/validation" - walkpkg "github.com/speakeasy-api/openapi/walk" ) type Link struct { @@ -101,70 +102,40 @@ func (l *Link) Validate(ctx context.Context, opts ...validation.Option) []error core := l.GetCore() errs := []error{} - op := validation.NewOptions(opts...) - o := validation.GetContextObject[OpenAPI](op) - if core.OperationID.Present && core.OperationRef.Present { - errs = append(errs, validation.NewValueError(validation.NewValueValidationError("operationID and operationRef are mutually exclusive"), core, core.OperationID)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationMutuallyExclusiveFields, errors.New("operationID and operationRef are mutually exclusive"), core, core.OperationID)) } - if l.OperationID != nil { - if o == nil { - panic("OpenAPI object is required to validate operationId") - } - - foundOp := false - - for item := range Walk(ctx, o) { - err := item.Match(Matcher{ - Operation: func(o *Operation) error { - if o.GetOperationID() == "" { - return nil - } - - if o.GetOperationID() == l.GetOperationID() { - foundOp = true - return walkpkg.ErrTerminate - } - return nil - }, - }) - if err != nil { - break - } - } - - if !foundOp { - errs = append(errs, validation.NewValueError(validation.NewValueValidationError("link.operationId value %s does not exist in document", *l.OperationID), core, core.OperationID)) - } - } + // Note: operationId validation has been moved to the linter rule "semantic-link-operation" + // This allows validation to occur after the index is built, enabling checks against + // operations in external documents that may be referenced later. // TODO should we validate the reference resolves here? Or as part of the resolution operation? Or make it optional? if l.OperationRef != nil { if _, err := url.Parse(*l.OperationRef); err != nil { - errs = append(errs, validation.NewValueError(validation.NewValueValidationError("link.operationRef is not a valid uri: %s", err), core, core.OperationRef)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidFormat, fmt.Errorf("link.operationRef is not a valid uri: %w", err), core, core.OperationRef)) } } for key, exp := range l.GetParameters().All() { _, expression, err := expression.GetValueOrExpressionValue(exp) if err != nil { - errs = append(errs, validation.NewMapValueError(validation.NewValueValidationError("link.parameters expression is invalid: %s", err.Error()), core, core.Parameters, key)) + errs = append(errs, validation.NewMapValueError(validation.SeverityError, validation.RuleValidationInvalidSyntax, fmt.Errorf("link.parameters expression is invalid: %w", err), core, core.Parameters, key)) } if expression != nil { if err := expression.Validate(); err != nil { - errs = append(errs, validation.NewMapValueError(validation.NewValueValidationError("link.parameters expression is invalid: %s", err.Error()), core, core.Parameters, key)) + errs = append(errs, validation.NewMapValueError(validation.SeverityError, validation.RuleValidationInvalidSyntax, fmt.Errorf("link.parameters expression is invalid: %w", err), core, core.Parameters, key)) } } } _, rbe, err := expression.GetValueOrExpressionValue(l.RequestBody) if err != nil { - errs = append(errs, validation.NewValueError(validation.NewValueValidationError("link.requestBody expression is invalid: %s", err.Error()), core, core.RequestBody)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidSyntax, fmt.Errorf("link.requestBody expression is invalid: %w", err), core, core.RequestBody)) } if rbe != nil { if err := rbe.Validate(); err != nil { - errs = append(errs, validation.NewValueError(validation.NewValueValidationError("link.requestBody expression is invalid: %s", err.Error()), core, core.RequestBody)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidSyntax, fmt.Errorf("link.requestBody expression is invalid: %w", err), core, core.RequestBody)) } } diff --git a/openapi/links_validate_test.go b/openapi/links_validate_test.go index ce2e2fea..ae032790 100644 --- a/openapi/links_validate_test.go +++ b/openapi/links_validate_test.go @@ -177,7 +177,7 @@ server: description: Invalid server without URL description: Link with invalid server `, - wantErrs: []string{"[4:3] server.url is missing"}, + wantErrs: []string{"[4:3] error validation-required-field `server.url` is required"}, }, { name: "invalid_operation_ref_uri", @@ -240,30 +240,10 @@ description: Invalid request body expression syntax - empty query name } } -func TestLink_Validate_OperationID_NotFound(t *testing.T) { - t.Parallel() - - // Create a minimal OpenAPI document with operations - openAPIDoc := &openapi.OpenAPI{ - Paths: openapi.NewPaths(), - } - - // Add a path with an operation - pathItem := openapi.NewPathItem() - operation := &openapi.Operation{ - OperationID: pointer.From("existingOperation"), - } - pathItem.Set("get", operation) - openAPIDoc.Paths.Set("/users/{id}", &openapi.ReferencedPathItem{Object: pathItem}) - - link := &openapi.Link{ - OperationID: pointer.From("nonExistentOperation"), - } - - errs := link.Validate(t.Context(), validation.WithContextObject(openAPIDoc)) - require.NotEmpty(t, errs, "Expected validation error for non-existent operationId") - require.Contains(t, errs[0].Error(), "link.operationId value nonExistentOperation does not exist in document") -} +// Note: TestLink_Validate_OperationID_NotFound has been removed because operationId validation +// has been moved to the linter rule "semantic-link-operation" (see link_operation.go in linter/rules). +// This allows validation to occur after the index is built, enabling checks against operations +// in external documents that may be referenced later. func TestLink_Validate_OperationID_Found(t *testing.T) { t.Parallel() @@ -289,17 +269,9 @@ func TestLink_Validate_OperationID_Found(t *testing.T) { require.Empty(t, errs, "Expected no validation errors for existing operationId") } -func TestLink_Validate_OperationID_WithoutOpenAPIContext_Panics(t *testing.T) { - t.Parallel() - - link := &openapi.Link{ - OperationID: pointer.From("getUserById"), - } - - require.Panics(t, func() { - link.Validate(t.Context()) - }, "Expected panic when validating operationId without OpenAPI context") -} +// Note: TestLink_Validate_OperationID_WithoutOpenAPIContext_Panics has been removed because +// operationId validation has been moved to the linter rule "semantic-link-operation". +// Link.Validate() no longer requires OpenAPI context for operationId validation. func TestLink_Validate_ComplexExpressions(t *testing.T) { t.Parallel() diff --git a/openapi/linter/README.md b/openapi/linter/README.md new file mode 100644 index 00000000..79b6c057 --- /dev/null +++ b/openapi/linter/README.md @@ -0,0 +1,420 @@ +# OpenAPI Linter + +The OpenAPI linter validates OpenAPI specifications for style, consistency, +and best practices beyond basic spec validation. + +## Quick Start + +### CLI + +```bash +# Lint an OpenAPI specification +openapi spec lint api.yaml + +# Output as JSON +openapi spec lint --format json api.yaml + +# Disable specific rules +openapi spec lint --disable semantic-path-params api.yaml +``` + +### Go API + +```go +import ( + "context" + "fmt" + "os" + + "github.com/speakeasy-api/openapi/linter" + "github.com/speakeasy-api/openapi/openapi" + openapiLinter "github.com/speakeasy-api/openapi/openapi/linter" +) + +func main() { + ctx := context.Background() + + // Load your OpenAPI document + f, _ := os.Open("api.yaml") + doc, validationErrors, _ := openapi.Unmarshal(ctx, f) + + // Create linter with default configuration + config := linter.NewConfig() + lint := openapiLinter.NewLinter(config) + + // Run linting + output, _ := lint.Lint(ctx, linter.NewDocumentInfo(doc, "api.yaml"), validationErrors, nil) + + // Print results + fmt.Println(output.FormatText()) +} +``` + +## Available Rules + + + +| Rule | Severity | Description | +| ----------------------------------------------------------------------------------------------- | -------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | +| `oas-schema-check` | error | Schemas must use type-appropriate constraints and have valid constraint values. For example, string types should use minLength/maxLength/pattern, numbers should use minimum/maximum/multipleOf, and constraint values must be logically valid (e.g., maxLength >= minLength). | +| `oas3-example-missing` | hint | Schemas, parameters, headers, and media types should include example values to illustrate expected data formats. Examples improve documentation quality, help developers understand how to use the API correctly, and enable better testing and validation. | +| `oas3-no-nullable` | warning | The nullable keyword is not supported in OpenAPI 3.1+ and should be replaced with a type array that includes null (e.g., type: [string, null]). This change aligns OpenAPI 3.1 with JSON Schema Draft 2020-12, which uses type arrays to express nullable values. | +| `owasp-additional-properties-constrained` | hint | Schemas with additionalProperties set to true or a schema should define maxProperties to limit object size. Without size limits, APIs are vulnerable to resource exhaustion attacks where clients send excessively large objects. | +| `owasp-array-limit` | error | Array schemas must specify maxItems to prevent resource exhaustion attacks. Without array size limits, malicious clients could send extremely large arrays that consume excessive memory or processing time. | +| `owasp-auth-insecure-schemes` | error | Authentication schemes using outdated or insecure methods must be avoided or upgraded. Insecure authentication schemes like API keys in query parameters or HTTP Basic over HTTP expose credentials and create security vulnerabilities. | +| `owasp-define-error-responses-401` | warning | Operations should define a 401 Unauthorized response with a proper schema to handle authentication failures. Documenting authentication error responses helps clients implement proper error handling and understand when credentials are invalid or missing. | +| `owasp-define-error-responses-429` | warning | Operations should define a 429 Too Many Requests response with a proper schema to indicate rate limiting. Rate limit responses help clients understand when they've exceeded usage thresholds and need to slow down requests. | +| `owasp-define-error-responses-500` | warning | Operations should define a 500 Internal Server Error response with a proper schema to handle unexpected failures. Documenting server error responses helps clients distinguish between client-side and server-side problems. | +| `owasp-define-error-validation` | warning | Operations should define validation error responses (400, 422, or 4XX) to indicate request data problems. Validation error responses help clients understand when and why their request data is invalid or malformed. | +| `owasp-integer-format` | error | Integer schemas must specify a format of int32 or int64 to define the expected size and range. Explicit integer formats prevent overflow vulnerabilities and ensure clients and servers agree on numeric boundaries. | +| `owasp-integer-limit` | error | Integer schemas must specify minimum and maximum values (or exclusive variants) to prevent unbounded inputs. Without numeric limits, APIs are vulnerable to overflow attacks and unexpected behavior from extreme values. | +| `owasp-jwt-best-practices` | error | Security schemes using OAuth2 or JWT must explicitly declare support for RFC8725 (JWT Best Current Practices) in the description. RFC8725 compliance ensures JWTs are validated properly and protected against common attacks like algorithm confusion. | +| `owasp-no-additional-properties` | error | Object schemas must not allow arbitrary additional properties (set additionalProperties to false or omit it). Allowing unexpected properties can lead to mass assignment vulnerabilities where attackers inject unintended fields. | +| `owasp-no-api-keys-in-url` | error | API keys must not be passed via URL parameters (query or path) as they are logged and cached. URL-based API keys appear in browser history, server logs, and proxy caches, creating security exposure. | +| `owasp-no-credentials-in-url` | error | URL parameters must not contain credentials like API keys, passwords, or secrets. Credentials in URLs are logged by servers, proxies, and browsers, creating significant security risks. | +| `owasp-no-http-basic` | error | Security schemes must not use HTTP Basic authentication without additional security layers. HTTP Basic sends credentials in easily-decoded base64 encoding, making it vulnerable to interception without HTTPS. | +| `owasp-no-numeric-ids` | error | Resource identifiers must use random values like UUIDs instead of sequential numeric IDs. Sequential IDs enable enumeration attacks where attackers can guess valid IDs and access unauthorized resources. | +| `owasp-protection-global-safe` | hint | Safe operations (GET, HEAD) should be protected by security schemes or explicitly marked as public. Unprotected read operations may expose sensitive data to unauthorized users. | +| `owasp-protection-global-unsafe` | error | Unsafe operations (POST, PUT, PATCH, DELETE) must be protected by security schemes to prevent unauthorized modifications. Write operations without authentication create serious security vulnerabilities allowing data tampering. | +| `owasp-protection-global-unsafe-strict` | hint | Unsafe operations (POST, PUT, PATCH, DELETE) must be protected by non-empty security schemes without explicit opt-outs. Strict authentication requirements ensure write operations cannot bypass security even with empty security arrays. | +| `owasp-rate-limit` | error | 2XX and 4XX responses must define rate limiting headers (X-RateLimit-Limit, X-RateLimit-Remaining) to prevent API overload. Rate limit headers help clients manage their usage and avoid hitting limits. | +| `owasp-rate-limit-retry-after` | error | 429 Too Many Requests responses must include a Retry-After header indicating when clients can retry. Retry-After headers prevent thundering herd problems by telling clients exactly when to resume requests. | +| `owasp-security-hosts-https-oas3` | error | Server URLs must begin with https:// as the only permitted protocol. Using HTTPS is essential for protecting API traffic from interception, tampering, and eavesdropping attacks. | +| `owasp-string-limit` | error | String schemas must specify maxLength, const, or enum to prevent unbounded data. Without string length limits, APIs are vulnerable to resource exhaustion from extremely long inputs. | +| `owasp-string-restricted` | error | String schemas must specify format, const, enum, or pattern to restrict content. String restrictions prevent injection attacks and ensure data conforms to expected formats. | +| `semantic-duplicated-enum` | warning | Enum arrays should not contain duplicate values. Duplicate enum values are redundant and can cause confusion or unexpected behavior in client code generation and validation. | +| `semantic-link-operation` | error | Link operationId must reference an existing operation in the API specification. This ensures that links point to valid operations, including those defined in external documents that are referenced in the specification. | +| `semantic-no-ambiguous-paths` | error | Path definitions must be unambiguous and distinguishable from each other to ensure correct request routing. Ambiguous paths like `/users/{id}` and `/users/{name}` can cause runtime routing conflicts since both match the same URL pattern. | +| `semantic-no-eval-in-markdown` | error | Markdown descriptions must not contain eval() statements, which pose serious security risks. Including eval() in documentation could enable code injection attacks if the documentation is rendered in contexts that execute JavaScript. | +| `semantic-no-script-tags-in-markdown` | error | Markdown descriptions must not contain " +paths: + /users: + get: + description: "safe" + responses: + '200': + description: ok + /admin: + get: + description: "" + responses: + '200': + description: ok +` + + expectedErrors := []string{ + "[6:16] error semantic-no-script-tags-in-markdown description contains content with ` 0) { + schema := paramObj.GetSchema() + if schema != nil { + schemasWithExamplesElsewhere[schema] = true + } + } + } + + // Collect schemas from headers with examples + allHeaders := docInfo.Index.GetAllHeaders() + for _, headerNode := range allHeaders { + header := headerNode.Node + if header == nil { + continue + } + headerObj := header.GetObject() + if headerObj == nil { + continue + } + // If header has example, mark its schema as having an example elsewhere + if headerObj.GetExample() != nil || (headerObj.GetExamples() != nil && headerObj.GetExamples().Len() > 0) { + schema := headerObj.GetSchema() + if schema != nil { + schemasWithExamplesElsewhere[schema] = true + } + } + } + + // Collect schemas from media types with examples + for _, mtNode := range docInfo.Index.MediaTypes { + mt := mtNode.Node + if mt == nil { + continue + } + // If media type has example, mark its schema as having an example elsewhere + if mt.GetExample() != nil || (mt.GetExamples() != nil && mt.GetExamples().Len() > 0) { + schema := mt.GetSchema() + if schema != nil { + schemasWithExamplesElsewhere[schema] = true + } + } + } + + // Check schemas for missing examples + for _, schemaNode := range docInfo.Index.GetAllSchemas() { + refSchema := schemaNode.Node + schema := refSchema.GetSchema() + if schema == nil { + continue + } + + // Skip if this schema is used by a parameter/header/media type that has an example + if schemasWithExamplesElsewhere[refSchema] { + continue + } + + // Skip if schema has example or examples + if schema.GetExample() != nil || len(schema.GetExamples()) > 0 { + continue + } + + // Skip if schema has const, default, or enum (these serve as implicit examples) + if schema.GetConst() != nil || schema.GetDefault() != nil || len(schema.GetEnum()) > 0 { + continue + } + + // Skip primitive types and schemas without type + types := schema.GetType() + if len(types) == 0 { + continue + } + + // Skip boolean, number, integer, string types (unless they have no constraints) + // These are often building blocks and don't need examples themselves + isPrimitive := false + for _, t := range types { + if t == "boolean" || t == "number" || t == "integer" || t == "string" { + isPrimitive = true + break + } + } + if isPrimitive { + continue + } + + // Report missing example + if rootNode := refSchema.GetRootNode(); rootNode != nil { + errs = append(errs, validation.NewValidationError( + config.GetSeverity(r.DefaultSeverity()), + RuleOAS3ExampleMissing, + errors.New("schema is missing `example` or `examples`"), + rootNode, + )) + } + } + + // Check parameters for missing examples + for _, paramNode := range allParameters { + param := paramNode.Node + if param == nil { + continue + } + + paramObj := param.GetObject() + if paramObj == nil { + continue + } + + // Skip if parameter has example or examples + if paramObj.GetExample() != nil { + continue + } + paramExamples := paramObj.GetExamples() + if paramExamples != nil && paramExamples.Len() > 0 { + continue + } + + // Report missing example + if rootNode := param.GetRootNode(); rootNode != nil { + errs = append(errs, validation.NewValidationError( + config.GetSeverity(r.DefaultSeverity()), + RuleOAS3ExampleMissing, + errors.New("parameter is missing `example` or `examples`"), + rootNode, + )) + } + } + + // Check headers for missing examples + for _, headerNode := range allHeaders { + header := headerNode.Node + if header == nil { + continue + } + + headerObj := header.GetObject() + if headerObj == nil { + continue + } + + // Skip if header has example or examples + if headerObj.GetExample() != nil { + continue + } + headerExamples := headerObj.GetExamples() + if headerExamples != nil && headerExamples.Len() > 0 { + continue + } + + // Report missing example + if rootNode := header.GetRootNode(); rootNode != nil { + errs = append(errs, validation.NewValidationError( + config.GetSeverity(r.DefaultSeverity()), + RuleOAS3ExampleMissing, + errors.New("header is missing `example` or `examples`"), + rootNode, + )) + } + } + + // Check media types for missing examples + for _, mtNode := range docInfo.Index.MediaTypes { + mt := mtNode.Node + if mt == nil { + continue + } + + // Skip if media type has example or examples + if mt.GetExample() != nil { + continue + } + mtExamples := mt.GetExamples() + if mtExamples != nil && mtExamples.Len() > 0 { + continue + } + + // Report missing example + if rootNode := mt.GetRootNode(); rootNode != nil { + errs = append(errs, validation.NewValidationError( + config.GetSeverity(r.DefaultSeverity()), + RuleOAS3ExampleMissing, + errors.New("media type is missing `example` or `examples`"), + rootNode, + )) + } + } + + return errs +} diff --git a/openapi/linter/rules/oas3_example_missing_test.go b/openapi/linter/rules/oas3_example_missing_test.go new file mode 100644 index 00000000..4e123ecc --- /dev/null +++ b/openapi/linter/rules/oas3_example_missing_test.go @@ -0,0 +1,296 @@ +package rules_test + +import ( + "strings" + "testing" + + "github.com/speakeasy-api/openapi/linter" + "github.com/speakeasy-api/openapi/openapi" + "github.com/speakeasy-api/openapi/openapi/linter/rules" + "github.com/speakeasy-api/openapi/references" + "github.com/speakeasy-api/openapi/validation" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestOAS3ExampleMissingRule_ValidCases(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + yaml string + }{ + { + name: "parameter with example", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users/{id}: + parameters: + - name: id + in: path + required: true + schema: + type: string + example: "123" +`, + }, + { + name: "parameter with examples", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users/{id}: + parameters: + - name: id + in: path + required: true + schema: + type: string + examples: + example1: + value: "123" +`, + }, + { + name: "header with example", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + get: + responses: + '200': + description: OK + headers: + X-Rate-Limit: + schema: + type: integer + example: 100 +`, + }, + { + name: "media type with example", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + post: + requestBody: + content: + application/json: + schema: + type: object + example: + name: John +`, + }, + { + name: "schema with examples array", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +components: + schemas: + User: + type: object + properties: + name: + type: string + examples: + - name: John +`, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + doc, _, err := openapi.Unmarshal(ctx, strings.NewReader(tt.yaml)) + require.NoError(t, err) + + rule := &rules.OAS3ExampleMissingRule{} + config := &linter.RuleConfig{} + + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + docInfo := linter.NewDocumentInfoWithIndex(doc, "test.yaml", idx) + + errs := rule.Run(ctx, docInfo, config) + assert.Empty(t, errs) + }) + } +} + +func TestOAS3ExampleMissingRule_Violations(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + yaml string + expectedCount int + }{ + { + name: "parameter without example", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users/{id}: + parameters: + - name: id + in: path + required: true + schema: + type: string +`, + expectedCount: 1, + }, + { + name: "header without example", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + get: + responses: + '200': + description: OK + headers: + X-Rate-Limit: + schema: + type: integer +`, + expectedCount: 1, + }, + { + name: "media type without example", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + post: + requestBody: + content: + application/json: + schema: + type: object +`, + expectedCount: 2, // Both schema and media type flagged + }, + { + name: "schema without example", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +components: + schemas: + User: + type: object + properties: + name: + type: string +`, + expectedCount: 1, + }, + { + name: "multiple violations", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users/{id}: + parameters: + - name: id + in: path + required: true + schema: + type: string + get: + responses: + '200': + description: OK + headers: + X-Rate-Limit: + schema: + type: integer + content: + application/json: + schema: + type: object +`, + expectedCount: 4, // parameter, header, media type, and schema flagged + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + doc, _, err := openapi.Unmarshal(ctx, strings.NewReader(tt.yaml)) + require.NoError(t, err) + + rule := &rules.OAS3ExampleMissingRule{} + config := &linter.RuleConfig{} + + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + docInfo := linter.NewDocumentInfoWithIndex(doc, "test.yaml", idx) + + errs := rule.Run(ctx, docInfo, config) + + require.Len(t, errs, tt.expectedCount) + for _, err := range errs { + assert.Contains(t, err.Error(), "missing `example` or `examples`") + } + }) + } +} + +func TestOAS3ExampleMissingRule_RuleMetadata(t *testing.T) { + t.Parallel() + + rule := &rules.OAS3ExampleMissingRule{} + + assert.Equal(t, "oas3-example-missing", rule.ID()) + assert.Equal(t, rules.CategoryStyle, rule.Category()) + assert.NotEmpty(t, rule.Description()) + assert.NotEmpty(t, rule.Link()) + assert.Equal(t, validation.SeverityHint, rule.DefaultSeverity()) + assert.Nil(t, rule.Versions()) +} diff --git a/openapi/linter/rules/oas3_no_nullable.go b/openapi/linter/rules/oas3_no_nullable.go new file mode 100644 index 00000000..acc3faed --- /dev/null +++ b/openapi/linter/rules/oas3_no_nullable.go @@ -0,0 +1,75 @@ +package rules + +import ( + "context" + "errors" + + "github.com/speakeasy-api/openapi/linter" + "github.com/speakeasy-api/openapi/openapi" + "github.com/speakeasy-api/openapi/validation" +) + +const RuleOAS3NoNullable = "oas3-no-nullable" + +type OAS3NoNullableRule struct{} + +func (r *OAS3NoNullableRule) ID() string { + return RuleOAS3NoNullable +} +func (r *OAS3NoNullableRule) Category() string { + return CategorySchemas +} +func (r *OAS3NoNullableRule) Description() string { + return "The `nullable` keyword is not supported in OpenAPI 3.1+ and should be replaced with a type array that includes null (e.g., `type: [string, null]`). This change aligns OpenAPI 3.1 with JSON Schema Draft 2020-12, which uses type arrays to express nullable values." +} +func (r *OAS3NoNullableRule) Summary() string { + return "OpenAPI 3.1 must not use the `nullable` keyword." +} +func (r *OAS3NoNullableRule) HowToFix() string { + return "Replace `nullable` with a type array that includes `null` (e.g., `type: [string, null]`)." +} +func (r *OAS3NoNullableRule) Link() string { + return "https://github.com/speakeasy-api/openapi/blob/main/openapi/linter/README.md#oas3-no-nullable" +} +func (r *OAS3NoNullableRule) DefaultSeverity() validation.Severity { + return validation.SeverityWarning +} +func (r *OAS3NoNullableRule) Versions() []string { + return []string{"3.1"} // Only applies to OpenAPI 3.1+ +} + +func (r *OAS3NoNullableRule) Run(ctx context.Context, docInfo *linter.DocumentInfo[*openapi.OpenAPI], config *linter.RuleConfig) []error { + if docInfo == nil || docInfo.Document == nil || docInfo.Index == nil { + return nil + } + + var errs []error + + // Check all schemas for nullable keyword + for _, schemaNode := range docInfo.Index.GetAllSchemas() { + refSchema := schemaNode.Node + schema := refSchema.GetSchema() + if schema == nil { + continue + } + + coreSchema := schema.GetCore() + if coreSchema == nil { + continue + } + + // Check if nullable field is present in the YAML + if coreSchema.Nullable.Present { + if rootNode := refSchema.GetRootNode(); rootNode != nil { + errs = append(errs, validation.NewValidationError( + config.GetSeverity(r.DefaultSeverity()), + RuleOAS3NoNullable, + errors.New("the `nullable` keyword is not supported in OpenAPI 3.1 - use `type: [actualType, \"null\"]` instead"), + rootNode, + )) + } + } + } + + return errs +} diff --git a/openapi/linter/rules/oas3_no_nullable_test.go b/openapi/linter/rules/oas3_no_nullable_test.go new file mode 100644 index 00000000..05fc8b97 --- /dev/null +++ b/openapi/linter/rules/oas3_no_nullable_test.go @@ -0,0 +1,186 @@ +package rules_test + +import ( + "strings" + "testing" + + "github.com/speakeasy-api/openapi/linter" + "github.com/speakeasy-api/openapi/openapi" + "github.com/speakeasy-api/openapi/openapi/linter/rules" + "github.com/speakeasy-api/openapi/references" + "github.com/speakeasy-api/openapi/validation" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestOAS3NoNullableRule_ValidCases(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + yaml string + }{ + { + name: "OpenAPI 3.1 with type array including null", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +components: + schemas: + NullableString: + type: [string, "null"] + maxLength: 100 +paths: {} +`, + }, + { + name: "OpenAPI 3.1 without nullable", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +components: + schemas: + Name: + type: string + maxLength: 100 +paths: {} +`, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + doc, _, err := openapi.Unmarshal(ctx, strings.NewReader(tt.yaml)) + require.NoError(t, err) + + rule := &rules.OAS3NoNullableRule{} + config := &linter.RuleConfig{} + + // Build index for the rule + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + docInfo := linter.NewDocumentInfoWithIndex(doc, "test.yaml", idx) + + errs := rule.Run(ctx, docInfo, config) + assert.Empty(t, errs) + }) + } +} + +func TestOAS3NoNullableRule_Violations(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + yaml string + expectedCount int + }{ + { + name: "OpenAPI 3.1 with nullable true", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +components: + schemas: + NullableName: + type: string + nullable: true + maxLength: 100 +paths: {} +`, + expectedCount: 1, + }, + { + name: "OpenAPI 3.1 with nullable false also flagged", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +components: + schemas: + Name: + type: string + nullable: false + maxLength: 100 +paths: {} +`, + expectedCount: 1, + }, + { + name: "OpenAPI 3.1 with multiple nullable schemas", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +components: + schemas: + NullableString: + type: string + nullable: true + maxLength: 100 + NullableInteger: + type: integer + nullable: true + format: int32 +paths: {} +`, + expectedCount: 2, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + doc, _, err := openapi.Unmarshal(ctx, strings.NewReader(tt.yaml)) + require.NoError(t, err) + + rule := &rules.OAS3NoNullableRule{} + config := &linter.RuleConfig{} + + // Build index for the rule + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + docInfo := linter.NewDocumentInfoWithIndex(doc, "test.yaml", idx) + + errs := rule.Run(ctx, docInfo, config) + + require.Len(t, errs, tt.expectedCount) + for _, err := range errs { + assert.Contains(t, err.Error(), "nullable") + assert.Contains(t, err.Error(), "3.1") + } + }) + } +} + +func TestOAS3NoNullableRule_RuleMetadata(t *testing.T) { + t.Parallel() + + rule := &rules.OAS3NoNullableRule{} + + assert.Equal(t, "oas3-no-nullable", rule.ID()) + assert.Equal(t, rules.CategorySchemas, rule.Category()) + assert.NotEmpty(t, rule.Description()) + assert.NotEmpty(t, rule.Link()) + assert.Equal(t, validation.SeverityWarning, rule.DefaultSeverity()) + assert.Equal(t, []string{"3.1"}, rule.Versions()) +} diff --git a/openapi/linter/rules/oas3_no_nullable_version_test.go b/openapi/linter/rules/oas3_no_nullable_version_test.go new file mode 100644 index 00000000..f7cad4dd --- /dev/null +++ b/openapi/linter/rules/oas3_no_nullable_version_test.go @@ -0,0 +1,123 @@ +package rules_test + +import ( + "strings" + "testing" + + "github.com/speakeasy-api/openapi/linter" + "github.com/speakeasy-api/openapi/openapi" + openapiLinter "github.com/speakeasy-api/openapi/openapi/linter" + "github.com/speakeasy-api/openapi/references" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +// TestOAS3NoNullableRule_VersionFiltering verifies that the linter engine +// properly filters rules based on their Versions() method +func TestOAS3NoNullableRule_VersionFiltering(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + yaml string + expectErrors bool + description string + }{ + { + name: "OpenAPI 3.1.0 - rule should run and detect violation", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +components: + schemas: + NullableName: + type: string + nullable: true +paths: {} +`, + expectErrors: true, + description: "OpenAPI 3.1.0 should trigger the rule", + }, + { + name: "OpenAPI 3.0.0 - rule should not run", + yaml: ` +openapi: 3.0.0 +info: + title: Test + version: 1.0.0 +components: + schemas: + NullableName: + type: string + nullable: true +paths: {} +`, + expectErrors: false, + description: "OpenAPI 3.0.0 should not trigger the rule (version filtering)", + }, + { + name: "OpenAPI 3.0.3 - rule should not run", + yaml: ` +openapi: 3.0.3 +info: + title: Test + version: 1.0.0 +components: + schemas: + NullableName: + type: string + nullable: true +paths: {} +`, + expectErrors: false, + description: "OpenAPI 3.0.3 should not trigger the rule (version filtering)", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + doc, _, err := openapi.Unmarshal(ctx, strings.NewReader(tt.yaml)) + require.NoError(t, err, "unmarshal should succeed") + + // Build index + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + + // Create linter with default config + config := &linter.Config{ + Extends: []string{"all"}, + } + l, err := openapiLinter.NewLinter(config) + require.NoError(t, err, "should create linter") + + // Lint the document + docInfo := linter.NewDocumentInfoWithIndex(doc, "test.yaml", idx) + output, err := l.Lint(ctx, docInfo, nil, nil) + require.NoError(t, err, "lint should succeed") + + // Filter results to only oas3-no-nullable rule + var ruleResults []error + for _, result := range output.Results { + // Check if this is a validation error from our rule + if strings.Contains(result.Error(), "nullable") && + strings.Contains(result.Error(), "3.1") { + ruleResults = append(ruleResults, result) + } + } + + if tt.expectErrors { + assert.NotEmpty(t, ruleResults, tt.description) + } else { + assert.Empty(t, ruleResults, tt.description) + } + }) + } +} diff --git a/openapi/linter/rules/oas_schema_check.go b/openapi/linter/rules/oas_schema_check.go new file mode 100644 index 00000000..ddd5e824 --- /dev/null +++ b/openapi/linter/rules/oas_schema_check.go @@ -0,0 +1,849 @@ +package rules + +import ( + "context" + "errors" + "fmt" + "regexp" + "strings" + + "github.com/speakeasy-api/openapi/jsonschema/oas3" + "github.com/speakeasy-api/openapi/linter" + "github.com/speakeasy-api/openapi/openapi" + "github.com/speakeasy-api/openapi/validation" + "gopkg.in/yaml.v3" +) + +var _ linter.Rule = (*OASSchemaCheckRule)(nil) + +// OASSchemaCheckRule validates that schemas contain appropriate constraints for their types +type OASSchemaCheckRule struct{} + +func (r *OASSchemaCheckRule) ID() string { + return "oas-schema-check" +} + +func (r *OASSchemaCheckRule) Category() string { + return CategorySchemas +} + +func (r *OASSchemaCheckRule) Description() string { + return "Schemas must use type-appropriate constraints and have valid constraint values. For example, `string` types should use `minLength`/`maxLength`/`pattern`, numbers should use `minimum`/`maximum`/`multipleOf`, and constraint values must be logically valid (e.g., `maxLength` >= `minLength`)." +} + +func (r *OASSchemaCheckRule) Summary() string { + return "Schemas must use type-appropriate constraints with valid values." +} + +func (r *OASSchemaCheckRule) HowToFix() string { + return "Add or correct constraints appropriate to each schema type (e.g., `minLength`/`maxLength` for strings, `minimum`/`maximum` for numbers)." +} + +func (r *OASSchemaCheckRule) Link() string { + return "https://quobix.com/vacuum/rules/schemas/oas-schema-check/" +} + +func (r *OASSchemaCheckRule) DefaultSeverity() validation.Severity { + return validation.SeverityError +} + +func (r *OASSchemaCheckRule) Versions() []string { + return nil // applies to all versions +} + +func (r *OASSchemaCheckRule) Run(ctx context.Context, docInfo *linter.DocumentInfo[*openapi.OpenAPI], config *linter.RuleConfig) []error { + var errs []error + + for _, schemaNode := range docInfo.Index.GetAllSchemas() { + refSchema := schemaNode.Node + schema := refSchema.GetSchema() + if schema == nil { + continue + } + + coreSchema := schema.GetCore() + if coreSchema == nil { + continue + } + + schemaTypes := schema.GetType() + + // Validate each type + for _, schemaType := range schemaTypes { + typeStr := string(schemaType) + switch typeStr { + case "string": + errs = append(errs, r.validateString(ctx, schema, refSchema, docInfo, config)...) + case "integer", "number": + errs = append(errs, r.validateNumber(ctx, schema, refSchema, docInfo, config)...) + case "boolean": + errs = append(errs, r.validateBoolean(ctx, schema, refSchema, docInfo, config)...) + case "array": + errs = append(errs, r.validateArray(ctx, schema, refSchema, docInfo, config)...) + case "object": + errs = append(errs, r.validateObject(ctx, schema, refSchema, docInfo, config)...) + case "null": + errs = append(errs, r.validateNull(ctx, schema, refSchema, schemaTypes, docInfo, config)...) + default: + // Unknown type + if coreSchema.Type.Present { + if rootNode := refSchema.GetRootNode(); rootNode != nil { + errs = append(errs, validation.NewValidationError( + config.GetSeverity(r.DefaultSeverity()), + r.ID(), + fmt.Errorf("unknown schema type: `%s`", typeStr), + rootNode, + )) + } + } + } + } + + // Validate const value matches declared types + if len(schemaTypes) > 0 { + errs = append(errs, r.validateConst(ctx, schema, refSchema, schemaTypes, docInfo, config)...) + } + + // Validate enum and const are not conflicting + errs = append(errs, r.validateEnumConst(ctx, schema, refSchema, docInfo, config)...) + + // Validate discriminator property existence + errs = append(errs, r.validateDiscriminator(ctx, schema, refSchema, docInfo, config)...) + } + + return errs +} + +func (r *OASSchemaCheckRule) validateString(ctx context.Context, schema *oas3.Schema, refSchema *oas3.JSONSchemaReferenceable, docInfo *linter.DocumentInfo[*openapi.OpenAPI], config *linter.RuleConfig) []error { + var errs []error + + errs = append(errs, r.checkTypeMismatchedConstraints(ctx, schema, refSchema, "string", docInfo, config)...) + + coreSchema := schema.GetCore() + + // Validate minLength + if coreSchema.MinLength.Present && schema.MinLength != nil { + if *schema.MinLength < 0 { + if rootNode := refSchema.GetRootNode(); rootNode != nil { + errs = append(errs, validation.NewValidationError( + config.GetSeverity(r.DefaultSeverity()), + r.ID(), + errors.New("`minLength` should be a non-negative number"), + rootNode, + )) + } + } + } + + // Validate maxLength + if coreSchema.MaxLength.Present && schema.MaxLength != nil { + if *schema.MaxLength < 0 { + if rootNode := refSchema.GetRootNode(); rootNode != nil { + errs = append(errs, validation.NewValidationError( + config.GetSeverity(r.DefaultSeverity()), + r.ID(), + errors.New("`maxLength` should be a non-negative number"), + rootNode, + )) + } + } + if coreSchema.MinLength.Present && schema.MinLength != nil { + if *schema.MinLength > *schema.MaxLength { + if rootNode := refSchema.GetRootNode(); rootNode != nil { + errs = append(errs, validation.NewValidationError( + config.GetSeverity(r.DefaultSeverity()), + r.ID(), + errors.New("`maxLength` should be greater than or equal to `minLength`"), + rootNode, + )) + } + } + } + } + + // Validate pattern is valid regex + if coreSchema.Pattern.Present && schema.Pattern != nil && *schema.Pattern != "" { + _, err := regexp.Compile(*schema.Pattern) + if err != nil { + if rootNode := refSchema.GetRootNode(); rootNode != nil { + errs = append(errs, validation.NewValidationError( + config.GetSeverity(r.DefaultSeverity()), + r.ID(), + errors.New("schema `pattern` should be a valid regular expression"), + rootNode, + )) + } + } + } + + return errs +} + +func (r *OASSchemaCheckRule) validateNumber(ctx context.Context, schema *oas3.Schema, refSchema *oas3.JSONSchemaReferenceable, docInfo *linter.DocumentInfo[*openapi.OpenAPI], config *linter.RuleConfig) []error { + var errs []error + + errs = append(errs, r.checkTypeMismatchedConstraints(ctx, schema, refSchema, "number", docInfo, config)...) + + coreSchema := schema.GetCore() + + // Validate multipleOf + if coreSchema.MultipleOf.Present && schema.MultipleOf != nil { + if *schema.MultipleOf <= 0 { + if rootNode := refSchema.GetRootNode(); rootNode != nil { + errs = append(errs, validation.NewValidationError( + config.GetSeverity(r.DefaultSeverity()), + r.ID(), + errors.New("`multipleOf` should be a number greater than `0`"), + rootNode, + )) + } + } + } + + // Validate maximum >= minimum + if coreSchema.Maximum.Present && schema.Maximum != nil { + if coreSchema.Minimum.Present && schema.Minimum != nil { + if *schema.Maximum < *schema.Minimum { + if rootNode := refSchema.GetRootNode(); rootNode != nil { + errs = append(errs, validation.NewValidationError( + config.GetSeverity(r.DefaultSeverity()), + r.ID(), + errors.New("`maximum` should be a number greater than or equal to `minimum`"), + rootNode, + )) + } + } + } + } + + // Validate exclusiveMaximum >= exclusiveMinimum (only when both are numbers) + if schema.ExclusiveMaximum != nil && schema.ExclusiveMaximum.IsRight() && + schema.ExclusiveMinimum != nil && schema.ExclusiveMinimum.IsRight() { + exclusiveMax := schema.ExclusiveMaximum.GetRight() + exclusiveMin := schema.ExclusiveMinimum.GetRight() + if exclusiveMax != nil && exclusiveMin != nil && *exclusiveMax < *exclusiveMin { + if rootNode := refSchema.GetRootNode(); rootNode != nil { + errs = append(errs, validation.NewValidationError( + config.GetSeverity(r.DefaultSeverity()), + r.ID(), + errors.New("`exclusiveMaximum` should be greater than or equal to `exclusiveMinimum`"), + rootNode, + )) + } + } + } + + return errs +} + +func (r *OASSchemaCheckRule) validateArray(ctx context.Context, schema *oas3.Schema, refSchema *oas3.JSONSchemaReferenceable, docInfo *linter.DocumentInfo[*openapi.OpenAPI], config *linter.RuleConfig) []error { + var errs []error + + errs = append(errs, r.checkTypeMismatchedConstraints(ctx, schema, refSchema, "array", docInfo, config)...) + + coreSchema := schema.GetCore() + + // Validate minItems + if coreSchema.MinItems.Present && schema.MinItems != nil { + if *schema.MinItems < 0 { + if rootNode := refSchema.GetRootNode(); rootNode != nil { + errs = append(errs, validation.NewValidationError( + config.GetSeverity(r.DefaultSeverity()), + r.ID(), + errors.New("`minItems` should be a non-negative number"), + rootNode, + )) + } + } + } + + // Validate maxItems + if coreSchema.MaxItems.Present && schema.MaxItems != nil { + if *schema.MaxItems < 0 { + if rootNode := refSchema.GetRootNode(); rootNode != nil { + errs = append(errs, validation.NewValidationError( + config.GetSeverity(r.DefaultSeverity()), + r.ID(), + errors.New("`maxItems` should be a non-negative number"), + rootNode, + )) + } + } + if coreSchema.MinItems.Present && schema.MinItems != nil { + if *schema.MinItems > *schema.MaxItems { + if rootNode := refSchema.GetRootNode(); rootNode != nil { + errs = append(errs, validation.NewValidationError( + config.GetSeverity(r.DefaultSeverity()), + r.ID(), + errors.New("`maxItems` should be greater than or equal to `minItems`"), + rootNode, + )) + } + } + } + } + + // Validate minContains + if coreSchema.MinContains.Present && schema.MinContains != nil { + if *schema.MinContains < 0 { + if rootNode := refSchema.GetRootNode(); rootNode != nil { + errs = append(errs, validation.NewValidationError( + config.GetSeverity(r.DefaultSeverity()), + r.ID(), + errors.New("`minContains` should be a non-negative number"), + rootNode, + )) + } + } + } + + // Validate maxContains + if coreSchema.MaxContains.Present && schema.MaxContains != nil { + if *schema.MaxContains < 0 { + if rootNode := refSchema.GetRootNode(); rootNode != nil { + errs = append(errs, validation.NewValidationError( + config.GetSeverity(r.DefaultSeverity()), + r.ID(), + errors.New("`maxContains` should be a non-negative number"), + rootNode, + )) + } + } + if coreSchema.MinContains.Present && schema.MinContains != nil { + if *schema.MinContains > *schema.MaxContains { + if rootNode := refSchema.GetRootNode(); rootNode != nil { + errs = append(errs, validation.NewValidationError( + config.GetSeverity(r.DefaultSeverity()), + r.ID(), + errors.New("`maxContains` should be greater than or equal to `minContains`"), + rootNode, + )) + } + } + } + } + + return errs +} + +func (r *OASSchemaCheckRule) validateObject(ctx context.Context, schema *oas3.Schema, refSchema *oas3.JSONSchemaReferenceable, docInfo *linter.DocumentInfo[*openapi.OpenAPI], config *linter.RuleConfig) []error { + var errs []error + + errs = append(errs, r.checkTypeMismatchedConstraints(ctx, schema, refSchema, "object", docInfo, config)...) + + coreSchema := schema.GetCore() + + // Validate minProperties + if coreSchema.MinProperties.Present && schema.MinProperties != nil { + if *schema.MinProperties < 0 { + if rootNode := refSchema.GetRootNode(); rootNode != nil { + errs = append(errs, validation.NewValidationError( + config.GetSeverity(r.DefaultSeverity()), + r.ID(), + errors.New("`minProperties` should be a non-negative number"), + rootNode, + )) + } + } + } + + // Validate maxProperties + if coreSchema.MaxProperties.Present && schema.MaxProperties != nil { + if *schema.MaxProperties < 0 { + if rootNode := refSchema.GetRootNode(); rootNode != nil { + errs = append(errs, validation.NewValidationError( + config.GetSeverity(r.DefaultSeverity()), + r.ID(), + errors.New("`maxProperties` should be a non-negative number"), + rootNode, + )) + } + } + if coreSchema.MinProperties.Present && schema.MinProperties != nil { + if *schema.MinProperties > *schema.MaxProperties { + if rootNode := refSchema.GetRootNode(); rootNode != nil { + errs = append(errs, validation.NewValidationError( + config.GetSeverity(r.DefaultSeverity()), + r.ID(), + errors.New("`maxProperties` should be greater than or equal to `minProperties`"), + rootNode, + )) + } + } + } + } + + // Validate required fields + requiredFields := schema.Required + if len(requiredFields) > 0 { + properties := schema.Properties + + // Check if there's any polymorphic composition + polyFound := len(schema.AnyOf) > 0 || len(schema.OneOf) > 0 || len(schema.AllOf) > 0 + + // If no properties and no polymorphic composition, error + if properties == nil && !polyFound { + if rootNode := refSchema.GetRootNode(); rootNode != nil { + errs = append(errs, validation.NewValidationError( + config.GetSeverity(r.DefaultSeverity()), + r.ID(), + errors.New("object contains `required` fields but no `properties`"), + rootNode, + )) + } + } else { + // Check each required field + for _, required := range requiredFields { + propertyExists := false + + // Check in direct properties + if properties != nil { + for propName := range properties.All() { + if propName == required { + propertyExists = true + break + } + } + } + + // Check in polymorphic schemas if not found + if !propertyExists { + propertyExists = r.checkPolymorphicProperty(schema, required) + } + + if !propertyExists { + if rootNode := refSchema.GetRootNode(); rootNode != nil { + errs = append(errs, validation.NewValidationError( + config.GetSeverity(r.DefaultSeverity()), + r.ID(), + fmt.Errorf("required property `%s` is not defined in schema `properties`", required), + rootNode, + )) + } + } + } + } + } + + return errs +} + +func (r *OASSchemaCheckRule) validateBoolean(ctx context.Context, schema *oas3.Schema, refSchema *oas3.JSONSchemaReferenceable, docInfo *linter.DocumentInfo[*openapi.OpenAPI], config *linter.RuleConfig) []error { + return r.checkTypeMismatchedConstraints(ctx, schema, refSchema, "boolean", docInfo, config) +} + +func (r *OASSchemaCheckRule) validateNull(ctx context.Context, schema *oas3.Schema, refSchema *oas3.JSONSchemaReferenceable, schemaTypes []oas3.SchemaType, docInfo *linter.DocumentInfo[*openapi.OpenAPI], config *linter.RuleConfig) []error { + // In OAS 3.1, nullable is expressed as type: [actualType, "null"] + // Don't check constraints when null is part of a multi-type array + if len(schemaTypes) > 1 { + return nil + } + return r.checkTypeMismatchedConstraints(ctx, schema, refSchema, "null", docInfo, config) +} + +func (r *OASSchemaCheckRule) checkTypeMismatchedConstraints(_ context.Context, schema *oas3.Schema, refSchema *oas3.JSONSchemaReferenceable, schemaType string, _ *linter.DocumentInfo[*openapi.OpenAPI], config *linter.RuleConfig) []error { + var errs []error + coreSchema := schema.GetCore() + + // Define which constraint types are invalid for this type + var invalidConstraints []struct { + field string + validFor string + } + + switch schemaType { + case "string": + invalidConstraints = []struct { + field string + validFor string + }{ + // Number constraints + {"minimum", "number/integer"}, + {"maximum", "number/integer"}, + {"multipleOf", "number/integer"}, + {"exclusiveMinimum", "number/integer"}, + {"exclusiveMaximum", "number/integer"}, + // Array constraints + {"minItems", "array"}, + {"maxItems", "array"}, + {"uniqueItems", "array"}, + {"minContains", "array"}, + {"maxContains", "array"}, + // Object constraints + {"minProperties", "object"}, + {"maxProperties", "object"}, + } + case "number", "integer": + invalidConstraints = []struct { + field string + validFor string + }{ + // String constraints + {"pattern", "string"}, + {"minLength", "string"}, + {"maxLength", "string"}, + // Array constraints + {"minItems", "array"}, + {"maxItems", "array"}, + {"uniqueItems", "array"}, + {"minContains", "array"}, + {"maxContains", "array"}, + // Object constraints + {"minProperties", "object"}, + {"maxProperties", "object"}, + } + case "array": + invalidConstraints = []struct { + field string + validFor string + }{ + // String constraints + {"pattern", "string"}, + {"minLength", "string"}, + {"maxLength", "string"}, + // Number constraints + {"minimum", "number/integer"}, + {"maximum", "number/integer"}, + {"multipleOf", "number/integer"}, + {"exclusiveMinimum", "number/integer"}, + {"exclusiveMaximum", "number/integer"}, + // Object constraints + {"minProperties", "object"}, + {"maxProperties", "object"}, + } + case "object": + invalidConstraints = []struct { + field string + validFor string + }{ + // String constraints + {"pattern", "string"}, + {"minLength", "string"}, + {"maxLength", "string"}, + // Number constraints + {"minimum", "number/integer"}, + {"maximum", "number/integer"}, + {"multipleOf", "number/integer"}, + {"exclusiveMinimum", "number/integer"}, + {"exclusiveMaximum", "number/integer"}, + // Array constraints + {"minItems", "array"}, + {"maxItems", "array"}, + {"uniqueItems", "array"}, + {"minContains", "array"}, + {"maxContains", "array"}, + } + case "boolean", "null": + invalidConstraints = []struct { + field string + validFor string + }{ + // String constraints + {"pattern", "string"}, + {"minLength", "string"}, + {"maxLength", "string"}, + // Number constraints + {"minimum", "number/integer"}, + {"maximum", "number/integer"}, + {"multipleOf", "number/integer"}, + {"exclusiveMinimum", "number/integer"}, + {"exclusiveMaximum", "number/integer"}, + // Array constraints + {"minItems", "array"}, + {"maxItems", "array"}, + {"uniqueItems", "array"}, + {"minContains", "array"}, + {"maxContains", "array"}, + // Object constraints + {"minProperties", "object"}, + {"maxProperties", "object"}, + } + } + + // Check for mismatched constraints + for _, constraint := range invalidConstraints { + var isPresent bool + switch constraint.field { + case "pattern": + isPresent = coreSchema.Pattern.Present + case "minLength": + isPresent = coreSchema.MinLength.Present + case "maxLength": + isPresent = coreSchema.MaxLength.Present + case "minimum": + isPresent = coreSchema.Minimum.Present + case "maximum": + isPresent = coreSchema.Maximum.Present + case "multipleOf": + isPresent = coreSchema.MultipleOf.Present + case "exclusiveMinimum": + isPresent = coreSchema.ExclusiveMinimum.Present + case "exclusiveMaximum": + isPresent = coreSchema.ExclusiveMaximum.Present + case "minItems": + isPresent = coreSchema.MinItems.Present + case "maxItems": + isPresent = coreSchema.MaxItems.Present + case "uniqueItems": + isPresent = coreSchema.UniqueItems.Present + case "minContains": + isPresent = coreSchema.MinContains.Present + case "maxContains": + isPresent = coreSchema.MaxContains.Present + case "minProperties": + isPresent = coreSchema.MinProperties.Present + case "maxProperties": + isPresent = coreSchema.MaxProperties.Present + } + + if isPresent { + if rootNode := refSchema.GetRootNode(); rootNode != nil { + errs = append(errs, validation.NewValidationError( + config.GetSeverity(r.DefaultSeverity()), + r.ID(), + fmt.Errorf("`%s` constraint is only applicable to %s types, not `%s`", + constraint.field, constraint.validFor, schemaType), + rootNode, + )) + } + } + } + + return errs +} + +func (r *OASSchemaCheckRule) checkPolymorphicProperty(schema *oas3.Schema, propertyName string) bool { + // Check in AnyOf schemas + for _, anyOfRef := range schema.AnyOf { + anyOfSchema := anyOfRef.GetSchema() + if anyOfSchema != nil && anyOfSchema.Properties != nil { + for propName := range anyOfSchema.Properties.All() { + if propName == propertyName { + return true + } + } + } + } + + // Check in OneOf schemas + for _, oneOfRef := range schema.OneOf { + oneOfSchema := oneOfRef.GetSchema() + if oneOfSchema != nil && oneOfSchema.Properties != nil { + for propName := range oneOfSchema.Properties.All() { + if propName == propertyName { + return true + } + } + } + } + + // Check in AllOf schemas + for _, allOfRef := range schema.AllOf { + allOfSchema := allOfRef.GetSchema() + if allOfSchema != nil && allOfSchema.Properties != nil { + for propName := range allOfSchema.Properties.All() { + if propName == propertyName { + return true + } + } + } + } + + return false +} + +func (r *OASSchemaCheckRule) validateConst(_ context.Context, schema *oas3.Schema, refSchema *oas3.JSONSchemaReferenceable, schemaTypes []oas3.SchemaType, _ *linter.DocumentInfo[*openapi.OpenAPI], config *linter.RuleConfig) []error { + var errs []error + coreSchema := schema.GetCore() + + if !coreSchema.Const.Present || schema.Const == nil { + return errs + } + + constNode := coreSchema.Const.ValueNode + if constNode == nil { + return errs + } + + // Check if const value matches any of the declared types + isValid := false + for _, schemaType := range schemaTypes { + if r.isConstNodeValidForType(constNode, string(schemaType)) { + isValid = true + break + } + } + + if !isValid { + // Convert SchemaType slice to string slice for Join with backticks + typeStrs := make([]string, len(schemaTypes)) + for i, t := range schemaTypes { + typeStrs[i] = "`" + string(t) + "`" + } + typeList := fmt.Sprintf("[%s]", strings.Join(typeStrs, ", ")) + if rootNode := refSchema.GetRootNode(); rootNode != nil { + errs = append(errs, validation.NewValidationError( + config.GetSeverity(r.DefaultSeverity()), + r.ID(), + fmt.Errorf("`const` value type does not match schema type %s", typeList), + rootNode, + )) + } + } + + return errs +} + +func (r *OASSchemaCheckRule) validateEnumConst(_ context.Context, schema *oas3.Schema, refSchema *oas3.JSONSchemaReferenceable, _ *linter.DocumentInfo[*openapi.OpenAPI], config *linter.RuleConfig) []error { + var errs []error + + enumValues := schema.Enum + constValue := schema.Const + + if len(enumValues) == 0 || constValue == nil { + return errs + } + + // Check if const value exists in enum values by comparing the YAML nodes + constInEnum := false + for _, enumValue := range enumValues { + // Compare YAML node values and tags + if constValue.Value == enumValue.Value && constValue.Tag == enumValue.Tag { + constInEnum = true + break + } + } + + if !constInEnum { + if rootNode := refSchema.GetRootNode(); rootNode != nil { + errs = append(errs, validation.NewValidationError( + config.GetSeverity(r.DefaultSeverity()), + r.ID(), + fmt.Errorf("`const` value `%v` is not present in `enum` values", constValue), + rootNode, + )) + } + } else { + // Both enum and const are present and compatible - flag as potentially redundant + if len(enumValues) == 1 { + if rootNode := refSchema.GetRootNode(); rootNode != nil { + errs = append(errs, validation.NewValidationError( + config.GetSeverity(r.DefaultSeverity()), + r.ID(), + errors.New("schema uses both `enum` with single value and `const` - consider using only `const`"), + rootNode, + )) + } + } else { + if rootNode := refSchema.GetRootNode(); rootNode != nil { + errs = append(errs, validation.NewValidationError( + config.GetSeverity(r.DefaultSeverity()), + r.ID(), + errors.New("schema uses both `enum` and `const` - this is likely an oversight as `const` restricts to a single value"), + rootNode, + )) + } + } + } + + return errs +} + +func (r *OASSchemaCheckRule) isConstNodeValidForType(node *yaml.Node, schemaType string) bool { + switch schemaType { + case "string": + return node.Tag == "!!str" + case "integer": + if node.Tag == "!!int" { + return true + } + // Allow float values that have no fractional part (e.g., 42.0) + if node.Tag == "!!float" { + return r.isFloatWhole(node.Value) + } + return false + case "number": + return node.Tag == "!!int" || node.Tag == "!!float" + case "boolean": + return node.Tag == "!!bool" + case "null": + return node.Tag == "!!null" + case "array": + return node.Kind == yaml.SequenceNode + case "object": + return node.Kind == yaml.MappingNode + } + return false +} + +func (r *OASSchemaCheckRule) isFloatWhole(value string) bool { + // Check if a float string represents a whole number (e.g., "42.0" -> true, "42.5" -> false) + if !strings.Contains(value, ".") { + return true + } + parts := strings.Split(value, ".") + if len(parts) != 2 { + return false + } + // Check if fractional part is all zeros + fractional := parts[1] + for _, char := range fractional { + if char != '0' { + return false + } + } + return true +} + +func (r *OASSchemaCheckRule) validateDiscriminator(_ context.Context, schema *oas3.Schema, refSchema *oas3.JSONSchemaReferenceable, _ *linter.DocumentInfo[*openapi.OpenAPI], config *linter.RuleConfig) []error { + var errs []error + + discriminator := schema.Discriminator + if discriminator == nil { + return errs + } + + propertyName := discriminator.PropertyName + + // propertyName is required per OpenAPI 3.x spec + if propertyName == "" { + if rootNode := refSchema.GetRootNode(); rootNode != nil { + errs = append(errs, validation.NewValidationError( + config.GetSeverity(r.DefaultSeverity()), + r.ID(), + errors.New("discriminator object is missing required `propertyName` field"), + rootNode, + )) + } + return errs + } + + // Check if property exists in direct properties + propertyExists := false + if properties := schema.Properties; properties != nil { + for propName := range properties.All() { + if propName == propertyName { + propertyExists = true + break + } + } + } + + // Check polymorphic schemas if not found + if !propertyExists { + propertyExists = r.checkPolymorphicProperty(schema, propertyName) + } + + if !propertyExists { + if rootNode := refSchema.GetRootNode(); rootNode != nil { + errs = append(errs, validation.NewValidationError( + config.GetSeverity(r.DefaultSeverity()), + r.ID(), + fmt.Errorf("discriminator property `%s` is not defined in schema properties", propertyName), + rootNode, + )) + } + } + + return errs +} diff --git a/openapi/linter/rules/oas_schema_check_test.go b/openapi/linter/rules/oas_schema_check_test.go new file mode 100644 index 00000000..61c660ba --- /dev/null +++ b/openapi/linter/rules/oas_schema_check_test.go @@ -0,0 +1,429 @@ +package rules + +import ( + "strings" + "testing" + + "github.com/speakeasy-api/openapi/linter" + "github.com/speakeasy-api/openapi/openapi" + "github.com/speakeasy-api/openapi/references" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestOASSchemaCheck_StringConstraints_Success(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + yaml string + }{ + { + name: "valid minLength and maxLength", + yaml: ` +openapi: 3.0.0 +info: + title: Test + version: 1.0.0 +components: + schemas: + Test: + type: string + minLength: 5 + maxLength: 10 +paths: {} +`, + }, + { + name: "valid pattern", + yaml: ` +openapi: 3.0.0 +info: + title: Test + version: 1.0.0 +components: + schemas: + Test: + type: string + pattern: ^[a-z]+$ +paths: {} +`, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + doc, _, err := openapi.Unmarshal(ctx, strings.NewReader(tt.yaml)) + require.NoError(t, err) + + rule := &OASSchemaCheckRule{} + config := &linter.RuleConfig{} + + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + docInfo := linter.NewDocumentInfoWithIndex(doc, "test.yaml", idx) + + errs := rule.Run(ctx, docInfo, config) + assert.Empty(t, errs) + }) + } +} + +func TestOASSchemaCheck_StringConstraints_Error(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + yaml string + expected int + }{ + { + name: "negative minLength", + yaml: ` +openapi: 3.0.0 +info: + title: Test + version: 1.0.0 +components: + schemas: + Test: + type: string + minLength: -1 +paths: {} +`, + expected: 1, + }, + { + name: "maxLength less than minLength", + yaml: ` +openapi: 3.0.0 +info: + title: Test + version: 1.0.0 +components: + schemas: + Test: + type: string + minLength: 10 + maxLength: 5 +paths: {} +`, + expected: 1, + }, + { + name: "invalid regex pattern", + yaml: ` +openapi: 3.0.0 +info: + title: Test + version: 1.0.0 +components: + schemas: + Test: + type: string + pattern: "[invalid(" +paths: {} +`, + expected: 1, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + doc, _, err := openapi.Unmarshal(ctx, strings.NewReader(tt.yaml)) + require.NoError(t, err) + + rule := &OASSchemaCheckRule{} + config := &linter.RuleConfig{} + + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + docInfo := linter.NewDocumentInfoWithIndex(doc, "test.yaml", idx) + + errs := rule.Run(ctx, docInfo, config) + assert.Len(t, errs, tt.expected) + }) + } +} + +func TestOASSchemaCheck_NumberConstraints_Success(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + yaml string + }{ + { + name: "valid minimum and maximum", + yaml: ` +openapi: 3.0.0 +info: + title: Test + version: 1.0.0 +components: + schemas: + Test: + type: number + minimum: 0 + maximum: 100 +paths: {} +`, + }, + { + name: "valid multipleOf", + yaml: ` +openapi: 3.0.0 +info: + title: Test + version: 1.0.0 +components: + schemas: + Test: + type: integer + multipleOf: 5 +paths: {} +`, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + doc, _, err := openapi.Unmarshal(ctx, strings.NewReader(tt.yaml)) + require.NoError(t, err) + + rule := &OASSchemaCheckRule{} + config := &linter.RuleConfig{} + + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + docInfo := linter.NewDocumentInfoWithIndex(doc, "test.yaml", idx) + + errs := rule.Run(ctx, docInfo, config) + assert.Empty(t, errs) + }) + } +} + +func TestOASSchemaCheck_NumberConstraints_Error(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + yaml string + expected int + }{ + { + name: "multipleOf zero", + yaml: ` +openapi: 3.0.0 +info: + title: Test + version: 1.0.0 +components: + schemas: + Test: + type: integer + multipleOf: 0 +paths: {} +`, + expected: 1, + }, + { + name: "maximum less than minimum", + yaml: ` +openapi: 3.0.0 +info: + title: Test + version: 1.0.0 +components: + schemas: + Test: + type: number + minimum: 100 + maximum: 0 +paths: {} +`, + expected: 1, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + doc, _, err := openapi.Unmarshal(ctx, strings.NewReader(tt.yaml)) + require.NoError(t, err) + + rule := &OASSchemaCheckRule{} + config := &linter.RuleConfig{} + + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + docInfo := linter.NewDocumentInfoWithIndex(doc, "test.yaml", idx) + + errs := rule.Run(ctx, docInfo, config) + assert.Len(t, errs, tt.expected) + }) + } +} + +func TestOASSchemaCheck_TypeMismatch_Error(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + yaml string + expected int + }{ + { + name: "string type with number constraints", + yaml: ` +openapi: 3.0.0 +info: + title: Test + version: 1.0.0 +components: + schemas: + Test: + type: string + minimum: 0 + maximum: 100 +paths: {} +`, + expected: 2, // minimum and maximum + }, + { + name: "number type with string constraints", + yaml: ` +openapi: 3.0.0 +info: + title: Test + version: 1.0.0 +components: + schemas: + Test: + type: number + minLength: 5 + pattern: ^[a-z]+$ +paths: {} +`, + expected: 2, // minLength and pattern + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + doc, _, err := openapi.Unmarshal(ctx, strings.NewReader(tt.yaml)) + require.NoError(t, err) + + rule := &OASSchemaCheckRule{} + config := &linter.RuleConfig{} + + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + docInfo := linter.NewDocumentInfoWithIndex(doc, "test.yaml", idx) + + errs := rule.Run(ctx, docInfo, config) + assert.Len(t, errs, tt.expected) + }) + } +} + +func TestOASSchemaCheck_ObjectRequired_Error(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + yaml string + expected int + }{ + { + name: "required without properties", + yaml: ` +openapi: 3.0.0 +info: + title: Test + version: 1.0.0 +components: + schemas: + Test: + type: object + required: + - name +paths: {} +`, + expected: 1, + }, + { + name: "required field not in properties", + yaml: ` +openapi: 3.0.0 +info: + title: Test + version: 1.0.0 +components: + schemas: + Test: + type: object + properties: + age: + type: integer + required: + - name +paths: {} +`, + expected: 1, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + doc, _, err := openapi.Unmarshal(ctx, strings.NewReader(tt.yaml)) + require.NoError(t, err) + + rule := &OASSchemaCheckRule{} + config := &linter.RuleConfig{} + + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + docInfo := linter.NewDocumentInfoWithIndex(doc, "test.yaml", idx) + + errs := rule.Run(ctx, docInfo, config) + assert.Len(t, errs, tt.expected) + }) + } +} diff --git a/openapi/linter/rules/openapi_tags.go b/openapi/linter/rules/openapi_tags.go new file mode 100644 index 00000000..627874ae --- /dev/null +++ b/openapi/linter/rules/openapi_tags.go @@ -0,0 +1,66 @@ +package rules + +import ( + "context" + "errors" + + "github.com/speakeasy-api/openapi/linter" + "github.com/speakeasy-api/openapi/openapi" + "github.com/speakeasy-api/openapi/validation" +) + +const RuleStyleOpenAPITags = "style-openapi-tags" + +type OpenAPITagsRule struct{} + +func (r *OpenAPITagsRule) ID() string { + return RuleStyleOpenAPITags +} + +func (r *OpenAPITagsRule) Description() string { + return "The OpenAPI specification should define a non-empty tags array at the root level to organize and categorize API operations. Tags help structure API documentation and enable logical grouping of related endpoints." +} + +func (r *OpenAPITagsRule) Summary() string { + return "The OpenAPI object should define a non-empty tags array." +} + +func (r *OpenAPITagsRule) HowToFix() string { + return "Define at least one tag in the top-level tags array." +} + +func (r *OpenAPITagsRule) Category() string { + return CategoryStyle +} + +func (r *OpenAPITagsRule) DefaultSeverity() validation.Severity { + return validation.SeverityWarning +} + +func (r *OpenAPITagsRule) Link() string { + return "https://github.com/speakeasy-api/openapi/blob/main/openapi/linter/README.md#style-openapi-tags" +} + +func (r *OpenAPITagsRule) Versions() []string { + return nil // applies to all versions +} + +func (r *OpenAPITagsRule) Run(ctx context.Context, docInfo *linter.DocumentInfo[*openapi.OpenAPI], config *linter.RuleConfig) []error { + if docInfo == nil || docInfo.Document == nil { + return nil + } + + doc := docInfo.Document + tags := doc.GetTags() + + if len(tags) == 0 { + return []error{validation.NewValidationError( + config.GetSeverity(r.DefaultSeverity()), + RuleStyleOpenAPITags, + errors.New("OpenAPI object must have a non-empty tags array"), + doc.GetRootNode(), + )} + } + + return nil +} diff --git a/openapi/linter/rules/openapi_tags_test.go b/openapi/linter/rules/openapi_tags_test.go new file mode 100644 index 00000000..33f86234 --- /dev/null +++ b/openapi/linter/rules/openapi_tags_test.go @@ -0,0 +1,122 @@ +package rules_test + +import ( + "strings" + "testing" + + "github.com/speakeasy-api/openapi/linter" + "github.com/speakeasy-api/openapi/openapi" + "github.com/speakeasy-api/openapi/openapi/linter/rules" + "github.com/speakeasy-api/openapi/validation" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestOpenAPITagsRule_ValidCases(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + yaml string + }{ + { + name: "single tag defined", + yaml: ` +openapi: 3.1.0 +info: + title: Test API + version: 1.0.0 +tags: + - name: users + description: User operations +paths: {} +`, + }, + { + name: "multiple tags defined", + yaml: ` +openapi: 3.1.0 +info: + title: Test API + version: 1.0.0 +tags: + - name: users + description: User operations + - name: pets + description: Pet operations +paths: {} +`, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + doc, _, err := openapi.Unmarshal(ctx, strings.NewReader(tt.yaml)) + require.NoError(t, err) + + rule := &rules.OpenAPITagsRule{} + config := &linter.RuleConfig{} + docInfo := linter.NewDocumentInfo(doc, "test.yaml") + + errs := rule.Run(ctx, docInfo, config) + assert.Empty(t, errs) + }) + } +} + +func TestOpenAPITagsRule_Violations(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + yaml string + expectedError string + }{ + { + name: "no tags defined", + yaml: ` +openapi: 3.1.0 +info: + title: Test API + version: 1.0.0 +paths: {} +`, + expectedError: "[2:1] warning style-openapi-tags OpenAPI object must have a non-empty tags array", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + doc, _, err := openapi.Unmarshal(ctx, strings.NewReader(tt.yaml)) + require.NoError(t, err) + + rule := &rules.OpenAPITagsRule{} + config := &linter.RuleConfig{} + docInfo := linter.NewDocumentInfo(doc, "test.yaml") + + errs := rule.Run(ctx, docInfo, config) + + require.NotEmpty(t, errs) + assert.Equal(t, tt.expectedError, errs[0].Error()) + }) + } +} + +func TestOpenAPITagsRule_RuleMetadata(t *testing.T) { + t.Parallel() + + rule := &rules.OpenAPITagsRule{} + + assert.Equal(t, "style-openapi-tags", rule.ID()) + assert.Equal(t, rules.CategoryStyle, rule.Category()) + assert.NotEmpty(t, rule.Description()) + assert.NotEmpty(t, rule.Link()) + assert.Equal(t, validation.SeverityWarning, rule.DefaultSeverity()) + assert.Nil(t, rule.Versions()) +} diff --git a/openapi/linter/rules/operation_description.go b/openapi/linter/rules/operation_description.go new file mode 100644 index 00000000..78d8d7be --- /dev/null +++ b/openapi/linter/rules/operation_description.go @@ -0,0 +1,85 @@ +package rules + +import ( + "context" + "fmt" + "strings" + + "github.com/speakeasy-api/openapi/linter" + "github.com/speakeasy-api/openapi/openapi" + "github.com/speakeasy-api/openapi/validation" +) + +const RuleStyleOperationDescription = "style-operation-description" + +type OperationDescriptionRule struct{} + +func (r *OperationDescriptionRule) ID() string { + return RuleStyleOperationDescription +} + +func (r *OperationDescriptionRule) Description() string { + return "Operations should include either a description or summary field to explain their purpose and behavior. Clear operation documentation helps developers understand what each endpoint does and how to use it effectively." +} + +func (r *OperationDescriptionRule) Summary() string { + return "Operations must include a description or summary." +} + +func (r *OperationDescriptionRule) HowToFix() string { + return "Add a summary or description to each operation." +} + +func (r *OperationDescriptionRule) Category() string { + return CategoryStyle +} + +func (r *OperationDescriptionRule) DefaultSeverity() validation.Severity { + return validation.SeverityWarning +} + +func (r *OperationDescriptionRule) Link() string { + return "https://github.com/speakeasy-api/openapi/blob/main/openapi/linter/README.md#style-operation-description" +} + +func (r *OperationDescriptionRule) Versions() []string { + return nil // applies to all versions +} + +func (r *OperationDescriptionRule) Run(ctx context.Context, docInfo *linter.DocumentInfo[*openapi.OpenAPI], config *linter.RuleConfig) []error { + if docInfo == nil || docInfo.Document == nil || docInfo.Index == nil { + return nil + } + + var errs []error + + for _, opNode := range docInfo.Index.Operations { + operation := opNode.Node + if operation == nil { + continue + } + + description := operation.GetDescription() + summary := operation.GetSummary() + + if description == "" && summary == "" { + // Get operation identifier (prefer operationId, fallback to method + path) + opIdentifier := operation.GetOperationID() + if opIdentifier == "" { + method, path := openapi.ExtractMethodAndPath(opNode.Location) + if method != "" { + opIdentifier = fmt.Sprintf("`%s` %s", strings.ToUpper(method), path) + } + } + + errs = append(errs, validation.NewValidationError( + config.GetSeverity(r.DefaultSeverity()), + RuleStyleOperationDescription, + fmt.Errorf("the %s is missing a description or summary", opIdentifier), + operation.GetRootNode(), + )) + } + } + + return errs +} diff --git a/openapi/linter/rules/operation_description_test.go b/openapi/linter/rules/operation_description_test.go new file mode 100644 index 00000000..2e15a975 --- /dev/null +++ b/openapi/linter/rules/operation_description_test.go @@ -0,0 +1,199 @@ +package rules_test + +import ( + "strings" + "testing" + + "github.com/speakeasy-api/openapi/linter" + "github.com/speakeasy-api/openapi/openapi" + "github.com/speakeasy-api/openapi/openapi/linter/rules" + "github.com/speakeasy-api/openapi/references" + "github.com/speakeasy-api/openapi/validation" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestOperationDescriptionRule_ValidCases(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + yaml string + }{ + { + name: "operation with description", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + get: + description: Get all users + responses: + '200': + description: ok +`, + }, + { + name: "operation with summary", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + get: + summary: Get all users + responses: + '200': + description: ok +`, + }, + { + name: "operation with both description and summary", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + get: + summary: Get all users + description: This endpoint retrieves all users from the database + responses: + '200': + description: ok +`, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + doc, _, err := openapi.Unmarshal(ctx, strings.NewReader(tt.yaml)) + require.NoError(t, err) + + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + + rule := &rules.OperationDescriptionRule{} + config := &linter.RuleConfig{} + docInfo := linter.NewDocumentInfoWithIndex(doc, "test.yaml", idx) + + errs := rule.Run(ctx, docInfo, config) + assert.Empty(t, errs) + }) + } +} + +func TestOperationDescriptionRule_Violations(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + yaml string + expectedError string + }{ + { + name: "operation without description or summary", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + get: + responses: + '200': + description: ok +`, + expectedError: "[9:7] warning style-operation-description the `GET` /users is missing a description or summary", + }, + { + name: "operation with operationId but no description", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + post: + operationId: createUser + responses: + '201': + description: created +`, + expectedError: "[9:7] warning style-operation-description the createUser is missing a description or summary", + }, + { + name: "multiple operations missing description", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + get: + responses: + '200': + description: ok + /products: + get: + responses: + '200': + description: ok +`, + expectedError: "[9:7] warning style-operation-description the `GET` /users is missing a description or summary", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + doc, _, err := openapi.Unmarshal(ctx, strings.NewReader(tt.yaml)) + require.NoError(t, err) + + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + + rule := &rules.OperationDescriptionRule{} + config := &linter.RuleConfig{} + docInfo := linter.NewDocumentInfoWithIndex(doc, "test.yaml", idx) + + errs := rule.Run(ctx, docInfo, config) + + require.NotEmpty(t, errs) + assert.Equal(t, tt.expectedError, errs[0].Error()) + }) + } +} + +func TestOperationDescriptionRule_RuleMetadata(t *testing.T) { + t.Parallel() + + rule := &rules.OperationDescriptionRule{} + + assert.Equal(t, "style-operation-description", rule.ID()) + assert.Equal(t, rules.CategoryStyle, rule.Category()) + assert.NotEmpty(t, rule.Description()) + assert.NotEmpty(t, rule.Link()) + assert.Equal(t, validation.SeverityWarning, rule.DefaultSeverity()) + assert.Nil(t, rule.Versions()) +} diff --git a/openapi/linter/rules/operation_error_response.go b/openapi/linter/rules/operation_error_response.go new file mode 100644 index 00000000..6147382b --- /dev/null +++ b/openapi/linter/rules/operation_error_response.go @@ -0,0 +1,83 @@ +package rules + +import ( + "context" + "errors" + "strconv" + + "github.com/speakeasy-api/openapi/linter" + "github.com/speakeasy-api/openapi/openapi" + "github.com/speakeasy-api/openapi/validation" +) + +const RuleStyleOperationErrorResponse = "style-operation-error-response" + +type OperationErrorResponseRule struct{} + +func (r *OperationErrorResponseRule) ID() string { return RuleStyleOperationErrorResponse } +func (r *OperationErrorResponseRule) Category() string { return CategoryStyle } +func (r *OperationErrorResponseRule) Description() string { + return "Operations should define at least one 4xx error response to document potential client errors. Documenting error responses helps API consumers handle failures gracefully and understand what went wrong when requests fail." +} +func (r *OperationErrorResponseRule) Summary() string { + return "Operations must define at least one 4xx error response." +} +func (r *OperationErrorResponseRule) HowToFix() string { + return "Add at least one 4xx response definition for each operation." +} +func (r *OperationErrorResponseRule) Link() string { + return "https://github.com/speakeasy-api/openapi/blob/main/openapi/linter/README.md#style-operation-error-response" +} +func (r *OperationErrorResponseRule) DefaultSeverity() validation.Severity { + return validation.SeverityWarning +} +func (r *OperationErrorResponseRule) Versions() []string { + return nil // Applies to all OpenAPI versions +} + +func (r *OperationErrorResponseRule) Run(ctx context.Context, docInfo *linter.DocumentInfo[*openapi.OpenAPI], config *linter.RuleConfig) []error { + if docInfo == nil || docInfo.Document == nil || docInfo.Index == nil { + return nil + } + + var errs []error + + // Use the pre-computed operation index + for _, opNode := range docInfo.Index.Operations { + operation := opNode.Node + if operation == nil { + continue + } + + responses := operation.GetResponses() + if responses == nil { + continue + } + + // Check if any response code is in the 4xx range + has4xxResponse := false + if responses.Map != nil { + for code := range responses.All() { + // Try to parse the code as an integer + codeVal, err := strconv.Atoi(code) + if err == nil && codeVal >= 400 && codeVal < 500 { + has4xxResponse = true + break + } + } + } + + if !has4xxResponse { + // Get the responses node for error reporting + responsesNode := responses.GetRootNode() + errs = append(errs, validation.NewValidationError( + config.GetSeverity(r.DefaultSeverity()), + RuleStyleOperationErrorResponse, + errors.New("operation must define at least one 4xx error response"), + responsesNode, + )) + } + } + + return errs +} diff --git a/openapi/linter/rules/operation_error_response_test.go b/openapi/linter/rules/operation_error_response_test.go new file mode 100644 index 00000000..ad76f15c --- /dev/null +++ b/openapi/linter/rules/operation_error_response_test.go @@ -0,0 +1,247 @@ +package rules_test + +import ( + "strings" + "testing" + + "github.com/speakeasy-api/openapi/linter" + "github.com/speakeasy-api/openapi/openapi" + "github.com/speakeasy-api/openapi/openapi/linter/rules" + "github.com/speakeasy-api/openapi/references" + "github.com/speakeasy-api/openapi/validation" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestOperationErrorResponseRule_ValidCases(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + yaml string + }{ + { + name: "operation with 400 response", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + get: + responses: + '200': + description: Success + '400': + description: Bad Request +`, + }, + { + name: "operation with 404 response", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users/{id}: + get: + responses: + '200': + description: Success + '404': + description: Not Found +`, + }, + { + name: "operation with multiple 4xx responses", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + post: + responses: + '201': + description: Created + '400': + description: Bad Request + '401': + description: Unauthorized + '403': + description: Forbidden +`, + }, + { + name: "operation with 422 response", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + post: + responses: + '201': + description: Created + '422': + description: Unprocessable Entity +`, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + doc, _, err := openapi.Unmarshal(ctx, strings.NewReader(tt.yaml)) + require.NoError(t, err) + + rule := &rules.OperationErrorResponseRule{} + config := &linter.RuleConfig{} + + // Build index for the rule + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + docInfo := linter.NewDocumentInfoWithIndex(doc, "test.yaml", idx) + + errs := rule.Run(ctx, docInfo, config) + assert.Empty(t, errs) + }) + } +} + +func TestOperationErrorResponseRule_Violations(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + yaml string + expectedCount int + }{ + { + name: "operation with only success response", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + get: + responses: + '200': + description: Success +`, + expectedCount: 1, + }, + { + name: "operation with only 5xx response", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + get: + responses: + '200': + description: Success + '500': + description: Internal Server Error +`, + expectedCount: 1, + }, + { + name: "multiple operations without 4xx responses", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + get: + responses: + '200': + description: Success + post: + responses: + '201': + description: Created + /posts: + get: + responses: + '200': + description: Success +`, + expectedCount: 3, + }, + { + name: "operation with default response only", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + get: + responses: + default: + description: Default response +`, + expectedCount: 1, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + doc, _, err := openapi.Unmarshal(ctx, strings.NewReader(tt.yaml)) + require.NoError(t, err) + + rule := &rules.OperationErrorResponseRule{} + config := &linter.RuleConfig{} + + // Build index for the rule + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + docInfo := linter.NewDocumentInfoWithIndex(doc, "test.yaml", idx) + + errs := rule.Run(ctx, docInfo, config) + + require.Len(t, errs, tt.expectedCount) + for _, err := range errs { + assert.Contains(t, err.Error(), "operation must define at least one 4xx error response") + } + }) + } +} + +func TestOperationErrorResponseRule_RuleMetadata(t *testing.T) { + t.Parallel() + + rule := &rules.OperationErrorResponseRule{} + + assert.Equal(t, "style-operation-error-response", rule.ID()) + assert.Equal(t, rules.CategoryStyle, rule.Category()) + assert.NotEmpty(t, rule.Description()) + assert.NotEmpty(t, rule.Link()) + assert.Equal(t, validation.SeverityWarning, rule.DefaultSeverity()) + assert.Nil(t, rule.Versions()) +} diff --git a/openapi/linter/rules/operation_id.go b/openapi/linter/rules/operation_id.go new file mode 100644 index 00000000..b61ab06b --- /dev/null +++ b/openapi/linter/rules/operation_id.go @@ -0,0 +1,75 @@ +package rules + +import ( + "context" + "fmt" + "strings" + + "github.com/speakeasy-api/openapi/linter" + "github.com/speakeasy-api/openapi/openapi" + "github.com/speakeasy-api/openapi/validation" +) + +const RuleSemanticOperationOperationId = "semantic-operation-operation-id" + +type OperationIdRule struct{} + +func (r *OperationIdRule) ID() string { return RuleSemanticOperationOperationId } + +func (r *OperationIdRule) Category() string { return CategorySemantic } + +func (r *OperationIdRule) Description() string { + return "Operations should define an operationId for consistent referencing across the specification and in generated code. Operation IDs enable tooling to generate meaningful function names and provide stable identifiers for API operations." +} + +func (r *OperationIdRule) Summary() string { + return "Operations should define an operationId." +} + +func (r *OperationIdRule) HowToFix() string { + return "Add an operationId to each operation." +} + +func (r *OperationIdRule) Link() string { + return "https://github.com/speakeasy-api/openapi/blob/main/openapi/linter/README.md#semantic-operation-operation-id" +} + +func (r *OperationIdRule) DefaultSeverity() validation.Severity { return validation.SeverityWarning } + +func (r *OperationIdRule) Versions() []string { return nil } + +func (r *OperationIdRule) Run(ctx context.Context, docInfo *linter.DocumentInfo[*openapi.OpenAPI], config *linter.RuleConfig) []error { + if docInfo == nil || docInfo.Document == nil || docInfo.Index == nil { + return nil + } + + var errs []error + + // Use the pre-computed Operations index for efficient iteration + for _, opNode := range docInfo.Index.Operations { + op := opNode.Node + method, path := openapi.ExtractMethodAndPath(opNode.Location) + if method == "" || path == "" { + continue + } + + if op.GetOperationID() != "" { + continue + } + + errNode := op.GetRootNode() + if errNode == nil { + errNode = docInfo.Document.GetRootNode() + } + + err := validation.NewValidationError( + config.GetSeverity(r.DefaultSeverity()), + RuleSemanticOperationOperationId, + fmt.Errorf("the `%s` operation does not contain an `operationId`", strings.ToUpper(method)), + errNode, + ) + errs = append(errs, err) + } + + return errs +} diff --git a/openapi/linter/rules/operation_id_test.go b/openapi/linter/rules/operation_id_test.go new file mode 100644 index 00000000..d29efc23 --- /dev/null +++ b/openapi/linter/rules/operation_id_test.go @@ -0,0 +1,136 @@ +package rules_test + +import ( + "strings" + "testing" + + "github.com/speakeasy-api/openapi/linter" + "github.com/speakeasy-api/openapi/openapi" + "github.com/speakeasy-api/openapi/openapi/linter/rules" + "github.com/speakeasy-api/openapi/references" + "github.com/speakeasy-api/openapi/validation" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestOperationIdRule_ValidCases(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + yaml string + }{ + { + name: "all operations have ids", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /melody: + post: + operationId: littleSong + responses: + '200': + description: ok + /ember: + get: + operationId: littleChampion + responses: + '200': + description: ok +`, + }, + { + name: "empty paths", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: {} +`, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + ctx := t.Context() + doc, _, err := openapi.Unmarshal(ctx, strings.NewReader(tt.yaml)) + require.NoError(t, err, "unmarshal should succeed") + + rule := &rules.OperationIdRule{} + config := &linter.RuleConfig{} + + // Build index for the rule + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + docInfo := linter.NewDocumentInfoWithIndex(doc, "test.yaml", idx) + + errs := rule.Run(ctx, docInfo, config) + require.Empty(t, errs, "expected no lint errors") + }) + } +} + +func TestOperationIdRule_Violations(t *testing.T) { + t.Parallel() + + yamlInput := ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /melody: + post: + operationId: littleSong + responses: + '200': + description: ok + /ember: + get: + responses: + '200': + description: ok +` + + ctx := t.Context() + doc, _, err := openapi.Unmarshal(ctx, strings.NewReader(yamlInput)) + require.NoError(t, err, "unmarshal should succeed") + + rule := &rules.OperationIdRule{} + config := &linter.RuleConfig{} + + // Build index for the rule + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + docInfo := linter.NewDocumentInfoWithIndex(doc, "test.yaml", idx) + + errs := rule.Run(ctx, docInfo, config) + + require.Len(t, errs, 1, "should have one lint error") + assert.Equal(t, "[15:7] warning semantic-operation-operation-id the `GET` operation does not contain an `operationId`", errs[0].Error()) +} + +func TestOperationIdRule_RuleMetadata(t *testing.T) { + t.Parallel() + + rule := &rules.OperationIdRule{} + + assert.Equal(t, "semantic-operation-operation-id", rule.ID(), "rule ID should match") + assert.Equal(t, rules.CategorySemantic, rule.Category(), "rule category should match") + assert.NotEmpty(t, rule.Description(), "rule should have description") + assert.NotEmpty(t, rule.Link(), "rule should have documentation link") + assert.Equal(t, validation.SeverityWarning, rule.DefaultSeverity(), "default severity should be warning") + assert.Nil(t, rule.Versions(), "versions should be nil (all versions)") +} diff --git a/openapi/linter/rules/operation_id_valid_in_url.go b/openapi/linter/rules/operation_id_valid_in_url.go new file mode 100644 index 00000000..7750fbe7 --- /dev/null +++ b/openapi/linter/rules/operation_id_valid_in_url.go @@ -0,0 +1,88 @@ +package rules + +import ( + "context" + "fmt" + "regexp" + + "github.com/speakeasy-api/openapi/linter" + "github.com/speakeasy-api/openapi/openapi" + "github.com/speakeasy-api/openapi/validation" +) + +const RuleSemanticOperationIDValidInURL = "semantic-operation-id-valid-in-url" + +// urlFriendlyPattern matches URL-friendly characters per RFC 3986 (unreserved + reserved characters) +var urlFriendlyPattern = regexp.MustCompile(`^[A-Za-z0-9-._~:/?#\[\]@!$&'()*+,;=]*$`) + +type OperationIDValidInURLRule struct{} + +func (r *OperationIDValidInURLRule) ID() string { + return RuleSemanticOperationIDValidInURL +} + +func (r *OperationIDValidInURLRule) Description() string { + return "Operation IDs must use URL-friendly characters (alphanumeric, hyphens, and underscores only). URL-safe operation IDs ensure compatibility with code generators and tooling that may use them in URLs or file paths." +} + +func (r *OperationIDValidInURLRule) Summary() string { + return "Operation IDs must contain only URL-friendly characters." +} + +func (r *OperationIDValidInURLRule) HowToFix() string { + return "Update operationId values to use only URL-friendly characters." +} + +func (r *OperationIDValidInURLRule) Category() string { + return CategorySemantic +} + +func (r *OperationIDValidInURLRule) DefaultSeverity() validation.Severity { + return validation.SeverityError +} + +func (r *OperationIDValidInURLRule) Link() string { + return "https://github.com/speakeasy-api/openapi/blob/main/openapi/linter/README.md#semantic-operation-id-valid-in-url" +} + +func (r *OperationIDValidInURLRule) Versions() []string { + return nil // applies to all versions +} + +func (r *OperationIDValidInURLRule) Run(ctx context.Context, docInfo *linter.DocumentInfo[*openapi.OpenAPI], config *linter.RuleConfig) []error { + if docInfo == nil || docInfo.Document == nil || docInfo.Index == nil { + return nil + } + + doc := docInfo.Document + var errs []error + + // Use the pre-computed Operations index for efficient iteration + for _, opNode := range docInfo.Index.Operations { + operation := opNode.Node + if operation == nil { + continue + } + + operationID := operation.GetOperationID() + if operationID == "" { + continue + } + + if !urlFriendlyPattern.MatchString(operationID) { + node := GetFieldValueNode(operation, "operationId", doc) + if node == nil { + node = operation.GetRootNode() + } + + errs = append(errs, validation.NewValidationError( + config.GetSeverity(r.DefaultSeverity()), + RuleSemanticOperationIDValidInURL, + fmt.Errorf("operationId `%s` contains characters that are not URL-friendly", operationID), + node, + )) + } + } + + return errs +} diff --git a/openapi/linter/rules/operation_id_valid_in_url_test.go b/openapi/linter/rules/operation_id_valid_in_url_test.go new file mode 100644 index 00000000..8a6b985b --- /dev/null +++ b/openapi/linter/rules/operation_id_valid_in_url_test.go @@ -0,0 +1,202 @@ +package rules_test + +import ( + "strings" + "testing" + + "github.com/speakeasy-api/openapi/linter" + "github.com/speakeasy-api/openapi/openapi" + "github.com/speakeasy-api/openapi/openapi/linter/rules" + "github.com/speakeasy-api/openapi/references" + "github.com/speakeasy-api/openapi/validation" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestOperationIDValidInURLRule_ValidCases(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + yaml string + }{ + { + name: "URL-friendly operationId", + yaml: ` +openapi: 3.1.0 +info: + title: Test API + version: 1.0.0 +paths: + /users: + get: + operationId: get-users +`, + }, + { + name: "operationId with underscores", + yaml: ` +openapi: 3.1.0 +info: + title: Test API + version: 1.0.0 +paths: + /users: + get: + operationId: get_users_list +`, + }, + { + name: "operationId with dots", + yaml: ` +openapi: 3.1.0 +info: + title: Test API + version: 1.0.0 +paths: + /users: + get: + operationId: users.list +`, + }, + { + name: "operationId with reserved characters", + yaml: ` +openapi: 3.1.0 +info: + title: Test API + version: 1.0.0 +paths: + /users: + get: + operationId: get@users +`, + }, + { + name: "no operationId", + yaml: ` +openapi: 3.1.0 +info: + title: Test API + version: 1.0.0 +paths: + /users: + get: + summary: Get users +`, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + doc, _, err := openapi.Unmarshal(ctx, strings.NewReader(tt.yaml)) + require.NoError(t, err, "unmarshal should succeed") + + rule := &rules.OperationIDValidInURLRule{} + config := &linter.RuleConfig{} + + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + docInfo := linter.NewDocumentInfoWithIndex(doc, "test.yaml", idx) + + errs := rule.Run(ctx, docInfo, config) + assert.Empty(t, errs, "should have no lint errors") + }) + } +} + +func TestOperationIDValidInURLRule_Violations(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + yaml string + expectedError string + }{ + { + name: "operationId with spaces", + yaml: ` +openapi: 3.1.0 +info: + title: Test API + version: 1.0.0 +paths: + /users: + get: + operationId: get users +`, + expectedError: "[9:20] error semantic-operation-id-valid-in-url operationId `get users` contains characters that are not URL-friendly", + }, + { + name: "operationId with percent sign", + yaml: ` +openapi: 3.1.0 +info: + title: Test API + version: 1.0.0 +paths: + /users: + get: + operationId: get%users +`, + expectedError: "[9:20] error semantic-operation-id-valid-in-url operationId `get%users` contains characters that are not URL-friendly", + }, + { + name: "operationId with angle brackets", + yaml: ` +openapi: 3.1.0 +info: + title: Test API + version: 1.0.0 +paths: + /users: + get: + operationId: +`, + expectedError: "[9:20] error semantic-operation-id-valid-in-url operationId `` contains characters that are not URL-friendly", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + doc, _, err := openapi.Unmarshal(ctx, strings.NewReader(tt.yaml)) + require.NoError(t, err, "unmarshal should succeed") + + rule := &rules.OperationIDValidInURLRule{} + config := &linter.RuleConfig{} + + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + docInfo := linter.NewDocumentInfoWithIndex(doc, "test.yaml", idx) + + errs := rule.Run(ctx, docInfo, config) + require.Len(t, errs, 1, "should have one lint error") + assert.Equal(t, tt.expectedError, errs[0].Error(), "error message should match exactly") + }) + } +} + +func TestOperationIDValidInURLRule_RuleMetadata(t *testing.T) { + t.Parallel() + + rule := &rules.OperationIDValidInURLRule{} + + assert.Equal(t, "semantic-operation-id-valid-in-url", rule.ID()) + assert.Equal(t, rules.CategorySemantic, rule.Category()) + assert.NotEmpty(t, rule.Description()) + assert.NotEmpty(t, rule.Link()) + assert.Equal(t, validation.SeverityError, rule.DefaultSeverity()) + assert.Nil(t, rule.Versions()) +} diff --git a/openapi/linter/rules/operation_singular_tag.go b/openapi/linter/rules/operation_singular_tag.go new file mode 100644 index 00000000..457454b7 --- /dev/null +++ b/openapi/linter/rules/operation_singular_tag.go @@ -0,0 +1,78 @@ +package rules + +import ( + "context" + "fmt" + "strings" + + "github.com/speakeasy-api/openapi/linter" + "github.com/speakeasy-api/openapi/openapi" + "github.com/speakeasy-api/openapi/validation" +) + +const RuleStyleOperationSingularTag = "style-operation-singular-tag" + +type OperationSingularTagRule struct{} + +func (r *OperationSingularTagRule) ID() string { return RuleStyleOperationSingularTag } +func (r *OperationSingularTagRule) Category() string { return CategoryStyle } +func (r *OperationSingularTagRule) Description() string { + return "Operations should be associated with only a single tag to maintain clear organizational boundaries. Multiple tags can create ambiguity about where an operation belongs in the API structure and complicate documentation organization." +} +func (r *OperationSingularTagRule) Summary() string { + return "Operations should have no more than one tag." +} +func (r *OperationSingularTagRule) HowToFix() string { + return "Limit each operation to a single tag." +} +func (r *OperationSingularTagRule) Link() string { + return "https://github.com/speakeasy-api/openapi/blob/main/openapi/linter/README.md#style-operation-singular-tag" +} +func (r *OperationSingularTagRule) DefaultSeverity() validation.Severity { + return validation.SeverityWarning +} +func (r *OperationSingularTagRule) Versions() []string { + return nil // Applies to all OpenAPI versions +} + +func (r *OperationSingularTagRule) Run(ctx context.Context, docInfo *linter.DocumentInfo[*openapi.OpenAPI], config *linter.RuleConfig) []error { + if docInfo == nil || docInfo.Document == nil || docInfo.Index == nil { + return nil + } + + var errs []error + + // Use index to iterate through all operations + for _, opNode := range docInfo.Index.Operations { + operation := opNode.Node + + // Check if operation has more than one tag + opTags := operation.GetTags() + if len(opTags) <= 1 { + continue + } + + // Get operation identifier (prefer operationId, fallback to method + path) + opIdentifier := operation.GetOperationID() + if opIdentifier == "" { + method, path := openapi.ExtractMethodAndPath(opNode.Location) + if method != "" { + opIdentifier = fmt.Sprintf("`%s` operation at path `%s`", strings.ToUpper(method), path) + } + } else { + opIdentifier = fmt.Sprintf("`%s` operation", opIdentifier) + } + if opIdentifier == "" { + continue + } + + errs = append(errs, validation.NewValidationError( + config.GetSeverity(r.DefaultSeverity()), + RuleStyleOperationSingularTag, + fmt.Errorf("the %s contains more than one tag (%d is too many)", opIdentifier, len(opTags)), + operation.GetCore().Tags.ValueNode, + )) + } + + return errs +} diff --git a/openapi/linter/rules/operation_singular_tag_test.go b/openapi/linter/rules/operation_singular_tag_test.go new file mode 100644 index 00000000..9e720dd9 --- /dev/null +++ b/openapi/linter/rules/operation_singular_tag_test.go @@ -0,0 +1,213 @@ +package rules_test + +import ( + "strings" + "testing" + + "github.com/speakeasy-api/openapi/linter" + "github.com/speakeasy-api/openapi/openapi" + "github.com/speakeasy-api/openapi/openapi/linter/rules" + "github.com/speakeasy-api/openapi/references" + "github.com/speakeasy-api/openapi/validation" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestOperationSingularTagRule_ValidCases(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + yaml string + }{ + { + name: "operation with single tag", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + get: + tags: + - users + responses: + '200': + description: ok +`, + }, + { + name: "operation with no tags", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + get: + responses: + '200': + description: ok +`, + }, + { + name: "multiple operations each with single tag", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + get: + tags: + - users + responses: + '200': + description: ok + /products: + get: + tags: + - products + responses: + '200': + description: ok +`, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + doc, _, err := openapi.Unmarshal(ctx, strings.NewReader(tt.yaml)) + require.NoError(t, err) + + rule := &rules.OperationSingularTagRule{} + config := &linter.RuleConfig{} + + // Build index for the rule + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + docInfo := linter.NewDocumentInfoWithIndex(doc, "test.yaml", idx) + + errs := rule.Run(ctx, docInfo, config) + assert.Empty(t, errs) + }) + } +} + +func TestOperationSingularTagRule_Violations(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + yaml string + expectedError string + }{ + { + name: "operation with two tags", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + get: + tags: + - users + - admin + responses: + '200': + description: ok +`, + expectedError: "[10:9] warning style-operation-singular-tag the `GET` operation at path `/users` contains more than one tag (2 is too many)", + }, + { + name: "operation with three tags", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + get: + tags: + - users + - admin + - public + responses: + '200': + description: ok +`, + expectedError: "[10:9] warning style-operation-singular-tag the `GET` operation at path `/users` contains more than one tag (3 is too many)", + }, + { + name: "operation with operationId and multiple tags", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + get: + operationId: listUsers + tags: + - users + - admin + responses: + '200': + description: ok +`, + expectedError: "[11:9] warning style-operation-singular-tag the `listUsers` operation contains more than one tag (2 is too many)", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + doc, _, err := openapi.Unmarshal(ctx, strings.NewReader(tt.yaml)) + require.NoError(t, err) + + rule := &rules.OperationSingularTagRule{} + config := &linter.RuleConfig{} + + // Build index for the rule + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + docInfo := linter.NewDocumentInfoWithIndex(doc, "test.yaml", idx) + + errs := rule.Run(ctx, docInfo, config) + + require.Len(t, errs, 1) + assert.Equal(t, tt.expectedError, errs[0].Error()) + }) + } +} + +func TestOperationSingularTagRule_RuleMetadata(t *testing.T) { + t.Parallel() + + rule := &rules.OperationSingularTagRule{} + + assert.Equal(t, "style-operation-singular-tag", rule.ID()) + assert.Equal(t, rules.CategoryStyle, rule.Category()) + assert.NotEmpty(t, rule.Description()) + assert.NotEmpty(t, rule.Link()) + assert.Equal(t, validation.SeverityWarning, rule.DefaultSeverity()) + assert.Nil(t, rule.Versions()) +} diff --git a/openapi/linter/rules/operation_success_response.go b/openapi/linter/rules/operation_success_response.go new file mode 100644 index 00000000..4a73ce74 --- /dev/null +++ b/openapi/linter/rules/operation_success_response.go @@ -0,0 +1,171 @@ +package rules + +import ( + "context" + "fmt" + "strconv" + "strings" + + "github.com/speakeasy-api/openapi/linter" + "github.com/speakeasy-api/openapi/openapi" + "github.com/speakeasy-api/openapi/validation" + "gopkg.in/yaml.v3" +) + +const RuleStyleOperationSuccessResponse = "style-operation-success-response" + +type OperationSuccessResponseRule struct{} + +func (r *OperationSuccessResponseRule) ID() string { return RuleStyleOperationSuccessResponse } + +func (r *OperationSuccessResponseRule) Category() string { return CategoryStyle } + +func (r *OperationSuccessResponseRule) Description() string { + return "Operations should define at least one 2xx or 3xx response code to indicate successful execution. Success responses are essential for API consumers to understand what data they'll receive when requests complete successfully." +} + +func (r *OperationSuccessResponseRule) Summary() string { + return "Operations must define at least one 2xx or 3xx response." +} + +func (r *OperationSuccessResponseRule) HowToFix() string { + return "Add at least one 2xx or 3xx response code definition per operation." +} + +func (r *OperationSuccessResponseRule) Link() string { + return "https://github.com/speakeasy-api/openapi/blob/main/openapi/linter/README.md#style-operation-success-response" +} + +func (r *OperationSuccessResponseRule) DefaultSeverity() validation.Severity { + return validation.SeverityWarning +} + +func (r *OperationSuccessResponseRule) Versions() []string { return nil } + +func (r *OperationSuccessResponseRule) Run(ctx context.Context, docInfo *linter.DocumentInfo[*openapi.OpenAPI], config *linter.RuleConfig) []error { + if docInfo == nil || docInfo.Document == nil || docInfo.Index == nil { + return nil + } + + doc := docInfo.Document + isOAS3 := strings.HasPrefix(doc.GetOpenAPI(), "3.") + + var errs []error + + // Use the pre-computed Operations index + for _, opNode := range docInfo.Index.Operations { + op := opNode.Node + + responses := op.GetResponses() + responseSeen := false + responseInvalidType := false + invalidCodes := []int{} + + if responses != nil { + for code := range responses.All() { + codeVal, err := strconv.Atoi(code) + if err == nil && codeVal >= 200 && codeVal < 400 { + responseSeen = true + } + } + + if isOAS3 { + responseInvalidType, invalidCodes = findIntegerResponseCodes(op) + if responseInvalidType { + responseSeen = true + } + } + } + + if !responseSeen || responseInvalidType { + opName := op.GetOperationID() + if opName == "" { + opName = "undefined operation (no operationId)" + } + + errNode := getOperationResponsesKeyNode(op, doc) + + if !responseSeen { + errs = append(errs, validation.NewValidationError( + config.GetSeverity(r.DefaultSeverity()), + RuleStyleOperationSuccessResponse, + fmt.Errorf("operation `%s` must define at least a single `2xx` or `3xx` response", opName), + errNode, + )) + } + + if responseInvalidType { + for _, code := range invalidCodes { + errs = append(errs, validation.NewValidationError( + config.GetSeverity(r.DefaultSeverity()), + RuleStyleOperationSuccessResponse, + fmt.Errorf("operation `%s` uses an `integer` instead of a `string` for response code `%d`", opName, code), + errNode, + )) + } + } + } + } + + return errs +} + +func getOperationResponsesKeyNode(op *openapi.Operation, doc *openapi.OpenAPI) *yaml.Node { + if op == nil { + if doc != nil { + return doc.GetRootNode() + } + return nil + } + + core := op.GetCore() + if core != nil && core.Responses.Present && core.Responses.KeyNode != nil { + return core.Responses.KeyNode + } + + if core != nil && core.GetRootNode() != nil { + return core.GetRootNode() + } + + if doc != nil { + return doc.GetRootNode() + } + + return nil +} + +func findIntegerResponseCodes(op *openapi.Operation) (bool, []int) { + core := op.GetCore() + if core == nil || !core.Responses.Present || core.Responses.ValueNode == nil { + return false, nil + } + + valueNode := core.Responses.ValueNode + if valueNode.Kind != yaml.MappingNode { + return false, nil + } + + invalidCodes := []int{} + for i := 0; i < len(valueNode.Content); i += 2 { + keyNode := valueNode.Content[i] + if keyNode == nil || keyNode.Kind != yaml.ScalarNode { + continue + } + + if keyNode.Tag != "!!int" { + continue + } + + codeVal, err := strconv.Atoi(keyNode.Value) + if err != nil { + continue + } + invalidCodes = append(invalidCodes, codeVal) + } + + if len(invalidCodes) == 0 { + return false, nil + } + + return true, invalidCodes +} diff --git a/openapi/linter/rules/operation_success_response_test.go b/openapi/linter/rules/operation_success_response_test.go new file mode 100644 index 00000000..81beddaf --- /dev/null +++ b/openapi/linter/rules/operation_success_response_test.go @@ -0,0 +1,244 @@ +package rules_test + +import ( + "strings" + "testing" + + "github.com/speakeasy-api/openapi/linter" + "github.com/speakeasy-api/openapi/openapi" + "github.com/speakeasy-api/openapi/openapi/linter/rules" + "github.com/speakeasy-api/openapi/references" + "github.com/speakeasy-api/openapi/validation" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestOperationSuccessResponseRule_ValidCases(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + yaml string + }{ + { + name: "operation with 2xx response", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + get: + operationId: listUsers + responses: + '200': + description: ok +`, + }, + { + name: "operation with 3xx response", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + get: + operationId: listUsers + responses: + '302': + description: redirect +`, + }, + { + name: "operation with mixed responses", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + get: + operationId: listUsers + responses: + '204': + description: no content + '404': + description: missing +`, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + ctx := t.Context() + doc, _, err := openapi.Unmarshal(ctx, strings.NewReader(tt.yaml)) + require.NoError(t, err, "unmarshal should succeed") + + rule := &rules.OperationSuccessResponseRule{} + config := &linter.RuleConfig{} + + // Build index for the rule + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + docInfo := linter.NewDocumentInfoWithIndex(doc, "test.yaml", idx) + + errs := rule.Run(ctx, docInfo, config) + assert.Empty(t, errs, "should have no lint errors") + }) + } +} + +func TestOperationSuccessResponseRule_Violations(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + yaml string + expectedErrors []string + }{ + { + name: "missing success response", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + get: + operationId: listUsers + responses: + '404': + description: missing +`, + expectedErrors: []string{ + "[10:7] warning style-operation-success-response operation `listUsers` must define at least a single `2xx` or `3xx` response", + }, + }, + { + name: "missing responses", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + get: + operationId: listUsers +`, + expectedErrors: []string{ + "[9:7] warning style-operation-success-response operation `listUsers` must define at least a single `2xx` or `3xx` response", + }, + }, + { + name: "missing success response without operationId", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + get: + responses: + '500': + description: error +`, + expectedErrors: []string{ + "[9:7] warning style-operation-success-response operation `undefined operation (no operationId)` must define at least a single `2xx` or `3xx` response", + }, + }, + { + name: "integer response code in OAS3", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + get: + operationId: listUsers + responses: + 200: + description: ok +`, + expectedErrors: []string{ + "[10:7] warning style-operation-success-response operation `listUsers` uses an `integer` instead of a `string` for response code `200`", + }, + }, + { + name: "missing success response and integer response codes", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + get: + operationId: listUsers + responses: + 404: + description: missing +`, + expectedErrors: []string{ + "[10:7] warning style-operation-success-response operation `listUsers` uses an `integer` instead of a `string` for response code `404`", + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + ctx := t.Context() + doc, _, err := openapi.Unmarshal(ctx, strings.NewReader(tt.yaml)) + require.NoError(t, err, "unmarshal should succeed") + + rule := &rules.OperationSuccessResponseRule{} + config := &linter.RuleConfig{} + + // Build index for the rule + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + docInfo := linter.NewDocumentInfoWithIndex(doc, "test.yaml", idx) + + errs := rule.Run(ctx, docInfo, config) + require.NotEmpty(t, errs, "should have lint errors") + + var errMsgs []string + for _, err := range errs { + errMsgs = append(errMsgs, err.Error()) + } + + assert.ElementsMatch(t, tt.expectedErrors, errMsgs) + }) + } +} + +func TestOperationSuccessResponseRule_RuleMetadata(t *testing.T) { + t.Parallel() + + rule := &rules.OperationSuccessResponseRule{} + + assert.Equal(t, "style-operation-success-response", rule.ID()) + assert.Equal(t, rules.CategoryStyle, rule.Category()) + assert.NotEmpty(t, rule.Description()) + assert.NotEmpty(t, rule.Link()) + assert.Equal(t, validation.SeverityWarning, rule.DefaultSeverity()) + assert.Nil(t, rule.Versions()) +} diff --git a/openapi/linter/rules/operation_tag_defined.go b/openapi/linter/rules/operation_tag_defined.go new file mode 100644 index 00000000..9862bef2 --- /dev/null +++ b/openapi/linter/rules/operation_tag_defined.go @@ -0,0 +1,87 @@ +package rules + +import ( + "context" + "fmt" + "strings" + + "github.com/speakeasy-api/openapi/linter" + "github.com/speakeasy-api/openapi/openapi" + "github.com/speakeasy-api/openapi/validation" +) + +const RuleStyleOperationTagDefined = "style-operation-tag-defined" + +type OperationTagDefinedRule struct{} + +func (r *OperationTagDefinedRule) ID() string { return RuleStyleOperationTagDefined } +func (r *OperationTagDefinedRule) Category() string { return CategoryStyle } +func (r *OperationTagDefinedRule) Description() string { + return "Operation tags should be declared in the global tags array at the specification root. Pre-defining tags ensures consistency, enables tag-level documentation, and helps maintain a well-organized API structure." +} +func (r *OperationTagDefinedRule) Summary() string { + return "Operation tags should be defined in the global tags list." +} +func (r *OperationTagDefinedRule) HowToFix() string { + return "Add each operation tag to the top-level tags array." +} +func (r *OperationTagDefinedRule) Link() string { + return "https://github.com/speakeasy-api/openapi/blob/main/openapi/linter/README.md#style-operation-tag-defined" +} +func (r *OperationTagDefinedRule) DefaultSeverity() validation.Severity { + return validation.SeverityWarning +} +func (r *OperationTagDefinedRule) Versions() []string { + return nil // Applies to all OpenAPI versions +} + +func (r *OperationTagDefinedRule) Run(ctx context.Context, docInfo *linter.DocumentInfo[*openapi.OpenAPI], config *linter.RuleConfig) []error { + if docInfo == nil || docInfo.Document == nil || docInfo.Index == nil { + return nil + } + + var errs []error + + // Build map of global tags + globalTags := make(map[string]bool) + for _, tagNode := range docInfo.Index.Tags { + tag := tagNode.Node + if tag.Name != "" { + globalTags[tag.Name] = true + } + } + + // Use index to iterate through all operations + for _, opNode := range docInfo.Index.Operations { + operation := opNode.Node + + // Get operation identifier (prefer operationId, fallback to method + path) + opIdentifier := operation.GetOperationID() + if opIdentifier == "" { + method, path := openapi.ExtractMethodAndPath(opNode.Location) + if method != "" { + opIdentifier = fmt.Sprintf("`%s` %s", strings.ToUpper(method), path) + } + } + if opIdentifier == "" { + continue + } + + // Check each tag in the operation + opTags := operation.GetTags() + for i, tagName := range opTags { + if tagName != "" && !globalTags[tagName] { + errs = append(errs, validation.NewSliceError( + config.GetSeverity(r.DefaultSeverity()), + RuleStyleOperationTagDefined, + fmt.Errorf("tag `%s` for %s operation is not defined as a global tag", tagName, opIdentifier), + operation.GetCore(), + operation.GetCore().Tags, + i, + )) + } + } + } + + return errs +} diff --git a/openapi/linter/rules/operation_tag_defined_test.go b/openapi/linter/rules/operation_tag_defined_test.go new file mode 100644 index 00000000..afe71b1e --- /dev/null +++ b/openapi/linter/rules/operation_tag_defined_test.go @@ -0,0 +1,254 @@ +package rules_test + +import ( + "strings" + "testing" + + "github.com/speakeasy-api/openapi/linter" + "github.com/speakeasy-api/openapi/openapi" + "github.com/speakeasy-api/openapi/openapi/linter/rules" + "github.com/speakeasy-api/openapi/references" + "github.com/speakeasy-api/openapi/validation" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestOperationTagDefinedRule_ValidCases(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + yaml string + }{ + { + name: "all operation tags are defined globally", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +tags: + - name: users + - name: products +paths: + /users: + get: + tags: + - users + responses: + '200': + description: ok + /products: + get: + tags: + - products + responses: + '200': + description: ok +`, + }, + { + name: "operations without tags", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + get: + responses: + '200': + description: ok +`, + }, + { + name: "no global tags but no operation tags", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + get: + responses: + '200': + description: ok +`, + }, + { + name: "multiple tags all defined", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +tags: + - name: users + - name: admin +paths: + /users: + get: + tags: + - users + - admin + responses: + '200': + description: ok +`, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + doc, _, err := openapi.Unmarshal(ctx, strings.NewReader(tt.yaml)) + require.NoError(t, err) + + rule := &rules.OperationTagDefinedRule{} + config := &linter.RuleConfig{} + + // Build index for the rule + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + docInfo := linter.NewDocumentInfoWithIndex(doc, "test.yaml", idx) + + errs := rule.Run(ctx, docInfo, config) + assert.Empty(t, errs) + }) + } +} + +func TestOperationTagDefinedRule_Violations(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + yaml string + expectedError string + }{ + { + name: "operation tag not defined globally", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +tags: + - name: users +paths: + /products: + get: + tags: + - products + responses: + '200': + description: ok +`, + expectedError: "[12:11] warning style-operation-tag-defined tag `products` for `GET` /products operation is not defined as a global tag", + }, + { + name: "one of multiple tags not defined", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +tags: + - name: users +paths: + /users: + get: + tags: + - users + - admin + responses: + '200': + description: ok +`, + expectedError: "[13:11] warning style-operation-tag-defined tag `admin` for `GET` /users operation is not defined as a global tag", + }, + { + name: "no global tags but operation has tag", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + get: + tags: + - users + responses: + '200': + description: ok +`, + expectedError: "[10:11] warning style-operation-tag-defined tag `users` for `GET` /users operation is not defined as a global tag", + }, + { + name: "operation with operationId uses id in error message", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + get: + operationId: listUsers + tags: + - admin + responses: + '200': + description: ok +`, + expectedError: "[11:11] warning style-operation-tag-defined tag `admin` for listUsers operation is not defined as a global tag", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + doc, _, err := openapi.Unmarshal(ctx, strings.NewReader(tt.yaml)) + require.NoError(t, err) + + rule := &rules.OperationTagDefinedRule{} + config := &linter.RuleConfig{} + + // Build index for the rule + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + docInfo := linter.NewDocumentInfoWithIndex(doc, "test.yaml", idx) + + errs := rule.Run(ctx, docInfo, config) + + require.Len(t, errs, 1) + assert.Equal(t, tt.expectedError, errs[0].Error()) + }) + } +} + +func TestOperationTagDefinedRule_RuleMetadata(t *testing.T) { + t.Parallel() + + rule := &rules.OperationTagDefinedRule{} + + assert.Equal(t, "style-operation-tag-defined", rule.ID()) + assert.Equal(t, rules.CategoryStyle, rule.Category()) + assert.NotEmpty(t, rule.Description()) + assert.NotEmpty(t, rule.Link()) + assert.Equal(t, validation.SeverityWarning, rule.DefaultSeverity()) + assert.Nil(t, rule.Versions()) +} diff --git a/openapi/linter/rules/operation_tags.go b/openapi/linter/rules/operation_tags.go new file mode 100644 index 00000000..17be43b3 --- /dev/null +++ b/openapi/linter/rules/operation_tags.go @@ -0,0 +1,83 @@ +package rules + +import ( + "context" + "fmt" + "strings" + + "github.com/speakeasy-api/openapi/linter" + "github.com/speakeasy-api/openapi/openapi" + "github.com/speakeasy-api/openapi/validation" +) + +const RuleStyleOperationTags = "style-operation-tags" + +type OperationTagsRule struct{} + +func (r *OperationTagsRule) ID() string { + return RuleStyleOperationTags +} + +func (r *OperationTagsRule) Description() string { + return "Operations should have at least one tag to enable logical grouping and organization in documentation. Tags help developers navigate the API by categorizing related operations together." +} + +func (r *OperationTagsRule) Summary() string { + return "Operations should have at least one tag." +} + +func (r *OperationTagsRule) HowToFix() string { + return "Add at least one tag to each operation." +} + +func (r *OperationTagsRule) Category() string { + return CategoryStyle +} + +func (r *OperationTagsRule) DefaultSeverity() validation.Severity { + return validation.SeverityWarning +} + +func (r *OperationTagsRule) Link() string { + return "https://github.com/speakeasy-api/openapi/blob/main/openapi/linter/README.md#style-operation-tags" +} + +func (r *OperationTagsRule) Versions() []string { + return nil // applies to all versions +} + +func (r *OperationTagsRule) Run(ctx context.Context, docInfo *linter.DocumentInfo[*openapi.OpenAPI], config *linter.RuleConfig) []error { + if docInfo == nil || docInfo.Document == nil || docInfo.Index == nil { + return nil + } + + var errs []error + + for _, opNode := range docInfo.Index.Operations { + operation := opNode.Node + if operation == nil { + continue + } + + tags := operation.GetTags() + if len(tags) == 0 { + // Get operation identifier (prefer operationId, fallback to method + path) + opIdentifier := operation.GetOperationID() + if opIdentifier == "" { + method, path := openapi.ExtractMethodAndPath(opNode.Location) + if method != "" { + opIdentifier = fmt.Sprintf("`%s` %s", strings.ToUpper(method), path) + } + } + + errs = append(errs, validation.NewValidationError( + config.GetSeverity(r.DefaultSeverity()), + RuleStyleOperationTags, + fmt.Errorf("the %s is missing tags", opIdentifier), + operation.GetRootNode(), + )) + } + } + + return errs +} diff --git a/openapi/linter/rules/operation_tags_test.go b/openapi/linter/rules/operation_tags_test.go new file mode 100644 index 00000000..801fc7b5 --- /dev/null +++ b/openapi/linter/rules/operation_tags_test.go @@ -0,0 +1,209 @@ +package rules_test + +import ( + "strings" + "testing" + + "github.com/speakeasy-api/openapi/linter" + "github.com/speakeasy-api/openapi/openapi" + "github.com/speakeasy-api/openapi/openapi/linter/rules" + "github.com/speakeasy-api/openapi/references" + "github.com/speakeasy-api/openapi/validation" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestOperationTagsRule_ValidCases(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + yaml string + }{ + { + name: "operation with tags", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + get: + tags: + - users + responses: + '200': + description: ok +`, + }, + { + name: "operation with multiple tags", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + get: + tags: + - users + - public + responses: + '200': + description: ok +`, + }, + { + name: "all operations have tags", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + get: + tags: + - users + responses: + '200': + description: ok + post: + tags: + - users + responses: + '201': + description: created +`, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + doc, _, err := openapi.Unmarshal(ctx, strings.NewReader(tt.yaml)) + require.NoError(t, err) + + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + + rule := &rules.OperationTagsRule{} + config := &linter.RuleConfig{} + docInfo := linter.NewDocumentInfoWithIndex(doc, "test.yaml", idx) + + errs := rule.Run(ctx, docInfo, config) + assert.Empty(t, errs) + }) + } +} + +func TestOperationTagsRule_Violations(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + yaml string + expectedError string + }{ + { + name: "operation without tags", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + get: + responses: + '200': + description: ok +`, + expectedError: "[9:7] warning style-operation-tags the `GET` /users is missing tags", + }, + { + name: "operation with operationId but no tags", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + post: + operationId: createUser + responses: + '201': + description: created +`, + expectedError: "[9:7] warning style-operation-tags the createUser is missing tags", + }, + { + name: "one operation with tags, one without", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + get: + tags: + - users + responses: + '200': + description: ok + post: + responses: + '201': + description: created +`, + expectedError: "[15:7] warning style-operation-tags the `POST` /users is missing tags", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + doc, _, err := openapi.Unmarshal(ctx, strings.NewReader(tt.yaml)) + require.NoError(t, err) + + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + + rule := &rules.OperationTagsRule{} + config := &linter.RuleConfig{} + docInfo := linter.NewDocumentInfoWithIndex(doc, "test.yaml", idx) + + errs := rule.Run(ctx, docInfo, config) + + require.NotEmpty(t, errs) + assert.Equal(t, tt.expectedError, errs[0].Error()) + }) + } +} + +func TestOperationTagsRule_RuleMetadata(t *testing.T) { + t.Parallel() + + rule := &rules.OperationTagsRule{} + + assert.Equal(t, "style-operation-tags", rule.ID()) + assert.Equal(t, rules.CategoryStyle, rule.Category()) + assert.NotEmpty(t, rule.Description()) + assert.NotEmpty(t, rule.Link()) + assert.Equal(t, validation.SeverityWarning, rule.DefaultSeverity()) + assert.Nil(t, rule.Versions()) +} diff --git a/openapi/linter/rules/owasp_additional_properties_constrained.go b/openapi/linter/rules/owasp_additional_properties_constrained.go new file mode 100644 index 00000000..cf538e18 --- /dev/null +++ b/openapi/linter/rules/owasp_additional_properties_constrained.go @@ -0,0 +1,110 @@ +package rules + +import ( + "context" + "errors" + + "github.com/speakeasy-api/openapi/linter" + "github.com/speakeasy-api/openapi/openapi" + "github.com/speakeasy-api/openapi/validation" +) + +const RuleOwaspAdditionalPropertiesConstrained = "owasp-additional-properties-constrained" + +type OwaspAdditionalPropertiesConstrainedRule struct{} + +func (r *OwaspAdditionalPropertiesConstrainedRule) ID() string { + return RuleOwaspAdditionalPropertiesConstrained +} +func (r *OwaspAdditionalPropertiesConstrainedRule) Category() string { + return CategorySecurity +} +func (r *OwaspAdditionalPropertiesConstrainedRule) Description() string { + return "Schemas with additionalProperties set to true or a schema should define maxProperties to limit object size. Without size limits, APIs are vulnerable to resource exhaustion attacks where clients send excessively large objects." +} +func (r *OwaspAdditionalPropertiesConstrainedRule) Summary() string { + return "Schemas with additionalProperties should define maxProperties." +} +func (r *OwaspAdditionalPropertiesConstrainedRule) HowToFix() string { + return "When additionalProperties is true or a schema, add a maxProperties limit to bound object size." +} +func (r *OwaspAdditionalPropertiesConstrainedRule) Link() string { + return "https://github.com/speakeasy-api/openapi/blob/main/openapi/linter/README.md#owasp-additional-properties-constrained" +} +func (r *OwaspAdditionalPropertiesConstrainedRule) DefaultSeverity() validation.Severity { + return validation.SeverityHint +} +func (r *OwaspAdditionalPropertiesConstrainedRule) Versions() []string { + return []string{"3.0", "3.1"} +} + +func (r *OwaspAdditionalPropertiesConstrainedRule) Run(ctx context.Context, docInfo *linter.DocumentInfo[*openapi.OpenAPI], config *linter.RuleConfig) []error { + if docInfo == nil || docInfo.Document == nil || docInfo.Index == nil { + return nil + } + + var errs []error + + // Check all schemas in the document + for _, schemaNode := range docInfo.Index.GetAllSchemas() { + refSchema := schemaNode.Node + schema := refSchema.GetSchema() + if schema == nil { + continue + } + + // Check if type contains "object" + types := schema.GetType() + hasObjectType := false + for _, typ := range types { + if typ == "object" { + hasObjectType = true + break + } + } + + if !hasObjectType { + continue + } + + // Check additionalProperties + additionalProps := schema.GetAdditionalProperties() + if additionalProps == nil { + // Not set - no constraint needed + continue + } + + // Check if additionalProperties allows additional properties + // (either as a schema object or as true) + allowsAdditional := false + + if additionalProps.IsBool() { + // It's a boolean value + boolVal := additionalProps.GetBool() + if boolVal != nil && *boolVal { + // additionalProperties: true + allowsAdditional = true + } + } else { + // It's a schema object - allows additional properties + allowsAdditional = true + } + + // If additional properties are allowed, maxProperties should be defined + if allowsAdditional { + maxProps := schema.GetMaxProperties() + if maxProps == nil { + if rootNode := refSchema.GetRootNode(); rootNode != nil { + errs = append(errs, validation.NewValidationError( + config.GetSeverity(r.DefaultSeverity()), + RuleOwaspAdditionalPropertiesConstrained, + errors.New("schema should define maxProperties when additionalProperties is set to true or a schema"), + rootNode, + )) + } + } + } + } + + return errs +} diff --git a/openapi/linter/rules/owasp_additional_properties_constrained_test.go b/openapi/linter/rules/owasp_additional_properties_constrained_test.go new file mode 100644 index 00000000..b14c7b59 --- /dev/null +++ b/openapi/linter/rules/owasp_additional_properties_constrained_test.go @@ -0,0 +1,250 @@ +package rules_test + +import ( + "strings" + "testing" + + "github.com/speakeasy-api/openapi/linter" + "github.com/speakeasy-api/openapi/openapi" + "github.com/speakeasy-api/openapi/openapi/linter/rules" + "github.com/speakeasy-api/openapi/references" + "github.com/speakeasy-api/openapi/validation" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestOwaspAdditionalPropertiesConstrainedRule_ValidCases(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + yaml string + }{ + { + name: "object without additionalProperties", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +components: + schemas: + User: + type: object + properties: + name: + type: string + maxLength: 100 +paths: {} +`, + }, + { + name: "object with additionalProperties false", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +components: + schemas: + User: + type: object + properties: + name: + type: string + maxLength: 100 + additionalProperties: false +paths: {} +`, + }, + { + name: "object with additionalProperties true and maxProperties", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +components: + schemas: + Config: + type: object + properties: + setting: + type: string + maxLength: 50 + additionalProperties: true + maxProperties: 10 +paths: {} +`, + }, + { + name: "object with additionalProperties schema and maxProperties", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +components: + schemas: + Config: + type: object + properties: + name: + type: string + maxLength: 100 + additionalProperties: + type: string + maxLength: 50 + maxProperties: 20 +paths: {} +`, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + doc, _, err := openapi.Unmarshal(ctx, strings.NewReader(tt.yaml)) + require.NoError(t, err) + + rule := &rules.OwaspAdditionalPropertiesConstrainedRule{} + config := &linter.RuleConfig{} + + // Build index for the rule + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + docInfo := linter.NewDocumentInfoWithIndex(doc, "test.yaml", idx) + + errs := rule.Run(ctx, docInfo, config) + assert.Empty(t, errs) + }) + } +} + +func TestOwaspAdditionalPropertiesConstrainedRule_Violations(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + yaml string + expectedCount int + }{ + { + name: "object with additionalProperties true without maxProperties", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +components: + schemas: + Config: + type: object + properties: + setting: + type: string + maxLength: 50 + additionalProperties: true +paths: {} +`, + expectedCount: 1, + }, + { + name: "object with additionalProperties schema without maxProperties", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +components: + schemas: + Config: + type: object + properties: + name: + type: string + maxLength: 100 + additionalProperties: + type: string + maxLength: 50 +paths: {} +`, + expectedCount: 1, + }, + { + name: "multiple objects with violations", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +components: + schemas: + Config1: + type: object + properties: + setting: + type: string + maxLength: 50 + additionalProperties: true + Config2: + type: object + properties: + value: + type: string + maxLength: 100 + additionalProperties: + type: integer + format: int32 +paths: {} +`, + expectedCount: 2, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + doc, _, err := openapi.Unmarshal(ctx, strings.NewReader(tt.yaml)) + require.NoError(t, err) + + rule := &rules.OwaspAdditionalPropertiesConstrainedRule{} + config := &linter.RuleConfig{} + + // Build index for the rule + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + docInfo := linter.NewDocumentInfoWithIndex(doc, "test.yaml", idx) + + errs := rule.Run(ctx, docInfo, config) + + require.Len(t, errs, tt.expectedCount) + for _, err := range errs { + assert.Contains(t, err.Error(), "maxProperties") + } + }) + } +} + +func TestOwaspAdditionalPropertiesConstrainedRule_RuleMetadata(t *testing.T) { + t.Parallel() + + rule := &rules.OwaspAdditionalPropertiesConstrainedRule{} + + assert.Equal(t, "owasp-additional-properties-constrained", rule.ID()) + assert.Equal(t, rules.CategorySecurity, rule.Category()) + assert.NotEmpty(t, rule.Description()) + assert.NotEmpty(t, rule.Link()) + assert.Equal(t, validation.SeverityHint, rule.DefaultSeverity()) + assert.Equal(t, []string{"3.0", "3.1"}, rule.Versions()) +} diff --git a/openapi/linter/rules/owasp_array_limit.go b/openapi/linter/rules/owasp_array_limit.go new file mode 100644 index 00000000..340b18f9 --- /dev/null +++ b/openapi/linter/rules/owasp_array_limit.go @@ -0,0 +1,85 @@ +package rules + +import ( + "context" + "errors" + + "github.com/speakeasy-api/openapi/linter" + "github.com/speakeasy-api/openapi/openapi" + "github.com/speakeasy-api/openapi/validation" +) + +const RuleOwaspArrayLimit = "owasp-array-limit" + +type OwaspArrayLimitRule struct{} + +func (r *OwaspArrayLimitRule) ID() string { + return RuleOwaspArrayLimit +} +func (r *OwaspArrayLimitRule) Category() string { + return CategorySecurity +} +func (r *OwaspArrayLimitRule) Description() string { + return "Array schemas must specify `maxItems` to prevent resource exhaustion attacks. Without array size limits, malicious clients could send extremely large arrays that consume excessive memory or processing time." +} +func (r *OwaspArrayLimitRule) Summary() string { + return "Array schemas must specify `maxItems`." +} +func (r *OwaspArrayLimitRule) HowToFix() string { + return "Add a `maxItems` constraint to array schemas to cap the allowed number of items." +} +func (r *OwaspArrayLimitRule) Link() string { + return "https://github.com/speakeasy-api/openapi/blob/main/openapi/linter/README.md#owasp-array-limit" +} +func (r *OwaspArrayLimitRule) DefaultSeverity() validation.Severity { + return validation.SeverityError +} +func (r *OwaspArrayLimitRule) Versions() []string { + return []string{"3.0", "3.1"} // OAS3 only +} + +func (r *OwaspArrayLimitRule) Run(ctx context.Context, docInfo *linter.DocumentInfo[*openapi.OpenAPI], config *linter.RuleConfig) []error { + if docInfo == nil || docInfo.Document == nil || docInfo.Index == nil { + return nil + } + + var errs []error + + // Check all schemas + for _, schemaNode := range docInfo.Index.GetAllSchemas() { + refSchema := schemaNode.Node + schema := refSchema.GetSchema() + if schema == nil { + continue + } + + // Check if type contains "array" + types := schema.GetType() + hasArrayType := false + for _, typ := range types { + if typ == "array" { + hasArrayType = true + break + } + } + + if !hasArrayType { + continue + } + + // Check if maxItems is defined + maxItems := schema.GetMaxItems() + if maxItems == nil { + if rootNode := refSchema.GetRootNode(); rootNode != nil { + errs = append(errs, validation.NewValidationError( + config.GetSeverity(r.DefaultSeverity()), + RuleOwaspArrayLimit, + errors.New("schema of type `array` must specify `maxItems`"), + rootNode, + )) + } + } + } + + return errs +} diff --git a/openapi/linter/rules/owasp_array_limit_test.go b/openapi/linter/rules/owasp_array_limit_test.go new file mode 100644 index 00000000..e2df7d60 --- /dev/null +++ b/openapi/linter/rules/owasp_array_limit_test.go @@ -0,0 +1,282 @@ +package rules_test + +import ( + "strings" + "testing" + + "github.com/speakeasy-api/openapi/linter" + "github.com/speakeasy-api/openapi/openapi" + "github.com/speakeasy-api/openapi/openapi/linter/rules" + "github.com/speakeasy-api/openapi/references" + "github.com/speakeasy-api/openapi/validation" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestOwaspArrayLimitRule_ValidCases(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + yaml string + }{ + { + name: "array with maxItems", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +components: + schemas: + Items: + type: array + maxItems: 100 + items: + type: string +paths: {} +`, + }, + { + name: "non-array schema without maxItems", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +components: + schemas: + User: + type: object + properties: + name: + type: string +paths: {} +`, + }, + { + name: "string schema without maxItems", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +components: + schemas: + Name: + type: string +paths: {} +`, + }, + { + name: "array in response with maxItems", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + get: + responses: + '200': + description: Success + content: + application/json: + schema: + type: array + maxItems: 50 + items: + type: object +`, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + doc, _, err := openapi.Unmarshal(ctx, strings.NewReader(tt.yaml)) + require.NoError(t, err) + + rule := &rules.OwaspArrayLimitRule{} + config := &linter.RuleConfig{} + + // Build index for the rule + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + docInfo := linter.NewDocumentInfoWithIndex(doc, "test.yaml", idx) + + errs := rule.Run(ctx, docInfo, config) + assert.Empty(t, errs) + }) + } +} + +func TestOwaspArrayLimitRule_Violations(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + yaml string + expectedCount int + expectedText string + }{ + { + name: "array without maxItems in component", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +components: + schemas: + Items: + type: array + items: + type: string +paths: {} +`, + expectedCount: 1, + expectedText: "maxItems", + }, + { + name: "array without maxItems in response", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + get: + responses: + '200': + description: Success + content: + application/json: + schema: + type: array + items: + type: object +`, + expectedCount: 1, + expectedText: "maxItems", + }, + { + name: "multiple arrays without maxItems", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +components: + schemas: + Items: + type: array + items: + type: string + Tags: + type: array + items: + type: string +paths: {} +`, + expectedCount: 2, + expectedText: "", + }, + { + name: "array in request body without maxItems", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + post: + requestBody: + content: + application/json: + schema: + type: array + items: + type: object + responses: + '201': + description: Created +`, + expectedCount: 1, + expectedText: "maxItems", + }, + { + name: "nested array without maxItems", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +components: + schemas: + User: + type: object + properties: + tags: + type: array + items: + type: string +paths: {} +`, + expectedCount: 1, + expectedText: "maxItems", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + doc, _, err := openapi.Unmarshal(ctx, strings.NewReader(tt.yaml)) + require.NoError(t, err) + + rule := &rules.OwaspArrayLimitRule{} + config := &linter.RuleConfig{} + + // Build index for the rule + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + docInfo := linter.NewDocumentInfoWithIndex(doc, "test.yaml", idx) + + errs := rule.Run(ctx, docInfo, config) + + require.Len(t, errs, tt.expectedCount) + for _, err := range errs { + assert.Contains(t, err.Error(), "array") + if tt.expectedText != "" { + assert.Contains(t, err.Error(), tt.expectedText) + } + } + }) + } +} + +func TestOwaspArrayLimitRule_RuleMetadata(t *testing.T) { + t.Parallel() + + rule := &rules.OwaspArrayLimitRule{} + + assert.Equal(t, "owasp-array-limit", rule.ID()) + assert.Equal(t, rules.CategorySecurity, rule.Category()) + assert.NotEmpty(t, rule.Description()) + assert.NotEmpty(t, rule.Link()) + assert.Equal(t, validation.SeverityError, rule.DefaultSeverity()) + assert.Equal(t, []string{"3.0", "3.1"}, rule.Versions()) +} diff --git a/openapi/linter/rules/owasp_auth_insecure_schemes.go b/openapi/linter/rules/owasp_auth_insecure_schemes.go new file mode 100644 index 00000000..0bfb8188 --- /dev/null +++ b/openapi/linter/rules/owasp_auth_insecure_schemes.go @@ -0,0 +1,97 @@ +package rules + +import ( + "context" + "fmt" + "strings" + + "github.com/speakeasy-api/openapi/linter" + "github.com/speakeasy-api/openapi/openapi" + "github.com/speakeasy-api/openapi/validation" + "github.com/speakeasy-api/openapi/yml" +) + +const RuleOwaspAuthInsecureSchemes = "owasp-auth-insecure-schemes" + +type OwaspAuthInsecureSchemesRule struct{} + +func (r *OwaspAuthInsecureSchemesRule) ID() string { return RuleOwaspAuthInsecureSchemes } +func (r *OwaspAuthInsecureSchemesRule) Category() string { return CategorySecurity } +func (r *OwaspAuthInsecureSchemesRule) Description() string { + return "Authentication schemes using outdated or insecure methods must be avoided or upgraded. Insecure authentication schemes like API keys in query parameters or `HTTP Basic` over `HTTP` expose credentials and create security vulnerabilities." +} +func (r *OwaspAuthInsecureSchemesRule) Summary() string { + return "Security schemes must not use outdated or insecure HTTP schemes." +} +func (r *OwaspAuthInsecureSchemesRule) HowToFix() string { + return "Replace insecure `HTTP` schemes (`negotiate`/`oauth`) with modern authentication like `OAuth 2.0` or `bearer` tokens." +} +func (r *OwaspAuthInsecureSchemesRule) Link() string { + return "https://github.com/speakeasy-api/openapi/blob/main/openapi/linter/README.md#owasp-auth-insecure-schemes" +} +func (r *OwaspAuthInsecureSchemesRule) DefaultSeverity() validation.Severity { + return validation.SeverityError +} +func (r *OwaspAuthInsecureSchemesRule) Versions() []string { + return nil // Applies to all OpenAPI versions +} + +func (r *OwaspAuthInsecureSchemesRule) Run(ctx context.Context, docInfo *linter.DocumentInfo[*openapi.OpenAPI], config *linter.RuleConfig) []error { + if docInfo == nil || docInfo.Document == nil { + return nil + } + + doc := docInfo.Document + components := doc.GetComponents() + if components == nil { + return nil + } + + securitySchemes := components.GetSecuritySchemes() + if securitySchemes == nil { + return nil + } + + var errs []error + + // Iterate through all security schemes + for name, scheme := range securitySchemes.All() { + if scheme == nil { + continue + } + + // Get the security scheme object + secScheme := scheme.GetObject() + if secScheme == nil { + continue + } + + // Check if this is an HTTP type security scheme + schemeType := secScheme.GetType() + if schemeType != "http" { + continue + } + + // Get the scheme value (basic, bearer, negotiate, oauth, etc.) + httpScheme := secScheme.GetScheme() + httpSchemeLower := strings.ToLower(httpScheme) + + // Check if it's negotiate or oauth (both insecure/outdated) + if httpSchemeLower == "negotiate" || httpSchemeLower == "oauth" { + // Get the root node to find the scheme key + if rootNode := secScheme.GetRootNode(); rootNode != nil { + _, schemeValueNode, found := yml.GetMapElementNodes(ctx, rootNode, "scheme") + if found && schemeValueNode != nil { + errs = append(errs, validation.NewValidationError( + config.GetSeverity(r.DefaultSeverity()), + RuleOwaspAuthInsecureSchemes, + fmt.Errorf("security scheme `%s` uses `%s` which is outdated or insecure - use modern authentication like `OAuth 2.0` or `bearer` tokens", name, httpScheme), + schemeValueNode, + )) + } + } + } + } + + return errs +} diff --git a/openapi/linter/rules/owasp_auth_insecure_schemes_test.go b/openapi/linter/rules/owasp_auth_insecure_schemes_test.go new file mode 100644 index 00000000..da282b7e --- /dev/null +++ b/openapi/linter/rules/owasp_auth_insecure_schemes_test.go @@ -0,0 +1,268 @@ +package rules_test + +import ( + "strings" + "testing" + + "github.com/speakeasy-api/openapi/linter" + "github.com/speakeasy-api/openapi/openapi" + "github.com/speakeasy-api/openapi/openapi/linter/rules" + "github.com/speakeasy-api/openapi/references" + "github.com/speakeasy-api/openapi/validation" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestOwaspAuthInsecureSchemesRule_ValidCases(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + yaml string + }{ + { + name: "bearer authentication", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +components: + securitySchemes: + bearerAuth: + type: http + scheme: bearer + bearerFormat: JWT +paths: {} +`, + }, + { + name: "digest authentication", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +components: + securitySchemes: + digestAuth: + type: http + scheme: digest +paths: {} +`, + }, + { + name: "oauth2 authentication (not http oauth)", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +components: + securitySchemes: + oauth: + type: oauth2 + flows: + implicit: + authorizationUrl: https://example.com/oauth/authorize + scopes: + read: Read access +paths: {} +`, + }, + { + name: "api key authentication", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +components: + securitySchemes: + apiKey: + type: apiKey + in: header + name: X-API-Key +paths: {} +`, + }, + { + name: "no security schemes", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: {} +`, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + doc, _, err := openapi.Unmarshal(ctx, strings.NewReader(tt.yaml)) + require.NoError(t, err) + + rule := &rules.OwaspAuthInsecureSchemesRule{} + config := &linter.RuleConfig{} + + // Build index for the rule + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + docInfo := linter.NewDocumentInfoWithIndex(doc, "test.yaml", idx) + + errs := rule.Run(ctx, docInfo, config) + assert.Empty(t, errs) + }) + } +} + +func TestOwaspAuthInsecureSchemesRule_Violations(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + yaml string + expectedCount int + expectedText string + }{ + { + name: "negotiate authentication", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +components: + securitySchemes: + negotiateAuth: + type: http + scheme: negotiate +paths: {} +`, + expectedCount: 1, + expectedText: "negotiate", + }, + { + name: "oauth authentication (http type)", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +components: + securitySchemes: + oauthAuth: + type: http + scheme: oauth +paths: {} +`, + expectedCount: 1, + expectedText: "oauth", + }, + { + name: "multiple insecure schemes", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +components: + securitySchemes: + negotiateAuth: + type: http + scheme: negotiate + oauthAuth: + type: http + scheme: oauth +paths: {} +`, + expectedCount: 2, + expectedText: "", + }, + { + name: "case insensitive - NEGOTIATE", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +components: + securitySchemes: + negotiateAuth: + type: http + scheme: NEGOTIATE +paths: {} +`, + expectedCount: 1, + expectedText: "NEGOTIATE", + }, + { + name: "case insensitive - OAuth", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +components: + securitySchemes: + oauthAuth: + type: http + scheme: OAuth +paths: {} +`, + expectedCount: 1, + expectedText: "OAuth", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + doc, _, err := openapi.Unmarshal(ctx, strings.NewReader(tt.yaml)) + require.NoError(t, err) + + rule := &rules.OwaspAuthInsecureSchemesRule{} + config := &linter.RuleConfig{} + + // Build index for the rule + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + docInfo := linter.NewDocumentInfoWithIndex(doc, "test.yaml", idx) + + errs := rule.Run(ctx, docInfo, config) + + require.Len(t, errs, tt.expectedCount) + for _, err := range errs { + assert.Contains(t, err.Error(), "outdated or insecure") + if tt.expectedText != "" { + assert.Contains(t, err.Error(), tt.expectedText) + } + } + }) + } +} + +func TestOwaspAuthInsecureSchemesRule_RuleMetadata(t *testing.T) { + t.Parallel() + + rule := &rules.OwaspAuthInsecureSchemesRule{} + + assert.Equal(t, "owasp-auth-insecure-schemes", rule.ID()) + assert.Equal(t, rules.CategorySecurity, rule.Category()) + assert.NotEmpty(t, rule.Description()) + assert.NotEmpty(t, rule.Link()) + assert.Equal(t, validation.SeverityError, rule.DefaultSeverity()) + assert.Nil(t, rule.Versions()) +} diff --git a/openapi/linter/rules/owasp_define_error_responses_401.go b/openapi/linter/rules/owasp_define_error_responses_401.go new file mode 100644 index 00000000..dec24447 --- /dev/null +++ b/openapi/linter/rules/owasp_define_error_responses_401.go @@ -0,0 +1,124 @@ +package rules + +import ( + "context" + "fmt" + + "github.com/speakeasy-api/openapi/linter" + "github.com/speakeasy-api/openapi/openapi" + "github.com/speakeasy-api/openapi/validation" + "github.com/speakeasy-api/openapi/yml" +) + +const RuleOwaspDefineErrorResponses401 = "owasp-define-error-responses-401" + +type OwaspDefineErrorResponses401Rule struct{} + +func (r *OwaspDefineErrorResponses401Rule) ID() string { return RuleOwaspDefineErrorResponses401 } +func (r *OwaspDefineErrorResponses401Rule) Category() string { + return CategorySecurity +} +func (r *OwaspDefineErrorResponses401Rule) Description() string { + return "Operations should define a 401 Unauthorized response with a proper schema to handle authentication failures. Documenting authentication error responses helps clients implement proper error handling and understand when credentials are invalid or missing." +} +func (r *OwaspDefineErrorResponses401Rule) Summary() string { + return "Operations should define a 401 Unauthorized response with a schema." +} +func (r *OwaspDefineErrorResponses401Rule) HowToFix() string { + return "Add a 401 response with a response body schema to each secured operation." +} +func (r *OwaspDefineErrorResponses401Rule) Link() string { + return "https://github.com/speakeasy-api/openapi/blob/main/openapi/linter/README.md#owasp-define-error-responses-401" +} +func (r *OwaspDefineErrorResponses401Rule) DefaultSeverity() validation.Severity { + return validation.SeverityWarning +} +func (r *OwaspDefineErrorResponses401Rule) Versions() []string { + return nil // Applies to all OpenAPI versions +} + +func (r *OwaspDefineErrorResponses401Rule) Run(ctx context.Context, docInfo *linter.DocumentInfo[*openapi.OpenAPI], config *linter.RuleConfig) []error { + if docInfo == nil || docInfo.Document == nil || docInfo.Index == nil { + return nil + } + + var errs []error + + // Check all operations + for _, opNode := range docInfo.Index.Operations { + op := opNode.Node + if op == nil { + continue + } + + // Get operation details for error messages + method := "" + path := "" + for _, loc := range opNode.Location { + switch openapi.GetParentType(loc) { + case "Paths": + if loc.ParentKey != nil { + path = *loc.ParentKey + } + case "PathItem": + if loc.ParentKey != nil { + method = *loc.ParentKey + } + } + } + + responses := op.GetResponses() + if responses == nil { + // No responses at all - report missing 401 + if rootNode := op.GetRootNode(); rootNode != nil { + errs = append(errs, validation.NewValidationError( + config.GetSeverity(r.DefaultSeverity()), + RuleOwaspDefineErrorResponses401, + fmt.Errorf("operation %s %s is missing 401 Unauthorized error response", method, path), + rootNode, + )) + } + continue + } + + // Check if 401 response exists + response401, has401 := responses.Get("401") + if !has401 { + // Missing 401 response + if rootNode := responses.GetRootNode(); rootNode != nil { + errs = append(errs, validation.NewValidationError( + config.GetSeverity(r.DefaultSeverity()), + RuleOwaspDefineErrorResponses401, + fmt.Errorf("operation %s %s is missing 401 Unauthorized error response", method, path), + rootNode, + )) + } + continue + } + + // 401 exists, check if it has content with schema + if response401 != nil { + responseObj := response401.GetObject() + if responseObj != nil { + content := responseObj.GetContent() + if content == nil || content.Len() == 0 { + // 401 exists but has no content/schema + if rootNode := responseObj.GetRootNode(); rootNode != nil { + _, responseValueNode, found := yml.GetMapElementNodes(ctx, rootNode, "description") + if !found { + responseValueNode = rootNode + } + errs = append(errs, validation.NewValidationError( + config.GetSeverity(r.DefaultSeverity()), + RuleOwaspDefineErrorResponses401, + fmt.Errorf("operation %s %s has 401 response but missing content schema", method, path), + responseValueNode, + )) + } + } + } + } + } + + return errs +} diff --git a/openapi/linter/rules/owasp_define_error_responses_401_test.go b/openapi/linter/rules/owasp_define_error_responses_401_test.go new file mode 100644 index 00000000..5e8ab6b1 --- /dev/null +++ b/openapi/linter/rules/owasp_define_error_responses_401_test.go @@ -0,0 +1,289 @@ +package rules_test + +import ( + "strings" + "testing" + + "github.com/speakeasy-api/openapi/linter" + "github.com/speakeasy-api/openapi/openapi" + "github.com/speakeasy-api/openapi/openapi/linter/rules" + "github.com/speakeasy-api/openapi/references" + "github.com/speakeasy-api/openapi/validation" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestOwaspDefineErrorResponses401Rule_ValidCases(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + yaml string + }{ + { + name: "operation with 401 response and schema", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + get: + responses: + '200': + description: Success + '401': + description: Unauthorized + content: + application/json: + schema: + type: object + properties: + error: + type: string +`, + }, + { + name: "multiple operations all with 401", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + get: + responses: + '200': + description: Success + '401': + description: Unauthorized + content: + application/json: + schema: + type: object + post: + responses: + '201': + description: Created + '401': + description: Unauthorized + content: + application/json: + schema: + type: object +`, + }, + { + name: "401 with multiple content types", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + get: + responses: + '200': + description: Success + '401': + description: Unauthorized + content: + application/json: + schema: + type: object + application/xml: + schema: + type: object +`, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + doc, _, err := openapi.Unmarshal(ctx, strings.NewReader(tt.yaml)) + require.NoError(t, err) + + rule := &rules.OwaspDefineErrorResponses401Rule{} + config := &linter.RuleConfig{} + + // Build index for the rule + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + docInfo := linter.NewDocumentInfoWithIndex(doc, "test.yaml", idx) + + errs := rule.Run(ctx, docInfo, config) + assert.Empty(t, errs) + }) + } +} + +func TestOwaspDefineErrorResponses401Rule_Violations(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + yaml string + expectedCount int + expectedText string + }{ + { + name: "missing 401 response", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + get: + responses: + '200': + description: Success + '500': + description: Server Error +`, + expectedCount: 1, + expectedText: "missing 401", + }, + { + name: "401 exists but no content", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + get: + responses: + '200': + description: Success + '401': + description: Unauthorized +`, + expectedCount: 1, + expectedText: "missing content schema", + }, + { + name: "multiple operations missing 401", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + get: + responses: + '200': + description: Success + post: + responses: + '201': + description: Created +`, + expectedCount: 2, + expectedText: "", + }, + { + name: "one operation with 401 one without", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + get: + responses: + '200': + description: Success + '401': + description: Unauthorized + content: + application/json: + schema: + type: object + post: + responses: + '201': + description: Created +`, + expectedCount: 1, + expectedText: "post", + }, + { + name: "401 with empty content object", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + get: + responses: + '200': + description: Success + '401': + description: Unauthorized + content: {} +`, + expectedCount: 1, + expectedText: "missing content schema", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + doc, _, err := openapi.Unmarshal(ctx, strings.NewReader(tt.yaml)) + require.NoError(t, err) + + rule := &rules.OwaspDefineErrorResponses401Rule{} + config := &linter.RuleConfig{} + + // Build index for the rule + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + docInfo := linter.NewDocumentInfoWithIndex(doc, "test.yaml", idx) + + errs := rule.Run(ctx, docInfo, config) + + require.Len(t, errs, tt.expectedCount) + for _, err := range errs { + assert.Contains(t, err.Error(), "401") + if tt.expectedText != "" { + assert.Contains(t, err.Error(), tt.expectedText) + } + } + }) + } +} + +func TestOwaspDefineErrorResponses401Rule_RuleMetadata(t *testing.T) { + t.Parallel() + + rule := &rules.OwaspDefineErrorResponses401Rule{} + + assert.Equal(t, "owasp-define-error-responses-401", rule.ID()) + assert.Equal(t, rules.CategorySecurity, rule.Category()) + assert.NotEmpty(t, rule.Description()) + assert.NotEmpty(t, rule.Link()) + assert.Equal(t, validation.SeverityWarning, rule.DefaultSeverity()) + assert.Nil(t, rule.Versions()) +} diff --git a/openapi/linter/rules/owasp_define_error_responses_429.go b/openapi/linter/rules/owasp_define_error_responses_429.go new file mode 100644 index 00000000..0c49d8f4 --- /dev/null +++ b/openapi/linter/rules/owasp_define_error_responses_429.go @@ -0,0 +1,124 @@ +package rules + +import ( + "context" + "fmt" + + "github.com/speakeasy-api/openapi/linter" + "github.com/speakeasy-api/openapi/openapi" + "github.com/speakeasy-api/openapi/validation" + "github.com/speakeasy-api/openapi/yml" +) + +const RuleOwaspDefineErrorResponses429 = "owasp-define-error-responses-429" + +type OwaspDefineErrorResponses429Rule struct{} + +func (r *OwaspDefineErrorResponses429Rule) ID() string { return RuleOwaspDefineErrorResponses429 } +func (r *OwaspDefineErrorResponses429Rule) Category() string { + return CategorySecurity +} +func (r *OwaspDefineErrorResponses429Rule) Description() string { + return "Operations should define a 429 Too Many Requests response with a proper schema to indicate rate limiting. Rate limit responses help clients understand when they've exceeded usage thresholds and need to slow down requests." +} +func (r *OwaspDefineErrorResponses429Rule) Summary() string { + return "Operations should define a 429 Too Many Requests response with a schema." +} +func (r *OwaspDefineErrorResponses429Rule) HowToFix() string { + return "Add a 429 response with a response body schema to operations that may be rate limited." +} +func (r *OwaspDefineErrorResponses429Rule) Link() string { + return "https://github.com/speakeasy-api/openapi/blob/main/openapi/linter/README.md#owasp-define-error-responses-429" +} +func (r *OwaspDefineErrorResponses429Rule) DefaultSeverity() validation.Severity { + return validation.SeverityWarning +} +func (r *OwaspDefineErrorResponses429Rule) Versions() []string { + return nil // Applies to all OpenAPI versions +} + +func (r *OwaspDefineErrorResponses429Rule) Run(ctx context.Context, docInfo *linter.DocumentInfo[*openapi.OpenAPI], config *linter.RuleConfig) []error { + if docInfo == nil || docInfo.Document == nil || docInfo.Index == nil { + return nil + } + + var errs []error + + // Check all operations + for _, opNode := range docInfo.Index.Operations { + op := opNode.Node + if op == nil { + continue + } + + // Get operation details for error messages + method := "" + path := "" + for _, loc := range opNode.Location { + switch openapi.GetParentType(loc) { + case "Paths": + if loc.ParentKey != nil { + path = *loc.ParentKey + } + case "PathItem": + if loc.ParentKey != nil { + method = *loc.ParentKey + } + } + } + + responses := op.GetResponses() + if responses == nil { + // No responses at all - report missing 429 + if rootNode := op.GetRootNode(); rootNode != nil { + errs = append(errs, validation.NewValidationError( + config.GetSeverity(r.DefaultSeverity()), + RuleOwaspDefineErrorResponses429, + fmt.Errorf("operation %s %s is missing 429 Too Many Requests response", method, path), + rootNode, + )) + } + continue + } + + // Check if 429 response exists + response429, has429 := responses.Get("429") + if !has429 { + // Missing 429 response + if rootNode := responses.GetRootNode(); rootNode != nil { + errs = append(errs, validation.NewValidationError( + config.GetSeverity(r.DefaultSeverity()), + RuleOwaspDefineErrorResponses429, + fmt.Errorf("operation %s %s is missing 429 Too Many Requests response", method, path), + rootNode, + )) + } + continue + } + + // 429 exists, check if it has content with schema + if response429 != nil { + responseObj := response429.GetObject() + if responseObj != nil { + content := responseObj.GetContent() + if content == nil || content.Len() == 0 { + // 429 exists but has no content/schema + if rootNode := responseObj.GetRootNode(); rootNode != nil { + _, responseValueNode, found := yml.GetMapElementNodes(ctx, rootNode, "description") + if !found { + responseValueNode = rootNode + } + errs = append(errs, validation.NewValidationError( + config.GetSeverity(r.DefaultSeverity()), + RuleOwaspDefineErrorResponses429, + fmt.Errorf("operation %s %s has 429 response but missing content schema", method, path), + responseValueNode, + )) + } + } + } + } + } + + return errs +} diff --git a/openapi/linter/rules/owasp_define_error_responses_429_test.go b/openapi/linter/rules/owasp_define_error_responses_429_test.go new file mode 100644 index 00000000..6e538c9f --- /dev/null +++ b/openapi/linter/rules/owasp_define_error_responses_429_test.go @@ -0,0 +1,213 @@ +package rules_test + +import ( + "strings" + "testing" + + "github.com/speakeasy-api/openapi/linter" + "github.com/speakeasy-api/openapi/openapi" + "github.com/speakeasy-api/openapi/openapi/linter/rules" + "github.com/speakeasy-api/openapi/references" + "github.com/speakeasy-api/openapi/validation" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestOwaspDefineErrorResponses429Rule_ValidCases(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + yaml string + }{ + { + name: "operation with 429 response and schema", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + get: + responses: + '200': + description: Success + '429': + description: Too Many Requests + content: + application/json: + schema: + type: object + properties: + error: + type: string +`, + }, + { + name: "429 with retry-after header", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + get: + responses: + '200': + description: Success + '429': + description: Too Many Requests + headers: + Retry-After: + description: Number of seconds to wait + schema: + type: integer + content: + application/json: + schema: + type: object +`, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + doc, _, err := openapi.Unmarshal(ctx, strings.NewReader(tt.yaml)) + require.NoError(t, err) + + rule := &rules.OwaspDefineErrorResponses429Rule{} + config := &linter.RuleConfig{} + + // Build index for the rule + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + docInfo := linter.NewDocumentInfoWithIndex(doc, "test.yaml", idx) + + errs := rule.Run(ctx, docInfo, config) + assert.Empty(t, errs) + }) + } +} + +func TestOwaspDefineErrorResponses429Rule_Violations(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + yaml string + expectedCount int + expectedText string + }{ + { + name: "missing 429 response", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + get: + responses: + '200': + description: Success + '401': + description: Unauthorized +`, + expectedCount: 1, + expectedText: "missing 429", + }, + { + name: "429 exists but no content", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + get: + responses: + '200': + description: Success + '429': + description: Too Many Requests +`, + expectedCount: 1, + expectedText: "missing content schema", + }, + { + name: "multiple operations missing 429", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + get: + responses: + '200': + description: Success + post: + responses: + '201': + description: Created +`, + expectedCount: 2, + expectedText: "", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + doc, _, err := openapi.Unmarshal(ctx, strings.NewReader(tt.yaml)) + require.NoError(t, err) + + rule := &rules.OwaspDefineErrorResponses429Rule{} + config := &linter.RuleConfig{} + + // Build index for the rule + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + docInfo := linter.NewDocumentInfoWithIndex(doc, "test.yaml", idx) + + errs := rule.Run(ctx, docInfo, config) + + require.Len(t, errs, tt.expectedCount) + for _, err := range errs { + assert.Contains(t, err.Error(), "429") + if tt.expectedText != "" { + assert.Contains(t, err.Error(), tt.expectedText) + } + } + }) + } +} + +func TestOwaspDefineErrorResponses429Rule_RuleMetadata(t *testing.T) { + t.Parallel() + + rule := &rules.OwaspDefineErrorResponses429Rule{} + + assert.Equal(t, "owasp-define-error-responses-429", rule.ID()) + assert.Equal(t, rules.CategorySecurity, rule.Category()) + assert.NotEmpty(t, rule.Description()) + assert.NotEmpty(t, rule.Link()) + assert.Equal(t, validation.SeverityWarning, rule.DefaultSeverity()) + assert.Nil(t, rule.Versions()) +} diff --git a/openapi/linter/rules/owasp_define_error_responses_500.go b/openapi/linter/rules/owasp_define_error_responses_500.go new file mode 100644 index 00000000..d807c958 --- /dev/null +++ b/openapi/linter/rules/owasp_define_error_responses_500.go @@ -0,0 +1,124 @@ +package rules + +import ( + "context" + "fmt" + + "github.com/speakeasy-api/openapi/linter" + "github.com/speakeasy-api/openapi/openapi" + "github.com/speakeasy-api/openapi/validation" + "github.com/speakeasy-api/openapi/yml" +) + +const RuleOwaspDefineErrorResponses500 = "owasp-define-error-responses-500" + +type OwaspDefineErrorResponses500Rule struct{} + +func (r *OwaspDefineErrorResponses500Rule) ID() string { return RuleOwaspDefineErrorResponses500 } +func (r *OwaspDefineErrorResponses500Rule) Category() string { + return CategorySecurity +} +func (r *OwaspDefineErrorResponses500Rule) Description() string { + return "Operations should define a 500 Internal Server Error response with a proper schema to handle unexpected failures. Documenting server error responses helps clients distinguish between client-side and server-side problems." +} +func (r *OwaspDefineErrorResponses500Rule) Summary() string { + return "Operations should define a 500 Internal Server Error response with a schema." +} +func (r *OwaspDefineErrorResponses500Rule) HowToFix() string { + return "Add a 500 response with a response body schema to each operation." +} +func (r *OwaspDefineErrorResponses500Rule) Link() string { + return "https://github.com/speakeasy-api/openapi/blob/main/openapi/linter/README.md#owasp-define-error-responses-500" +} +func (r *OwaspDefineErrorResponses500Rule) DefaultSeverity() validation.Severity { + return validation.SeverityWarning +} +func (r *OwaspDefineErrorResponses500Rule) Versions() []string { + return nil // Applies to all OpenAPI versions +} + +func (r *OwaspDefineErrorResponses500Rule) Run(ctx context.Context, docInfo *linter.DocumentInfo[*openapi.OpenAPI], config *linter.RuleConfig) []error { + if docInfo == nil || docInfo.Document == nil || docInfo.Index == nil { + return nil + } + + var errs []error + + // Check all operations + for _, opNode := range docInfo.Index.Operations { + op := opNode.Node + if op == nil { + continue + } + + // Get operation details for error messages + method := "" + path := "" + for _, loc := range opNode.Location { + switch openapi.GetParentType(loc) { + case "Paths": + if loc.ParentKey != nil { + path = *loc.ParentKey + } + case "PathItem": + if loc.ParentKey != nil { + method = *loc.ParentKey + } + } + } + + responses := op.GetResponses() + if responses == nil { + // No responses at all - report missing 500 + if rootNode := op.GetRootNode(); rootNode != nil { + errs = append(errs, validation.NewValidationError( + config.GetSeverity(r.DefaultSeverity()), + RuleOwaspDefineErrorResponses500, + fmt.Errorf("operation %s %s is missing 500 Internal Server Error response", method, path), + rootNode, + )) + } + continue + } + + // Check if 500 response exists + response500, has500 := responses.Get("500") + if !has500 { + // Missing 500 response + if rootNode := responses.GetRootNode(); rootNode != nil { + errs = append(errs, validation.NewValidationError( + config.GetSeverity(r.DefaultSeverity()), + RuleOwaspDefineErrorResponses500, + fmt.Errorf("operation %s %s is missing 500 Internal Server Error response", method, path), + rootNode, + )) + } + continue + } + + // 500 exists, check if it has content with schema + if response500 != nil { + responseObj := response500.GetObject() + if responseObj != nil { + content := responseObj.GetContent() + if content == nil || content.Len() == 0 { + // 500 exists but has no content/schema + if rootNode := responseObj.GetRootNode(); rootNode != nil { + _, responseValueNode, found := yml.GetMapElementNodes(ctx, rootNode, "description") + if !found { + responseValueNode = rootNode + } + errs = append(errs, validation.NewValidationError( + config.GetSeverity(r.DefaultSeverity()), + RuleOwaspDefineErrorResponses500, + fmt.Errorf("operation %s %s has 500 response but missing content schema", method, path), + responseValueNode, + )) + } + } + } + } + } + + return errs +} diff --git a/openapi/linter/rules/owasp_define_error_responses_500_test.go b/openapi/linter/rules/owasp_define_error_responses_500_test.go new file mode 100644 index 00000000..49556098 --- /dev/null +++ b/openapi/linter/rules/owasp_define_error_responses_500_test.go @@ -0,0 +1,218 @@ +package rules_test + +import ( + "strings" + "testing" + + "github.com/speakeasy-api/openapi/linter" + "github.com/speakeasy-api/openapi/openapi" + "github.com/speakeasy-api/openapi/openapi/linter/rules" + "github.com/speakeasy-api/openapi/references" + "github.com/speakeasy-api/openapi/validation" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestOwaspDefineErrorResponses500Rule_ValidCases(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + yaml string + }{ + { + name: "operation with 500 response and schema", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + get: + responses: + '200': + description: Success + '500': + description: Internal Server Error + content: + application/json: + schema: + type: object + properties: + error: + type: string +`, + }, + { + name: "multiple operations all with 500", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + get: + responses: + '200': + description: Success + '500': + description: Internal Server Error + content: + application/json: + schema: + type: object + post: + responses: + '201': + description: Created + '500': + description: Internal Server Error + content: + application/json: + schema: + type: object +`, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + doc, _, err := openapi.Unmarshal(ctx, strings.NewReader(tt.yaml)) + require.NoError(t, err) + + rule := &rules.OwaspDefineErrorResponses500Rule{} + config := &linter.RuleConfig{} + + // Build index for the rule + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + docInfo := linter.NewDocumentInfoWithIndex(doc, "test.yaml", idx) + + errs := rule.Run(ctx, docInfo, config) + assert.Empty(t, errs) + }) + } +} + +func TestOwaspDefineErrorResponses500Rule_Violations(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + yaml string + expectedCount int + expectedText string + }{ + { + name: "missing 500 response", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + get: + responses: + '200': + description: Success + '401': + description: Unauthorized +`, + expectedCount: 1, + expectedText: "missing 500", + }, + { + name: "500 exists but no content", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + get: + responses: + '200': + description: Success + '500': + description: Internal Server Error +`, + expectedCount: 1, + expectedText: "missing content schema", + }, + { + name: "multiple operations missing 500", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + get: + responses: + '200': + description: Success + post: + responses: + '201': + description: Created +`, + expectedCount: 2, + expectedText: "", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + doc, _, err := openapi.Unmarshal(ctx, strings.NewReader(tt.yaml)) + require.NoError(t, err) + + rule := &rules.OwaspDefineErrorResponses500Rule{} + config := &linter.RuleConfig{} + + // Build index for the rule + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + docInfo := linter.NewDocumentInfoWithIndex(doc, "test.yaml", idx) + + errs := rule.Run(ctx, docInfo, config) + + require.Len(t, errs, tt.expectedCount) + for _, err := range errs { + assert.Contains(t, err.Error(), "500") + if tt.expectedText != "" { + assert.Contains(t, err.Error(), tt.expectedText) + } + } + }) + } +} + +func TestOwaspDefineErrorResponses500Rule_RuleMetadata(t *testing.T) { + t.Parallel() + + rule := &rules.OwaspDefineErrorResponses500Rule{} + + assert.Equal(t, "owasp-define-error-responses-500", rule.ID()) + assert.Equal(t, rules.CategorySecurity, rule.Category()) + assert.NotEmpty(t, rule.Description()) + assert.NotEmpty(t, rule.Link()) + assert.Equal(t, validation.SeverityWarning, rule.DefaultSeverity()) + assert.Nil(t, rule.Versions()) +} diff --git a/openapi/linter/rules/owasp_define_error_validation.go b/openapi/linter/rules/owasp_define_error_validation.go new file mode 100644 index 00000000..95a71637 --- /dev/null +++ b/openapi/linter/rules/owasp_define_error_validation.go @@ -0,0 +1,102 @@ +package rules + +import ( + "context" + "fmt" + + "github.com/speakeasy-api/openapi/linter" + "github.com/speakeasy-api/openapi/openapi" + "github.com/speakeasy-api/openapi/validation" +) + +const RuleOwaspDefineErrorValidation = "owasp-define-error-validation" + +type OwaspDefineErrorValidationRule struct{} + +func (r *OwaspDefineErrorValidationRule) ID() string { return RuleOwaspDefineErrorValidation } +func (r *OwaspDefineErrorValidationRule) Category() string { + return CategorySecurity +} +func (r *OwaspDefineErrorValidationRule) Description() string { + return "Operations should define validation error responses (400, 422, or 4XX) to indicate request data problems. Validation error responses help clients understand when and why their request data is invalid or malformed." +} +func (r *OwaspDefineErrorValidationRule) Summary() string { + return "Operations should define validation error responses (400, 422, or 4XX)." +} +func (r *OwaspDefineErrorValidationRule) HowToFix() string { + return "Add a 400, 422, or 4XX response with a schema to each operation to describe validation errors." +} +func (r *OwaspDefineErrorValidationRule) Link() string { + return "https://github.com/speakeasy-api/openapi/blob/main/openapi/linter/README.md#owasp-define-error-validation" +} +func (r *OwaspDefineErrorValidationRule) DefaultSeverity() validation.Severity { + return validation.SeverityWarning +} +func (r *OwaspDefineErrorValidationRule) Versions() []string { + return nil // Applies to all OpenAPI versions +} + +func (r *OwaspDefineErrorValidationRule) Run(ctx context.Context, docInfo *linter.DocumentInfo[*openapi.OpenAPI], config *linter.RuleConfig) []error { + if docInfo == nil || docInfo.Document == nil || docInfo.Index == nil { + return nil + } + + var errs []error + + // Check all operations + for _, opNode := range docInfo.Index.Operations { + op := opNode.Node + if op == nil { + continue + } + + // Get operation details for error messages + method := "" + path := "" + for _, loc := range opNode.Location { + switch openapi.GetParentType(loc) { + case "Paths": + if loc.ParentKey != nil { + path = *loc.ParentKey + } + case "PathItem": + if loc.ParentKey != nil { + method = *loc.ParentKey + } + } + } + + responses := op.GetResponses() + if responses == nil { + // No responses at all - report missing validation error response + if rootNode := op.GetRootNode(); rootNode != nil { + errs = append(errs, validation.NewValidationError( + config.GetSeverity(r.DefaultSeverity()), + RuleOwaspDefineErrorValidation, + fmt.Errorf("operation %s %s is missing validation error response (400, 422, or 4XX)", method, path), + rootNode, + )) + } + continue + } + + // Check if any of the validation error codes exist + has400, _ := responses.Get("400") + has422, _ := responses.Get("422") + has4XX, _ := responses.Get("4XX") + + if has400 == nil && has422 == nil && has4XX == nil { + // Missing all validation error responses + if rootNode := responses.GetRootNode(); rootNode != nil { + errs = append(errs, validation.NewValidationError( + config.GetSeverity(r.DefaultSeverity()), + RuleOwaspDefineErrorValidation, + fmt.Errorf("operation %s %s is missing validation error response (should have 400, 422, or 4XX)", method, path), + rootNode, + )) + } + } + } + + return errs +} diff --git a/openapi/linter/rules/owasp_define_error_validation_test.go b/openapi/linter/rules/owasp_define_error_validation_test.go new file mode 100644 index 00000000..8ab42439 --- /dev/null +++ b/openapi/linter/rules/owasp_define_error_validation_test.go @@ -0,0 +1,268 @@ +package rules_test + +import ( + "strings" + "testing" + + "github.com/speakeasy-api/openapi/linter" + "github.com/speakeasy-api/openapi/openapi" + "github.com/speakeasy-api/openapi/openapi/linter/rules" + "github.com/speakeasy-api/openapi/references" + "github.com/speakeasy-api/openapi/validation" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestOwaspDefineErrorValidationRule_ValidCases(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + yaml string + }{ + { + name: "operation with 400 response", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + get: + responses: + '200': + description: Success + '400': + description: Bad Request + content: + application/json: + schema: + type: object +`, + }, + { + name: "operation with 422 response", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + post: + responses: + '201': + description: Created + '422': + description: Unprocessable Entity + content: + application/json: + schema: + type: object +`, + }, + { + name: "operation with 4XX wildcard", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + get: + responses: + '200': + description: Success + '4XX': + description: Client Error + content: + application/json: + schema: + type: object +`, + }, + { + name: "operation with multiple validation error codes", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + post: + responses: + '201': + description: Created + '400': + description: Bad Request + '422': + description: Unprocessable Entity +`, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + doc, _, err := openapi.Unmarshal(ctx, strings.NewReader(tt.yaml)) + require.NoError(t, err) + + rule := &rules.OwaspDefineErrorValidationRule{} + config := &linter.RuleConfig{} + + // Build index for the rule + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + docInfo := linter.NewDocumentInfoWithIndex(doc, "test.yaml", idx) + + errs := rule.Run(ctx, docInfo, config) + assert.Empty(t, errs) + }) + } +} + +func TestOwaspDefineErrorValidationRule_Violations(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + yaml string + expectedCount int + expectedText string + }{ + { + name: "missing validation error response", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + get: + responses: + '200': + description: Success + '401': + description: Unauthorized + '500': + description: Server Error +`, + expectedCount: 1, + expectedText: "missing validation error", + }, + { + name: "only success response", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + get: + responses: + '200': + description: Success +`, + expectedCount: 1, + expectedText: "missing validation error", + }, + { + name: "multiple operations missing validation errors", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + get: + responses: + '200': + description: Success + post: + responses: + '201': + description: Created +`, + expectedCount: 2, + expectedText: "", + }, + { + name: "one operation with validation error one without", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + get: + responses: + '200': + description: Success + '400': + description: Bad Request + post: + responses: + '201': + description: Created +`, + expectedCount: 1, + expectedText: "post", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + doc, _, err := openapi.Unmarshal(ctx, strings.NewReader(tt.yaml)) + require.NoError(t, err) + + rule := &rules.OwaspDefineErrorValidationRule{} + config := &linter.RuleConfig{} + + // Build index for the rule + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + docInfo := linter.NewDocumentInfoWithIndex(doc, "test.yaml", idx) + + errs := rule.Run(ctx, docInfo, config) + + require.Len(t, errs, tt.expectedCount) + for _, err := range errs { + assert.Contains(t, err.Error(), "validation error") + if tt.expectedText != "" { + assert.Contains(t, err.Error(), tt.expectedText) + } + } + }) + } +} + +func TestOwaspDefineErrorValidationRule_RuleMetadata(t *testing.T) { + t.Parallel() + + rule := &rules.OwaspDefineErrorValidationRule{} + + assert.Equal(t, "owasp-define-error-validation", rule.ID()) + assert.Equal(t, rules.CategorySecurity, rule.Category()) + assert.NotEmpty(t, rule.Description()) + assert.NotEmpty(t, rule.Link()) + assert.Equal(t, validation.SeverityWarning, rule.DefaultSeverity()) + assert.Nil(t, rule.Versions()) +} diff --git a/openapi/linter/rules/owasp_integer_format.go b/openapi/linter/rules/owasp_integer_format.go new file mode 100644 index 00000000..e907be4b --- /dev/null +++ b/openapi/linter/rules/owasp_integer_format.go @@ -0,0 +1,85 @@ +package rules + +import ( + "context" + "errors" + + "github.com/speakeasy-api/openapi/linter" + "github.com/speakeasy-api/openapi/openapi" + "github.com/speakeasy-api/openapi/validation" +) + +const RuleOwaspIntegerFormat = "owasp-integer-format" + +type OwaspIntegerFormatRule struct{} + +func (r *OwaspIntegerFormatRule) ID() string { + return RuleOwaspIntegerFormat +} +func (r *OwaspIntegerFormatRule) Category() string { + return CategorySecurity +} +func (r *OwaspIntegerFormatRule) Description() string { + return "Integer schemas must specify a `format` of `int32` or `int64` to define the expected size and range. Explicit integer formats prevent overflow vulnerabilities and ensure clients and servers agree on numeric boundaries." +} +func (r *OwaspIntegerFormatRule) Summary() string { + return "Integer schemas must specify `int32` or `int64` format." +} +func (r *OwaspIntegerFormatRule) HowToFix() string { + return "Set integer schema `format` to `int32` or `int64` based on the expected range." +} +func (r *OwaspIntegerFormatRule) Link() string { + return "https://github.com/speakeasy-api/openapi/blob/main/openapi/linter/README.md#owasp-integer-format" +} +func (r *OwaspIntegerFormatRule) DefaultSeverity() validation.Severity { + return validation.SeverityError +} +func (r *OwaspIntegerFormatRule) Versions() []string { + return []string{"3.0", "3.1"} +} + +func (r *OwaspIntegerFormatRule) Run(ctx context.Context, docInfo *linter.DocumentInfo[*openapi.OpenAPI], config *linter.RuleConfig) []error { + if docInfo == nil || docInfo.Document == nil || docInfo.Index == nil { + return nil + } + + var errs []error + + // Check all schemas in the document + for _, schemaNode := range docInfo.Index.GetAllSchemas() { + refSchema := schemaNode.Node + schema := refSchema.GetSchema() + if schema == nil { + continue + } + + // Check if type contains "integer" + types := schema.GetType() + hasIntegerType := false + for _, typ := range types { + if typ == "integer" { + hasIntegerType = true + break + } + } + + if !hasIntegerType { + continue + } + + // Check if format is int32 or int64 + format := schema.GetFormat() + if format != "int32" && format != "int64" { + if rootNode := refSchema.GetRootNode(); rootNode != nil { + errs = append(errs, validation.NewValidationError( + config.GetSeverity(r.DefaultSeverity()), + RuleOwaspIntegerFormat, + errors.New("schema of type `integer` must specify `format` as `int32` or `int64`"), + rootNode, + )) + } + } + } + + return errs +} diff --git a/openapi/linter/rules/owasp_integer_format_test.go b/openapi/linter/rules/owasp_integer_format_test.go new file mode 100644 index 00000000..24fd2be6 --- /dev/null +++ b/openapi/linter/rules/owasp_integer_format_test.go @@ -0,0 +1,217 @@ +package rules_test + +import ( + "strings" + "testing" + + "github.com/speakeasy-api/openapi/linter" + "github.com/speakeasy-api/openapi/openapi" + "github.com/speakeasy-api/openapi/openapi/linter/rules" + "github.com/speakeasy-api/openapi/references" + "github.com/speakeasy-api/openapi/validation" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestOwaspIntegerFormatRule_ValidCases(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + yaml string + }{ + { + name: "integer with int32 format", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +components: + schemas: + Counter: + type: integer + format: int32 +paths: {} +`, + }, + { + name: "integer with int64 format", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +components: + schemas: + BigCounter: + type: integer + format: int64 +paths: {} +`, + }, + { + name: "non-integer type without format is ok", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +components: + schemas: + Name: + type: string +paths: {} +`, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + doc, _, err := openapi.Unmarshal(ctx, strings.NewReader(tt.yaml)) + require.NoError(t, err) + + rule := &rules.OwaspIntegerFormatRule{} + config := &linter.RuleConfig{} + + // Build index for the rule + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + docInfo := linter.NewDocumentInfoWithIndex(doc, "test.yaml", idx) + + errs := rule.Run(ctx, docInfo, config) + assert.Empty(t, errs) + }) + } +} + +func TestOwaspIntegerFormatRule_Violations(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + yaml string + expectedCount int + }{ + { + name: "integer without format", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +components: + schemas: + Counter: + type: integer +paths: {} +`, + expectedCount: 1, + }, + { + name: "integer with invalid format", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +components: + schemas: + Counter: + type: integer + format: int16 +paths: {} +`, + expectedCount: 1, + }, + { + name: "multiple integers without proper format", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +components: + schemas: + User: + type: object + properties: + age: + type: integer + count: + type: integer + format: uint32 +paths: {} +`, + expectedCount: 2, + }, + { + name: "inline integer parameter without format", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + get: + parameters: + - name: limit + in: query + schema: + type: integer + responses: + '200': + description: Success +`, + expectedCount: 1, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + doc, _, err := openapi.Unmarshal(ctx, strings.NewReader(tt.yaml)) + require.NoError(t, err) + + rule := &rules.OwaspIntegerFormatRule{} + config := &linter.RuleConfig{} + + // Build index for the rule + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + docInfo := linter.NewDocumentInfoWithIndex(doc, "test.yaml", idx) + + errs := rule.Run(ctx, docInfo, config) + + require.Len(t, errs, tt.expectedCount) + for _, err := range errs { + assert.Contains(t, err.Error(), "must specify `format` as `int32` or `int64`") + } + }) + } +} + +func TestOwaspIntegerFormatRule_RuleMetadata(t *testing.T) { + t.Parallel() + + rule := &rules.OwaspIntegerFormatRule{} + + assert.Equal(t, "owasp-integer-format", rule.ID()) + assert.Equal(t, rules.CategorySecurity, rule.Category()) + assert.NotEmpty(t, rule.Description()) + assert.NotEmpty(t, rule.Link()) + assert.Equal(t, validation.SeverityError, rule.DefaultSeverity()) + assert.Equal(t, []string{"3.0", "3.1"}, rule.Versions()) +} diff --git a/openapi/linter/rules/owasp_integer_limit.go b/openapi/linter/rules/owasp_integer_limit.go new file mode 100644 index 00000000..331304db --- /dev/null +++ b/openapi/linter/rules/owasp_integer_limit.go @@ -0,0 +1,98 @@ +package rules + +import ( + "context" + "errors" + + "github.com/speakeasy-api/openapi/linter" + "github.com/speakeasy-api/openapi/openapi" + "github.com/speakeasy-api/openapi/validation" +) + +const RuleOwaspIntegerLimit = "owasp-integer-limit" + +type OwaspIntegerLimitRule struct{} + +func (r *OwaspIntegerLimitRule) ID() string { + return RuleOwaspIntegerLimit +} +func (r *OwaspIntegerLimitRule) Category() string { + return CategorySecurity +} +func (r *OwaspIntegerLimitRule) Description() string { + return "Integer schemas must specify `minimum` and `maximum` values (or exclusive variants) to prevent unbounded inputs. Without numeric limits, APIs are vulnerable to overflow attacks and unexpected behavior from extreme values." +} +func (r *OwaspIntegerLimitRule) Summary() string { + return "Integer schemas must specify `minimum` and `maximum` values." +} +func (r *OwaspIntegerLimitRule) HowToFix() string { + return "Add `minimum` and `maximum` (or `exclusiveMinimum`/`exclusiveMaximum`) values to integer schemas." +} +func (r *OwaspIntegerLimitRule) Link() string { + return "https://github.com/speakeasy-api/openapi/blob/main/openapi/linter/README.md#owasp-integer-limit" +} +func (r *OwaspIntegerLimitRule) DefaultSeverity() validation.Severity { + return validation.SeverityError +} +func (r *OwaspIntegerLimitRule) Versions() []string { + return []string{"3.0", "3.1"} +} + +func (r *OwaspIntegerLimitRule) Run(ctx context.Context, docInfo *linter.DocumentInfo[*openapi.OpenAPI], config *linter.RuleConfig) []error { + if docInfo == nil || docInfo.Document == nil || docInfo.Index == nil { + return nil + } + + var errs []error + + // Check all schemas in the document + for _, schemaNode := range docInfo.Index.GetAllSchemas() { + refSchema := schemaNode.Node + schema := refSchema.GetSchema() + if schema == nil { + continue + } + + // Check if type contains "integer" + types := schema.GetType() + hasIntegerType := false + for _, typ := range types { + if typ == "integer" { + hasIntegerType = true + break + } + } + + if !hasIntegerType { + continue + } + + // Check if schema has appropriate minimum and maximum constraints + minimum := schema.GetMinimum() + maximum := schema.GetMaximum() + exclusiveMinimum := schema.GetExclusiveMinimum() + exclusiveMaximum := schema.GetExclusiveMaximum() + + // Valid combinations: + // 1. minimum AND maximum + // 2. minimum AND exclusiveMaximum + // 3. exclusiveMinimum AND maximum + // 4. exclusiveMinimum AND exclusiveMaximum + + hasMin := minimum != nil || exclusiveMinimum != nil + hasMax := maximum != nil || exclusiveMaximum != nil + + if !hasMin || !hasMax { + if rootNode := refSchema.GetRootNode(); rootNode != nil { + errs = append(errs, validation.NewValidationError( + config.GetSeverity(r.DefaultSeverity()), + RuleOwaspIntegerLimit, + errors.New("schema of type `integer` must specify `minimum` and `maximum` (or `exclusiveMinimum` and `exclusiveMaximum`)"), + rootNode, + )) + } + } + } + + return errs +} diff --git a/openapi/linter/rules/owasp_integer_limit_test.go b/openapi/linter/rules/owasp_integer_limit_test.go new file mode 100644 index 00000000..cde65544 --- /dev/null +++ b/openapi/linter/rules/owasp_integer_limit_test.go @@ -0,0 +1,289 @@ +package rules_test + +import ( + "strings" + "testing" + + "github.com/speakeasy-api/openapi/linter" + "github.com/speakeasy-api/openapi/openapi" + "github.com/speakeasy-api/openapi/openapi/linter/rules" + "github.com/speakeasy-api/openapi/references" + "github.com/speakeasy-api/openapi/validation" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestOwaspIntegerLimitRule_ValidCases(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + yaml string + }{ + { + name: "integer with minimum and maximum", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +components: + schemas: + Age: + type: integer + format: int32 + minimum: 0 + maximum: 120 +paths: {} +`, + }, + { + name: "integer with minimum and exclusiveMaximum", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +components: + schemas: + Counter: + type: integer + format: int32 + minimum: 0 + exclusiveMaximum: 100 +paths: {} +`, + }, + { + name: "integer with exclusiveMinimum and maximum", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +components: + schemas: + Counter: + type: integer + format: int32 + exclusiveMinimum: 0 + maximum: 100 +paths: {} +`, + }, + { + name: "integer with exclusiveMinimum and exclusiveMaximum", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +components: + schemas: + Counter: + type: integer + format: int32 + exclusiveMinimum: 0 + exclusiveMaximum: 100 +paths: {} +`, + }, + { + name: "non-integer type without limits is ok", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +components: + schemas: + Name: + type: string +paths: {} +`, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + doc, _, err := openapi.Unmarshal(ctx, strings.NewReader(tt.yaml)) + require.NoError(t, err) + + rule := &rules.OwaspIntegerLimitRule{} + config := &linter.RuleConfig{} + + // Build index for the rule + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + docInfo := linter.NewDocumentInfoWithIndex(doc, "test.yaml", idx) + + errs := rule.Run(ctx, docInfo, config) + assert.Empty(t, errs) + }) + } +} + +func TestOwaspIntegerLimitRule_Violations(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + yaml string + expectedCount int + }{ + { + name: "integer without any limits", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +components: + schemas: + Counter: + type: integer + format: int32 +paths: {} +`, + expectedCount: 1, + }, + { + name: "integer with only minimum", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +components: + schemas: + Counter: + type: integer + format: int32 + minimum: 0 +paths: {} +`, + expectedCount: 1, + }, + { + name: "integer with only maximum", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +components: + schemas: + Counter: + type: integer + format: int32 + maximum: 100 +paths: {} +`, + expectedCount: 1, + }, + { + name: "integer with only exclusiveMinimum", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +components: + schemas: + Counter: + type: integer + format: int32 + exclusiveMinimum: 0 +paths: {} +`, + expectedCount: 1, + }, + { + name: "integer with only exclusiveMaximum", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +components: + schemas: + Counter: + type: integer + format: int32 + exclusiveMaximum: 100 +paths: {} +`, + expectedCount: 1, + }, + { + name: "multiple integers without proper limits", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +components: + schemas: + User: + type: object + properties: + age: + type: integer + format: int32 + count: + type: integer + format: int32 + minimum: 0 +paths: {} +`, + expectedCount: 2, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + doc, _, err := openapi.Unmarshal(ctx, strings.NewReader(tt.yaml)) + require.NoError(t, err) + + rule := &rules.OwaspIntegerLimitRule{} + config := &linter.RuleConfig{} + + // Build index for the rule + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + docInfo := linter.NewDocumentInfoWithIndex(doc, "test.yaml", idx) + + errs := rule.Run(ctx, docInfo, config) + + require.Len(t, errs, tt.expectedCount) + for _, err := range errs { + assert.Contains(t, err.Error(), "must specify `minimum` and `maximum`") + } + }) + } +} + +func TestOwaspIntegerLimitRule_RuleMetadata(t *testing.T) { + t.Parallel() + + rule := &rules.OwaspIntegerLimitRule{} + + assert.Equal(t, "owasp-integer-limit", rule.ID()) + assert.Equal(t, rules.CategorySecurity, rule.Category()) + assert.NotEmpty(t, rule.Description()) + assert.NotEmpty(t, rule.Link()) + assert.Equal(t, validation.SeverityError, rule.DefaultSeverity()) + assert.Equal(t, []string{"3.0", "3.1"}, rule.Versions()) +} diff --git a/openapi/linter/rules/owasp_jwt_best_practices.go b/openapi/linter/rules/owasp_jwt_best_practices.go new file mode 100644 index 00000000..929b6c5b --- /dev/null +++ b/openapi/linter/rules/owasp_jwt_best_practices.go @@ -0,0 +1,107 @@ +package rules + +import ( + "context" + "fmt" + "strings" + + "github.com/speakeasy-api/openapi/linter" + "github.com/speakeasy-api/openapi/openapi" + "github.com/speakeasy-api/openapi/validation" + "github.com/speakeasy-api/openapi/yml" +) + +const RuleOwaspJWTBestPractices = "owasp-jwt-best-practices" + +type OwaspJWTBestPracticesRule struct{} + +func (r *OwaspJWTBestPracticesRule) ID() string { + return RuleOwaspJWTBestPractices +} +func (r *OwaspJWTBestPracticesRule) Category() string { + return CategorySecurity +} +func (r *OwaspJWTBestPracticesRule) Description() string { + return "Security schemes using OAuth2 or JWT must explicitly declare support for RFC8725 (JWT Best Current Practices) in the description. RFC8725 compliance ensures JWTs are validated properly and protected against common attacks like algorithm confusion." +} +func (r *OwaspJWTBestPracticesRule) Summary() string { + return "OAuth2/JWT schemes must mention RFC8725 in their description." +} +func (r *OwaspJWTBestPracticesRule) HowToFix() string { + return "Update OAuth2/JWT security scheme descriptions to mention RFC8725 compliance." +} +func (r *OwaspJWTBestPracticesRule) Link() string { + return "https://github.com/speakeasy-api/openapi/blob/main/openapi/linter/README.md#owasp-jwt-best-practices" +} +func (r *OwaspJWTBestPracticesRule) DefaultSeverity() validation.Severity { + return validation.SeverityError +} +func (r *OwaspJWTBestPracticesRule) Versions() []string { + return []string{"3.0", "3.1"} // OAS3 only +} + +func (r *OwaspJWTBestPracticesRule) Run(ctx context.Context, docInfo *linter.DocumentInfo[*openapi.OpenAPI], config *linter.RuleConfig) []error { + if docInfo == nil || docInfo.Document == nil { + return nil + } + + var errs []error + + doc := docInfo.Document + components := doc.GetComponents() + if components == nil { + return nil + } + + securitySchemes := components.GetSecuritySchemes() + if securitySchemes == nil || securitySchemes.Len() == 0 { + return nil + } + + // Check each security scheme + for name, scheme := range securitySchemes.All() { + schemeObj := scheme.GetObject() + if schemeObj == nil { + continue + } + + schemeType := schemeObj.GetType() + bearerFormat := schemeObj.GetBearerFormat() + + // Check if this is OAuth2 or JWT bearer + isOAuth2 := schemeType == "oauth2" + isJWT := strings.ToLower(bearerFormat) == "jwt" + + if !isOAuth2 && !isJWT { + continue + } + + // Check if description contains RFC8725 + description := schemeObj.GetDescription() + if !strings.Contains(description, "RFC8725") { + // Try to get the description node for better error location + rootNode := scheme.GetRootNode() + if rootNode != nil { + _, descNode, found := yml.GetMapElementNodes(ctx, rootNode, "description") + if found && descNode != nil { + errs = append(errs, validation.NewValidationError( + config.GetSeverity(r.DefaultSeverity()), + RuleOwaspJWTBestPractices, + fmt.Errorf("security scheme `%s` must explicitly declare support for RFC8725 in the description", name), + descNode, + )) + } else { + // No description field - report on the scheme itself + errs = append(errs, validation.NewValidationError( + config.GetSeverity(r.DefaultSeverity()), + RuleOwaspJWTBestPractices, + fmt.Errorf("security scheme `%s` must explicitly declare support for RFC8725 in the description", name), + rootNode, + )) + } + } + } + } + + return errs +} diff --git a/openapi/linter/rules/owasp_jwt_best_practices_test.go b/openapi/linter/rules/owasp_jwt_best_practices_test.go new file mode 100644 index 00000000..3bc7b565 --- /dev/null +++ b/openapi/linter/rules/owasp_jwt_best_practices_test.go @@ -0,0 +1,300 @@ +package rules_test + +import ( + "strings" + "testing" + + "github.com/speakeasy-api/openapi/linter" + "github.com/speakeasy-api/openapi/openapi" + "github.com/speakeasy-api/openapi/openapi/linter/rules" + "github.com/speakeasy-api/openapi/references" + "github.com/speakeasy-api/openapi/validation" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestOwaspJWTBestPracticesRule_ValidCases(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + yaml string + }{ + { + name: "oauth2 with RFC8725 in description", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +components: + securitySchemes: + oauth: + type: oauth2 + description: OAuth2 authentication supporting RFC8725 + flows: + implicit: + authorizationUrl: https://example.com/oauth + scopes: + read: Read access +paths: {} +`, + }, + { + name: "jwt bearer with RFC8725 in description", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +components: + securitySchemes: + bearer: + type: http + scheme: bearer + bearerFormat: JWT + description: JWT bearer token supporting RFC8725 +paths: {} +`, + }, + { + name: "non-jwt bearer without RFC8725 is ok", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +components: + securitySchemes: + bearer: + type: http + scheme: bearer + description: Bearer token authentication +paths: {} +`, + }, + { + name: "api key without RFC8725 is ok", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +components: + securitySchemes: + apiKey: + type: apiKey + in: header + name: X-API-Key + description: API key authentication +paths: {} +`, + }, + { + name: "no security schemes is ok", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: {} +`, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + doc, _, err := openapi.Unmarshal(ctx, strings.NewReader(tt.yaml)) + require.NoError(t, err) + + rule := &rules.OwaspJWTBestPracticesRule{} + config := &linter.RuleConfig{} + + // Build index for the rule + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + docInfo := linter.NewDocumentInfoWithIndex(doc, "test.yaml", idx) + + errs := rule.Run(ctx, docInfo, config) + assert.Empty(t, errs) + }) + } +} + +func TestOwaspJWTBestPracticesRule_Violations(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + yaml string + expectedCount int + expectedText string + }{ + { + name: "oauth2 without RFC8725", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +components: + securitySchemes: + oauth: + type: oauth2 + description: OAuth2 authentication + flows: + implicit: + authorizationUrl: https://example.com/oauth + scopes: + read: Read access +paths: {} +`, + expectedCount: 1, + expectedText: "oauth", + }, + { + name: "jwt bearer without RFC8725", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +components: + securitySchemes: + bearer: + type: http + scheme: bearer + bearerFormat: JWT + description: JWT bearer token +paths: {} +`, + expectedCount: 1, + expectedText: "bearer", + }, + { + name: "oauth2 with no description", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +components: + securitySchemes: + oauth: + type: oauth2 + flows: + implicit: + authorizationUrl: https://example.com/oauth + scopes: + read: Read access +paths: {} +`, + expectedCount: 1, + expectedText: "RFC8725", + }, + { + name: "multiple jwt schemes without RFC8725", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +components: + securitySchemes: + oauth: + type: oauth2 + description: OAuth2 authentication + flows: + implicit: + authorizationUrl: https://example.com/oauth + scopes: + read: Read access + bearer: + type: http + scheme: bearer + bearerFormat: JWT + description: JWT bearer token +paths: {} +`, + expectedCount: 2, + expectedText: "", + }, + { + name: "mixed schemes one with one without RFC8725", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +components: + securitySchemes: + oauth: + type: oauth2 + description: OAuth2 authentication supporting RFC8725 + flows: + implicit: + authorizationUrl: https://example.com/oauth + scopes: + read: Read access + bearer: + type: http + scheme: bearer + bearerFormat: JWT + description: JWT bearer token +paths: {} +`, + expectedCount: 1, + expectedText: "bearer", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + doc, _, err := openapi.Unmarshal(ctx, strings.NewReader(tt.yaml)) + require.NoError(t, err) + + rule := &rules.OwaspJWTBestPracticesRule{} + config := &linter.RuleConfig{} + + // Build index for the rule + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + docInfo := linter.NewDocumentInfoWithIndex(doc, "test.yaml", idx) + + errs := rule.Run(ctx, docInfo, config) + + require.Len(t, errs, tt.expectedCount) + for _, err := range errs { + assert.Contains(t, err.Error(), "RFC8725") + if tt.expectedText != "" { + assert.Contains(t, err.Error(), tt.expectedText) + } + } + }) + } +} + +func TestOwaspJWTBestPracticesRule_RuleMetadata(t *testing.T) { + t.Parallel() + + rule := &rules.OwaspJWTBestPracticesRule{} + + assert.Equal(t, "owasp-jwt-best-practices", rule.ID()) + assert.Equal(t, rules.CategorySecurity, rule.Category()) + assert.NotEmpty(t, rule.Description()) + assert.NotEmpty(t, rule.Link()) + assert.Equal(t, validation.SeverityError, rule.DefaultSeverity()) + assert.Equal(t, []string{"3.0", "3.1"}, rule.Versions()) +} diff --git a/openapi/linter/rules/owasp_no_additional_properties.go b/openapi/linter/rules/owasp_no_additional_properties.go new file mode 100644 index 00000000..cfa972e8 --- /dev/null +++ b/openapi/linter/rules/owasp_no_additional_properties.go @@ -0,0 +1,107 @@ +package rules + +import ( + "context" + "errors" + + "github.com/speakeasy-api/openapi/linter" + "github.com/speakeasy-api/openapi/openapi" + "github.com/speakeasy-api/openapi/validation" +) + +const RuleOwaspNoAdditionalProperties = "owasp-no-additional-properties" + +type OwaspNoAdditionalPropertiesRule struct{} + +func (r *OwaspNoAdditionalPropertiesRule) ID() string { + return RuleOwaspNoAdditionalProperties +} +func (r *OwaspNoAdditionalPropertiesRule) Category() string { + return CategorySecurity +} +func (r *OwaspNoAdditionalPropertiesRule) Description() string { + return "Object schemas must not allow arbitrary additional properties (set additionalProperties to false or omit it). Allowing unexpected properties can lead to mass assignment vulnerabilities where attackers inject unintended fields." +} +func (r *OwaspNoAdditionalPropertiesRule) Summary() string { + return "Object schemas should not allow additional properties." +} +func (r *OwaspNoAdditionalPropertiesRule) HowToFix() string { + return "Set additionalProperties to false or remove it from object schemas." +} +func (r *OwaspNoAdditionalPropertiesRule) Link() string { + return "https://github.com/speakeasy-api/openapi/blob/main/openapi/linter/README.md#owasp-no-additional-properties" +} +func (r *OwaspNoAdditionalPropertiesRule) DefaultSeverity() validation.Severity { + return validation.SeverityError +} +func (r *OwaspNoAdditionalPropertiesRule) Versions() []string { + return []string{"3.0", "3.1"} +} + +func (r *OwaspNoAdditionalPropertiesRule) Run(ctx context.Context, docInfo *linter.DocumentInfo[*openapi.OpenAPI], config *linter.RuleConfig) []error { + if docInfo == nil || docInfo.Document == nil || docInfo.Index == nil { + return nil + } + + var errs []error + + // Check all schemas in the document + for _, schemaNode := range docInfo.Index.GetAllSchemas() { + refSchema := schemaNode.Node + schema := refSchema.GetSchema() + if schema == nil { + continue + } + + // Check if type contains "object" + types := schema.GetType() + hasObjectType := false + for _, typ := range types { + if typ == "object" { + hasObjectType = true + break + } + } + + if !hasObjectType { + continue + } + + // Check additionalProperties + additionalProps := schema.GetAdditionalProperties() + if additionalProps == nil { + // Not set - this is OK + continue + } + + // additionalProperties can be either a boolean or a schema + // If it's a boolean, check if it's true (violation) + // If it's a schema, that's also a violation + isViolation := false + + if additionalProps.IsBool() { + // It's a boolean value + boolVal := additionalProps.GetBool() + if boolVal != nil && *boolVal { + // additionalProperties: true is a violation + isViolation = true + } + } else { + // It's a schema object - this is a violation + isViolation = true + } + + if isViolation { + if rootNode := refSchema.GetRootNode(); rootNode != nil { + errs = append(errs, validation.NewValidationError( + config.GetSeverity(r.DefaultSeverity()), + RuleOwaspNoAdditionalProperties, + errors.New("additionalProperties should not be set to true or define a schema - set to false or omit it"), + rootNode, + )) + } + } + } + + return errs +} diff --git a/openapi/linter/rules/owasp_no_additional_properties_test.go b/openapi/linter/rules/owasp_no_additional_properties_test.go new file mode 100644 index 00000000..651ec773 --- /dev/null +++ b/openapi/linter/rules/owasp_no_additional_properties_test.go @@ -0,0 +1,223 @@ +package rules_test + +import ( + "strings" + "testing" + + "github.com/speakeasy-api/openapi/linter" + "github.com/speakeasy-api/openapi/openapi" + "github.com/speakeasy-api/openapi/openapi/linter/rules" + "github.com/speakeasy-api/openapi/references" + "github.com/speakeasy-api/openapi/validation" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestOwaspNoAdditionalPropertiesRule_ValidCases(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + yaml string + }{ + { + name: "object without additionalProperties set", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +components: + schemas: + User: + type: object + properties: + name: + type: string + maxLength: 100 +paths: {} +`, + }, + { + name: "object with additionalProperties false", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +components: + schemas: + User: + type: object + properties: + name: + type: string + maxLength: 100 + additionalProperties: false +paths: {} +`, + }, + { + name: "non-object type with additionalProperties is ok", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +components: + schemas: + Name: + type: string + maxLength: 100 +paths: {} +`, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + doc, _, err := openapi.Unmarshal(ctx, strings.NewReader(tt.yaml)) + require.NoError(t, err) + + rule := &rules.OwaspNoAdditionalPropertiesRule{} + config := &linter.RuleConfig{} + + // Build index for the rule + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + docInfo := linter.NewDocumentInfoWithIndex(doc, "test.yaml", idx) + + errs := rule.Run(ctx, docInfo, config) + assert.Empty(t, errs) + }) + } +} + +func TestOwaspNoAdditionalPropertiesRule_Violations(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + yaml string + expectedCount int + }{ + { + name: "object with additionalProperties true", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +components: + schemas: + User: + type: object + properties: + name: + type: string + maxLength: 100 + additionalProperties: true +paths: {} +`, + expectedCount: 1, + }, + { + name: "object with additionalProperties schema", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +components: + schemas: + User: + type: object + properties: + name: + type: string + maxLength: 100 + additionalProperties: + type: string + maxLength: 50 +paths: {} +`, + expectedCount: 1, + }, + { + name: "multiple objects with additionalProperties violations", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +components: + schemas: + User: + type: object + properties: + name: + type: string + maxLength: 100 + additionalProperties: true + Config: + type: object + properties: + setting: + type: string + maxLength: 50 + additionalProperties: + type: string + maxLength: 100 +paths: {} +`, + expectedCount: 2, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + doc, _, err := openapi.Unmarshal(ctx, strings.NewReader(tt.yaml)) + require.NoError(t, err) + + rule := &rules.OwaspNoAdditionalPropertiesRule{} + config := &linter.RuleConfig{} + + // Build index for the rule + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + docInfo := linter.NewDocumentInfoWithIndex(doc, "test.yaml", idx) + + errs := rule.Run(ctx, docInfo, config) + + require.Len(t, errs, tt.expectedCount) + for _, err := range errs { + assert.Contains(t, err.Error(), "additionalProperties") + } + }) + } +} + +func TestOwaspNoAdditionalPropertiesRule_RuleMetadata(t *testing.T) { + t.Parallel() + + rule := &rules.OwaspNoAdditionalPropertiesRule{} + + assert.Equal(t, "owasp-no-additional-properties", rule.ID()) + assert.Equal(t, rules.CategorySecurity, rule.Category()) + assert.NotEmpty(t, rule.Description()) + assert.NotEmpty(t, rule.Link()) + assert.Equal(t, validation.SeverityError, rule.DefaultSeverity()) + assert.Equal(t, []string{"3.0", "3.1"}, rule.Versions()) +} diff --git a/openapi/linter/rules/owasp_no_api_keys_in_url.go b/openapi/linter/rules/owasp_no_api_keys_in_url.go new file mode 100644 index 00000000..1c170ba6 --- /dev/null +++ b/openapi/linter/rules/owasp_no_api_keys_in_url.go @@ -0,0 +1,96 @@ +package rules + +import ( + "context" + "fmt" + + "github.com/speakeasy-api/openapi/linter" + "github.com/speakeasy-api/openapi/openapi" + "github.com/speakeasy-api/openapi/validation" + "github.com/speakeasy-api/openapi/yml" +) + +//nolint:gosec +const RuleOwaspNoAPIKeysInURL = "owasp-no-api-keys-in-url" + +type OwaspNoAPIKeysInURLRule struct{} + +func (r *OwaspNoAPIKeysInURLRule) ID() string { return RuleOwaspNoAPIKeysInURL } +func (r *OwaspNoAPIKeysInURLRule) Category() string { return CategorySecurity } +func (r *OwaspNoAPIKeysInURLRule) Description() string { + return "API keys must not be passed via URL parameters (query or path) as they are logged and cached. URL-based API keys appear in browser history, server logs, and proxy caches, creating security exposure." +} +func (r *OwaspNoAPIKeysInURLRule) Summary() string { + return "API keys must not be passed via URL parameters." +} +func (r *OwaspNoAPIKeysInURLRule) HowToFix() string { + return "Move API keys to header-based authentication instead of query or path parameters." +} +func (r *OwaspNoAPIKeysInURLRule) Link() string { + return "https://github.com/speakeasy-api/openapi/blob/main/openapi/linter/README.md#owasp-no-api-keys-in-url" +} +func (r *OwaspNoAPIKeysInURLRule) DefaultSeverity() validation.Severity { + return validation.SeverityError +} +func (r *OwaspNoAPIKeysInURLRule) Versions() []string { + return nil // Applies to all OpenAPI versions +} + +func (r *OwaspNoAPIKeysInURLRule) Run(ctx context.Context, docInfo *linter.DocumentInfo[*openapi.OpenAPI], config *linter.RuleConfig) []error { + if docInfo == nil || docInfo.Document == nil { + return nil + } + + doc := docInfo.Document + components := doc.GetComponents() + if components == nil { + return nil + } + + securitySchemes := components.GetSecuritySchemes() + if securitySchemes == nil { + return nil + } + + var errs []error + + // Iterate through all security schemes + for name, scheme := range securitySchemes.All() { + if scheme == nil { + continue + } + + // Get the security scheme object + secScheme := scheme.GetObject() + if secScheme == nil { + continue + } + + // Check if this is an API key type security scheme + schemeType := secScheme.GetType() + if schemeType != "apiKey" { + continue + } + + // Get the location where the API key is passed + location := secScheme.GetIn() + + // Check if it's in query or path (both are insecure for API keys) + if location == "query" || location == "path" { + // Get the root node to find the "in" key + if rootNode := secScheme.GetRootNode(); rootNode != nil { + _, inValueNode, found := yml.GetMapElementNodes(ctx, rootNode, "in") + if found && inValueNode != nil { + errs = append(errs, validation.NewValidationError( + config.GetSeverity(r.DefaultSeverity()), + RuleOwaspNoAPIKeysInURL, + fmt.Errorf("security scheme `%s` passes API key via URL `%s` parameter - use header instead for security", name, location), + inValueNode, + )) + } + } + } + } + + return errs +} diff --git a/openapi/linter/rules/owasp_no_api_keys_in_url_test.go b/openapi/linter/rules/owasp_no_api_keys_in_url_test.go new file mode 100644 index 00000000..ffcc8b8d --- /dev/null +++ b/openapi/linter/rules/owasp_no_api_keys_in_url_test.go @@ -0,0 +1,242 @@ +package rules_test + +import ( + "strings" + "testing" + + "github.com/speakeasy-api/openapi/linter" + "github.com/speakeasy-api/openapi/openapi" + "github.com/speakeasy-api/openapi/openapi/linter/rules" + "github.com/speakeasy-api/openapi/references" + "github.com/speakeasy-api/openapi/validation" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestOwaspNoAPIKeysInURLRule_ValidCases(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + yaml string + }{ + { + name: "api key in header", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +components: + securitySchemes: + apiKey: + type: apiKey + in: header + name: X-API-Key +paths: {} +`, + }, + { + name: "api key in cookie", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +components: + securitySchemes: + apiKey: + type: apiKey + in: cookie + name: api_key +paths: {} +`, + }, + { + name: "bearer authentication", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +components: + securitySchemes: + bearerAuth: + type: http + scheme: bearer + bearerFormat: JWT +paths: {} +`, + }, + { + name: "no security schemes", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: {} +`, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + doc, _, err := openapi.Unmarshal(ctx, strings.NewReader(tt.yaml)) + require.NoError(t, err) + + rule := &rules.OwaspNoAPIKeysInURLRule{} + config := &linter.RuleConfig{} + + // Build index for the rule + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + docInfo := linter.NewDocumentInfoWithIndex(doc, "test.yaml", idx) + + errs := rule.Run(ctx, docInfo, config) + assert.Empty(t, errs) + }) + } +} + +func TestOwaspNoAPIKeysInURLRule_Violations(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + yaml string + expectedCount int + expectedText string + }{ + { + name: "api key in query parameter", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +components: + securitySchemes: + apiKey: + type: apiKey + in: query + name: api_key +paths: {} +`, + expectedCount: 1, + expectedText: "query", + }, + { + name: "api key in path parameter", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +components: + securitySchemes: + apiKey: + type: apiKey + in: path + name: api_key +paths: {} +`, + expectedCount: 1, + expectedText: "path", + }, + { + name: "multiple api keys in url", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +components: + securitySchemes: + apiKeyQuery: + type: apiKey + in: query + name: api_key + apiKeyPath: + type: apiKey + in: path + name: key +paths: {} +`, + expectedCount: 2, + expectedText: "", + }, + { + name: "mixed secure and insecure api keys", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +components: + securitySchemes: + apiKeyHeader: + type: apiKey + in: header + name: X-API-Key + apiKeyQuery: + type: apiKey + in: query + name: api_key +paths: {} +`, + expectedCount: 1, + expectedText: "query", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + doc, _, err := openapi.Unmarshal(ctx, strings.NewReader(tt.yaml)) + require.NoError(t, err) + + rule := &rules.OwaspNoAPIKeysInURLRule{} + config := &linter.RuleConfig{} + + // Build index for the rule + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + docInfo := linter.NewDocumentInfoWithIndex(doc, "test.yaml", idx) + + errs := rule.Run(ctx, docInfo, config) + + require.Len(t, errs, tt.expectedCount) + for _, err := range errs { + assert.Contains(t, err.Error(), "API key via URL") + if tt.expectedText != "" { + assert.Contains(t, err.Error(), tt.expectedText) + } + } + }) + } +} + +func TestOwaspNoAPIKeysInURLRule_RuleMetadata(t *testing.T) { + t.Parallel() + + rule := &rules.OwaspNoAPIKeysInURLRule{} + + assert.Equal(t, "owasp-no-api-keys-in-url", rule.ID()) + assert.Equal(t, rules.CategorySecurity, rule.Category()) + assert.NotEmpty(t, rule.Description()) + assert.NotEmpty(t, rule.Link()) + assert.Equal(t, validation.SeverityError, rule.DefaultSeverity()) + assert.Nil(t, rule.Versions()) +} diff --git a/openapi/linter/rules/owasp_no_credentials_in_url.go b/openapi/linter/rules/owasp_no_credentials_in_url.go new file mode 100644 index 00000000..3c12f071 --- /dev/null +++ b/openapi/linter/rules/owasp_no_credentials_in_url.go @@ -0,0 +1,95 @@ +package rules + +import ( + "context" + "fmt" + "regexp" + + "github.com/speakeasy-api/openapi/linter" + "github.com/speakeasy-api/openapi/openapi" + "github.com/speakeasy-api/openapi/validation" + "github.com/speakeasy-api/openapi/yml" +) + +//nolint:gosec +const RuleOwaspNoCredentialsInURL = "owasp-no-credentials-in-url" + +// credentialPattern matches parameter names that look like credentials +// Matches: client_secret, clientsecret, token, access_token, accesstoken, refresh_token, refreshtoken, +// id_token, idtoken, password, secret, api-key, apikey (case insensitive) +var credentialPattern = regexp.MustCompile(`(?i)^.*(client_?secret|token|access_?token|refresh_?token|id_?token|password|secret|api-?key).*$`) + +type OwaspNoCredentialsInURLRule struct{} + +func (r *OwaspNoCredentialsInURLRule) ID() string { return RuleOwaspNoCredentialsInURL } +func (r *OwaspNoCredentialsInURLRule) Category() string { return CategorySecurity } +func (r *OwaspNoCredentialsInURLRule) Description() string { + return "URL parameters must not contain credentials like API keys, passwords, or secrets. Credentials in URLs are logged by servers, proxies, and browsers, creating significant security risks." +} +func (r *OwaspNoCredentialsInURLRule) Summary() string { + return "URL parameters must not contain credentials." +} +func (r *OwaspNoCredentialsInURLRule) HowToFix() string { + return "Remove credentials from URL parameters; use headers or request bodies instead." +} +func (r *OwaspNoCredentialsInURLRule) Link() string { + return "https://github.com/speakeasy-api/openapi/blob/main/openapi/linter/README.md#owasp-no-credentials-in-url" +} +func (r *OwaspNoCredentialsInURLRule) DefaultSeverity() validation.Severity { + return validation.SeverityError +} +func (r *OwaspNoCredentialsInURLRule) Versions() []string { + return nil // Applies to all OpenAPI versions +} + +func (r *OwaspNoCredentialsInURLRule) Run(ctx context.Context, docInfo *linter.DocumentInfo[*openapi.OpenAPI], config *linter.RuleConfig) []error { + if docInfo == nil || docInfo.Document == nil || docInfo.Index == nil { + return nil + } + + var errs []error + + // Helper function to check a parameter + checkParameter := func(paramNode *openapi.IndexNode[*openapi.ReferencedParameter]) { + param := paramNode.Node + if param == nil { + return + } + + // Get the parameter object + paramObj := param.GetObject() + if paramObj == nil { + return + } + + // Only check query and path parameters (header and cookie are OK) + location := paramObj.GetIn() + if location != "query" && location != "path" { + return + } + + // Check if the parameter name matches the credential pattern + paramName := paramObj.GetName() + if credentialPattern.MatchString(paramName) { + // Get the root node to find the name key + if rootNode := paramObj.GetRootNode(); rootNode != nil { + _, nameValueNode, found := yml.GetMapElementNodes(ctx, rootNode, "name") + if found && nameValueNode != nil { + errs = append(errs, validation.NewValidationError( + config.GetSeverity(r.DefaultSeverity()), + RuleOwaspNoCredentialsInURL, + fmt.Errorf("URL parameter `%s` appears to contain credentials - avoid passing sensitive data in URLs", paramName), + nameValueNode, + )) + } + } + } + } + + // Check all parameters (inline, component, external, and references) + for _, paramNode := range docInfo.Index.GetAllParameters() { + checkParameter(paramNode) + } + + return errs +} diff --git a/openapi/linter/rules/owasp_no_credentials_in_url_test.go b/openapi/linter/rules/owasp_no_credentials_in_url_test.go new file mode 100644 index 00000000..4251a149 --- /dev/null +++ b/openapi/linter/rules/owasp_no_credentials_in_url_test.go @@ -0,0 +1,372 @@ +package rules_test + +import ( + "strings" + "testing" + + "github.com/speakeasy-api/openapi/linter" + "github.com/speakeasy-api/openapi/openapi" + "github.com/speakeasy-api/openapi/openapi/linter/rules" + "github.com/speakeasy-api/openapi/references" + "github.com/speakeasy-api/openapi/validation" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestOwaspNoCredentialsInURLRule_ValidCases(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + yaml string + }{ + { + name: "safe query parameter names", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + get: + parameters: + - name: userId + in: query + schema: + type: string + - name: filter + in: query + schema: + type: string + responses: + '200': + description: Success +`, + }, + { + name: "credentials in header parameters are allowed", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + get: + parameters: + - name: api-key + in: header + schema: + type: string + - name: password + in: header + schema: + type: string + responses: + '200': + description: Success +`, + }, + { + name: "credentials in cookie parameters are allowed", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + get: + parameters: + - name: token + in: cookie + schema: + type: string + responses: + '200': + description: Success +`, + }, + { + name: "no parameters", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + get: + responses: + '200': + description: Success +`, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + doc, _, err := openapi.Unmarshal(ctx, strings.NewReader(tt.yaml)) + require.NoError(t, err) + + rule := &rules.OwaspNoCredentialsInURLRule{} + config := &linter.RuleConfig{} + + // Build index for the rule + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + docInfo := linter.NewDocumentInfoWithIndex(doc, "test.yaml", idx) + + errs := rule.Run(ctx, docInfo, config) + assert.Empty(t, errs) + }) + } +} + +func TestOwaspNoCredentialsInURLRule_Violations(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + yaml string + expectedCount int + expectedParam string + }{ + { + name: "token in query parameter", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + get: + parameters: + - name: token + in: query + schema: + type: string + responses: + '200': + description: Success +`, + expectedCount: 1, + expectedParam: "token", + }, + { + name: "api-key in query parameter", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + get: + parameters: + - name: api-key + in: query + schema: + type: string + responses: + '200': + description: Success +`, + expectedCount: 1, + expectedParam: "api-key", + }, + { + name: "password in path parameter", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /auth/{password}: + get: + parameters: + - name: password + in: path + required: true + schema: + type: string + responses: + '200': + description: Success +`, + expectedCount: 1, + expectedParam: "password", + }, + { + name: "client_secret in query parameter", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /oauth: + get: + parameters: + - name: client_secret + in: query + schema: + type: string + responses: + '200': + description: Success +`, + expectedCount: 1, + expectedParam: "client_secret", + }, + { + name: "access_token in query parameter", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /api: + get: + parameters: + - name: access_token + in: query + schema: + type: string + responses: + '200': + description: Success +`, + expectedCount: 1, + expectedParam: "access_token", + }, + { + name: "multiple credential parameters", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /api: + get: + parameters: + - name: token + in: query + schema: + type: string + - name: api-key + in: query + schema: + type: string + - name: userId + in: query + schema: + type: string + responses: + '200': + description: Success +`, + expectedCount: 2, + expectedParam: "", + }, + { + name: "case insensitive match - TOKEN", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /api: + get: + parameters: + - name: TOKEN + in: query + schema: + type: string + responses: + '200': + description: Success +`, + expectedCount: 1, + expectedParam: "TOKEN", + }, + { + name: "apikey without dash", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /api: + get: + parameters: + - name: apikey + in: query + schema: + type: string + responses: + '200': + description: Success +`, + expectedCount: 1, + expectedParam: "apikey", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + doc, _, err := openapi.Unmarshal(ctx, strings.NewReader(tt.yaml)) + require.NoError(t, err) + + rule := &rules.OwaspNoCredentialsInURLRule{} + config := &linter.RuleConfig{} + + // Build index for the rule + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + docInfo := linter.NewDocumentInfoWithIndex(doc, "test.yaml", idx) + + errs := rule.Run(ctx, docInfo, config) + + require.Len(t, errs, tt.expectedCount) + for _, err := range errs { + assert.Contains(t, err.Error(), "credentials") + if tt.expectedParam != "" { + assert.Contains(t, err.Error(), tt.expectedParam) + } + } + }) + } +} + +func TestOwaspNoCredentialsInURLRule_RuleMetadata(t *testing.T) { + t.Parallel() + + rule := &rules.OwaspNoCredentialsInURLRule{} + + assert.Equal(t, "owasp-no-credentials-in-url", rule.ID()) + assert.Equal(t, rules.CategorySecurity, rule.Category()) + assert.NotEmpty(t, rule.Description()) + assert.NotEmpty(t, rule.Link()) + assert.Equal(t, validation.SeverityError, rule.DefaultSeverity()) + assert.Nil(t, rule.Versions()) +} diff --git a/openapi/linter/rules/owasp_no_http_basic.go b/openapi/linter/rules/owasp_no_http_basic.go new file mode 100644 index 00000000..680c4ebb --- /dev/null +++ b/openapi/linter/rules/owasp_no_http_basic.go @@ -0,0 +1,97 @@ +package rules + +import ( + "context" + "fmt" + "strings" + + "github.com/speakeasy-api/openapi/linter" + "github.com/speakeasy-api/openapi/openapi" + "github.com/speakeasy-api/openapi/validation" + "github.com/speakeasy-api/openapi/yml" +) + +const RuleOwaspNoHttpBasic = "owasp-no-http-basic" + +type OwaspNoHttpBasicRule struct{} + +func (r *OwaspNoHttpBasicRule) ID() string { return RuleOwaspNoHttpBasic } +func (r *OwaspNoHttpBasicRule) Category() string { return CategorySecurity } +func (r *OwaspNoHttpBasicRule) Description() string { + return "Security schemes must not use `HTTP Basic` authentication without additional security layers. `HTTP Basic` sends credentials in easily-decoded base64 encoding, making it vulnerable to interception without `HTTPS`." +} +func (r *OwaspNoHttpBasicRule) Summary() string { + return "Security schemes must not use `HTTP Basic` authentication." +} +func (r *OwaspNoHttpBasicRule) HowToFix() string { + return "Replace `HTTP Basic` schemes with more secure authentication (e.g., `OAuth 2.0` or `bearer` tokens)." +} +func (r *OwaspNoHttpBasicRule) Link() string { + return "https://github.com/speakeasy-api/openapi/blob/main/openapi/linter/README.md#owasp-no-http-basic" +} +func (r *OwaspNoHttpBasicRule) DefaultSeverity() validation.Severity { + return validation.SeverityError +} +func (r *OwaspNoHttpBasicRule) Versions() []string { + return nil // Applies to all OpenAPI versions +} + +func (r *OwaspNoHttpBasicRule) Run(ctx context.Context, docInfo *linter.DocumentInfo[*openapi.OpenAPI], config *linter.RuleConfig) []error { + if docInfo == nil || docInfo.Document == nil { + return nil + } + + doc := docInfo.Document + components := doc.GetComponents() + if components == nil { + return nil + } + + securitySchemes := components.GetSecuritySchemes() + if securitySchemes == nil { + return nil + } + + var errs []error + + // Iterate through all security schemes + for name, scheme := range securitySchemes.All() { + if scheme == nil { + continue + } + + // Get the security scheme object + secScheme := scheme.GetObject() + if secScheme == nil { + continue + } + + // Check if this is an HTTP type security scheme + schemeType := secScheme.GetType() + if schemeType != "http" { + continue + } + + // Get the scheme value (basic, bearer, etc.) + httpScheme := secScheme.GetScheme() + httpSchemeLower := strings.ToLower(httpScheme) + + // Check if it's basic or negotiate (both insecure) + if httpSchemeLower == "basic" || httpSchemeLower == "negotiate" { + // Get the root node to find the scheme key + if rootNode := secScheme.GetRootNode(); rootNode != nil { + _, schemeValueNode, found := yml.GetMapElementNodes(ctx, rootNode, "scheme") + if found && schemeValueNode != nil { + errs = append(errs, validation.NewValidationError( + config.GetSeverity(r.DefaultSeverity()), + RuleOwaspNoHttpBasic, + fmt.Errorf("security scheme `%s` uses `HTTP` `%s` authentication, which is insecure - use `OAuth 2.0` or another secure method", name, httpScheme), + schemeValueNode, + )) + } + } + } + } + + return errs +} diff --git a/openapi/linter/rules/owasp_no_http_basic_test.go b/openapi/linter/rules/owasp_no_http_basic_test.go new file mode 100644 index 00000000..fbfa8d7a --- /dev/null +++ b/openapi/linter/rules/owasp_no_http_basic_test.go @@ -0,0 +1,236 @@ +package rules_test + +import ( + "strings" + "testing" + + "github.com/speakeasy-api/openapi/linter" + "github.com/speakeasy-api/openapi/openapi" + "github.com/speakeasy-api/openapi/openapi/linter/rules" + "github.com/speakeasy-api/openapi/references" + "github.com/speakeasy-api/openapi/validation" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestOwaspNoHttpBasicRule_ValidCases(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + yaml string + }{ + { + name: "bearer authentication", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +components: + securitySchemes: + bearerAuth: + type: http + scheme: bearer + bearerFormat: JWT +paths: {} +`, + }, + { + name: "oauth2 authentication", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +components: + securitySchemes: + oauth: + type: oauth2 + flows: + implicit: + authorizationUrl: https://example.com/oauth/authorize + scopes: + read: Read access +paths: {} +`, + }, + { + name: "api key authentication", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +components: + securitySchemes: + apiKey: + type: apiKey + in: header + name: X-API-Key +paths: {} +`, + }, + { + name: "no security schemes", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: {} +`, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + doc, _, err := openapi.Unmarshal(ctx, strings.NewReader(tt.yaml)) + require.NoError(t, err) + + rule := &rules.OwaspNoHttpBasicRule{} + config := &linter.RuleConfig{} + + // Build index for the rule + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + docInfo := linter.NewDocumentInfoWithIndex(doc, "test.yaml", idx) + + errs := rule.Run(ctx, docInfo, config) + assert.Empty(t, errs) + }) + } +} + +func TestOwaspNoHttpBasicRule_Violations(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + yaml string + expectedCount int + expectedText string + }{ + { + name: "basic authentication", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +components: + securitySchemes: + basicAuth: + type: http + scheme: basic +paths: {} +`, + expectedCount: 1, + expectedText: "basic", + }, + { + name: "negotiate authentication", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +components: + securitySchemes: + negotiateAuth: + type: http + scheme: negotiate +paths: {} +`, + expectedCount: 1, + expectedText: "negotiate", + }, + { + name: "multiple insecure schemes", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +components: + securitySchemes: + basicAuth: + type: http + scheme: basic + negotiateAuth: + type: http + scheme: negotiate +paths: {} +`, + expectedCount: 2, + expectedText: "", + }, + { + name: "basic with uppercase", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +components: + securitySchemes: + basicAuth: + type: http + scheme: Basic +paths: {} +`, + expectedCount: 1, + expectedText: "Basic", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + doc, _, err := openapi.Unmarshal(ctx, strings.NewReader(tt.yaml)) + require.NoError(t, err) + + rule := &rules.OwaspNoHttpBasicRule{} + config := &linter.RuleConfig{} + + // Build index for the rule + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + docInfo := linter.NewDocumentInfoWithIndex(doc, "test.yaml", idx) + + errs := rule.Run(ctx, docInfo, config) + + require.Len(t, errs, tt.expectedCount) + for _, err := range errs { + assert.Contains(t, err.Error(), "insecure") + if tt.expectedText != "" { + assert.Contains(t, err.Error(), tt.expectedText) + } + } + }) + } +} + +func TestOwaspNoHttpBasicRule_RuleMetadata(t *testing.T) { + t.Parallel() + + rule := &rules.OwaspNoHttpBasicRule{} + + assert.Equal(t, "owasp-no-http-basic", rule.ID()) + assert.Equal(t, rules.CategorySecurity, rule.Category()) + assert.NotEmpty(t, rule.Description()) + assert.NotEmpty(t, rule.Link()) + assert.Equal(t, validation.SeverityError, rule.DefaultSeverity()) + assert.Nil(t, rule.Versions()) +} diff --git a/openapi/linter/rules/owasp_no_numeric_ids.go b/openapi/linter/rules/owasp_no_numeric_ids.go new file mode 100644 index 00000000..6f562d47 --- /dev/null +++ b/openapi/linter/rules/owasp_no_numeric_ids.go @@ -0,0 +1,108 @@ +package rules + +import ( + "context" + "fmt" + "strings" + + "github.com/speakeasy-api/openapi/linter" + "github.com/speakeasy-api/openapi/openapi" + "github.com/speakeasy-api/openapi/validation" +) + +const RuleOwaspNoNumericIDs = "owasp-no-numeric-ids" + +type OwaspNoNumericIDsRule struct{} + +func (r *OwaspNoNumericIDsRule) ID() string { + return RuleOwaspNoNumericIDs +} +func (r *OwaspNoNumericIDsRule) Category() string { + return CategorySecurity +} +func (r *OwaspNoNumericIDsRule) Description() string { + return "Resource identifiers must use random values like UUIDs instead of sequential numeric IDs. Sequential IDs enable enumeration attacks where attackers can guess valid IDs and access unauthorized resources." +} +func (r *OwaspNoNumericIDsRule) Summary() string { + return "Resource identifiers should not use sequential numeric IDs." +} +func (r *OwaspNoNumericIDsRule) HowToFix() string { + return "Use non-sequential identifiers (e.g., UUIDs) for ID parameters." +} +func (r *OwaspNoNumericIDsRule) Link() string { + return "https://github.com/speakeasy-api/openapi/blob/main/openapi/linter/README.md#owasp-no-numeric-ids" +} +func (r *OwaspNoNumericIDsRule) DefaultSeverity() validation.Severity { + return validation.SeverityError +} +func (r *OwaspNoNumericIDsRule) Versions() []string { + return []string{"3.0", "3.1"} // OAS3 only +} + +// isIDParameter checks if a parameter name is an ID field +func isIDParameter(name string) bool { + lowerName := strings.ToLower(name) + return lowerName == "id" || + strings.HasSuffix(lowerName, "_id") || + strings.HasSuffix(lowerName, "-id") || + strings.HasSuffix(lowerName, "id") +} + +func (r *OwaspNoNumericIDsRule) Run(ctx context.Context, docInfo *linter.DocumentInfo[*openapi.OpenAPI], config *linter.RuleConfig) []error { + if docInfo == nil || docInfo.Document == nil || docInfo.Index == nil { + return nil + } + + var errs []error + + // Check all parameters (inline, component, external, and references) + for _, paramNode := range docInfo.Index.GetAllParameters() { + param := paramNode.Node + if param == nil { + continue + } + + paramObj := param.GetObject() + if paramObj == nil { + continue + } + + name := paramObj.GetName() + if !isIDParameter(name) { + continue + } + + // Check if schema type is integer + jsonSchema := paramObj.GetSchema() + if jsonSchema == nil { + continue + } + + schema := jsonSchema.GetSchema() + if schema == nil { + continue + } + + types := schema.GetType() + if len(types) == 0 { + continue + } + + // Check if type contains "integer" + for _, typ := range types { + if typ == "integer" { + if rootNode := jsonSchema.GetRootNode(); rootNode != nil { + errs = append(errs, validation.NewValidationError( + config.GetSeverity(r.DefaultSeverity()), + RuleOwaspNoNumericIDs, + fmt.Errorf("parameter `%s` uses integer type for ID - use random IDs like UUIDs instead of numeric IDs", name), + rootNode, + )) + } + break + } + } + } + + return errs +} diff --git a/openapi/linter/rules/owasp_no_numeric_ids_test.go b/openapi/linter/rules/owasp_no_numeric_ids_test.go new file mode 100644 index 00000000..efb8a7a7 --- /dev/null +++ b/openapi/linter/rules/owasp_no_numeric_ids_test.go @@ -0,0 +1,430 @@ +package rules_test + +import ( + "strings" + "testing" + + "github.com/speakeasy-api/openapi/linter" + "github.com/speakeasy-api/openapi/openapi" + "github.com/speakeasy-api/openapi/openapi/linter/rules" + "github.com/speakeasy-api/openapi/references" + "github.com/speakeasy-api/openapi/validation" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestOwaspNoNumericIDsRule_ValidCases(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + yaml string + }{ + { + name: "id parameter with string type", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users/{id}: + get: + parameters: + - name: id + in: path + schema: + type: string + format: uuid + responses: + '200': + description: Success +`, + }, + { + name: "user_id parameter with string type", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users/{user_id}: + get: + parameters: + - name: user_id + in: path + schema: + type: string + responses: + '200': + description: Success +`, + }, + { + name: "non-id parameter with integer type is ok", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + get: + parameters: + - name: limit + in: query + schema: + type: integer + - name: offset + in: query + schema: + type: integer + responses: + '200': + description: Success +`, + }, + { + name: "referenced id parameter with string type", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +components: + parameters: + UserId: + name: user_id + in: path + schema: + type: string + format: uuid +paths: + /users/{user_id}: + get: + parameters: + - $ref: '#/components/parameters/UserId' + responses: + '200': + description: Success +`, + }, + { + name: "id with object type", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users/{id}: + get: + parameters: + - name: id + in: path + schema: + type: object + responses: + '200': + description: Success +`, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + doc, _, err := openapi.Unmarshal(ctx, strings.NewReader(tt.yaml)) + require.NoError(t, err) + + rule := &rules.OwaspNoNumericIDsRule{} + config := &linter.RuleConfig{} + + // Build index for the rule + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + docInfo := linter.NewDocumentInfoWithIndex(doc, "test.yaml", idx) + + errs := rule.Run(ctx, docInfo, config) + assert.Empty(t, errs) + }) + } +} + +func TestOwaspNoNumericIDsRule_Violations(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + yaml string + expectedCount int + expectedText string + }{ + { + name: "id parameter with integer type", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users/{id}: + get: + parameters: + - name: id + in: path + schema: + type: integer + responses: + '200': + description: Success +`, + expectedCount: 1, + expectedText: "id", + }, + { + name: "user_id parameter with integer type", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users/{user_id}: + get: + parameters: + - name: user_id + in: path + schema: + type: integer + responses: + '200': + description: Success +`, + expectedCount: 1, + expectedText: "user_id", + }, + { + name: "post-id parameter with integer type", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /posts/{post-id}: + get: + parameters: + - name: post-id + in: path + schema: + type: integer + responses: + '200': + description: Success +`, + expectedCount: 1, + expectedText: "post-id", + }, + { + name: "multiple id parameters with integer type", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users/{user_id}/posts/{post_id}: + get: + parameters: + - name: user_id + in: path + schema: + type: integer + - name: post_id + in: path + schema: + type: integer + responses: + '200': + description: Success +`, + expectedCount: 2, + expectedText: "", + }, + { + name: "component parameter id with integer type", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +components: + parameters: + UserId: + name: id + in: path + schema: + type: integer +paths: + /users/{id}: + get: + parameters: + - $ref: '#/components/parameters/UserId' + responses: + '200': + description: Success +`, + expectedCount: 1, + expectedText: "id", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + doc, _, err := openapi.Unmarshal(ctx, strings.NewReader(tt.yaml)) + require.NoError(t, err) + + rule := &rules.OwaspNoNumericIDsRule{} + config := &linter.RuleConfig{} + + // Build index for the rule + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + docInfo := linter.NewDocumentInfoWithIndex(doc, "test.yaml", idx) + + errs := rule.Run(ctx, docInfo, config) + + require.Len(t, errs, tt.expectedCount) + for _, err := range errs { + assert.Contains(t, err.Error(), "integer type for ID") + assert.Contains(t, err.Error(), "UUID") + if tt.expectedText != "" { + assert.Contains(t, err.Error(), tt.expectedText) + } + } + }) + } +} + +func TestOwaspNoNumericIDsRule_EdgeCases(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + yaml string + }{ + { + name: "productId ending with lowercase id", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /products/{productid}: + get: + parameters: + - name: productid + in: path + schema: + type: integer + responses: + '200': + description: Success +`, + }, + { + name: "parameter with no schema", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users/{id}: + get: + parameters: + - name: id + in: path + responses: + '200': + description: Success +`, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + doc, _, err := openapi.Unmarshal(ctx, strings.NewReader(tt.yaml)) + require.NoError(t, err) + + rule := &rules.OwaspNoNumericIDsRule{} + config := &linter.RuleConfig{} + + // Build index for the rule + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + docInfo := linter.NewDocumentInfoWithIndex(doc, "test.yaml", idx) + + // Should not panic + errs := rule.Run(ctx, docInfo, config) + // productid ends with "id" so it should trigger the rule + if strings.Contains(tt.name, "productId") { + assert.Len(t, errs, 1) + assert.Contains(t, errs[0].Error(), "productid") + } else { + // parameter with no schema should not error + assert.Empty(t, errs) + } + }) + } +} + +func TestOwaspNoNumericIDsRule_NilInputs(t *testing.T) { + t.Parallel() + + rule := &rules.OwaspNoNumericIDsRule{} + config := &linter.RuleConfig{} + ctx := t.Context() + + // Test with nil docInfo + errs := rule.Run(ctx, nil, config) + assert.Empty(t, errs) + + // Test with nil document + var nilDoc *openapi.OpenAPI + errs = rule.Run(ctx, linter.NewDocumentInfoWithIndex(nilDoc, "test.yaml", nil), config) + assert.Empty(t, errs) +} + +func TestOwaspNoNumericIDsRule_RuleMetadata(t *testing.T) { + t.Parallel() + + rule := &rules.OwaspNoNumericIDsRule{} + + assert.Equal(t, "owasp-no-numeric-ids", rule.ID()) + assert.Equal(t, rules.CategorySecurity, rule.Category()) + assert.NotEmpty(t, rule.Description()) + assert.NotEmpty(t, rule.Link()) + assert.Equal(t, validation.SeverityError, rule.DefaultSeverity()) + assert.Equal(t, []string{"3.0", "3.1"}, rule.Versions()) +} diff --git a/openapi/linter/rules/owasp_protection_global_safe.go b/openapi/linter/rules/owasp_protection_global_safe.go new file mode 100644 index 00000000..64faeac3 --- /dev/null +++ b/openapi/linter/rules/owasp_protection_global_safe.go @@ -0,0 +1,115 @@ +package rules + +import ( + "context" + "fmt" + "strings" + + "github.com/speakeasy-api/openapi/linter" + "github.com/speakeasy-api/openapi/openapi" + "github.com/speakeasy-api/openapi/validation" + "github.com/speakeasy-api/openapi/yml" +) + +const RuleOwaspProtectionGlobalSafe = "owasp-protection-global-safe" + +// Safe HTTP methods that don't modify state +var safeMethods = map[string]bool{ + "get": true, + "head": true, +} + +type OwaspProtectionGlobalSafeRule struct{} + +func (r *OwaspProtectionGlobalSafeRule) ID() string { + return RuleOwaspProtectionGlobalSafe +} +func (r *OwaspProtectionGlobalSafeRule) Category() string { + return CategorySecurity +} +func (r *OwaspProtectionGlobalSafeRule) Description() string { + return "Safe operations (GET, HEAD) should be protected by security schemes or explicitly marked as public. Unprotected read operations may expose sensitive data to unauthorized users." +} +func (r *OwaspProtectionGlobalSafeRule) Summary() string { + return "Safe operations should be protected or explicitly marked public." +} +func (r *OwaspProtectionGlobalSafeRule) HowToFix() string { + return "Add global security requirements or set operation-level security (empty array for public endpoints)." +} +func (r *OwaspProtectionGlobalSafeRule) Link() string { + return "https://github.com/speakeasy-api/openapi/blob/main/openapi/linter/README.md#owasp-protection-global-safe" +} +func (r *OwaspProtectionGlobalSafeRule) DefaultSeverity() validation.Severity { + return validation.SeverityHint +} +func (r *OwaspProtectionGlobalSafeRule) Versions() []string { + return []string{"3.0", "3.1"} // OAS3 only +} + +func (r *OwaspProtectionGlobalSafeRule) Run(ctx context.Context, docInfo *linter.DocumentInfo[*openapi.OpenAPI], config *linter.RuleConfig) []error { + if docInfo == nil || docInfo.Document == nil || docInfo.Index == nil { + return nil + } + + var errs []error + + doc := docInfo.Document + + // Check if there's a global security requirement + globalSecurity := doc.GetSecurity() + hasGlobalSecurity := len(globalSecurity) > 0 + + // Check all operations + for _, opNode := range docInfo.Index.Operations { + op := opNode.Node + if op == nil { + continue + } + + // Get operation details + method := "" + path := "" + for _, loc := range opNode.Location { + switch openapi.GetParentType(loc) { + case "Paths": + if loc.ParentKey != nil { + path = *loc.ParentKey + } + case "PathItem": + if loc.ParentKey != nil { + method = *loc.ParentKey + } + } + } + + // Only check safe methods + if !safeMethods[strings.ToLower(method)] { + continue + } + + // Check if operation has explicit security field (even if empty array) + // security: [] means explicitly public and is allowed + rootNode := op.GetRootNode() + hasExplicitSecurity := false + if rootNode != nil { + _, _, found := yml.GetMapElementNodes(ctx, rootNode, "security") + hasExplicitSecurity = found + } + + // Operation is protected if: + // 1. Has global security, OR + // 2. Has explicit operation-level security field (even if empty) + if !hasGlobalSecurity && !hasExplicitSecurity { + if rootNode != nil { + errs = append(errs, validation.NewValidationError( + config.GetSeverity(r.DefaultSeverity()), + RuleOwaspProtectionGlobalSafe, + fmt.Errorf("operation %s %s is not protected by any security scheme", method, path), + rootNode, + )) + } + } + } + + return errs +} diff --git a/openapi/linter/rules/owasp_protection_global_safe_test.go b/openapi/linter/rules/owasp_protection_global_safe_test.go new file mode 100644 index 00000000..2f3186f5 --- /dev/null +++ b/openapi/linter/rules/owasp_protection_global_safe_test.go @@ -0,0 +1,302 @@ +package rules_test + +import ( + "strings" + "testing" + + "github.com/speakeasy-api/openapi/linter" + "github.com/speakeasy-api/openapi/openapi" + "github.com/speakeasy-api/openapi/openapi/linter/rules" + "github.com/speakeasy-api/openapi/references" + "github.com/speakeasy-api/openapi/validation" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestOwaspProtectionGlobalSafeRule_ValidCases(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + yaml string + }{ + { + name: "global security protects all operations", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +security: + - apiKey: [] +components: + securitySchemes: + apiKey: + type: apiKey + in: header + name: X-API-Key +paths: + /users: + get: + responses: + '200': + description: Success + head: + responses: + '200': + description: Success +`, + }, + { + name: "operation-level security", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +components: + securitySchemes: + apiKey: + type: apiKey + in: header + name: X-API-Key +paths: + /users: + get: + security: + - apiKey: [] + responses: + '200': + description: Success +`, + }, + { + name: "empty security array allowed for safe methods", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /public: + get: + security: [] + responses: + '200': + description: Success +`, + }, + { + name: "unsafe methods not checked by this rule", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + post: + responses: + '201': + description: Created +`, + }, + { + name: "mixed global and operation security", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +security: + - apiKey: [] +components: + securitySchemes: + apiKey: + type: apiKey + in: header + name: X-API-Key + oauth: + type: oauth2 + flows: + implicit: + authorizationUrl: https://example.com/oauth + scopes: + read: Read access +paths: + /users: + get: + responses: + '200': + description: Success + head: + security: + - oauth: [read] + responses: + '200': + description: Success +`, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + doc, _, err := openapi.Unmarshal(ctx, strings.NewReader(tt.yaml)) + require.NoError(t, err) + + rule := &rules.OwaspProtectionGlobalSafeRule{} + config := &linter.RuleConfig{} + + // Build index for the rule + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + docInfo := linter.NewDocumentInfoWithIndex(doc, "test.yaml", idx) + + errs := rule.Run(ctx, docInfo, config) + assert.Empty(t, errs) + }) + } +} + +func TestOwaspProtectionGlobalSafeRule_Violations(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + yaml string + expectedCount int + expectedText string + }{ + { + name: "get without security", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + get: + responses: + '200': + description: Success +`, + expectedCount: 1, + expectedText: "get", + }, + { + name: "head without security", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + head: + responses: + '200': + description: Success +`, + expectedCount: 1, + expectedText: "head", + }, + { + name: "multiple safe operations without security", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + get: + responses: + '200': + description: Success + /posts: + get: + responses: + '200': + description: Success + head: + responses: + '200': + description: Success +`, + expectedCount: 3, + expectedText: "", + }, + { + name: "post is unsafe but get is not protected", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + get: + responses: + '200': + description: Success + post: + responses: + '201': + description: Created +`, + expectedCount: 1, + expectedText: "get", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + doc, _, err := openapi.Unmarshal(ctx, strings.NewReader(tt.yaml)) + require.NoError(t, err) + + rule := &rules.OwaspProtectionGlobalSafeRule{} + config := &linter.RuleConfig{} + + // Build index for the rule + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + docInfo := linter.NewDocumentInfoWithIndex(doc, "test.yaml", idx) + + errs := rule.Run(ctx, docInfo, config) + + require.Len(t, errs, tt.expectedCount) + for _, err := range errs { + assert.Contains(t, err.Error(), "not protected") + if tt.expectedText != "" { + assert.Contains(t, err.Error(), tt.expectedText) + } + } + }) + } +} + +func TestOwaspProtectionGlobalSafeRule_RuleMetadata(t *testing.T) { + t.Parallel() + + rule := &rules.OwaspProtectionGlobalSafeRule{} + + assert.Equal(t, "owasp-protection-global-safe", rule.ID()) + assert.Equal(t, rules.CategorySecurity, rule.Category()) + assert.NotEmpty(t, rule.Description()) + assert.NotEmpty(t, rule.Link()) + assert.Equal(t, validation.SeverityHint, rule.DefaultSeverity()) + assert.Equal(t, []string{"3.0", "3.1"}, rule.Versions()) +} diff --git a/openapi/linter/rules/owasp_protection_global_unsafe.go b/openapi/linter/rules/owasp_protection_global_unsafe.go new file mode 100644 index 00000000..52b906fc --- /dev/null +++ b/openapi/linter/rules/owasp_protection_global_unsafe.go @@ -0,0 +1,115 @@ +package rules + +import ( + "context" + "fmt" + "strings" + + "github.com/speakeasy-api/openapi/linter" + "github.com/speakeasy-api/openapi/openapi" + "github.com/speakeasy-api/openapi/validation" + "github.com/speakeasy-api/openapi/yml" +) + +const RuleOwaspProtectionGlobalUnsafe = "owasp-protection-global-unsafe" + +// Unsafe HTTP methods that modify state and should be protected +var unsafeMethods = map[string]bool{ + "post": true, + "put": true, + "patch": true, + "delete": true, +} + +type OwaspProtectionGlobalUnsafeRule struct{} + +func (r *OwaspProtectionGlobalUnsafeRule) ID() string { return RuleOwaspProtectionGlobalUnsafe } +func (r *OwaspProtectionGlobalUnsafeRule) Category() string { + return CategorySecurity +} +func (r *OwaspProtectionGlobalUnsafeRule) Description() string { + return "Unsafe operations (POST, PUT, PATCH, DELETE) must be protected by security schemes to prevent unauthorized modifications. Write operations without authentication create serious security vulnerabilities allowing data tampering." +} +func (r *OwaspProtectionGlobalUnsafeRule) Summary() string { + return "Unsafe operations must be protected by security schemes." +} +func (r *OwaspProtectionGlobalUnsafeRule) HowToFix() string { + return "Define security requirements globally or per operation for POST/PUT/PATCH/DELETE endpoints." +} +func (r *OwaspProtectionGlobalUnsafeRule) Link() string { + return "https://github.com/speakeasy-api/openapi/blob/main/openapi/linter/README.md#owasp-protection-global-unsafe" +} +func (r *OwaspProtectionGlobalUnsafeRule) DefaultSeverity() validation.Severity { + return validation.SeverityError +} +func (r *OwaspProtectionGlobalUnsafeRule) Versions() []string { + return nil // Applies to all OpenAPI versions +} + +func (r *OwaspProtectionGlobalUnsafeRule) Run(ctx context.Context, docInfo *linter.DocumentInfo[*openapi.OpenAPI], config *linter.RuleConfig) []error { + if docInfo == nil || docInfo.Document == nil || docInfo.Index == nil { + return nil + } + + var errs []error + + doc := docInfo.Document + + // Check if there's a global security requirement + globalSecurity := doc.GetSecurity() + hasGlobalSecurity := len(globalSecurity) > 0 + + // Check all operations + for _, opNode := range docInfo.Index.Operations { + op := opNode.Node + if op == nil { + continue + } + + // Get operation details + method := "" + path := "" + for _, loc := range opNode.Location { + switch openapi.GetParentType(loc) { + case "Paths": + if loc.ParentKey != nil { + path = *loc.ParentKey + } + case "PathItem": + if loc.ParentKey != nil { + method = *loc.ParentKey + } + } + } + + // Only check unsafe methods + if !unsafeMethods[strings.ToLower(method)] { + continue + } + + // Check if operation has explicit security field (even if empty array) + // security: [] means explicitly public and is allowed + rootNode := op.GetRootNode() + hasExplicitSecurity := false + if rootNode != nil { + _, _, found := yml.GetMapElementNodes(ctx, rootNode, "security") + hasExplicitSecurity = found + } + + // Operation is protected if: + // 1. Has global security, OR + // 2. Has explicit operation-level security field (even if empty) + if !hasGlobalSecurity && !hasExplicitSecurity { + if rootNode != nil { + errs = append(errs, validation.NewValidationError( + config.GetSeverity(r.DefaultSeverity()), + RuleOwaspProtectionGlobalUnsafe, + fmt.Errorf("operation %s %s is not protected by any security scheme", method, path), + rootNode, + )) + } + } + } + + return errs +} diff --git a/openapi/linter/rules/owasp_protection_global_unsafe_strict.go b/openapi/linter/rules/owasp_protection_global_unsafe_strict.go new file mode 100644 index 00000000..3570e96a --- /dev/null +++ b/openapi/linter/rules/owasp_protection_global_unsafe_strict.go @@ -0,0 +1,101 @@ +package rules + +import ( + "context" + "fmt" + "strings" + + "github.com/speakeasy-api/openapi/linter" + "github.com/speakeasy-api/openapi/openapi" + "github.com/speakeasy-api/openapi/validation" +) + +const RuleOwaspProtectionGlobalUnsafeStrict = "owasp-protection-global-unsafe-strict" + +type OwaspProtectionGlobalUnsafeStrictRule struct{} + +func (r *OwaspProtectionGlobalUnsafeStrictRule) ID() string { + return RuleOwaspProtectionGlobalUnsafeStrict +} +func (r *OwaspProtectionGlobalUnsafeStrictRule) Category() string { + return CategorySecurity +} +func (r *OwaspProtectionGlobalUnsafeStrictRule) Description() string { + return "Unsafe operations (POST, PUT, PATCH, DELETE) must be protected by non-empty security schemes without explicit opt-outs. Strict authentication requirements ensure write operations cannot bypass security even with empty security arrays." +} +func (r *OwaspProtectionGlobalUnsafeStrictRule) Summary() string { + return "Unsafe operations must have non-empty security schemes (no opt-outs)." +} +func (r *OwaspProtectionGlobalUnsafeStrictRule) HowToFix() string { + return "Define non-empty security requirements globally or per unsafe operation (no empty security arrays)." +} +func (r *OwaspProtectionGlobalUnsafeStrictRule) Link() string { + return "https://github.com/speakeasy-api/openapi/blob/main/openapi/linter/README.md#owasp-protection-global-unsafe-strict" +} +func (r *OwaspProtectionGlobalUnsafeStrictRule) DefaultSeverity() validation.Severity { + return validation.SeverityHint +} +func (r *OwaspProtectionGlobalUnsafeStrictRule) Versions() []string { + return []string{"3.0", "3.1"} // OAS3 only +} + +func (r *OwaspProtectionGlobalUnsafeStrictRule) Run(ctx context.Context, docInfo *linter.DocumentInfo[*openapi.OpenAPI], config *linter.RuleConfig) []error { + if docInfo == nil || docInfo.Document == nil || docInfo.Index == nil { + return nil + } + + var errs []error + + doc := docInfo.Document + + // Check if there's a global security requirement with actual schemes + globalSecurity := doc.GetSecurity() + hasGlobalSecurity := len(globalSecurity) > 0 + + // Check all operations + for _, opNode := range docInfo.Index.Operations { + op := opNode.Node + if op == nil { + continue + } + + // Get operation details + method := "" + path := "" + for _, loc := range opNode.Location { + switch openapi.GetParentType(loc) { + case "Paths": + if loc.ParentKey != nil { + path = *loc.ParentKey + } + case "PathItem": + if loc.ParentKey != nil { + method = *loc.ParentKey + } + } + } + + // Only check unsafe methods + if !unsafeMethods[strings.ToLower(method)] { + continue + } + + // Check if operation has security with actual schemes + opSecurity := op.GetSecurity() + hasOpSecurity := len(opSecurity) > 0 + + // Strict mode: operation must have actual security schemes (no empty arrays allowed) + if !hasGlobalSecurity && !hasOpSecurity { + if rootNode := op.GetRootNode(); rootNode != nil { + errs = append(errs, validation.NewValidationError( + config.GetSeverity(r.DefaultSeverity()), + RuleOwaspProtectionGlobalUnsafeStrict, + fmt.Errorf("operation %s %s must be protected by a security scheme (empty security array not allowed)", method, path), + rootNode, + )) + } + } + } + + return errs +} diff --git a/openapi/linter/rules/owasp_protection_global_unsafe_strict_test.go b/openapi/linter/rules/owasp_protection_global_unsafe_strict_test.go new file mode 100644 index 00000000..86615535 --- /dev/null +++ b/openapi/linter/rules/owasp_protection_global_unsafe_strict_test.go @@ -0,0 +1,307 @@ +package rules_test + +import ( + "strings" + "testing" + + "github.com/speakeasy-api/openapi/linter" + "github.com/speakeasy-api/openapi/openapi" + "github.com/speakeasy-api/openapi/openapi/linter/rules" + "github.com/speakeasy-api/openapi/references" + "github.com/speakeasy-api/openapi/validation" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestOwaspProtectionGlobalUnsafeStrictRule_ValidCases(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + yaml string + }{ + { + name: "global security protects all operations", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +security: + - apiKey: [] +components: + securitySchemes: + apiKey: + type: apiKey + in: header + name: X-API-Key +paths: + /users: + post: + responses: + '201': + description: Created + delete: + responses: + '204': + description: Deleted +`, + }, + { + name: "operation-level security", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +components: + securitySchemes: + apiKey: + type: apiKey + in: header + name: X-API-Key +paths: + /users: + post: + security: + - apiKey: [] + responses: + '201': + description: Created +`, + }, + { + name: "safe methods dont require security", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + get: + responses: + '200': + description: Success +`, + }, + { + name: "mixed global and operation security", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +security: + - apiKey: [] +components: + securitySchemes: + apiKey: + type: apiKey + in: header + name: X-API-Key + oauth: + type: oauth2 + flows: + implicit: + authorizationUrl: https://example.com/oauth + scopes: + write: Write access +paths: + /users: + post: + responses: + '201': + description: Created + put: + security: + - oauth: [write] + responses: + '200': + description: Updated +`, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + doc, _, err := openapi.Unmarshal(ctx, strings.NewReader(tt.yaml)) + require.NoError(t, err) + + rule := &rules.OwaspProtectionGlobalUnsafeStrictRule{} + config := &linter.RuleConfig{} + + // Build index for the rule + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + docInfo := linter.NewDocumentInfoWithIndex(doc, "test.yaml", idx) + + errs := rule.Run(ctx, docInfo, config) + assert.Empty(t, errs) + }) + } +} + +func TestOwaspProtectionGlobalUnsafeStrictRule_Violations(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + yaml string + expectedCount int + expectedText string + }{ + { + name: "post without security", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + post: + responses: + '201': + description: Created +`, + expectedCount: 1, + expectedText: "post", + }, + { + name: "empty security array not allowed in strict mode", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /public: + post: + security: [] + responses: + '201': + description: Created +`, + expectedCount: 1, + expectedText: "post", + }, + { + name: "delete without security", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users/{id}: + delete: + responses: + '204': + description: Deleted +`, + expectedCount: 1, + expectedText: "delete", + }, + { + name: "multiple unsafe operations without security", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + post: + responses: + '201': + description: Created + put: + responses: + '200': + description: Updated + patch: + responses: + '200': + description: Patched + delete: + responses: + '204': + description: Deleted +`, + expectedCount: 4, + expectedText: "", + }, + { + name: "get is safe but post is not protected", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + get: + responses: + '200': + description: Success + post: + responses: + '201': + description: Created +`, + expectedCount: 1, + expectedText: "post", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + doc, _, err := openapi.Unmarshal(ctx, strings.NewReader(tt.yaml)) + require.NoError(t, err) + + rule := &rules.OwaspProtectionGlobalUnsafeStrictRule{} + config := &linter.RuleConfig{} + + // Build index for the rule + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + docInfo := linter.NewDocumentInfoWithIndex(doc, "test.yaml", idx) + + errs := rule.Run(ctx, docInfo, config) + + require.Len(t, errs, tt.expectedCount) + for _, err := range errs { + assert.Contains(t, err.Error(), "must be protected") + if tt.expectedText != "" { + assert.Contains(t, err.Error(), tt.expectedText) + } + } + }) + } +} + +func TestOwaspProtectionGlobalUnsafeStrictRule_RuleMetadata(t *testing.T) { + t.Parallel() + + rule := &rules.OwaspProtectionGlobalUnsafeStrictRule{} + + assert.Equal(t, "owasp-protection-global-unsafe-strict", rule.ID()) + assert.Equal(t, rules.CategorySecurity, rule.Category()) + assert.NotEmpty(t, rule.Description()) + assert.NotEmpty(t, rule.Link()) + assert.Equal(t, validation.SeverityHint, rule.DefaultSeverity()) + assert.Equal(t, []string{"3.0", "3.1"}, rule.Versions()) +} diff --git a/openapi/linter/rules/owasp_protection_global_unsafe_test.go b/openapi/linter/rules/owasp_protection_global_unsafe_test.go new file mode 100644 index 00000000..5e19f5d4 --- /dev/null +++ b/openapi/linter/rules/owasp_protection_global_unsafe_test.go @@ -0,0 +1,305 @@ +package rules_test + +import ( + "strings" + "testing" + + "github.com/speakeasy-api/openapi/linter" + "github.com/speakeasy-api/openapi/openapi" + "github.com/speakeasy-api/openapi/openapi/linter/rules" + "github.com/speakeasy-api/openapi/references" + "github.com/speakeasy-api/openapi/validation" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestOwaspProtectionGlobalUnsafeRule_ValidCases(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + yaml string + }{ + { + name: "global security protects all operations", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +security: + - apiKey: [] +components: + securitySchemes: + apiKey: + type: apiKey + in: header + name: X-API-Key +paths: + /users: + post: + responses: + '201': + description: Created + delete: + responses: + '204': + description: Deleted +`, + }, + { + name: "operation-level security", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +components: + securitySchemes: + apiKey: + type: apiKey + in: header + name: X-API-Key +paths: + /users: + post: + security: + - apiKey: [] + responses: + '201': + description: Created +`, + }, + { + name: "safe methods dont require security", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + get: + responses: + '200': + description: Success +`, + }, + { + name: "empty security array allowed", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /public: + post: + security: [] + responses: + '201': + description: Created +`, + }, + { + name: "mixed global and operation security", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +security: + - apiKey: [] +components: + securitySchemes: + apiKey: + type: apiKey + in: header + name: X-API-Key + oauth: + type: oauth2 + flows: + implicit: + authorizationUrl: https://example.com/oauth + scopes: + write: Write access +paths: + /users: + post: + responses: + '201': + description: Created + put: + security: + - oauth: [write] + responses: + '200': + description: Updated +`, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + doc, _, err := openapi.Unmarshal(ctx, strings.NewReader(tt.yaml)) + require.NoError(t, err) + + rule := &rules.OwaspProtectionGlobalUnsafeRule{} + config := &linter.RuleConfig{} + + // Build index for the rule + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + docInfo := linter.NewDocumentInfoWithIndex(doc, "test.yaml", idx) + + errs := rule.Run(ctx, docInfo, config) + assert.Empty(t, errs) + }) + } +} + +func TestOwaspProtectionGlobalUnsafeRule_Violations(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + yaml string + expectedCount int + expectedText string + }{ + { + name: "post without security", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + post: + responses: + '201': + description: Created +`, + expectedCount: 1, + expectedText: "post", + }, + { + name: "delete without security", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users/{id}: + delete: + responses: + '204': + description: Deleted +`, + expectedCount: 1, + expectedText: "delete", + }, + { + name: "multiple unsafe operations without security", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + post: + responses: + '201': + description: Created + put: + responses: + '200': + description: Updated + patch: + responses: + '200': + description: Patched + delete: + responses: + '204': + description: Deleted +`, + expectedCount: 4, + expectedText: "", + }, + { + name: "get is safe but post is not protected", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + get: + responses: + '200': + description: Success + post: + responses: + '201': + description: Created +`, + expectedCount: 1, + expectedText: "post", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + doc, _, err := openapi.Unmarshal(ctx, strings.NewReader(tt.yaml)) + require.NoError(t, err) + + rule := &rules.OwaspProtectionGlobalUnsafeRule{} + config := &linter.RuleConfig{} + + // Build index for the rule + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + docInfo := linter.NewDocumentInfoWithIndex(doc, "test.yaml", idx) + + errs := rule.Run(ctx, docInfo, config) + + require.Len(t, errs, tt.expectedCount) + for _, err := range errs { + assert.Contains(t, err.Error(), "not protected") + if tt.expectedText != "" { + assert.Contains(t, err.Error(), tt.expectedText) + } + } + }) + } +} + +func TestOwaspProtectionGlobalUnsafeRule_RuleMetadata(t *testing.T) { + t.Parallel() + + rule := &rules.OwaspProtectionGlobalUnsafeRule{} + + assert.Equal(t, "owasp-protection-global-unsafe", rule.ID()) + assert.Equal(t, rules.CategorySecurity, rule.Category()) + assert.NotEmpty(t, rule.Description()) + assert.NotEmpty(t, rule.Link()) + assert.Equal(t, validation.SeverityError, rule.DefaultSeverity()) + assert.Nil(t, rule.Versions()) +} diff --git a/openapi/linter/rules/owasp_rate_limit.go b/openapi/linter/rules/owasp_rate_limit.go new file mode 100644 index 00000000..6d1dfa61 --- /dev/null +++ b/openapi/linter/rules/owasp_rate_limit.go @@ -0,0 +1,137 @@ +package rules + +import ( + "context" + "fmt" + "strings" + + "github.com/speakeasy-api/openapi/linter" + "github.com/speakeasy-api/openapi/openapi" + "github.com/speakeasy-api/openapi/validation" +) + +const RuleOwaspRateLimit = "owasp-rate-limit" + +// Rate limiting headers to check for +var rateLimitHeaders = []string{ + "X-RateLimit-Limit", + "X-Rate-Limit-Limit", + "RateLimit-Limit", + "RateLimit-Reset", + "RateLimit", +} + +type OwaspRateLimitRule struct{} + +func (r *OwaspRateLimitRule) ID() string { + return RuleOwaspRateLimit +} +func (r *OwaspRateLimitRule) Category() string { + return CategorySecurity +} +func (r *OwaspRateLimitRule) Description() string { + return "2XX and 4XX responses must define rate limiting headers (X-RateLimit-Limit, X-RateLimit-Remaining) to prevent API overload. Rate limit headers help clients manage their usage and avoid hitting limits." +} +func (r *OwaspRateLimitRule) Summary() string { + return "2XX and 4XX responses should include rate limit headers." +} +func (r *OwaspRateLimitRule) HowToFix() string { + return "Add rate limit headers (e.g., X-RateLimit-Limit, X-RateLimit-Remaining) to 2XX and 4XX responses." +} +func (r *OwaspRateLimitRule) Link() string { + return "https://github.com/speakeasy-api/openapi/blob/main/openapi/linter/README.md#owasp-rate-limit" +} +func (r *OwaspRateLimitRule) DefaultSeverity() validation.Severity { + return validation.SeverityError +} +func (r *OwaspRateLimitRule) Versions() []string { + return []string{"3.0", "3.1"} // OAS3 only +} + +func (r *OwaspRateLimitRule) Run(ctx context.Context, docInfo *linter.DocumentInfo[*openapi.OpenAPI], config *linter.RuleConfig) []error { + if docInfo == nil || docInfo.Document == nil || docInfo.Index == nil { + return nil + } + + var errs []error + + // Check all operations + for _, opNode := range docInfo.Index.Operations { + op := opNode.Node + if op == nil { + continue + } + + // Get operation details for error messages + method := "" + path := "" + for _, loc := range opNode.Location { + switch openapi.GetParentType(loc) { + case "Paths": + if loc.ParentKey != nil { + path = *loc.ParentKey + } + case "PathItem": + if loc.ParentKey != nil { + method = *loc.ParentKey + } + } + } + + responses := op.GetResponses() + if responses == nil { + continue + } + + // Check all response codes + for statusCode, response := range responses.All() { + // Only check 2XX and 4XX responses + if !strings.HasPrefix(statusCode, "2") && !strings.HasPrefix(statusCode, "4") { + continue + } + + responseObj := response.GetObject() + if responseObj == nil { + continue + } + + headers := responseObj.GetHeaders() + if headers == nil || headers.Len() == 0 { + // No headers defined - report missing rate limit headers + if rootNode := response.GetRootNode(); rootNode != nil { + errs = append(errs, validation.NewValidationError( + config.GetSeverity(r.DefaultSeverity()), + RuleOwaspRateLimit, + fmt.Errorf("response %s for operation %s %s is missing rate limiting headers", statusCode, method, path), + rootNode, + )) + } + continue + } + + // Check if any rate limit header is present + hasRateLimitHeader := false + for _, headerName := range rateLimitHeaders { + if _, exists := headers.Get(headerName); exists { + hasRateLimitHeader = true + break + } + } + + if !hasRateLimitHeader { + // No rate limit header found + if rootNode := responseObj.GetRootNode(); rootNode != nil { + errs = append(errs, validation.NewValidationError( + config.GetSeverity(r.DefaultSeverity()), + RuleOwaspRateLimit, + fmt.Errorf("response %s for operation %s %s is missing rate limiting headers (expected one of: %s)", + statusCode, method, path, strings.Join(rateLimitHeaders, ", ")), + rootNode, + )) + } + } + } + } + + return errs +} diff --git a/openapi/linter/rules/owasp_rate_limit_retry_after.go b/openapi/linter/rules/owasp_rate_limit_retry_after.go new file mode 100644 index 00000000..c63b2627 --- /dev/null +++ b/openapi/linter/rules/owasp_rate_limit_retry_after.go @@ -0,0 +1,122 @@ +package rules + +import ( + "context" + "fmt" + + "github.com/speakeasy-api/openapi/linter" + "github.com/speakeasy-api/openapi/openapi" + "github.com/speakeasy-api/openapi/validation" +) + +const RuleOwaspRateLimitRetryAfter = "owasp-rate-limit-retry-after" + +type OwaspRateLimitRetryAfterRule struct{} + +func (r *OwaspRateLimitRetryAfterRule) ID() string { + return RuleOwaspRateLimitRetryAfter +} +func (r *OwaspRateLimitRetryAfterRule) Category() string { + return CategorySecurity +} +func (r *OwaspRateLimitRetryAfterRule) Description() string { + return "429 Too Many Requests responses must include a Retry-After header indicating when clients can retry. Retry-After headers prevent thundering herd problems by telling clients exactly when to resume requests." +} +func (r *OwaspRateLimitRetryAfterRule) Summary() string { + return "429 responses must include a Retry-After header." +} +func (r *OwaspRateLimitRetryAfterRule) HowToFix() string { + return "Add a Retry-After header to 429 responses to indicate when clients can retry." +} +func (r *OwaspRateLimitRetryAfterRule) Link() string { + return "https://github.com/speakeasy-api/openapi/blob/main/openapi/linter/README.md#owasp-rate-limit-retry-after" +} +func (r *OwaspRateLimitRetryAfterRule) DefaultSeverity() validation.Severity { + return validation.SeverityError +} +func (r *OwaspRateLimitRetryAfterRule) Versions() []string { + return []string{"3.0", "3.1"} // OAS3 only +} + +func (r *OwaspRateLimitRetryAfterRule) Run(ctx context.Context, docInfo *linter.DocumentInfo[*openapi.OpenAPI], config *linter.RuleConfig) []error { + if docInfo == nil || docInfo.Document == nil || docInfo.Index == nil { + return nil + } + + var errs []error + + // Check all operations + for _, opNode := range docInfo.Index.Operations { + op := opNode.Node + if op == nil { + continue + } + + // Get operation details for error messages + method := "" + path := "" + for _, loc := range opNode.Location { + switch openapi.GetParentType(loc) { + case "Paths": + if loc.ParentKey != nil { + path = *loc.ParentKey + } + case "PathItem": + if loc.ParentKey != nil { + method = *loc.ParentKey + } + } + } + + responses := op.GetResponses() + if responses == nil { + continue + } + + // Check for 429 response + response429, exists := responses.Get("429") + if !exists || response429 == nil { + continue + } + + responseObj := response429.GetObject() + if responseObj == nil { + continue + } + + // Check if Retry-After header exists + headers := responseObj.GetHeaders() + if headers == nil { + // No headers at all + if rootNode := response429.GetRootNode(); rootNode != nil { + errs = append(errs, validation.NewValidationError( + config.GetSeverity(r.DefaultSeverity()), + RuleOwaspRateLimitRetryAfter, + fmt.Errorf("429 response for operation %s %s is missing Retry-After header", method, path), + rootNode, + )) + } + continue + } + + // Check for Retry-After header (case-insensitive check) + retryAfter, hasRetryAfter := headers.Get("Retry-After") + if !hasRetryAfter || retryAfter == nil { + // Try alternate casing + retryAfter, hasRetryAfter = headers.Get("retry-after") + } + + if !hasRetryAfter || retryAfter == nil { + if rootNode := responseObj.GetRootNode(); rootNode != nil { + errs = append(errs, validation.NewValidationError( + config.GetSeverity(r.DefaultSeverity()), + RuleOwaspRateLimitRetryAfter, + fmt.Errorf("429 response for operation %s %s is missing Retry-After header", method, path), + rootNode, + )) + } + } + } + + return errs +} diff --git a/openapi/linter/rules/owasp_rate_limit_retry_after_test.go b/openapi/linter/rules/owasp_rate_limit_retry_after_test.go new file mode 100644 index 00000000..50526e55 --- /dev/null +++ b/openapi/linter/rules/owasp_rate_limit_retry_after_test.go @@ -0,0 +1,301 @@ +package rules_test + +import ( + "strings" + "testing" + + "github.com/speakeasy-api/openapi/linter" + "github.com/speakeasy-api/openapi/openapi" + "github.com/speakeasy-api/openapi/openapi/linter/rules" + "github.com/speakeasy-api/openapi/references" + "github.com/speakeasy-api/openapi/validation" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestOwaspRateLimitRetryAfterRule_ValidCases(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + yaml string + }{ + { + name: "429 response with Retry-After header", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + get: + responses: + '429': + description: Too Many Requests + headers: + Retry-After: + description: Number of seconds to wait + schema: + type: integer +`, + }, + { + name: "429 with lowercase retry-after header", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + get: + responses: + '429': + description: Too Many Requests + headers: + retry-after: + schema: + type: integer +`, + }, + { + name: "no 429 response is ok", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + get: + responses: + '200': + description: Success + '400': + description: Bad Request +`, + }, + { + name: "429 with Retry-After and other headers", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + get: + responses: + '429': + description: Too Many Requests + headers: + Retry-After: + schema: + type: integer + X-RateLimit-Limit: + schema: + type: integer + X-RateLimit-Remaining: + schema: + type: integer +`, + }, + { + name: "multiple operations with 429 and Retry-After", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + get: + responses: + '429': + description: Too Many Requests + headers: + Retry-After: + schema: + type: integer + post: + responses: + '429': + description: Too Many Requests + headers: + Retry-After: + schema: + type: integer +`, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + doc, _, err := openapi.Unmarshal(ctx, strings.NewReader(tt.yaml)) + require.NoError(t, err) + + rule := &rules.OwaspRateLimitRetryAfterRule{} + config := &linter.RuleConfig{} + + // Build index for the rule + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + docInfo := linter.NewDocumentInfoWithIndex(doc, "test.yaml", idx) + + errs := rule.Run(ctx, docInfo, config) + assert.Empty(t, errs) + }) + } +} + +func TestOwaspRateLimitRetryAfterRule_Violations(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + yaml string + expectedCount int + expectedText string + }{ + { + name: "429 response missing Retry-After header", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + get: + responses: + '429': + description: Too Many Requests +`, + expectedCount: 1, + expectedText: "Retry-After", + }, + { + name: "429 has headers but no Retry-After", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + get: + responses: + '429': + description: Too Many Requests + headers: + X-RateLimit-Limit: + schema: + type: integer +`, + expectedCount: 1, + expectedText: "Retry-After", + }, + { + name: "multiple operations with 429 missing Retry-After", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + get: + responses: + '429': + description: Too Many Requests + post: + responses: + '429': + description: Too Many Requests + /posts: + get: + responses: + '429': + description: Too Many Requests +`, + expectedCount: 3, + expectedText: "", + }, + { + name: "mixed operations some with some without Retry-After", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + get: + responses: + '429': + description: Too Many Requests + headers: + Retry-After: + schema: + type: integer + post: + responses: + '429': + description: Too Many Requests +`, + expectedCount: 1, + expectedText: "post", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + doc, _, err := openapi.Unmarshal(ctx, strings.NewReader(tt.yaml)) + require.NoError(t, err) + + rule := &rules.OwaspRateLimitRetryAfterRule{} + config := &linter.RuleConfig{} + + // Build index for the rule + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + docInfo := linter.NewDocumentInfoWithIndex(doc, "test.yaml", idx) + + errs := rule.Run(ctx, docInfo, config) + + require.Len(t, errs, tt.expectedCount) + for _, err := range errs { + assert.Contains(t, err.Error(), "429") + if tt.expectedText != "" { + assert.Contains(t, err.Error(), tt.expectedText) + } + } + }) + } +} + +func TestOwaspRateLimitRetryAfterRule_RuleMetadata(t *testing.T) { + t.Parallel() + + rule := &rules.OwaspRateLimitRetryAfterRule{} + + assert.Equal(t, "owasp-rate-limit-retry-after", rule.ID()) + assert.Equal(t, rules.CategorySecurity, rule.Category()) + assert.NotEmpty(t, rule.Description()) + assert.NotEmpty(t, rule.Link()) + assert.Equal(t, validation.SeverityError, rule.DefaultSeverity()) + assert.Equal(t, []string{"3.0", "3.1"}, rule.Versions()) +} diff --git a/openapi/linter/rules/owasp_rate_limit_test.go b/openapi/linter/rules/owasp_rate_limit_test.go new file mode 100644 index 00000000..b18eaa67 --- /dev/null +++ b/openapi/linter/rules/owasp_rate_limit_test.go @@ -0,0 +1,319 @@ +package rules_test + +import ( + "strings" + "testing" + + "github.com/speakeasy-api/openapi/linter" + "github.com/speakeasy-api/openapi/openapi" + "github.com/speakeasy-api/openapi/openapi/linter/rules" + "github.com/speakeasy-api/openapi/references" + "github.com/speakeasy-api/openapi/validation" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestOwaspRateLimitRule_ValidCases(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + yaml string + }{ + { + name: "2xx response with X-RateLimit-Limit header", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + get: + responses: + '200': + description: Success + headers: + X-RateLimit-Limit: + description: Request limit per hour + schema: + type: integer +`, + }, + { + name: "4xx response with RateLimit header", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + get: + responses: + '429': + description: Too Many Requests + headers: + RateLimit: + description: Rate limit info + schema: + type: string +`, + }, + { + name: "response with RateLimit-Limit header", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + get: + responses: + '200': + description: Success + headers: + RateLimit-Limit: + schema: + type: integer +`, + }, + { + name: "3xx response without rate limit headers is ok", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + get: + responses: + '301': + description: Moved Permanently +`, + }, + { + name: "5xx response without rate limit headers is ok", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + get: + responses: + '500': + description: Server Error +`, + }, + { + name: "multiple responses with rate limit headers", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + get: + responses: + '200': + description: Success + headers: + X-Rate-Limit-Limit: + schema: + type: integer + '400': + description: Bad Request + headers: + RateLimit-Reset: + schema: + type: integer +`, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + doc, _, err := openapi.Unmarshal(ctx, strings.NewReader(tt.yaml)) + require.NoError(t, err) + + rule := &rules.OwaspRateLimitRule{} + config := &linter.RuleConfig{} + + // Build index for the rule + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + docInfo := linter.NewDocumentInfoWithIndex(doc, "test.yaml", idx) + + errs := rule.Run(ctx, docInfo, config) + assert.Empty(t, errs) + }) + } +} + +func TestOwaspRateLimitRule_Violations(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + yaml string + expectedCount int + expectedText string + }{ + { + name: "2xx response missing rate limit headers", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + get: + responses: + '200': + description: Success +`, + expectedCount: 1, + expectedText: "200", + }, + { + name: "4xx response missing rate limit headers", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + get: + responses: + '400': + description: Bad Request +`, + expectedCount: 1, + expectedText: "400", + }, + { + name: "2xx response has headers but no rate limit header", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + get: + responses: + '200': + description: Success + headers: + X-Request-ID: + schema: + type: string +`, + expectedCount: 1, + expectedText: "missing rate limiting headers", + }, + { + name: "multiple responses missing rate limit headers", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + get: + responses: + '200': + description: Success + '201': + description: Created + '400': + description: Bad Request + '404': + description: Not Found +`, + expectedCount: 4, + expectedText: "", + }, + { + name: "mixed responses some with some without rate limit headers", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + get: + responses: + '200': + description: Success + headers: + RateLimit: + schema: + type: string + '400': + description: Bad Request +`, + expectedCount: 1, + expectedText: "400", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + doc, _, err := openapi.Unmarshal(ctx, strings.NewReader(tt.yaml)) + require.NoError(t, err) + + rule := &rules.OwaspRateLimitRule{} + config := &linter.RuleConfig{} + + // Build index for the rule + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + docInfo := linter.NewDocumentInfoWithIndex(doc, "test.yaml", idx) + + errs := rule.Run(ctx, docInfo, config) + + require.Len(t, errs, tt.expectedCount) + for _, err := range errs { + assert.Contains(t, err.Error(), "rate limiting") + if tt.expectedText != "" { + assert.Contains(t, err.Error(), tt.expectedText) + } + } + }) + } +} + +func TestOwaspRateLimitRule_RuleMetadata(t *testing.T) { + t.Parallel() + + rule := &rules.OwaspRateLimitRule{} + + assert.Equal(t, "owasp-rate-limit", rule.ID()) + assert.Equal(t, rules.CategorySecurity, rule.Category()) + assert.NotEmpty(t, rule.Description()) + assert.NotEmpty(t, rule.Link()) + assert.Equal(t, validation.SeverityError, rule.DefaultSeverity()) + assert.Equal(t, []string{"3.0", "3.1"}, rule.Versions()) +} diff --git a/openapi/linter/rules/owasp_security_hosts_https_oas3.go b/openapi/linter/rules/owasp_security_hosts_https_oas3.go new file mode 100644 index 00000000..b86d9ece --- /dev/null +++ b/openapi/linter/rules/owasp_security_hosts_https_oas3.go @@ -0,0 +1,85 @@ +package rules + +import ( + "context" + "fmt" + "strings" + + "github.com/speakeasy-api/openapi/linter" + "github.com/speakeasy-api/openapi/openapi" + "github.com/speakeasy-api/openapi/validation" + "github.com/speakeasy-api/openapi/yml" +) + +const RuleOwaspSecurityHostsHttpsOAS3 = "owasp-security-hosts-https-oas3" + +type OwaspSecurityHostsHttpsOAS3Rule struct{} + +func (r *OwaspSecurityHostsHttpsOAS3Rule) ID() string { + return RuleOwaspSecurityHostsHttpsOAS3 +} +func (r *OwaspSecurityHostsHttpsOAS3Rule) Category() string { + return CategorySecurity +} +func (r *OwaspSecurityHostsHttpsOAS3Rule) Description() string { + return "Server URLs must begin with https:// as the only permitted protocol. Using HTTPS is essential for protecting API traffic from interception, tampering, and eavesdropping attacks." +} +func (r *OwaspSecurityHostsHttpsOAS3Rule) Summary() string { + return "Server URLs must use HTTPS." +} +func (r *OwaspSecurityHostsHttpsOAS3Rule) HowToFix() string { + return "Update server URLs to use https:// instead of http://." +} +func (r *OwaspSecurityHostsHttpsOAS3Rule) Link() string { + return "https://github.com/speakeasy-api/openapi/blob/main/openapi/linter/README.md#owasp-security-hosts-https-oas3" +} +func (r *OwaspSecurityHostsHttpsOAS3Rule) DefaultSeverity() validation.Severity { + return validation.SeverityError +} +func (r *OwaspSecurityHostsHttpsOAS3Rule) Versions() []string { + return []string{"3.0", "3.1"} // Only applies to OpenAPI 3.x +} + +func (r *OwaspSecurityHostsHttpsOAS3Rule) Run(ctx context.Context, docInfo *linter.DocumentInfo[*openapi.OpenAPI], config *linter.RuleConfig) []error { + if docInfo == nil || docInfo.Document == nil { + return nil + } + + var errs []error + + doc := docInfo.Document + servers := doc.GetServers() + if len(servers) == 0 { + return nil + } + + // Check each server URL + for _, server := range servers { + if server == nil { + continue + } + + url := server.GetURL() + if url == "" { + continue + } + + // Check if URL starts with https + if !strings.HasPrefix(url, "https") { + // Get the root node to find the url key + if rootNode := server.GetRootNode(); rootNode != nil { + _, urlValueNode, found := yml.GetMapElementNodes(ctx, rootNode, "url") + if found && urlValueNode != nil { + errs = append(errs, validation.NewValidationError( + config.GetSeverity(r.DefaultSeverity()), + RuleOwaspSecurityHostsHttpsOAS3, + fmt.Errorf("server URL `%s` must use HTTPS protocol for security", url), + urlValueNode, + )) + } + } + } + } + + return errs +} diff --git a/openapi/linter/rules/owasp_security_hosts_https_oas3_test.go b/openapi/linter/rules/owasp_security_hosts_https_oas3_test.go new file mode 100644 index 00000000..66a73708 --- /dev/null +++ b/openapi/linter/rules/owasp_security_hosts_https_oas3_test.go @@ -0,0 +1,309 @@ +package rules_test + +import ( + "strings" + "testing" + + "github.com/speakeasy-api/openapi/linter" + "github.com/speakeasy-api/openapi/openapi" + "github.com/speakeasy-api/openapi/openapi/linter/rules" + "github.com/speakeasy-api/openapi/references" + "github.com/speakeasy-api/openapi/validation" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestOwaspSecurityHostsHttpsOAS3Rule_ValidCases(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + yaml string + }{ + { + name: "https server url", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +servers: + - url: https://api.example.com +paths: {} +`, + }, + { + name: "https with path", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +servers: + - url: https://api.example.com/v1 +paths: {} +`, + }, + { + name: "multiple https servers", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +servers: + - url: https://api.example.com + description: Production + - url: https://staging.example.com + description: Staging +paths: {} +`, + }, + { + name: "no servers defined", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: {} +`, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + doc, _, err := openapi.Unmarshal(ctx, strings.NewReader(tt.yaml)) + require.NoError(t, err) + + rule := &rules.OwaspSecurityHostsHttpsOAS3Rule{} + config := &linter.RuleConfig{} + + // Build index for the rule + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + docInfo := linter.NewDocumentInfoWithIndex(doc, "test.yaml", idx) + + errs := rule.Run(ctx, docInfo, config) + assert.Empty(t, errs) + }) + } +} + +func TestOwaspSecurityHostsHttpsOAS3Rule_Violations(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + yaml string + expectedCount int + expectedText string + }{ + { + name: "http server url", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +servers: + - url: http://api.example.com +paths: {} +`, + expectedCount: 1, + expectedText: "http://api.example.com", + }, + { + name: "ftp server url", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +servers: + - url: ftp://api.example.com +paths: {} +`, + expectedCount: 1, + expectedText: "ftp://api.example.com", + }, + { + name: "mixed https and http servers", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +servers: + - url: https://api.example.com + description: Production + - url: http://staging.example.com + description: Staging (insecure) +paths: {} +`, + expectedCount: 1, + expectedText: "http://staging.example.com", + }, + { + name: "multiple non-https servers", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +servers: + - url: http://api.example.com + - url: ws://websocket.example.com +paths: {} +`, + expectedCount: 2, + expectedText: "", + }, + { + name: "relative url not starting with https", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +servers: + - url: /api/v1 +paths: {} +`, + expectedCount: 1, + expectedText: "/api/v1", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + doc, _, err := openapi.Unmarshal(ctx, strings.NewReader(tt.yaml)) + require.NoError(t, err) + + rule := &rules.OwaspSecurityHostsHttpsOAS3Rule{} + config := &linter.RuleConfig{} + + // Build index for the rule + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + docInfo := linter.NewDocumentInfoWithIndex(doc, "test.yaml", idx) + + errs := rule.Run(ctx, docInfo, config) + + require.Len(t, errs, tt.expectedCount) + for _, err := range errs { + assert.Contains(t, err.Error(), "HTTPS") + if tt.expectedText != "" { + assert.Contains(t, err.Error(), tt.expectedText) + } + } + }) + } +} + +func TestOwaspSecurityHostsHttpsOAS3Rule_EdgeCases(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + yaml string + }{ + { + name: "server with empty url", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +servers: + - url: "" + description: Empty URL +paths: {} +`, + }, + { + name: "server with variables in https url", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +servers: + - url: https://{environment}.example.com + variables: + environment: + default: api +paths: {} +`, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + doc, _, err := openapi.Unmarshal(ctx, strings.NewReader(tt.yaml)) + require.NoError(t, err) + + rule := &rules.OwaspSecurityHostsHttpsOAS3Rule{} + config := &linter.RuleConfig{} + + // Build index for the rule + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + docInfo := linter.NewDocumentInfoWithIndex(doc, "test.yaml", idx) + + // Should not panic + errs := rule.Run(ctx, docInfo, config) + + // Empty URL should be skipped, variables in https URL should be valid + assert.Empty(t, errs) + }) + } +} + +func TestOwaspSecurityHostsHttpsOAS3Rule_NilInputs(t *testing.T) { + t.Parallel() + + rule := &rules.OwaspSecurityHostsHttpsOAS3Rule{} + config := &linter.RuleConfig{} + ctx := t.Context() + + // Test with nil docInfo + errs := rule.Run(ctx, nil, config) + assert.Empty(t, errs) + + // Test with nil document + var nilDoc *openapi.OpenAPI + errs = rule.Run(ctx, linter.NewDocumentInfoWithIndex(nilDoc, "test.yaml", nil), config) + assert.Empty(t, errs) +} + +func TestOwaspSecurityHostsHttpsOAS3Rule_RuleMetadata(t *testing.T) { + t.Parallel() + + rule := &rules.OwaspSecurityHostsHttpsOAS3Rule{} + + assert.Equal(t, "owasp-security-hosts-https-oas3", rule.ID()) + assert.Equal(t, rules.CategorySecurity, rule.Category()) + assert.NotEmpty(t, rule.Description()) + assert.NotEmpty(t, rule.Link()) + assert.Equal(t, validation.SeverityError, rule.DefaultSeverity()) + assert.Equal(t, []string{"3.0", "3.1"}, rule.Versions()) +} diff --git a/openapi/linter/rules/owasp_string_limit.go b/openapi/linter/rules/owasp_string_limit.go new file mode 100644 index 00000000..198037ee --- /dev/null +++ b/openapi/linter/rules/owasp_string_limit.go @@ -0,0 +1,89 @@ +package rules + +import ( + "context" + "errors" + + "github.com/speakeasy-api/openapi/linter" + "github.com/speakeasy-api/openapi/openapi" + "github.com/speakeasy-api/openapi/validation" +) + +const RuleOwaspStringLimit = "owasp-string-limit" + +type OwaspStringLimitRule struct{} + +func (r *OwaspStringLimitRule) ID() string { + return RuleOwaspStringLimit +} +func (r *OwaspStringLimitRule) Category() string { + return CategorySecurity +} +func (r *OwaspStringLimitRule) Description() string { + return "String schemas must specify maxLength, const, or enum to prevent unbounded data. Without string length limits, APIs are vulnerable to resource exhaustion from extremely long inputs." +} +func (r *OwaspStringLimitRule) Summary() string { + return "String schemas must specify maxLength, const, or enum." +} +func (r *OwaspStringLimitRule) HowToFix() string { + return "Add maxLength, const, or enum constraints to string schemas." +} +func (r *OwaspStringLimitRule) Link() string { + return "https://github.com/speakeasy-api/openapi/blob/main/openapi/linter/README.md#owasp-string-limit" +} +func (r *OwaspStringLimitRule) DefaultSeverity() validation.Severity { + return validation.SeverityError +} +func (r *OwaspStringLimitRule) Versions() []string { + return []string{"3.0", "3.1"} +} + +func (r *OwaspStringLimitRule) Run(ctx context.Context, docInfo *linter.DocumentInfo[*openapi.OpenAPI], config *linter.RuleConfig) []error { + if docInfo == nil || docInfo.Document == nil || docInfo.Index == nil { + return nil + } + + var errs []error + + // Check all schemas in the document + for _, schemaNode := range docInfo.Index.GetAllSchemas() { + refSchema := schemaNode.Node + schema := refSchema.GetSchema() + if schema == nil { + continue + } + + // Check if type contains "string" + types := schema.GetType() + hasStringType := false + for _, typ := range types { + if typ == "string" { + hasStringType = true + break + } + } + + if !hasStringType { + continue + } + + // Check if schema has maxLength, const, or enum defined + maxLength := schema.GetMaxLength() + constValue := schema.GetConst() + enumValues := schema.GetEnum() + + // If none of these are defined, report error + if maxLength == nil && constValue == nil && len(enumValues) == 0 { + if rootNode := refSchema.GetRootNode(); rootNode != nil { + errs = append(errs, validation.NewValidationError( + config.GetSeverity(r.DefaultSeverity()), + RuleOwaspStringLimit, + errors.New("schema of type 'string' must specify maxLength, const, or enum to prevent unbounded data"), + rootNode, + )) + } + } + } + + return errs +} diff --git a/openapi/linter/rules/owasp_string_limit_test.go b/openapi/linter/rules/owasp_string_limit_test.go new file mode 100644 index 00000000..1deaabaa --- /dev/null +++ b/openapi/linter/rules/owasp_string_limit_test.go @@ -0,0 +1,267 @@ +package rules_test + +import ( + "strings" + "testing" + + "github.com/speakeasy-api/openapi/linter" + "github.com/speakeasy-api/openapi/openapi" + "github.com/speakeasy-api/openapi/openapi/linter/rules" + "github.com/speakeasy-api/openapi/references" + "github.com/speakeasy-api/openapi/validation" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestOwaspStringLimitRule_ValidCases(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + yaml string + }{ + { + name: "string with maxLength", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +components: + schemas: + User: + type: object + properties: + name: + type: string + maxLength: 100 +paths: {} +`, + }, + { + name: "string with const", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +components: + schemas: + Status: + type: string + const: active +paths: {} +`, + }, + { + name: "string with enum", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +components: + schemas: + Status: + type: string + enum: + - active + - inactive + - pending +paths: {} +`, + }, + { + name: "non-string type without maxLength is ok", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +components: + schemas: + Counter: + type: integer +paths: {} +`, + }, + { + name: "object type is ok", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +components: + schemas: + User: + type: object + properties: + id: + type: string + maxLength: 36 +paths: {} +`, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + doc, _, err := openapi.Unmarshal(ctx, strings.NewReader(tt.yaml)) + require.NoError(t, err) + + rule := &rules.OwaspStringLimitRule{} + config := &linter.RuleConfig{} + + // Build index for the rule + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + docInfo := linter.NewDocumentInfoWithIndex(doc, "test.yaml", idx) + + errs := rule.Run(ctx, docInfo, config) + assert.Empty(t, errs) + }) + } +} + +func TestOwaspStringLimitRule_Violations(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + yaml string + expectedCount int + }{ + { + name: "string without maxLength, const, or enum", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +components: + schemas: + User: + type: object + properties: + name: + type: string +paths: {} +`, + expectedCount: 1, + }, + { + name: "multiple strings without constraints", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +components: + schemas: + User: + type: object + properties: + name: + type: string + email: + type: string + description: + type: string +paths: {} +`, + expectedCount: 3, + }, + { + name: "inline string parameter without maxLength", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + get: + parameters: + - name: search + in: query + schema: + type: string + responses: + '200': + description: Success +`, + expectedCount: 1, + }, + { + name: "response with string without constraint", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + get: + responses: + '200': + description: Success + content: + application/json: + schema: + type: object + properties: + message: + type: string +`, + expectedCount: 1, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + doc, _, err := openapi.Unmarshal(ctx, strings.NewReader(tt.yaml)) + require.NoError(t, err) + + rule := &rules.OwaspStringLimitRule{} + config := &linter.RuleConfig{} + + // Build index for the rule + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + docInfo := linter.NewDocumentInfoWithIndex(doc, "test.yaml", idx) + + errs := rule.Run(ctx, docInfo, config) + + require.Len(t, errs, tt.expectedCount) + for _, err := range errs { + assert.Contains(t, err.Error(), "must specify maxLength, const, or enum") + } + }) + } +} + +func TestOwaspStringLimitRule_RuleMetadata(t *testing.T) { + t.Parallel() + + rule := &rules.OwaspStringLimitRule{} + + assert.Equal(t, "owasp-string-limit", rule.ID()) + assert.Equal(t, rules.CategorySecurity, rule.Category()) + assert.NotEmpty(t, rule.Description()) + assert.NotEmpty(t, rule.Link()) + assert.Equal(t, validation.SeverityError, rule.DefaultSeverity()) + assert.Equal(t, []string{"3.0", "3.1"}, rule.Versions()) +} diff --git a/openapi/linter/rules/owasp_string_restricted.go b/openapi/linter/rules/owasp_string_restricted.go new file mode 100644 index 00000000..1b9418de --- /dev/null +++ b/openapi/linter/rules/owasp_string_restricted.go @@ -0,0 +1,90 @@ +package rules + +import ( + "context" + "errors" + + "github.com/speakeasy-api/openapi/linter" + "github.com/speakeasy-api/openapi/openapi" + "github.com/speakeasy-api/openapi/validation" +) + +const RuleOwaspStringRestricted = "owasp-string-restricted" + +type OwaspStringRestrictedRule struct{} + +func (r *OwaspStringRestrictedRule) ID() string { + return RuleOwaspStringRestricted +} +func (r *OwaspStringRestrictedRule) Category() string { + return CategorySecurity +} +func (r *OwaspStringRestrictedRule) Description() string { + return "String schemas must specify `format`, `const`, `enum`, or `pattern` to restrict content. String restrictions prevent injection attacks and ensure data conforms to expected formats." +} +func (r *OwaspStringRestrictedRule) Summary() string { + return "String schemas must specify `format`, `const`, `enum`, or `pattern`." +} +func (r *OwaspStringRestrictedRule) HowToFix() string { + return "Add `format`, `const`, `enum`, or `pattern` constraints to string schemas." +} +func (r *OwaspStringRestrictedRule) Link() string { + return "https://github.com/speakeasy-api/openapi/blob/main/openapi/linter/README.md#owasp-string-restricted" +} +func (r *OwaspStringRestrictedRule) DefaultSeverity() validation.Severity { + return validation.SeverityError +} +func (r *OwaspStringRestrictedRule) Versions() []string { + return []string{"3.0", "3.1"} +} + +func (r *OwaspStringRestrictedRule) Run(ctx context.Context, docInfo *linter.DocumentInfo[*openapi.OpenAPI], config *linter.RuleConfig) []error { + if docInfo == nil || docInfo.Document == nil || docInfo.Index == nil { + return nil + } + + var errs []error + + // Check all schemas in the document + for _, schemaNode := range docInfo.Index.GetAllSchemas() { + refSchema := schemaNode.Node + schema := refSchema.GetSchema() + if schema == nil { + continue + } + + // Check if type contains "string" + types := schema.GetType() + hasStringType := false + for _, typ := range types { + if typ == "string" { + hasStringType = true + break + } + } + + if !hasStringType { + continue + } + + // Check if schema has format, const, enum, or pattern defined + format := schema.GetFormat() + constValue := schema.GetConst() + enumValues := schema.GetEnum() + pattern := schema.GetPattern() + + // If none of these are defined, report error + if format == "" && constValue == nil && len(enumValues) == 0 && pattern == "" { + if rootNode := refSchema.GetRootNode(); rootNode != nil { + errs = append(errs, validation.NewValidationError( + config.GetSeverity(r.DefaultSeverity()), + RuleOwaspStringRestricted, + errors.New("schema of type `string` must specify `format`, `const`, `enum`, or `pattern` to restrict content"), + rootNode, + )) + } + } + } + + return errs +} diff --git a/openapi/linter/rules/owasp_string_restricted_test.go b/openapi/linter/rules/owasp_string_restricted_test.go new file mode 100644 index 00000000..bc1ec390 --- /dev/null +++ b/openapi/linter/rules/owasp_string_restricted_test.go @@ -0,0 +1,237 @@ +package rules_test + +import ( + "strings" + "testing" + + "github.com/speakeasy-api/openapi/linter" + "github.com/speakeasy-api/openapi/openapi" + "github.com/speakeasy-api/openapi/openapi/linter/rules" + "github.com/speakeasy-api/openapi/references" + "github.com/speakeasy-api/openapi/validation" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestOwaspStringRestrictedRule_ValidCases(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + yaml string + }{ + { + name: "string with format", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +components: + schemas: + Email: + type: string + format: email +paths: {} +`, + }, + { + name: "string with const", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +components: + schemas: + Status: + type: string + const: active +paths: {} +`, + }, + { + name: "string with enum", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +components: + schemas: + Status: + type: string + enum: + - active + - inactive +paths: {} +`, + }, + { + name: "string with pattern", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +components: + schemas: + PhoneNumber: + type: string + pattern: '^\d{3}-\d{3}-\d{4}$' +paths: {} +`, + }, + { + name: "non-string type without restrictions is ok", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +components: + schemas: + Counter: + type: integer +paths: {} +`, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + doc, _, err := openapi.Unmarshal(ctx, strings.NewReader(tt.yaml)) + require.NoError(t, err) + + rule := &rules.OwaspStringRestrictedRule{} + config := &linter.RuleConfig{} + + // Build index for the rule + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + docInfo := linter.NewDocumentInfoWithIndex(doc, "test.yaml", idx) + + errs := rule.Run(ctx, docInfo, config) + assert.Empty(t, errs) + }) + } +} + +func TestOwaspStringRestrictedRule_Violations(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + yaml string + expectedCount int + }{ + { + name: "string without any restrictions", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +components: + schemas: + User: + type: object + properties: + name: + type: string +paths: {} +`, + expectedCount: 1, + }, + { + name: "multiple strings without restrictions", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +components: + schemas: + User: + type: object + properties: + name: + type: string + email: + type: string + description: + type: string +paths: {} +`, + expectedCount: 3, + }, + { + name: "inline string parameter without restrictions", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + get: + parameters: + - name: search + in: query + schema: + type: string + responses: + '200': + description: Success +`, + expectedCount: 1, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + doc, _, err := openapi.Unmarshal(ctx, strings.NewReader(tt.yaml)) + require.NoError(t, err) + + rule := &rules.OwaspStringRestrictedRule{} + config := &linter.RuleConfig{} + + // Build index for the rule + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + docInfo := linter.NewDocumentInfoWithIndex(doc, "test.yaml", idx) + + errs := rule.Run(ctx, docInfo, config) + + require.Len(t, errs, tt.expectedCount) + for _, err := range errs { + assert.Contains(t, err.Error(), "must specify `format`, `const`, `enum`, or `pattern`") + } + }) + } +} + +func TestOwaspStringRestrictedRule_RuleMetadata(t *testing.T) { + t.Parallel() + + rule := &rules.OwaspStringRestrictedRule{} + + assert.Equal(t, "owasp-string-restricted", rule.ID()) + assert.Equal(t, rules.CategorySecurity, rule.Category()) + assert.NotEmpty(t, rule.Description()) + assert.NotEmpty(t, rule.Link()) + assert.Equal(t, validation.SeverityError, rule.DefaultSeverity()) + assert.Equal(t, []string{"3.0", "3.1"}, rule.Versions()) +} diff --git a/openapi/linter/rules/parameter_description.go b/openapi/linter/rules/parameter_description.go new file mode 100644 index 00000000..386535ad --- /dev/null +++ b/openapi/linter/rules/parameter_description.go @@ -0,0 +1,109 @@ +package rules + +import ( + "context" + "fmt" + + "github.com/speakeasy-api/openapi/linter" + "github.com/speakeasy-api/openapi/openapi" + "github.com/speakeasy-api/openapi/validation" +) + +const RuleStyleOAS3ParameterDescription = "style-oas3-parameter-description" + +type OAS3ParameterDescriptionRule struct{} + +func (r *OAS3ParameterDescriptionRule) ID() string { + return RuleStyleOAS3ParameterDescription +} + +func (r *OAS3ParameterDescriptionRule) Description() string { + return "Parameters should include descriptions that explain their purpose and expected values. Clear parameter documentation helps developers understand how to construct valid requests and what each parameter controls." +} + +func (r *OAS3ParameterDescriptionRule) Summary() string { + return "Parameters should include descriptions." +} + +func (r *OAS3ParameterDescriptionRule) HowToFix() string { + return "Add descriptions to parameters that explain their purpose and expected values." +} + +func (r *OAS3ParameterDescriptionRule) Category() string { + return CategoryStyle +} + +func (r *OAS3ParameterDescriptionRule) DefaultSeverity() validation.Severity { + return validation.SeverityWarning +} + +func (r *OAS3ParameterDescriptionRule) Link() string { + return "https://github.com/speakeasy-api/openapi/blob/main/openapi/linter/README.md#style-oas3-parameter-description" +} + +func (r *OAS3ParameterDescriptionRule) Versions() []string { + return []string{"3.0.0", "3.0.1", "3.0.2", "3.0.3", "3.1.0", "3.1.1", "3.2.0"} +} + +func (r *OAS3ParameterDescriptionRule) Run(ctx context.Context, docInfo *linter.DocumentInfo[*openapi.OpenAPI], config *linter.RuleConfig) []error { + if docInfo == nil || docInfo.Document == nil || docInfo.Index == nil { + return nil + } + + doc := docInfo.Document + var errs []error + + // Check only inline parameters (component parameters are checked by component_description rule) + for _, paramNode := range docInfo.Index.InlineParameters { + refParam := paramNode.Node + if refParam == nil { + continue + } + + param := refParam.GetObject() + if param == nil { + continue + } + + description := param.GetDescription() + if description == "" { + paramName := param.GetName() + + // Extract method and path manually from location + var method, path string + for _, loc := range paramNode.Location { + switch openapi.GetParentType(loc) { + case "Paths": + if loc.ParentKey != nil { + path = *loc.ParentKey + } + case "PathItem": + if loc.ParentKey != nil { + method = *loc.ParentKey + } + } + } + + errNode := GetFieldValueNode(param, "description", doc) + if errNode == nil { + errNode = param.GetRootNode() + } + + var msg string + if method != "" && path != "" { + msg = fmt.Sprintf("parameter `%s` in `%s %s` is missing a description", paramName, method, path) + } else { + msg = fmt.Sprintf("parameter `%s` is missing a description", paramName) + } + + errs = append(errs, validation.NewValidationError( + config.GetSeverity(r.DefaultSeverity()), + RuleStyleOAS3ParameterDescription, + fmt.Errorf("%s", msg), + errNode, + )) + } + } + + return errs +} diff --git a/openapi/linter/rules/parameter_description_test.go b/openapi/linter/rules/parameter_description_test.go new file mode 100644 index 00000000..f118d6d7 --- /dev/null +++ b/openapi/linter/rules/parameter_description_test.go @@ -0,0 +1,181 @@ +package rules_test + +import ( + "strings" + "testing" + + "github.com/speakeasy-api/openapi/linter" + "github.com/speakeasy-api/openapi/openapi" + "github.com/speakeasy-api/openapi/openapi/linter/rules" + "github.com/speakeasy-api/openapi/references" + "github.com/speakeasy-api/openapi/validation" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestOAS3ParameterDescriptionRule_ValidCases(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + yaml string + }{ + { + name: "path parameter with description", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users/{id}: + get: + parameters: + - name: id + in: path + required: true + description: The user ID + schema: + type: string +`, + }, + { + name: "query parameter with description", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + get: + parameters: + - name: limit + in: query + description: Maximum number of results + schema: + type: integer +`, + }, + { + name: "no parameters", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + get: {} +`, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + doc, _, err := openapi.Unmarshal(ctx, strings.NewReader(tt.yaml)) + require.NoError(t, err, "unmarshal should succeed") + + rule := &rules.OAS3ParameterDescriptionRule{} + config := &linter.RuleConfig{} + + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + docInfo := linter.NewDocumentInfoWithIndex(doc, "test.yaml", idx) + + errs := rule.Run(ctx, docInfo, config) + assert.Empty(t, errs, "should have no lint errors") + }) + } +} + +func TestOAS3ParameterDescriptionRule_Violations(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + yaml string + expectedError string + }{ + { + name: "path parameter missing description", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users/{id}: + get: + parameters: + - name: id + in: path + required: true + schema: + type: string +`, + expectedError: "parameter `id` in `get /users/{id}` is missing a description", + }, + { + name: "query parameter missing description", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + get: + parameters: + - name: limit + in: query + schema: + type: integer +`, + expectedError: "parameter `limit` in `get /users` is missing a description", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + doc, _, err := openapi.Unmarshal(ctx, strings.NewReader(tt.yaml)) + require.NoError(t, err, "unmarshal should succeed") + + rule := &rules.OAS3ParameterDescriptionRule{} + config := &linter.RuleConfig{} + + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + docInfo := linter.NewDocumentInfoWithIndex(doc, "test.yaml", idx) + + errs := rule.Run(ctx, docInfo, config) + require.NotEmpty(t, errs, "should have lint errors") + assert.Contains(t, errs[0].Error(), tt.expectedError, "error message should contain expected text") + }) + } +} + +func TestOAS3ParameterDescriptionRule_RuleMetadata(t *testing.T) { + t.Parallel() + + rule := &rules.OAS3ParameterDescriptionRule{} + + assert.Equal(t, "style-oas3-parameter-description", rule.ID()) + assert.Equal(t, rules.CategoryStyle, rule.Category()) + assert.NotEmpty(t, rule.Description()) + assert.NotEmpty(t, rule.Link()) + assert.Equal(t, validation.SeverityWarning, rule.DefaultSeverity()) + assert.NotNil(t, rule.Versions()) +} diff --git a/openapi/linter/rules/path_declarations.go b/openapi/linter/rules/path_declarations.go new file mode 100644 index 00000000..25b60693 --- /dev/null +++ b/openapi/linter/rules/path_declarations.go @@ -0,0 +1,58 @@ +package rules + +import ( + "context" + "fmt" + "strings" + + "github.com/speakeasy-api/openapi/linter" + "github.com/speakeasy-api/openapi/openapi" + "github.com/speakeasy-api/openapi/validation" +) + +const RuleSemanticPathDeclarations = "semantic-path-declarations" + +type PathDeclarationsRule struct{} + +func (r *PathDeclarationsRule) ID() string { return RuleSemanticPathDeclarations } +func (r *PathDeclarationsRule) Category() string { return CategorySemantic } +func (r *PathDeclarationsRule) Description() string { + return "Path parameter declarations must not be empty - declarations like /api/{} are invalid. Empty path parameters create ambiguous routes and will cause runtime errors in most API frameworks." +} +func (r *PathDeclarationsRule) Summary() string { + return "Path parameter declarations must not be empty." +} +func (r *PathDeclarationsRule) HowToFix() string { + return "Replace empty path parameters with named ones (e.g., /items/{itemId})." +} +func (r *PathDeclarationsRule) Link() string { + return "https://github.com/speakeasy-api/openapi/blob/main/openapi/linter/README.md#semantic-path-declarations" +} +func (r *PathDeclarationsRule) DefaultSeverity() validation.Severity { + return validation.SeverityError +} +func (r *PathDeclarationsRule) Versions() []string { + return nil // Applies to all OpenAPI versions +} + +func (r *PathDeclarationsRule) Run(ctx context.Context, docInfo *linter.DocumentInfo[*openapi.OpenAPI], config *linter.RuleConfig) []error { + var errs []error + + doc := docInfo.Document + + // Check each path for empty parameter declarations + for pathKey := range doc.GetPaths().All() { + if strings.Contains(pathKey, "{}") { + node := doc.GetPaths().GetCore().GetMapKeyNodeOrRoot(pathKey, doc.GetPaths().GetRootNode()) + + errs = append(errs, validation.NewValidationError( + config.GetSeverity(r.DefaultSeverity()), + RuleSemanticPathDeclarations, + fmt.Errorf("path %q contains empty parameter declaration `{}`", pathKey), + node, + )) + } + } + + return errs +} diff --git a/openapi/linter/rules/path_declarations_test.go b/openapi/linter/rules/path_declarations_test.go new file mode 100644 index 00000000..b8a663ed --- /dev/null +++ b/openapi/linter/rules/path_declarations_test.go @@ -0,0 +1,229 @@ +package rules_test + +import ( + "strings" + "testing" + + "github.com/speakeasy-api/openapi/linter" + "github.com/speakeasy-api/openapi/openapi" + "github.com/speakeasy-api/openapi/openapi/linter/rules" + "github.com/speakeasy-api/openapi/validation" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestPathDeclarationsRule_ValidCases(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + yaml string + }{ + { + name: "valid single parameter", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users/{userId}: + get: + responses: + '200': + description: ok +`, + }, + { + name: "valid multiple parameters", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users/{userId}/posts/{postId}: + get: + responses: + '200': + description: ok +`, + }, + { + name: "no parameters", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + get: + responses: + '200': + description: ok +`, + }, + { + name: "parameter with underscores", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users/{user_id}: + get: + responses: + '200': + description: ok +`, + }, + { + name: "parameter with hyphens", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users/{user-id}: + get: + responses: + '200': + description: ok +`, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + doc, _, err := openapi.Unmarshal(ctx, strings.NewReader(tt.yaml)) + require.NoError(t, err, "unmarshal should succeed") + + rule := &rules.PathDeclarationsRule{} + config := &linter.RuleConfig{} + + docInfo := &linter.DocumentInfo[*openapi.OpenAPI]{ + Document: doc, + } + + errs := rule.Run(ctx, docInfo, config) + + assert.Empty(t, errs, "should have no lint errors") + }) + } +} + +func TestPathDeclarationsRule_EmptyDeclarations(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + yaml string + expectedError string + }{ + { + name: "single empty declaration", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /api/{}: + get: + responses: + '200': + description: ok +`, + expectedError: "[7:3] error semantic-path-declarations path \"/api/{}\" contains empty parameter declaration `{}`", + }, + { + name: "empty declaration in middle of path", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /api/{}/users: + get: + responses: + '200': + description: ok +`, + expectedError: "[7:3] error semantic-path-declarations path \"/api/{}/users\" contains empty parameter declaration `{}`", + }, + { + name: "multiple empty declarations", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /api/{}/{}/users: + get: + responses: + '200': + description: ok +`, + expectedError: "[7:3] error semantic-path-declarations path \"/api/{}/{}/users\" contains empty parameter declaration `{}`", + }, + { + name: "empty declaration with valid parameter", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /api/{userId}/{}: + get: + responses: + '200': + description: ok +`, + expectedError: "[7:3] error semantic-path-declarations path \"/api/{userId}/{}\" contains empty parameter declaration `{}`", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + doc, _, err := openapi.Unmarshal(ctx, strings.NewReader(tt.yaml)) + require.NoError(t, err, "unmarshal should succeed") + + rule := &rules.PathDeclarationsRule{} + config := &linter.RuleConfig{} + + docInfo := &linter.DocumentInfo[*openapi.OpenAPI]{ + Document: doc, + } + + errs := rule.Run(ctx, docInfo, config) + + require.Len(t, errs, 1, "should have one lint error") + assert.Equal(t, tt.expectedError, errs[0].Error(), "error message should match exactly") + }) + } +} + +func TestPathDeclarationsRule_RuleMetadata(t *testing.T) { + t.Parallel() + + rule := &rules.PathDeclarationsRule{} + + assert.Equal(t, "semantic-path-declarations", rule.ID()) + assert.Equal(t, rules.CategorySemantic, rule.Category()) + assert.NotEmpty(t, rule.Description()) + assert.NotEmpty(t, rule.Link()) + assert.Equal(t, validation.SeverityError, rule.DefaultSeverity()) + assert.Nil(t, rule.Versions()) +} diff --git a/openapi/linter/rules/path_params.go b/openapi/linter/rules/path_params.go new file mode 100644 index 00000000..459679e5 --- /dev/null +++ b/openapi/linter/rules/path_params.go @@ -0,0 +1,186 @@ +package rules + +import ( + "context" + "fmt" + "regexp" + + "github.com/speakeasy-api/openapi/linter" + "github.com/speakeasy-api/openapi/openapi" + "github.com/speakeasy-api/openapi/validation" +) + +const RuleSemanticPathParams = "semantic-path-params" + +type PathParamsRule struct{} + +func (r *PathParamsRule) ID() string { return RuleSemanticPathParams } +func (r *PathParamsRule) Category() string { return CategorySemantic } +func (r *PathParamsRule) Description() string { + return "Path template variables like {userId} must have corresponding parameter definitions with in='path', and declared path parameters must be used in the URL template. This ensures request routing works correctly and all path variables are properly documented. Parameters can be defined at PathItem level (inherited by all operations) or Operation level (can override PathItem parameters)." +} +func (r *PathParamsRule) Summary() string { + return "Path template variables must have matching path parameters and vice versa." +} +func (r *PathParamsRule) HowToFix() string { + return "Ensure every {param} in the path has an in: path parameter and remove any unused path parameters." +} +func (r *PathParamsRule) Link() string { + return "https://github.com/speakeasy-api/openapi/blob/main/openapi/linter/README.md#semantic-path-params" +} +func (r *PathParamsRule) DefaultSeverity() validation.Severity { + return validation.SeverityError +} +func (r *PathParamsRule) Versions() []string { + return nil // Applies to all OpenAPI versions +} + +var pathParamRegex = regexp.MustCompile(`\{([^}]+)\}`) + +func (r *PathParamsRule) Run(ctx context.Context, docInfo *linter.DocumentInfo[*openapi.OpenAPI], config *linter.RuleConfig) []error { + if docInfo == nil || docInfo.Document == nil || docInfo.Index == nil { + return nil + } + + var errs []error + + doc := docInfo.Document + + // Build resolve options from config + resolveOpts := openapi.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: docInfo.Location, + } + if config.ResolveOptions != nil { + resolveOpts.VirtualFS = config.ResolveOptions.VirtualFS + resolveOpts.HTTPClient = config.ResolveOptions.HTTPClient + resolveOpts.DisableExternalRefs = config.ResolveOptions.DisableExternalRefs + } + + // Use the pre-computed InlinePathItems index (path items from /paths) + // These are the only ones with path templates in their location parent key + for _, pathItemNode := range docInfo.Index.InlinePathItems { + refPathItem := pathItemNode.Node + + // Extract path from location (parent key of the path item) + path := pathItemNode.Location.ParentKey() + if path == "" { + continue + } + + pathItem := refPathItem.GetObject() + if pathItem == nil { + continue + } + + templateParams := extractParamsFromPath(path) + + // Get PathItem parameters (in: path) + pathItemParams, pathItemErrs := getPathParameters(ctx, pathItem.Parameters, resolveOpts, config) + errs = append(errs, pathItemErrs...) + + // Iterate operations + for _, op := range pathItem.All() { + // Merge parameters + opParams, opErrs := getPathParameters(ctx, op.Parameters, resolveOpts, config) + errs = append(errs, opErrs...) + effectiveParams := mergeParameters(pathItemParams, opParams) + + // Validate + // 1. All template params must be in effectiveParams + for _, tmplParam := range templateParams { + if _, ok := effectiveParams[tmplParam]; !ok { + errs = append(errs, validation.NewValidationError( + config.GetSeverity(r.DefaultSeverity()), + RuleSemanticPathParams, + fmt.Errorf("path parameter `{%s}` is not defined in operation parameters", tmplParam), + op.GetRootNode(), + )) + } + } + + // 2. All effectiveParams must be in template params + for paramName := range effectiveParams { + found := false + for _, tmplParam := range templateParams { + if tmplParam == paramName { + found = true + break + } + } + if !found { + errs = append(errs, validation.NewValidationError( + config.GetSeverity(r.DefaultSeverity()), + RuleSemanticPathParams, + fmt.Errorf("parameter `%s` is declared as path parameter but not used in path template `%s`", paramName, path), + op.GetRootNode(), + )) + } + } + } + } + + return errs +} + +func extractParamsFromPath(path string) []string { + matches := pathParamRegex.FindAllStringSubmatch(path, -1) + var params []string + for _, match := range matches { + if len(match) > 1 { + params = append(params, match[1]) + } + } + return params +} + +func getPathParameters(ctx context.Context, params []*openapi.ReferencedParameter, resolveOpts openapi.ResolveOptions, _ *linter.RuleConfig) (map[string]bool, []error) { + result := make(map[string]bool) + var resolutionErrs []error + + for _, refParam := range params { + if refParam == nil { + continue + } + + // Resolve reference if needed + if refParam.IsReference() && !refParam.IsResolved() { + validErrs, err := refParam.Resolve(ctx, resolveOpts) + if err != nil { + // Resolution failed - report as validation error + resolutionErrs = append(resolutionErrs, validation.NewValidationError( + validation.SeverityError, + RuleSemanticPathParams, + fmt.Errorf("failed to resolve parameter reference `%s`: %w", refParam.GetReference(), err), + refParam.GetRootNode(), + )) + continue + } + // Append any validation errors from resolution + resolutionErrs = append(resolutionErrs, validErrs...) + } + + // GetObject() returns the resolved object for references, or inline object + param := refParam.GetObject() + if param == nil { + continue + } + + if param.In == "path" { + result[param.Name] = true + } + } + return result, resolutionErrs +} + +func mergeParameters(base, override map[string]bool) map[string]bool { + result := make(map[string]bool) + for k, v := range base { + result[k] = v + } + for k, v := range override { + result[k] = v + } + return result +} diff --git a/openapi/linter/rules/path_params_test.go b/openapi/linter/rules/path_params_test.go new file mode 100644 index 00000000..ac22483a --- /dev/null +++ b/openapi/linter/rules/path_params_test.go @@ -0,0 +1,1006 @@ +package rules_test + +import ( + "context" + "fmt" + "net/http" + "net/http/httptest" + "strings" + "testing" + + "github.com/speakeasy-api/openapi/linter" + "github.com/speakeasy-api/openapi/openapi" + "github.com/speakeasy-api/openapi/openapi/linter/rules" + "github.com/speakeasy-api/openapi/references" + "github.com/speakeasy-api/openapi/validation" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +// Helper function to create DocumentInfo with Index +func createDocInfoWithIndex(t *testing.T, ctx context.Context, doc *openapi.OpenAPI, location string) *linter.DocumentInfo[*openapi.OpenAPI] { + t.Helper() + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: location, + }) + return linter.NewDocumentInfoWithIndex(doc, location, idx) +} + +func TestPathParamsRule_ValidCases(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + yaml string + }{ + { + name: "single path param in operation", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users/{userId}: + get: + parameters: + - name: userId + in: path + required: true + schema: + type: string + responses: + '200': + description: ok +`, + }, + { + name: "path param defined at path item level", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users/{userId}: + parameters: + - name: userId + in: path + required: true + schema: + type: string + get: + responses: + '200': + description: ok + post: + responses: + '201': + description: created +`, + }, + { + name: "multiple path params", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users/{userId}/posts/{postId}: + get: + parameters: + - name: userId + in: path + required: true + schema: + type: string + - name: postId + in: path + required: true + schema: + type: string + responses: + '200': + description: ok +`, + }, + { + name: "deeply nested path params", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /orgs/{orgId}/teams/{teamId}/members/{memberId}: + get: + parameters: + - name: orgId + in: path + required: true + schema: + type: string + - name: teamId + in: path + required: true + schema: + type: string + - name: memberId + in: path + required: true + schema: + type: string + responses: + '200': + description: ok +`, + }, + { + name: "path param override at operation level", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users/{userId}: + parameters: + - name: userId + in: path + required: true + schema: + type: string + get: + parameters: + - name: userId + in: path + required: true + schema: + type: integer + responses: + '200': + description: ok +`, + }, + { + name: "mixed path item and operation params", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users/{userId}/posts/{postId}: + parameters: + - name: userId + in: path + required: true + schema: + type: string + get: + parameters: + - name: postId + in: path + required: true + schema: + type: string + responses: + '200': + description: ok +`, + }, + { + name: "multiple operations sharing path item params", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users/{userId}: + parameters: + - name: userId + in: path + required: true + schema: + type: string + get: + responses: + '200': + description: ok + put: + responses: + '200': + description: ok + delete: + responses: + '204': + description: deleted +`, + }, + { + name: "path without template params", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + get: + responses: + '200': + description: ok +`, + }, + { + name: "query param ignored for path validation", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + get: + parameters: + - name: filter + in: query + schema: + type: string + responses: + '200': + description: ok +`, + }, + { + name: "header param ignored for path validation", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + get: + parameters: + - name: X-Request-Id + in: header + schema: + type: string + responses: + '200': + description: ok +`, + }, + { + name: "referenced parameter from components", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users/{userId}: + get: + parameters: + - $ref: '#/components/parameters/UserId' + responses: + '200': + description: ok +components: + parameters: + UserId: + name: userId + in: path + required: true + schema: + type: string +`, + }, + { + name: "referenced path item from components", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users/{userId}: + $ref: '#/components/pathItems/UserPath' +components: + pathItems: + UserPath: + parameters: + - name: userId + in: path + required: true + schema: + type: string + get: + responses: + '200': + description: ok +`, + }, + { + name: "path item ref with params defined in operations", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users/{userId}: + $ref: '#/components/pathItems/UserPath' +components: + pathItems: + UserPath: + get: + parameters: + - name: userId + in: path + required: true + schema: + type: string + responses: + '200': + description: ok +`, + }, + { + name: "mixed inline and referenced parameters", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users/{userId}/posts/{postId}: + get: + parameters: + - $ref: '#/components/parameters/UserId' + - name: postId + in: path + required: true + schema: + type: string + responses: + '200': + description: ok +components: + parameters: + UserId: + name: userId + in: path + required: true + schema: + type: string +`, + }, + { + name: "path item level ref param inherited by multiple ops", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users/{userId}: + parameters: + - $ref: '#/components/parameters/UserId' + get: + responses: + '200': + description: ok + delete: + responses: + '204': + description: deleted +components: + parameters: + UserId: + name: userId + in: path + required: true + schema: + type: string +`, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + doc, _, err := openapi.Unmarshal(ctx, strings.NewReader(tt.yaml)) + require.NoError(t, err, "unmarshal should succeed") + + rule := &rules.PathParamsRule{} + config := &linter.RuleConfig{} + + docInfo := createDocInfoWithIndex(t, ctx, doc, "test.yaml") + + errs := rule.Run(ctx, docInfo, config) + + assert.Empty(t, errs, "should have no lint errors") + }) + } +} + +func TestPathParamsRule_MissingPathParam(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + yaml string + expectedErrors []string + }{ + { + name: "missing single path param", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users/{userId}: + get: + responses: + '200': + description: ok +`, + expectedErrors: []string{ + "[9:7] error semantic-path-params path parameter `{userId}` is not defined in operation parameters", + }, + }, + { + name: "missing one of multiple path params", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users/{userId}/posts/{postId}: + get: + parameters: + - name: userId + in: path + required: true + schema: + type: string + responses: + '200': + description: ok +`, + expectedErrors: []string{ + "[9:7] error semantic-path-params path parameter `{postId}` is not defined in operation parameters", + }, + }, + { + name: "missing all path params", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users/{userId}/posts/{postId}: + get: + responses: + '200': + description: ok +`, + expectedErrors: []string{ + "[9:7] error semantic-path-params path parameter `{userId}` is not defined in operation parameters", + "[9:7] error semantic-path-params path parameter `{postId}` is not defined in operation parameters", + }, + }, + { + name: "missing path param in one operation but not another", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users/{userId}: + get: + parameters: + - name: userId + in: path + required: true + schema: + type: string + responses: + '200': + description: ok + post: + responses: + '201': + description: created +`, + expectedErrors: []string{ + "[19:7] error semantic-path-params path parameter `{userId}` is not defined in operation parameters", + }, + }, + { + name: "case sensitive param names", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users/{userId}: + get: + parameters: + - name: userid + in: path + required: true + schema: + type: string + responses: + '200': + description: ok +`, + expectedErrors: []string{ + "[9:7] error semantic-path-params path parameter `{userId}` is not defined in operation parameters", + "[9:7] error semantic-path-params parameter `userid` is declared as path parameter but not used in path template `/users/{userId}`", + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + doc, _, err := openapi.Unmarshal(ctx, strings.NewReader(tt.yaml)) + require.NoError(t, err, "unmarshal should succeed") + + rule := &rules.PathParamsRule{} + config := &linter.RuleConfig{} + + docInfo := createDocInfoWithIndex(t, ctx, doc, "test.yaml") + + errs := rule.Run(ctx, docInfo, config) + + var errMsgs []string + for _, err := range errs { + errMsgs = append(errMsgs, err.Error()) + } + + assert.ElementsMatch(t, tt.expectedErrors, errMsgs) + }) + } +} + +func TestPathParamsRule_UnusedPathParam(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + yaml string + expectedErrors []string + }{ + { + name: "unused single path param", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + get: + parameters: + - name: userId + in: path + required: true + schema: + type: string + responses: + '200': + description: ok +`, + expectedErrors: []string{ + "[9:7] error semantic-path-params parameter `userId` is declared as path parameter but not used in path template `/users`", + }, + }, + { + name: "unused path param at path item level", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + parameters: + - name: userId + in: path + required: true + schema: + type: string + get: + responses: + '200': + description: ok +`, + expectedErrors: []string{ + "[15:7] error semantic-path-params parameter `userId` is declared as path parameter but not used in path template `/users`", + }, + }, + { + name: "one used one unused path param", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users/{userId}: + get: + parameters: + - name: userId + in: path + required: true + schema: + type: string + - name: postId + in: path + required: true + schema: + type: string + responses: + '200': + description: ok +`, + expectedErrors: []string{ + "[9:7] error semantic-path-params parameter `postId` is declared as path parameter but not used in path template `/users/{userId}`", + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + doc, _, err := openapi.Unmarshal(ctx, strings.NewReader(tt.yaml)) + require.NoError(t, err, "unmarshal should succeed") + + rule := &rules.PathParamsRule{} + config := &linter.RuleConfig{} + + docInfo := createDocInfoWithIndex(t, ctx, doc, "test.yaml") + + errs := rule.Run(ctx, docInfo, config) + + var errMsgs []string + for _, err := range errs { + errMsgs = append(errMsgs, err.Error()) + } + + assert.ElementsMatch(t, tt.expectedErrors, errMsgs) + }) + } +} + +func TestPathParamsRule_RuleMetadata(t *testing.T) { + t.Parallel() + + rule := &rules.PathParamsRule{} + + assert.Equal(t, "semantic-path-params", rule.ID(), "rule ID should match") + assert.Equal(t, rules.CategorySemantic, rule.Category(), "rule category should match") + assert.NotEmpty(t, rule.Description(), "rule should have description") + assert.Equal(t, validation.SeverityError, rule.DefaultSeverity(), "default severity should be error") + assert.Nil(t, rule.Versions(), "versions should be nil (all versions)") +} + +func TestPathParamsRule_SeverityOverride(t *testing.T) { + t.Parallel() + ctx := t.Context() + + yamlInput := ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users/{userId}: + get: + responses: + '200': + description: ok +` + doc, _, err := openapi.Unmarshal(ctx, strings.NewReader(yamlInput)) + require.NoError(t, err, "unmarshal should succeed") + + rule := &rules.PathParamsRule{} + warningSeverity := validation.SeverityWarning + config := &linter.RuleConfig{ + Severity: &warningSeverity, + } + + docInfo := createDocInfoWithIndex(t, ctx, doc, "test.yaml") + + errs := rule.Run(ctx, docInfo, config) + require.Len(t, errs, 1, "should have one error") + + // Check full error string includes warning severity + assert.Equal(t, "[9:7] warning semantic-path-params path parameter `{userId}` is not defined in operation parameters", errs[0].Error()) +} + +func TestPathParamsRule_ExternalReferenceResolution(t *testing.T) { + t.Parallel() + + t.Run("external reference to parameter resolved successfully", func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + // Create a mock HTTP server for this test + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + switch r.URL.Path { + case "/params/user-id.yaml": + w.Header().Set("Content-Type", "application/yaml") + w.WriteHeader(http.StatusOK) + _, _ = w.Write([]byte(` +name: userId +in: path +required: true +schema: + type: string +`)) + default: + w.WriteHeader(http.StatusNotFound) + } + })) + defer server.Close() + + yamlInput := fmt.Sprintf(`openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users/{userId}: + get: + parameters: + - $ref: "%s/params/user-id.yaml" + responses: + "200": + description: ok +`, server.URL) + doc, _, err := openapi.Unmarshal(ctx, strings.NewReader(yamlInput)) + require.NoError(t, err, "unmarshal should succeed") + + rule := &rules.PathParamsRule{} + config := &linter.RuleConfig{} + + docInfo := createDocInfoWithIndex(t, ctx, doc, server.URL+"/openapi.yaml") + + errs := rule.Run(ctx, docInfo, config) + + // Should have no errors because the external reference resolves to a valid path param + assert.Empty(t, errs, "should have no lint errors when external ref is resolved") + }) + + t.Run("multiple external references resolved", func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + // Create a mock HTTP server for this test + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + switch r.URL.Path { + case "/params/user-id.yaml": + w.Header().Set("Content-Type", "application/yaml") + w.WriteHeader(http.StatusOK) + _, _ = w.Write([]byte(` +name: userId +in: path +required: true +schema: + type: string +`)) + case "/params/post-id.yaml": + w.Header().Set("Content-Type", "application/yaml") + w.WriteHeader(http.StatusOK) + _, _ = w.Write([]byte(` +name: postId +in: path +required: true +schema: + type: string +`)) + default: + w.WriteHeader(http.StatusNotFound) + } + })) + defer server.Close() + + yamlInput := fmt.Sprintf(`openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users/{userId}/posts/{postId}: + get: + parameters: + - $ref: "%s/params/user-id.yaml" + - $ref: "%s/params/post-id.yaml" + responses: + "200": + description: ok +`, server.URL, server.URL) + doc, _, err := openapi.Unmarshal(ctx, strings.NewReader(yamlInput)) + require.NoError(t, err, "unmarshal should succeed") + + rule := &rules.PathParamsRule{} + config := &linter.RuleConfig{} + + docInfo := createDocInfoWithIndex(t, ctx, doc, server.URL+"/openapi.yaml") + + errs := rule.Run(ctx, docInfo, config) + + // Should have no errors - both path params are defined via external refs + assert.Empty(t, errs, "should have no lint errors when all external refs resolve") + }) + + t.Run("missing path param detected even with external references", func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + // Create a mock HTTP server for this test + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + switch r.URL.Path { + case "/params/user-id.yaml": + w.Header().Set("Content-Type", "application/yaml") + w.WriteHeader(http.StatusOK) + _, _ = w.Write([]byte(` +name: userId +in: path +required: true +schema: + type: string +`)) + default: + w.WriteHeader(http.StatusNotFound) + } + })) + defer server.Close() + + yamlInput := fmt.Sprintf(`openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users/{userId}/posts/{postId}: + get: + parameters: + - $ref: "%s/params/user-id.yaml" + responses: + "200": + description: ok +`, server.URL) + doc, _, err := openapi.Unmarshal(ctx, strings.NewReader(yamlInput)) + require.NoError(t, err, "unmarshal should succeed") + + rule := &rules.PathParamsRule{} + config := &linter.RuleConfig{} + + docInfo := createDocInfoWithIndex(t, ctx, doc, server.URL+"/openapi.yaml") + + errs := rule.Run(ctx, docInfo, config) + + // Should have one error - postId is not defined + require.Len(t, errs, 1, "should have one lint error for missing postId") + assert.Contains(t, errs[0].Error(), "postId", "error should mention postId") + }) + + t.Run("resolution error reported when external reference fails", func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + // Create a mock HTTP server that returns 404 + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.WriteHeader(http.StatusNotFound) + })) + defer server.Close() + + yamlInput := fmt.Sprintf(`openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users/{userId}: + get: + parameters: + - $ref: "%s/params/missing.yaml" + responses: + "200": + description: ok +`, server.URL) + doc, _, err := openapi.Unmarshal(ctx, strings.NewReader(yamlInput)) + require.NoError(t, err, "unmarshal should succeed") + + rule := &rules.PathParamsRule{} + config := &linter.RuleConfig{} + + docInfo := createDocInfoWithIndex(t, ctx, doc, server.URL+"/openapi.yaml") + + errs := rule.Run(ctx, docInfo, config) + + // Should have errors for both resolution failure and missing param + require.NotEmpty(t, errs, "should have errors when resolution fails") + + // Check that we have a resolution error + var foundResolutionError bool + for _, err := range errs { + if strings.Contains(err.Error(), "failed to resolve parameter reference") { + foundResolutionError = true + break + } + } + assert.True(t, foundResolutionError, "should report resolution error") + }) + + t.Run("resolution error for invalid yaml content", func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + // Create a mock HTTP server that returns invalid YAML + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + switch r.URL.Path { + case "/params/invalid.yaml": + w.Header().Set("Content-Type", "application/yaml") + w.WriteHeader(http.StatusOK) + _, _ = w.Write([]byte(`this is not valid: yaml: content: [unclosed`)) + default: + w.WriteHeader(http.StatusNotFound) + } + })) + defer server.Close() + + yamlInput := fmt.Sprintf(`openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users/{userId}: + get: + parameters: + - $ref: "%s/params/invalid.yaml" + responses: + "200": + description: ok +`, server.URL) + doc, _, err := openapi.Unmarshal(ctx, strings.NewReader(yamlInput)) + require.NoError(t, err, "unmarshal should succeed") + + rule := &rules.PathParamsRule{} + config := &linter.RuleConfig{} + + docInfo := createDocInfoWithIndex(t, ctx, doc, server.URL+"/openapi.yaml") + + errs := rule.Run(ctx, docInfo, config) + + // Should have a resolution error for invalid YAML + require.NotEmpty(t, errs, "should have errors when YAML is invalid") + + var foundResolutionError bool + for _, err := range errs { + if strings.Contains(err.Error(), "failed to resolve parameter reference") { + foundResolutionError = true + break + } + } + assert.True(t, foundResolutionError, "should report resolution error for invalid YAML") + }) +} diff --git a/openapi/linter/rules/path_query.go b/openapi/linter/rules/path_query.go new file mode 100644 index 00000000..a324d0be --- /dev/null +++ b/openapi/linter/rules/path_query.go @@ -0,0 +1,58 @@ +package rules + +import ( + "context" + "fmt" + "strings" + + "github.com/speakeasy-api/openapi/linter" + "github.com/speakeasy-api/openapi/openapi" + "github.com/speakeasy-api/openapi/validation" +) + +const RuleSemanticPathQuery = "semantic-path-query" + +type PathQueryRule struct{} + +func (r *PathQueryRule) ID() string { return RuleSemanticPathQuery } +func (r *PathQueryRule) Category() string { return CategorySemantic } +func (r *PathQueryRule) Description() string { + return "Paths must not include query strings - query parameters should be defined in the parameters array instead. Including query strings in paths creates ambiguity, breaks code generation, and violates OpenAPI specification structure." +} +func (r *PathQueryRule) Summary() string { + return "Paths must not include query strings." +} +func (r *PathQueryRule) HowToFix() string { + return "Remove query strings from path keys and define them as query parameters instead." +} +func (r *PathQueryRule) Link() string { + return "https://github.com/speakeasy-api/openapi/blob/main/openapi/linter/README.md#semantic-path-query" +} +func (r *PathQueryRule) DefaultSeverity() validation.Severity { + return validation.SeverityError +} +func (r *PathQueryRule) Versions() []string { + return nil // Applies to all OpenAPI versions +} + +func (r *PathQueryRule) Run(ctx context.Context, docInfo *linter.DocumentInfo[*openapi.OpenAPI], config *linter.RuleConfig) []error { + var errs []error + + doc := docInfo.Document + + // Check each path for query strings + for pathKey := range doc.GetPaths().All() { + if strings.Contains(pathKey, "?") { + node := doc.GetPaths().GetCore().GetMapKeyNodeOrRoot(pathKey, doc.GetPaths().GetRootNode()) + + errs = append(errs, validation.NewValidationError( + config.GetSeverity(r.DefaultSeverity()), + RuleSemanticPathQuery, + fmt.Errorf("path %q contains query string - use parameters array instead", pathKey), + node, + )) + } + } + + return errs +} diff --git a/openapi/linter/rules/path_query_test.go b/openapi/linter/rules/path_query_test.go new file mode 100644 index 00000000..252b24eb --- /dev/null +++ b/openapi/linter/rules/path_query_test.go @@ -0,0 +1,161 @@ +package rules_test + +import ( + "strings" + "testing" + + "github.com/speakeasy-api/openapi/linter" + "github.com/speakeasy-api/openapi/openapi" + "github.com/speakeasy-api/openapi/openapi/linter/rules" + "github.com/speakeasy-api/openapi/validation" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestPathQueryRule_ValidCases(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + yaml string + }{ + { + name: "no query string", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + get: + responses: + '200': + description: ok +`, + }, + { + name: "path with parameters but no query", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users/{userId}: + get: + responses: + '200': + description: ok +`, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + doc, _, err := openapi.Unmarshal(ctx, strings.NewReader(tt.yaml)) + require.NoError(t, err) + + rule := &rules.PathQueryRule{} + config := &linter.RuleConfig{} + docInfo := &linter.DocumentInfo[*openapi.OpenAPI]{Document: doc} + + errs := rule.Run(ctx, docInfo, config) + assert.Empty(t, errs) + }) + } +} + +func TestPathQueryRule_QueryInPath(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + yaml string + expectedError string + }{ + { + name: "query string at end", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users?filter=active: + get: + responses: + '200': + description: ok +`, + expectedError: `[7:3] error semantic-path-query path "/users?filter=active" contains query string - use parameters array instead`, + }, + { + name: "query string with parameter", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users/{id}?include=details: + get: + responses: + '200': + description: ok +`, + expectedError: `[7:3] error semantic-path-query path "/users/{id}?include=details" contains query string - use parameters array instead`, + }, + { + name: "single question mark at end", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users?: + get: + responses: + '200': + description: ok +`, + expectedError: `[7:3] error semantic-path-query path "/users?" contains query string - use parameters array instead`, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + doc, _, err := openapi.Unmarshal(ctx, strings.NewReader(tt.yaml)) + require.NoError(t, err) + + rule := &rules.PathQueryRule{} + config := &linter.RuleConfig{} + docInfo := &linter.DocumentInfo[*openapi.OpenAPI]{Document: doc} + + errs := rule.Run(ctx, docInfo, config) + + require.Len(t, errs, 1) + assert.Equal(t, tt.expectedError, errs[0].Error()) + }) + } +} + +func TestPathQueryRule_RuleMetadata(t *testing.T) { + t.Parallel() + + rule := &rules.PathQueryRule{} + + assert.Equal(t, "semantic-path-query", rule.ID()) + assert.Equal(t, rules.CategorySemantic, rule.Category()) + assert.NotEmpty(t, rule.Description()) + assert.NotEmpty(t, rule.Link()) + assert.Equal(t, validation.SeverityError, rule.DefaultSeverity()) + assert.Nil(t, rule.Versions()) +} diff --git a/openapi/linter/rules/path_trailing_slash.go b/openapi/linter/rules/path_trailing_slash.go new file mode 100644 index 00000000..87a5aabe --- /dev/null +++ b/openapi/linter/rules/path_trailing_slash.go @@ -0,0 +1,64 @@ +package rules + +import ( + "context" + "fmt" + "strings" + + "github.com/speakeasy-api/openapi/linter" + "github.com/speakeasy-api/openapi/openapi" + "github.com/speakeasy-api/openapi/validation" +) + +const RuleStylePathTrailingSlash = "style-path-trailing-slash" + +type PathTrailingSlashRule struct{} + +func (r *PathTrailingSlashRule) ID() string { return RuleStylePathTrailingSlash } +func (r *PathTrailingSlashRule) Category() string { return CategoryStyle } +func (r *PathTrailingSlashRule) Description() string { + return "Path definitions should not end with a trailing slash to maintain consistency and avoid routing ambiguity. Trailing slashes in paths can cause mismatches with server routing rules and create duplicate endpoint definitions." +} +func (r *PathTrailingSlashRule) Summary() string { + return "Paths should not end with a trailing slash." +} +func (r *PathTrailingSlashRule) HowToFix() string { + return "Remove trailing slashes from path keys (except the root '/')." +} +func (r *PathTrailingSlashRule) Link() string { + return "https://github.com/speakeasy-api/openapi/blob/main/openapi/linter/README.md#style-path-trailing-slash" +} +func (r *PathTrailingSlashRule) DefaultSeverity() validation.Severity { + return validation.SeverityWarning +} +func (r *PathTrailingSlashRule) Versions() []string { + return nil // Applies to all OpenAPI versions +} + +func (r *PathTrailingSlashRule) Run(ctx context.Context, docInfo *linter.DocumentInfo[*openapi.OpenAPI], config *linter.RuleConfig) []error { + if docInfo == nil || docInfo.Document == nil { + return nil + } + + var errs []error + doc := docInfo.Document + + paths := doc.GetPaths() + if paths == nil { + return nil + } + + for pathKey := range paths.All() { + if strings.HasSuffix(pathKey, "/") && pathKey != "/" { + node := paths.GetCore().GetMapKeyNodeOrRoot(pathKey, paths.GetRootNode()) + errs = append(errs, validation.NewValidationError( + config.GetSeverity(r.DefaultSeverity()), + RuleStylePathTrailingSlash, + fmt.Errorf("path `%s` must not end with a trailing slash", pathKey), + node, + )) + } + } + + return errs +} diff --git a/openapi/linter/rules/path_trailing_slash_test.go b/openapi/linter/rules/path_trailing_slash_test.go new file mode 100644 index 00000000..dd93c0a1 --- /dev/null +++ b/openapi/linter/rules/path_trailing_slash_test.go @@ -0,0 +1,160 @@ +package rules_test + +import ( + "strings" + "testing" + + "github.com/speakeasy-api/openapi/linter" + "github.com/speakeasy-api/openapi/openapi" + "github.com/speakeasy-api/openapi/openapi/linter/rules" + "github.com/speakeasy-api/openapi/validation" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestPathTrailingSlashRule_ValidCases(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + yaml string + }{ + { + name: "path without trailing slash", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users: + get: + responses: + '200': + description: ok +`, + }, + { + name: "root path", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /: + get: + responses: + '200': + description: ok +`, + }, + { + name: "path with parameters", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users/{id}: + get: + responses: + '200': + description: ok +`, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + doc, _, err := openapi.Unmarshal(ctx, strings.NewReader(tt.yaml)) + require.NoError(t, err) + + rule := &rules.PathTrailingSlashRule{} + config := &linter.RuleConfig{} + docInfo := linter.NewDocumentInfo(doc, "test.yaml") + + errs := rule.Run(ctx, docInfo, config) + assert.Empty(t, errs) + }) + } +} + +func TestPathTrailingSlashRule_Violations(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + yaml string + expectedError string + }{ + { + name: "path with trailing slash", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users/: + get: + responses: + '200': + description: ok +`, + expectedError: "[7:3] warning style-path-trailing-slash path `/users/` must not end with a trailing slash", + }, + { + name: "nested path with trailing slash", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /api/users/: + get: + responses: + '200': + description: ok +`, + expectedError: "[7:3] warning style-path-trailing-slash path `/api/users/` must not end with a trailing slash", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + doc, _, err := openapi.Unmarshal(ctx, strings.NewReader(tt.yaml)) + require.NoError(t, err) + + rule := &rules.PathTrailingSlashRule{} + config := &linter.RuleConfig{} + docInfo := linter.NewDocumentInfo(doc, "test.yaml") + + errs := rule.Run(ctx, docInfo, config) + + require.Len(t, errs, 1) + assert.Equal(t, tt.expectedError, errs[0].Error()) + }) + } +} + +func TestPathTrailingSlashRule_RuleMetadata(t *testing.T) { + t.Parallel() + + rule := &rules.PathTrailingSlashRule{} + + assert.Equal(t, "style-path-trailing-slash", rule.ID()) + assert.Equal(t, rules.CategoryStyle, rule.Category()) + assert.NotEmpty(t, rule.Description()) + assert.NotEmpty(t, rule.Link()) + assert.Equal(t, validation.SeverityWarning, rule.DefaultSeverity()) + assert.Nil(t, rule.Versions()) +} diff --git a/openapi/linter/rules/paths_kebab_case.go b/openapi/linter/rules/paths_kebab_case.go new file mode 100644 index 00000000..e724a1c9 --- /dev/null +++ b/openapi/linter/rules/paths_kebab_case.go @@ -0,0 +1,107 @@ +package rules + +import ( + "context" + "fmt" + "regexp" + "strings" + + "github.com/speakeasy-api/openapi/linter" + "github.com/speakeasy-api/openapi/openapi" + "github.com/speakeasy-api/openapi/validation" +) + +const RuleStylePathsKebabCase = "style-paths-kebab-case" + +type PathsKebabCaseRule struct{} + +func (r *PathsKebabCaseRule) ID() string { + return RuleStylePathsKebabCase +} + +func (r *PathsKebabCaseRule) Description() string { + return "Path segments should use kebab-case (lowercase with hyphens) for consistency and readability. Kebab-case paths are easier to read, follow REST conventions, and avoid case-sensitivity issues across different systems." +} + +func (r *PathsKebabCaseRule) Summary() string { + return "Path segments should use kebab-case." +} + +func (r *PathsKebabCaseRule) HowToFix() string { + return "Rename non-kebab-case path segments to lowercase with hyphens." +} + +func (r *PathsKebabCaseRule) Category() string { + return CategoryStyle +} + +func (r *PathsKebabCaseRule) DefaultSeverity() validation.Severity { + return validation.SeverityWarning +} + +func (r *PathsKebabCaseRule) Link() string { + return "https://github.com/speakeasy-api/openapi/blob/main/openapi/linter/README.md#style-paths-kebab-case" +} + +func (r *PathsKebabCaseRule) Versions() []string { + return nil // applies to all versions +} + +var pathKebabCaseRegex = regexp.MustCompile(`^[{}a-z\d-.]+$`) +var variableRegex = regexp.MustCompile(`^\{(\w.*)}\.?.*$`) + +// checkPathKebabCase returns non-kebab-case segments in the path +func checkPathKebabCase(path string) []string { + parts := strings.Split(path, "/") + if len(parts) == 0 { + return nil + } + segments := parts[1:] // skip first empty segment + var invalidSegments []string + + for _, seg := range segments { + if seg == "" { + continue + } + // Skip variable segments like {id} or {userId} + if variableRegex.MatchString(seg) { + continue + } + // Check if segment matches kebab-case pattern + if !pathKebabCaseRegex.MatchString(seg) { + invalidSegments = append(invalidSegments, seg) + } + } + + return invalidSegments +} + +func (r *PathsKebabCaseRule) Run(ctx context.Context, docInfo *linter.DocumentInfo[*openapi.OpenAPI], config *linter.RuleConfig) []error { + if docInfo == nil || docInfo.Document == nil { + return nil + } + + var errs []error + doc := docInfo.Document + + paths := doc.GetPaths() + if paths == nil { + return nil + } + + for pathKey := range paths.All() { + invalidSegments := checkPathKebabCase(pathKey) + + if len(invalidSegments) > 0 { + node := paths.GetCore().GetMapKeyNodeOrRoot(pathKey, paths.GetRootNode()) + errs = append(errs, validation.NewValidationError( + config.GetSeverity(r.DefaultSeverity()), + RuleStylePathsKebabCase, + fmt.Errorf("path segments `%s` are not kebab-case", strings.Join(invalidSegments, "`, `")), + node, + )) + } + } + + return errs +} diff --git a/openapi/linter/rules/paths_kebab_case_test.go b/openapi/linter/rules/paths_kebab_case_test.go new file mode 100644 index 00000000..7c651251 --- /dev/null +++ b/openapi/linter/rules/paths_kebab_case_test.go @@ -0,0 +1,222 @@ +package rules_test + +import ( + "strings" + "testing" + + "github.com/speakeasy-api/openapi/linter" + "github.com/speakeasy-api/openapi/openapi" + "github.com/speakeasy-api/openapi/openapi/linter/rules" + "github.com/speakeasy-api/openapi/validation" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestPathsKebabCaseRule_ValidCases(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + yaml string + }{ + { + name: "kebab-case path", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /api-users: + get: + responses: + '200': + description: ok +`, + }, + { + name: "kebab-case with numbers", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /api-v1-users: + get: + responses: + '200': + description: ok +`, + }, + { + name: "path with variables", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /users/{userId}: + get: + responses: + '200': + description: ok +`, + }, + { + name: "mixed kebab-case and variables", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /api-users/{userId}/user-profile: + get: + responses: + '200': + description: ok +`, + }, + { + name: "path with dots and dashes", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /api.v1/user-data: + get: + responses: + '200': + description: ok +`, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + doc, _, err := openapi.Unmarshal(ctx, strings.NewReader(tt.yaml)) + require.NoError(t, err) + + rule := &rules.PathsKebabCaseRule{} + config := &linter.RuleConfig{} + docInfo := linter.NewDocumentInfo(doc, "test.yaml") + + errs := rule.Run(ctx, docInfo, config) + assert.Empty(t, errs) + }) + } +} + +func TestPathsKebabCaseRule_Violations(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + yaml string + expectedError string + }{ + { + name: "camelCase path", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /apiUsers: + get: + responses: + '200': + description: ok +`, + expectedError: "[7:3] warning style-paths-kebab-case path segments `apiUsers` are not kebab-case", + }, + { + name: "snake_case path", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /api_users: + get: + responses: + '200': + description: ok +`, + expectedError: "[7:3] warning style-paths-kebab-case path segments `api_users` are not kebab-case", + }, + { + name: "uppercase path", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /API/USERS: + get: + responses: + '200': + description: ok +`, + expectedError: "[7:3] warning style-paths-kebab-case path segments `API`, `USERS` are not kebab-case", + }, + { + name: "mixed valid and invalid segments", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /api-users/userId: + get: + responses: + '200': + description: ok +`, + expectedError: "[7:3] warning style-paths-kebab-case path segments `userId` are not kebab-case", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + doc, _, err := openapi.Unmarshal(ctx, strings.NewReader(tt.yaml)) + require.NoError(t, err) + + rule := &rules.PathsKebabCaseRule{} + config := &linter.RuleConfig{} + docInfo := linter.NewDocumentInfo(doc, "test.yaml") + + errs := rule.Run(ctx, docInfo, config) + + require.Len(t, errs, 1) + assert.Equal(t, tt.expectedError, errs[0].Error()) + }) + } +} + +func TestPathsKebabCaseRule_RuleMetadata(t *testing.T) { + t.Parallel() + + rule := &rules.PathsKebabCaseRule{} + + assert.Equal(t, "style-paths-kebab-case", rule.ID()) + assert.Equal(t, rules.CategoryStyle, rule.Category()) + assert.NotEmpty(t, rule.Description()) + assert.NotEmpty(t, rule.Link()) + assert.Equal(t, validation.SeverityWarning, rule.DefaultSeverity()) + assert.Nil(t, rule.Versions()) +} diff --git a/openapi/linter/rules/rule_metadata_test.go b/openapi/linter/rules/rule_metadata_test.go new file mode 100644 index 00000000..cc0333e8 --- /dev/null +++ b/openapi/linter/rules/rule_metadata_test.go @@ -0,0 +1,103 @@ +package rules_test + +import ( + "testing" + + "github.com/speakeasy-api/openapi/linter" + "github.com/speakeasy-api/openapi/openapi" + "github.com/speakeasy-api/openapi/openapi/linter/rules" + "github.com/stretchr/testify/assert" +) + +// howToFixer is the interface satisfied by rules that provide fix guidance. +type howToFixer interface { + HowToFix() string +} + +// allRules returns every built-in rule instance. +func allRules() []linter.RuleRunner[*openapi.OpenAPI] { + return []linter.RuleRunner[*openapi.OpenAPI]{ + &rules.PathParamsRule{}, + &rules.PathDeclarationsRule{}, + &rules.PathQueryRule{}, + &rules.TypedEnumRule{}, + &rules.DuplicatedEnumRule{}, + &rules.NoEvalInMarkdownRule{}, + &rules.OAS3APIServersRule{}, + &rules.NoRefSiblingsRule{}, + &rules.NoScriptTagsInMarkdownRule{}, + &rules.OAS3HostNotExampleRule{}, + &rules.OperationIdRule{}, + &rules.OperationSuccessResponseRule{}, + &rules.OperationErrorResponseRule{}, + &rules.OperationTagDefinedRule{}, + &rules.OperationSingularTagRule{}, + &rules.OperationDescriptionRule{}, + &rules.OperationTagsRule{}, + &rules.InfoDescriptionRule{}, + &rules.InfoContactRule{}, + &rules.InfoLicenseRule{}, + &rules.LicenseURLRule{}, + &rules.PathTrailingSlashRule{}, + &rules.PathsKebabCaseRule{}, + &rules.NoVerbsInPathRule{}, + &rules.TagDescriptionRule{}, + &rules.TagsAlphabeticalRule{}, + &rules.ContactPropertiesRule{}, + &rules.OpenAPITagsRule{}, + &rules.ComponentDescriptionRule{}, + &rules.UnusedComponentRule{}, + &rules.OperationIDValidInURLRule{}, + &rules.LinkOperationRule{}, + &rules.OAS3HostTrailingSlashRule{}, + &rules.OAS3ParameterDescriptionRule{}, + &rules.NoAmbiguousPathsRule{}, + &rules.DescriptionDuplicationRule{}, + &rules.OwaspNoHttpBasicRule{}, + &rules.OwaspNoAPIKeysInURLRule{}, + &rules.OwaspNoCredentialsInURLRule{}, + &rules.OwaspAuthInsecureSchemesRule{}, + &rules.OwaspDefineErrorResponses401Rule{}, + &rules.OwaspDefineErrorResponses500Rule{}, + &rules.OwaspDefineErrorResponses429Rule{}, + &rules.OwaspSecurityHostsHttpsOAS3Rule{}, + &rules.OwaspDefineErrorValidationRule{}, + &rules.OwaspProtectionGlobalUnsafeRule{}, + &rules.OwaspProtectionGlobalUnsafeStrictRule{}, + &rules.OwaspProtectionGlobalSafeRule{}, + &rules.OwaspRateLimitRule{}, + &rules.OwaspRateLimitRetryAfterRule{}, + &rules.OwaspNoNumericIDsRule{}, + &rules.OwaspJWTBestPracticesRule{}, + &rules.OwaspArrayLimitRule{}, + &rules.OwaspStringLimitRule{}, + &rules.OwaspStringRestrictedRule{}, + &rules.OwaspIntegerFormatRule{}, + &rules.OwaspIntegerLimitRule{}, + &rules.OwaspNoAdditionalPropertiesRule{}, + &rules.OwaspAdditionalPropertiesConstrainedRule{}, + &rules.OAS3NoNullableRule{}, + &rules.OAS3ExampleMissingRule{}, + &rules.OASSchemaCheckRule{}, + } +} + +func TestAllRules_MetadataPopulated(t *testing.T) { + t.Parallel() + + for _, rule := range allRules() { + t.Run(rule.ID(), func(t *testing.T) { + t.Parallel() + + assert.NotEmpty(t, rule.ID(), "rule ID should not be empty") + assert.NotEmpty(t, rule.Category(), "rule category should not be empty") + assert.NotEmpty(t, rule.Description(), "rule description should not be empty") + assert.NotEmpty(t, rule.Summary(), "rule summary should not be empty") + assert.NotEmpty(t, rule.DefaultSeverity(), "rule default severity should not be empty") + + if fixer, ok := rule.(howToFixer); ok { + assert.NotEmpty(t, fixer.HowToFix(), "rule HowToFix should not be empty") + } + }) + } +} diff --git a/openapi/linter/rules/tag_description.go b/openapi/linter/rules/tag_description.go new file mode 100644 index 00000000..d8e28114 --- /dev/null +++ b/openapi/linter/rules/tag_description.go @@ -0,0 +1,80 @@ +package rules + +import ( + "context" + "fmt" + + "github.com/speakeasy-api/openapi/linter" + "github.com/speakeasy-api/openapi/openapi" + "github.com/speakeasy-api/openapi/validation" +) + +const RuleStyleTagDescription = "style-tag-description" + +type TagDescriptionRule struct{} + +func (r *TagDescriptionRule) ID() string { + return RuleStyleTagDescription +} + +func (r *TagDescriptionRule) Description() string { + return "Tags should include descriptions that explain the purpose and scope of the operations they group. Tag descriptions provide context in documentation and help developers understand the organization of API functionality." +} + +func (r *TagDescriptionRule) Summary() string { + return "Tags should include descriptions." +} + +func (r *TagDescriptionRule) HowToFix() string { + return "Add descriptions to each tag to explain the grouped operations." +} + +func (r *TagDescriptionRule) Category() string { + return CategoryStyle +} + +func (r *TagDescriptionRule) DefaultSeverity() validation.Severity { + return validation.SeverityHint +} + +func (r *TagDescriptionRule) Link() string { + return "https://github.com/speakeasy-api/openapi/blob/main/openapi/linter/README.md#style-tag-description" +} + +func (r *TagDescriptionRule) Versions() []string { + return nil // applies to all versions +} + +func (r *TagDescriptionRule) Run(ctx context.Context, docInfo *linter.DocumentInfo[*openapi.OpenAPI], config *linter.RuleConfig) []error { + if docInfo == nil || docInfo.Document == nil { + return nil + } + + var errs []error + doc := docInfo.Document + + tags := doc.GetTags() + if len(tags) == 0 { + return nil + } + + for _, tag := range tags { + if tag == nil { + continue + } + + description := tag.GetDescription() + name := tag.GetName() + + if description == "" { + errs = append(errs, validation.NewValidationError( + config.GetSeverity(r.DefaultSeverity()), + RuleStyleTagDescription, + fmt.Errorf("tag `%s` must have a description", name), + tag.GetRootNode(), + )) + } + } + + return errs +} diff --git a/openapi/linter/rules/tag_description_test.go b/openapi/linter/rules/tag_description_test.go new file mode 100644 index 00000000..353e1b52 --- /dev/null +++ b/openapi/linter/rules/tag_description_test.go @@ -0,0 +1,163 @@ +package rules_test + +import ( + "strings" + "testing" + + "github.com/speakeasy-api/openapi/linter" + "github.com/speakeasy-api/openapi/openapi" + "github.com/speakeasy-api/openapi/openapi/linter/rules" + "github.com/speakeasy-api/openapi/validation" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestTagDescriptionRule_ValidCases(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + yaml string + }{ + { + name: "tag with description", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +tags: + - name: users + description: User management endpoints +paths: {} +`, + }, + { + name: "multiple tags with descriptions", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +tags: + - name: users + description: User management endpoints + - name: products + description: Product management endpoints +paths: {} +`, + }, + { + name: "no tags defined", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: {} +`, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + doc, _, err := openapi.Unmarshal(ctx, strings.NewReader(tt.yaml)) + require.NoError(t, err) + + rule := &rules.TagDescriptionRule{} + config := &linter.RuleConfig{} + docInfo := linter.NewDocumentInfo(doc, "test.yaml") + + errs := rule.Run(ctx, docInfo, config) + assert.Empty(t, errs) + }) + } +} + +func TestTagDescriptionRule_Violations(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + yaml string + expectedError string + }{ + { + name: "tag without description", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +tags: + - name: users +paths: {} +`, + expectedError: "[7:5] hint style-tag-description tag `users` must have a description", + }, + { + name: "tag with empty description", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +tags: + - name: users + description: "" +paths: {} +`, + expectedError: "[7:5] hint style-tag-description tag `users` must have a description", + }, + { + name: "one tag with description, one without", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +tags: + - name: users + description: User management endpoints + - name: products +paths: {} +`, + expectedError: "[9:5] hint style-tag-description tag `products` must have a description", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + doc, _, err := openapi.Unmarshal(ctx, strings.NewReader(tt.yaml)) + require.NoError(t, err) + + rule := &rules.TagDescriptionRule{} + config := &linter.RuleConfig{} + docInfo := linter.NewDocumentInfo(doc, "test.yaml") + + errs := rule.Run(ctx, docInfo, config) + + require.NotEmpty(t, errs) + assert.Equal(t, tt.expectedError, errs[0].Error()) + }) + } +} + +func TestTagDescriptionRule_RuleMetadata(t *testing.T) { + t.Parallel() + + rule := &rules.TagDescriptionRule{} + + assert.Equal(t, "style-tag-description", rule.ID()) + assert.Equal(t, rules.CategoryStyle, rule.Category()) + assert.NotEmpty(t, rule.Description()) + assert.NotEmpty(t, rule.Link()) + assert.Equal(t, validation.SeverityHint, rule.DefaultSeverity()) + assert.Nil(t, rule.Versions()) +} diff --git a/openapi/linter/rules/tags_alphabetical.go b/openapi/linter/rules/tags_alphabetical.go new file mode 100644 index 00000000..ca12214a --- /dev/null +++ b/openapi/linter/rules/tags_alphabetical.go @@ -0,0 +1,94 @@ +package rules + +import ( + "context" + "fmt" + "strings" + + "github.com/speakeasy-api/openapi/linter" + "github.com/speakeasy-api/openapi/openapi" + "github.com/speakeasy-api/openapi/validation" +) + +const RuleStyleTagsAlphabetical = "style-tags-alphabetical" + +type TagsAlphabeticalRule struct{} + +func (r *TagsAlphabeticalRule) ID() string { + return RuleStyleTagsAlphabetical +} + +func (r *TagsAlphabeticalRule) Description() string { + return "Tags should be listed in alphabetical order to improve documentation organization and navigation. Alphabetical ordering makes it easier for developers to find specific tag groups in API documentation." +} + +func (r *TagsAlphabeticalRule) Summary() string { + return "Tags should be listed in alphabetical order." +} + +func (r *TagsAlphabeticalRule) HowToFix() string { + return "Sort the tags array alphabetically by tag name." +} + +func (r *TagsAlphabeticalRule) Category() string { + return CategoryStyle +} + +func (r *TagsAlphabeticalRule) DefaultSeverity() validation.Severity { + return validation.SeverityWarning +} + +func (r *TagsAlphabeticalRule) Link() string { + return "https://github.com/speakeasy-api/openapi/blob/main/openapi/linter/README.md#style-tags-alphabetical" +} + +func (r *TagsAlphabeticalRule) Versions() []string { + return nil // applies to all versions +} + +func (r *TagsAlphabeticalRule) Run(ctx context.Context, docInfo *linter.DocumentInfo[*openapi.OpenAPI], config *linter.RuleConfig) []error { + if docInfo == nil || docInfo.Document == nil { + return nil + } + + var errs []error + doc := docInfo.Document + + tags := doc.GetTags() + if len(tags) < 2 { + return nil // Need at least 2 tags to check ordering + } + + // Check if tags are in alphabetical order by name + for i := 0; i < len(tags)-1; i++ { + currentTag := tags[i] + nextTag := tags[i+1] + + if currentTag == nil || nextTag == nil { + continue + } + + currentName := currentTag.GetName() + nextName := nextTag.GetName() + + // Compare case-insensitively + if strings.Compare(strings.ToLower(currentName), strings.ToLower(nextName)) > 0 { + // Get the node for the tags array + tagsNode := doc.GetCore().Tags.ValueNode + if tagsNode == nil { + tagsNode = doc.GetRootNode() + } + + errs = append(errs, validation.NewValidationError( + config.GetSeverity(r.DefaultSeverity()), + RuleStyleTagsAlphabetical, + fmt.Errorf("tag `%s` must be placed before `%s` (alphabetical)", nextName, currentName), + tagsNode, + )) + // Report only the first violation for deterministic behavior + break + } + } + + return errs +} diff --git a/openapi/linter/rules/tags_alphabetical_test.go b/openapi/linter/rules/tags_alphabetical_test.go new file mode 100644 index 00000000..b8941a51 --- /dev/null +++ b/openapi/linter/rules/tags_alphabetical_test.go @@ -0,0 +1,192 @@ +package rules_test + +import ( + "strings" + "testing" + + "github.com/speakeasy-api/openapi/linter" + "github.com/speakeasy-api/openapi/openapi" + "github.com/speakeasy-api/openapi/openapi/linter/rules" + "github.com/speakeasy-api/openapi/validation" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestTagsAlphabeticalRule_ValidCases(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + yaml string + }{ + { + name: "tags in alphabetical order", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +tags: + - name: auth + description: Authentication + - name: products + description: Products + - name: users + description: Users +paths: {} +`, + }, + { + name: "single tag", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +tags: + - name: users + description: Users +paths: {} +`, + }, + { + name: "no tags", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: {} +`, + }, + { + name: "tags with case variations in alphabetical order", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +tags: + - name: Auth + description: Authentication + - name: products + description: Products + - name: Users + description: Users +paths: {} +`, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + doc, _, err := openapi.Unmarshal(ctx, strings.NewReader(tt.yaml)) + require.NoError(t, err) + + rule := &rules.TagsAlphabeticalRule{} + config := &linter.RuleConfig{} + docInfo := linter.NewDocumentInfo(doc, "test.yaml") + + errs := rule.Run(ctx, docInfo, config) + assert.Empty(t, errs) + }) + } +} + +func TestTagsAlphabeticalRule_Violations(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + yaml string + expectedError string + }{ + { + name: "tags not in alphabetical order", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +tags: + - name: users + description: Users + - name: auth + description: Authentication +paths: {} +`, + expectedError: "[7:3] warning style-tags-alphabetical tag `auth` must be placed before `users` (alphabetical)", + }, + { + name: "tags reversed", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +tags: + - name: users + description: Users + - name: products + description: Products + - name: auth + description: Authentication +paths: {} +`, + expectedError: "[7:3] warning style-tags-alphabetical tag `products` must be placed before `users` (alphabetical)", + }, + { + name: "middle tags out of order", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +tags: + - name: auth + description: Authentication + - name: users + description: Users + - name: products + description: Products +paths: {} +`, + expectedError: "[7:3] warning style-tags-alphabetical tag `products` must be placed before `users` (alphabetical)", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + doc, _, err := openapi.Unmarshal(ctx, strings.NewReader(tt.yaml)) + require.NoError(t, err) + + rule := &rules.TagsAlphabeticalRule{} + config := &linter.RuleConfig{} + docInfo := linter.NewDocumentInfo(doc, "test.yaml") + + errs := rule.Run(ctx, docInfo, config) + + require.NotEmpty(t, errs) + assert.Equal(t, tt.expectedError, errs[0].Error()) + }) + } +} + +func TestTagsAlphabeticalRule_RuleMetadata(t *testing.T) { + t.Parallel() + + rule := &rules.TagsAlphabeticalRule{} + + assert.Equal(t, "style-tags-alphabetical", rule.ID()) + assert.Equal(t, rules.CategoryStyle, rule.Category()) + assert.NotEmpty(t, rule.Description()) + assert.NotEmpty(t, rule.Link()) + assert.Equal(t, validation.SeverityWarning, rule.DefaultSeverity()) + assert.Nil(t, rule.Versions()) +} diff --git a/openapi/linter/rules/typed_enum.go b/openapi/linter/rules/typed_enum.go new file mode 100644 index 00000000..9cece69f --- /dev/null +++ b/openapi/linter/rules/typed_enum.go @@ -0,0 +1,207 @@ +package rules + +import ( + "context" + "fmt" + "strings" + + "github.com/speakeasy-api/openapi/jsonschema/oas3" + "github.com/speakeasy-api/openapi/linter" + "github.com/speakeasy-api/openapi/openapi" + "github.com/speakeasy-api/openapi/validation" + "gopkg.in/yaml.v3" +) + +const RuleSemanticTypedEnum = "semantic-typed-enum" + +type TypedEnumRule struct{} + +func (r *TypedEnumRule) ID() string { return RuleSemanticTypedEnum } +func (r *TypedEnumRule) Category() string { return CategorySemantic } +func (r *TypedEnumRule) Description() string { + return "Enum values must match the specified type - for example, if type is `string`, all enum values must be strings. Type mismatches in enums cause validation failures and break code generation tools." +} +func (r *TypedEnumRule) Summary() string { + return "Enum values must match the specified schema type." +} +func (r *TypedEnumRule) HowToFix() string { + return "Update enum values to match the schema type or adjust the schema type to include the enum value types." +} +func (r *TypedEnumRule) Link() string { + return "https://github.com/speakeasy-api/openapi/blob/main/openapi/linter/README.md#semantic-typed-enum" +} +func (r *TypedEnumRule) DefaultSeverity() validation.Severity { + return validation.SeverityWarning +} +func (r *TypedEnumRule) Versions() []string { + return nil // Applies to all OpenAPI versions +} + +func (r *TypedEnumRule) Run(ctx context.Context, docInfo *linter.DocumentInfo[*openapi.OpenAPI], config *linter.RuleConfig) []error { + if docInfo == nil || docInfo.Document == nil || docInfo.Index == nil { + return nil + } + + var errs []error + + // Get OpenAPI version for version-specific error messages + openapiVersion := docInfo.Document.GetOpenAPI() + + // Use the pre-computed schema indexes to find all schemas with enums + for _, schemaNode := range docInfo.Index.GetAllSchemas() { + refSchema := schemaNode.Node + schema := refSchema.GetSchema() + if schema == nil { + continue + } + + // Check if schema has enum values + if len(schema.GetEnum()) == 0 { + continue + } + + // Get the schema type + schemaTypes := schema.GetType() + if len(schemaTypes) == 0 { + // No type specified, skip validation + continue + } + + // Validate each enum value against the type + for i, enumValueNode := range schema.GetEnum() { + if !isNodeMatchingType(enumValueNode, schemaTypes, schema.GetNullable()) { + errorMsg := createTypeMismatchError(i, enumValueNode, schemaTypes, schema.GetNullable(), openapiVersion) + errs = append(errs, validation.NewSliceError( + config.GetSeverity(r.DefaultSeverity()), + RuleSemanticTypedEnum, + fmt.Errorf("%s", errorMsg), + schema.GetCore(), + schema.GetCore().Enum, + i, + )) + } + } + } + + return errs +} + +// createTypeMismatchError creates an appropriate error message for type mismatches +func createTypeMismatchError(index int, node *yaml.Node, schemaTypes []oas3.SchemaType, nullable bool, openapiVersion string) string { + isNull := isNullNode(node) + + if isNull && !nullable && !containsType(schemaTypes, oas3.SchemaTypeNull) { + // Special error message for null values without proper nullable declaration + if len(openapiVersion) >= 3 && openapiVersion[:3] == "3.0" { + // OpenAPI 3.0.x - suggest nullable: true + return fmt.Sprintf("enum contains null at index `%d` but schema does not have `nullable: true`. Add `nullable: true` to allow null values", index) + } + // OpenAPI 3.1.x or later - suggest type array with null + typeWithNull := formatTypeArrayWithNull(schemaTypes) + return fmt.Sprintf("enum contains null at index `%d` but schema type does not include null. Change `type: %s` to `type: %s` to allow null values", index, formatTypeArray(schemaTypes), typeWithNull) + } + + // Generic type mismatch error + return fmt.Sprintf("enum value at index `%d` does not match schema type %s", index, formatTypeArray(schemaTypes)) +} + +// isNullNode checks if a YAML node represents a null value +func isNullNode(node *yaml.Node) bool { + if node == nil { + return true + } + if node.Kind == yaml.AliasNode { + return true + } + return node.Tag == "!!null" +} + +// formatTypeArray formats a type array for display in error messages +func formatTypeArray(schemaTypes []oas3.SchemaType) string { + if len(schemaTypes) == 0 { + return "[]" + } + if len(schemaTypes) == 1 { + return fmt.Sprintf("[`%s`]", schemaTypes[0]) + } + // Multiple types + var result strings.Builder + result.WriteString("[") + for i, t := range schemaTypes { + if i > 0 { + result.WriteString(", ") + } + result.WriteString("`") + result.WriteString(string(t)) + result.WriteString("`") + } + result.WriteString("]") + return result.String() +} + +// formatTypeArrayWithNull formats a type array suggestion with null included +func formatTypeArrayWithNull(schemaTypes []oas3.SchemaType) string { + if len(schemaTypes) == 0 { + return `["null"]` + } + if len(schemaTypes) == 1 { + return fmt.Sprintf(`[%q, "null"]`, schemaTypes[0]) + } + // Multiple types - add null to the array + types := make([]string, len(schemaTypes)+1) + for i, t := range schemaTypes { + types[i] = fmt.Sprintf("%q", t) + } + types[len(types)-1] = `"null"` + + var result strings.Builder + result.WriteString("[") + for i, t := range types { + if i > 0 { + result.WriteString(", ") + } + result.WriteString(t) + } + result.WriteString("]") + return result.String() +} + +// isNodeMatchingType checks if a yaml.Node value matches the schema type +func isNodeMatchingType(node *yaml.Node, schemaTypes []oas3.SchemaType, nullable bool) bool { + if node == nil || node.Kind == yaml.AliasNode { + // nil or alias nodes - check for null type or nullable schema + return containsType(schemaTypes, oas3.SchemaTypeNull) || nullable + } + + // Check based on yaml node tag + switch node.Tag { + case "!!str": + return containsType(schemaTypes, oas3.SchemaTypeString) + case "!!int": + // Integer can match both integer and number types + return containsType(schemaTypes, oas3.SchemaTypeInteger) || containsType(schemaTypes, oas3.SchemaTypeNumber) + case "!!float": + // Float can match number or integer types + return containsType(schemaTypes, oas3.SchemaTypeNumber) || containsType(schemaTypes, oas3.SchemaTypeInteger) + case "!!bool": + return containsType(schemaTypes, oas3.SchemaTypeBoolean) + case "!!seq": + return containsType(schemaTypes, oas3.SchemaTypeArray) + case "!!map": + return containsType(schemaTypes, oas3.SchemaTypeObject) + case "!!null": + return containsType(schemaTypes, oas3.SchemaTypeNull) || nullable + default: + // Unknown tag, be permissive + return true + } +} + +func containsType(schemaType []oas3.SchemaType, targetType oas3.SchemaType) bool { + for _, t := range schemaType { + if t == targetType { + return true + } + } + return false +} diff --git a/openapi/linter/rules/typed_enum_test.go b/openapi/linter/rules/typed_enum_test.go new file mode 100644 index 00000000..c6d2bb25 --- /dev/null +++ b/openapi/linter/rules/typed_enum_test.go @@ -0,0 +1,352 @@ +package rules_test + +import ( + "strings" + "testing" + + "github.com/speakeasy-api/openapi/linter" + "github.com/speakeasy-api/openapi/openapi" + "github.com/speakeasy-api/openapi/openapi/linter/rules" + "github.com/speakeasy-api/openapi/references" + "github.com/speakeasy-api/openapi/validation" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestTypedEnumRule_ValidCases(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + yaml string + }{ + { + name: "string enum with string values", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +components: + schemas: + Status: + type: string + enum: + - active + - inactive + - pending +`, + }, + { + name: "integer enum with integer values", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +components: + schemas: + Priority: + type: integer + enum: + - 1 + - 2 + - 3 +`, + }, + { + name: "number enum with numeric values", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +components: + schemas: + Rating: + type: number + enum: + - 1.5 + - 2.0 + - 4.5 +`, + }, + { + name: "boolean enum", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +components: + schemas: + Flag: + type: boolean + enum: + - true + - false +`, + }, + { + name: "number type with integer value is valid", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +components: + schemas: + Count: + type: number + enum: + - 1 + - 2 +`, + }, + { + name: "null type with null value", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +components: + schemas: + Nullable: + type: "null" + enum: + - null +`, + }, + { + name: "enum without type specified", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +components: + schemas: + Mixed: + enum: + - value1 + - 123 +`, + }, + { + name: "schema without enum", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +components: + schemas: + User: + type: object + properties: + name: + type: string +`, + }, + { + name: "nullable integer enum with null value", + yaml: ` +openapi: 3.0.0 +info: + title: Test + version: 1.0.0 +components: + schemas: + NullableIntEnum: + type: integer + nullable: true + enum: + - 1 + - 2 + - 3 + - null +`, + }, + { + name: "nullable string enum with null value", + yaml: ` +openapi: 3.0.0 +info: + title: Test + version: 1.0.0 +components: + schemas: + NullableStringEnum: + type: string + nullable: true + enum: + - First + - Second + - Third + - null +`, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + doc, _, err := openapi.Unmarshal(ctx, strings.NewReader(tt.yaml)) + require.NoError(t, err) + + rule := &rules.TypedEnumRule{} + config := &linter.RuleConfig{} + + // Build index for the rule + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + docInfo := linter.NewDocumentInfoWithIndex(doc, "test.yaml", idx) + + errs := rule.Run(ctx, docInfo, config) + assert.Empty(t, errs) + }) + } +} + +func TestTypedEnumRule_TypeMismatch(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + yaml string + expectedError string + }{ + { + name: "string type with integer value", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +components: + schemas: + Status: + type: string + enum: + - active + - 123 +`, + expectedError: "[12:11] warning semantic-typed-enum enum value at index `1` does not match schema type [`string`]", + }, + { + name: "integer type with string value", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +components: + schemas: + Priority: + type: integer + enum: + - 1 + - high +`, + expectedError: "[12:11] warning semantic-typed-enum enum value at index `1` does not match schema type [`integer`]", + }, + { + name: "boolean type with string value", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +components: + schemas: + Flag: + type: boolean + enum: + - true + - yes +`, + expectedError: "[12:11] warning semantic-typed-enum enum value at index `1` does not match schema type [`boolean`]", + }, + { + name: "openapi 3.0 null in enum without nullable true", + yaml: ` +openapi: 3.0.0 +info: + title: Test + version: 1.0.0 +components: + schemas: + Status: + type: string + enum: + - active + - inactive + - null +`, + expectedError: "[13:11] warning semantic-typed-enum enum contains null at index `2` but schema does not have `nullable: true`. Add `nullable: true` to allow null values", + }, + { + name: "openapi 3.1 null in enum without null in type", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +components: + schemas: + Priority: + type: integer + enum: + - 1 + - 2 + - null +`, + expectedError: `[13:11] warning semantic-typed-enum enum contains null at index ` + "`2`" + ` but schema type does not include null. Change ` + "`type: [`integer`]`" + ` to ` + "`type: [\"integer\", \"null\"]`" + ` to allow null values`, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + doc, _, err := openapi.Unmarshal(ctx, strings.NewReader(tt.yaml)) + require.NoError(t, err) + + rule := &rules.TypedEnumRule{} + config := &linter.RuleConfig{} + + // Build index for the rule + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + docInfo := linter.NewDocumentInfoWithIndex(doc, "test.yaml", idx) + + errs := rule.Run(ctx, docInfo, config) + + require.NotEmpty(t, errs, "should have lint errors") + assert.Equal(t, tt.expectedError, errs[0].Error()) + }) + } +} + +func TestTypedEnumRule_RuleMetadata(t *testing.T) { + t.Parallel() + + rule := &rules.TypedEnumRule{} + + assert.Equal(t, "semantic-typed-enum", rule.ID()) + assert.Equal(t, rules.CategorySemantic, rule.Category()) + assert.NotEmpty(t, rule.Description()) + assert.NotEmpty(t, rule.Link()) + assert.Equal(t, validation.SeverityWarning, rule.DefaultSeverity()) + assert.Nil(t, rule.Versions()) +} diff --git a/openapi/linter/rules/unused_components.go b/openapi/linter/rules/unused_components.go new file mode 100644 index 00000000..a64ce75e --- /dev/null +++ b/openapi/linter/rules/unused_components.go @@ -0,0 +1,467 @@ +package rules + +import ( + "context" + "fmt" + "strings" + + "github.com/speakeasy-api/openapi/extensions" + "github.com/speakeasy-api/openapi/linter" + "github.com/speakeasy-api/openapi/openapi" + "github.com/speakeasy-api/openapi/references" + "github.com/speakeasy-api/openapi/validation" + "gopkg.in/yaml.v3" +) + +const RuleSemanticUnusedComponent = "semantic-unused-component" + +type UnusedComponentRule struct{} + +func (r *UnusedComponentRule) ID() string { return RuleSemanticUnusedComponent } +func (r *UnusedComponentRule) Category() string { return CategorySemantic } +func (r *UnusedComponentRule) Description() string { + return "Components that are declared but never referenced should be removed to keep the specification clean. Unused components create maintenance burden, increase specification size, and may confuse developers about which schemas are actually used." +} +func (r *UnusedComponentRule) Summary() string { + return "Components should not be declared if they are never referenced." +} +func (r *UnusedComponentRule) HowToFix() string { + return "Remove unused components or reference them where needed in the specification." +} +func (r *UnusedComponentRule) Link() string { + return "https://github.com/speakeasy-api/openapi/blob/main/openapi/linter/README.md#semantic-unused-component" +} +func (r *UnusedComponentRule) DefaultSeverity() validation.Severity { + return validation.SeverityWarning +} +func (r *UnusedComponentRule) Versions() []string { + // Applies to all OAS3 versions + return nil +} + +func (r *UnusedComponentRule) Run(_ context.Context, docInfo *linter.DocumentInfo[*openapi.OpenAPI], config *linter.RuleConfig) []error { + if docInfo == nil || docInfo.Document == nil || docInfo.Index == nil { + return nil + } + + doc := docInfo.Document + + // Step 1: Collect all referenced component pointers from $ref strings + referencedPointers := collectReferencedComponentPointers(docInfo.Index, doc, docInfo.Location) + + // Step 2: Check each component against the referenced set + return checkUnusedComponents(doc, docInfo.Index, referencedPointers, config, r.DefaultSeverity()) +} + +// collectReferencedComponentPointers iterates through all reference slices in the index +// and collects the component JSON pointers (e.g., "/components/schemas/Pet"). +func collectReferencedComponentPointers(idx *openapi.Index, doc *openapi.OpenAPI, docLocation string) map[string]struct{} { + refs := make(map[string]struct{}) + self := "" + if doc != nil { + self = doc.GetSelf() + } + + // Schema references + for _, node := range idx.SchemaReferences { + if node == nil || node.Node == nil { + continue + } + if ptr := extractComponentPointer(node.Node.GetReference(), docLocation, self); ptr != "" { + refs[ptr] = struct{}{} + } + } + + // Parameter references + for _, node := range idx.ParameterReferences { + if node == nil || node.Node == nil { + continue + } + if ptr := extractComponentPointer(node.Node.GetReference(), docLocation, self); ptr != "" { + refs[ptr] = struct{}{} + } + } + + // Response references + for _, node := range idx.ResponseReferences { + if node == nil || node.Node == nil { + continue + } + if ptr := extractComponentPointer(node.Node.GetReference(), docLocation, self); ptr != "" { + refs[ptr] = struct{}{} + } + } + + // RequestBody references + for _, node := range idx.RequestBodyReferences { + if node == nil || node.Node == nil { + continue + } + if ptr := extractComponentPointer(node.Node.GetReference(), docLocation, self); ptr != "" { + refs[ptr] = struct{}{} + } + } + + // Header references + for _, node := range idx.HeaderReferences { + if node == nil || node.Node == nil { + continue + } + if ptr := extractComponentPointer(node.Node.GetReference(), docLocation, self); ptr != "" { + refs[ptr] = struct{}{} + } + } + + // Example references + for _, node := range idx.ExampleReferences { + if node == nil || node.Node == nil { + continue + } + if ptr := extractComponentPointer(node.Node.GetReference(), docLocation, self); ptr != "" { + refs[ptr] = struct{}{} + } + } + + // Link references + for _, node := range idx.LinkReferences { + if node == nil || node.Node == nil { + continue + } + if ptr := extractComponentPointer(node.Node.GetReference(), docLocation, self); ptr != "" { + refs[ptr] = struct{}{} + } + } + + // Callback references + for _, node := range idx.CallbackReferences { + if node == nil || node.Node == nil { + continue + } + if ptr := extractComponentPointer(node.Node.GetReference(), docLocation, self); ptr != "" { + refs[ptr] = struct{}{} + } + } + + // PathItem references + for _, node := range idx.PathItemReferences { + if node == nil || node.Node == nil { + continue + } + if ptr := extractComponentPointer(node.Node.GetReference(), docLocation, self); ptr != "" { + refs[ptr] = struct{}{} + } + } + + // SecurityScheme references + for _, node := range idx.SecuritySchemeReferences { + if node == nil || node.Node == nil { + continue + } + if ptr := extractComponentPointer(node.Node.GetReference(), docLocation, self); ptr != "" { + refs[ptr] = struct{}{} + } + } + + // Security requirements reference security schemes by name (not $ref) + for _, node := range idx.SecurityRequirements { + if node == nil || node.Node == nil { + continue + } + for schemeName := range node.Node.All() { + // Security requirements reference security schemes by name + refs["/components/securitySchemes/"+schemeName] = struct{}{} + } + } + + return refs +} + +// extractComponentPointer extracts the top-level component JSON pointer from a $ref. +// For example, "#/components/schemas/Pet/properties/name" becomes "/components/schemas/Pet". +// Returns empty string if the reference is not to a component or is external. +func extractComponentPointer(ref references.Reference, docLocation string, docSelf string) string { + if ref == "" { + return "" + } + + uri := ref.GetURI() + if uri != "" && uri != docLocation && uri != docSelf { + return "" + } + + pointer := ref.GetJSONPointer().String() + if pointer == "" { + return "" + } + + // Must start with /components/ + if !strings.HasPrefix(pointer, "/components/") { + return "" + } + + // Extract the component type and name: /components/{type}/{name} + // Skip "/components/" (12 chars), then find the type and name + rest := strings.TrimPrefix(pointer, "/components/") + parts := strings.SplitN(rest, "/", 3) // Split into at most 3 parts: type, name, rest + if len(parts) < 2 || parts[0] == "" || parts[1] == "" { + return "" + } + + // Return the normalized component pointer (keep escaped form for comparison) + return "/components/" + parts[0] + "/" + parts[1] +} + +// checkUnusedComponents iterates through all component entries in the index +// and flags those not in the referenced set using ToJSONPointer. +func checkUnusedComponents(doc *openapi.OpenAPI, idx *openapi.Index, refs map[string]struct{}, config *linter.RuleConfig, severity validation.Severity) []error { + var errs []error + + // Check component schemas + for _, node := range idx.ComponentSchemas { + if node == nil || node.Node == nil { + continue + } + pointer := node.Location.ToJSONPointer().String() + if _, found := refs[pointer]; !found { + // Skip if component has a usage-marking extension + if hasUsageMarkingExtension(node.Node.GetExtensions()) { + continue + } + errNode := getComponentKeyNode(doc, node.Location) + errs = append(errs, createUnusedComponentError(pointer, errNode, config, severity)) + } + } + + // Check component parameters + for _, node := range idx.ComponentParameters { + if node == nil || node.Node == nil { + continue + } + pointer := node.Location.ToJSONPointer().String() + if _, found := refs[pointer]; !found { + // Skip if component has a usage-marking extension + if obj := node.Node.GetObject(); obj != nil && hasUsageMarkingExtension(obj.GetExtensions()) { + continue + } + errNode := getComponentKeyNode(doc, node.Location) + errs = append(errs, createUnusedComponentError(pointer, errNode, config, severity)) + } + } + + // Check component responses + for _, node := range idx.ComponentResponses { + if node == nil || node.Node == nil { + continue + } + pointer := node.Location.ToJSONPointer().String() + if _, found := refs[pointer]; !found { + // Skip if component has a usage-marking extension + if obj := node.Node.GetObject(); obj != nil && hasUsageMarkingExtension(obj.GetExtensions()) { + continue + } + errNode := getComponentKeyNode(doc, node.Location) + errs = append(errs, createUnusedComponentError(pointer, errNode, config, severity)) + } + } + + // Check component request bodies + for _, node := range idx.ComponentRequestBodies { + if node == nil || node.Node == nil { + continue + } + pointer := node.Location.ToJSONPointer().String() + if _, found := refs[pointer]; !found { + // Skip if component has a usage-marking extension + if obj := node.Node.GetObject(); obj != nil && hasUsageMarkingExtension(obj.Extensions) { + continue + } + errNode := getComponentKeyNode(doc, node.Location) + errs = append(errs, createUnusedComponentError(pointer, errNode, config, severity)) + } + } + + // Check component headers + for _, node := range idx.ComponentHeaders { + if node == nil || node.Node == nil { + continue + } + pointer := node.Location.ToJSONPointer().String() + if _, found := refs[pointer]; !found { + // Skip if component has a usage-marking extension + if obj := node.Node.GetObject(); obj != nil && hasUsageMarkingExtension(obj.GetExtensions()) { + continue + } + errNode := getComponentKeyNode(doc, node.Location) + errs = append(errs, createUnusedComponentError(pointer, errNode, config, severity)) + } + } + + // Check component examples + for _, node := range idx.ComponentExamples { + if node == nil || node.Node == nil { + continue + } + pointer := node.Location.ToJSONPointer().String() + if _, found := refs[pointer]; !found { + // Skip if component has a usage-marking extension + if obj := node.Node.GetObject(); obj != nil && hasUsageMarkingExtension(obj.GetExtensions()) { + continue + } + errNode := getComponentKeyNode(doc, node.Location) + errs = append(errs, createUnusedComponentError(pointer, errNode, config, severity)) + } + } + + // Check component links + for _, node := range idx.ComponentLinks { + if node == nil || node.Node == nil { + continue + } + pointer := node.Location.ToJSONPointer().String() + if _, found := refs[pointer]; !found { + // Skip if component has a usage-marking extension + if obj := node.Node.GetObject(); obj != nil && hasUsageMarkingExtension(obj.GetExtensions()) { + continue + } + errNode := getComponentKeyNode(doc, node.Location) + errs = append(errs, createUnusedComponentError(pointer, errNode, config, severity)) + } + } + + // Check component callbacks + for _, node := range idx.ComponentCallbacks { + if node == nil || node.Node == nil { + continue + } + pointer := node.Location.ToJSONPointer().String() + if _, found := refs[pointer]; !found { + // Skip if component has a usage-marking extension + if obj := node.Node.GetObject(); obj != nil && hasUsageMarkingExtension(obj.GetExtensions()) { + continue + } + errNode := getComponentKeyNode(doc, node.Location) + errs = append(errs, createUnusedComponentError(pointer, errNode, config, severity)) + } + } + + // Check component path items + for _, node := range idx.ComponentPathItems { + if node == nil || node.Node == nil { + continue + } + pointer := node.Location.ToJSONPointer().String() + if _, found := refs[pointer]; !found { + // Skip if component has a usage-marking extension + if obj := node.Node.GetObject(); obj != nil && hasUsageMarkingExtension(obj.GetExtensions()) { + continue + } + errNode := getComponentKeyNode(doc, node.Location) + errs = append(errs, createUnusedComponentError(pointer, errNode, config, severity)) + } + } + + // Check component security schemes + for _, node := range idx.ComponentSecuritySchemes { + if node == nil || node.Node == nil { + continue + } + pointer := node.Location.ToJSONPointer().String() + if _, found := refs[pointer]; !found { + // Skip if component has a usage-marking extension + if obj := node.Node.GetObject(); obj != nil && hasUsageMarkingExtension(obj.GetExtensions()) { + continue + } + errNode := getComponentKeyNode(doc, node.Location) + errs = append(errs, createUnusedComponentError(pointer, errNode, config, severity)) + } + } + + return errs +} + +func getComponentKeyNode(doc *openapi.OpenAPI, location openapi.Locations) *yaml.Node { + if doc == nil || len(location) == 0 { + return nil + } + last := location[len(location)-1] + if last.ParentKey == nil { + return nil + } + componentName := *last.ParentKey + componentType := last.ParentField + + core := doc.GetCore() + if core == nil { + return nil + } + rootNode := core.GetRootNode() + if !core.Components.Present || core.Components.Value == nil { + return rootNode + } + componentsCore := core.Components.Value + componentsRoot := componentsCore.GetRootNode() + if componentsRoot == nil { + componentsRoot = rootNode + } + + switch componentType { + case "schemas": + return componentsCore.Schemas.GetMapKeyNodeOrRoot(componentName, componentsRoot) + case "responses": + return componentsCore.Responses.GetMapKeyNodeOrRoot(componentName, componentsRoot) + case "parameters": + return componentsCore.Parameters.GetMapKeyNodeOrRoot(componentName, componentsRoot) + case "examples": + return componentsCore.Examples.GetMapKeyNodeOrRoot(componentName, componentsRoot) + case "requestBodies": + return componentsCore.RequestBodies.GetMapKeyNodeOrRoot(componentName, componentsRoot) + case "headers": + return componentsCore.Headers.GetMapKeyNodeOrRoot(componentName, componentsRoot) + case "securitySchemes": + return componentsCore.SecuritySchemes.GetMapKeyNodeOrRoot(componentName, componentsRoot) + case "links": + return componentsCore.Links.GetMapKeyNodeOrRoot(componentName, componentsRoot) + case "callbacks": + return componentsCore.Callbacks.GetMapKeyNodeOrRoot(componentName, componentsRoot) + case "pathItems": + return componentsCore.PathItems.GetMapKeyNodeOrRoot(componentName, componentsRoot) + default: + return componentsRoot + } +} + +// usageMarkingExtensions is the list of extensions that mark a component as being used +// even when not directly referenced in the specification. +var usageMarkingExtensions = []string{ + "x-speakeasy-include", + "x-include", + "x-used", +} + +// hasUsageMarkingExtension checks if the extensions contain any of the usage-marking +// extensions (x-speakeasy-include, x-include, x-used) set to true. +func hasUsageMarkingExtension(exts *extensions.Extensions) bool { + if exts == nil { + return false + } + + for _, ext := range usageMarkingExtensions { + val, err := extensions.GetExtensionValue[bool](exts, ext) + if err == nil && val != nil && *val { + return true + } + } + + return false +} + +// createUnusedComponentError creates a validation error for an unused component. +func createUnusedComponentError(pointer string, errNode *yaml.Node, config *linter.RuleConfig, severity validation.Severity) error { + componentRef := "#" + pointer + return validation.NewValidationError( + config.GetSeverity(severity), + RuleSemanticUnusedComponent, + fmt.Errorf("`%s` is potentially unused or has been orphaned", componentRef), + errNode, + ) +} diff --git a/openapi/linter/rules/unused_components_test.go b/openapi/linter/rules/unused_components_test.go new file mode 100644 index 00000000..0f41f307 --- /dev/null +++ b/openapi/linter/rules/unused_components_test.go @@ -0,0 +1,513 @@ +package rules_test + +import ( + "context" + "fmt" + "net/http" + "net/http/httptest" + "strings" + "testing" + + "github.com/speakeasy-api/openapi/linter" + "github.com/speakeasy-api/openapi/openapi" + "github.com/speakeasy-api/openapi/openapi/linter/rules" + "github.com/speakeasy-api/openapi/references" + "github.com/speakeasy-api/openapi/validation" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func createDocInfoWithIndexUnusedComponents(t *testing.T, ctx context.Context, doc *openapi.OpenAPI, location string) *linter.DocumentInfo[*openapi.OpenAPI] { + t.Helper() + idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: location, + }) + return linter.NewDocumentInfoWithIndex(doc, location, idx) +} + +func TestUnusedComponentRule_ValidCases(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + yaml string + }{ + { + name: "all components referenced", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /pets: + get: + security: + - ApiKey: [] + parameters: + - $ref: '#/components/parameters/PetId' + responses: + '200': + $ref: '#/components/responses/PetResponse' +components: + schemas: + Pet: + type: string + parameters: + PetId: + name: petId + in: query + schema: + type: string + responses: + PetResponse: + description: ok + content: + application/json: + schema: + $ref: '#/components/schemas/Pet' + securitySchemes: + ApiKey: + type: apiKey + in: header + name: X-API-Key +`, + }, + { + name: "no components", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /pets: + get: + responses: + '200': + description: ok +`, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + doc, _, err := openapi.Unmarshal(ctx, strings.NewReader(tt.yaml)) + require.NoError(t, err, "unmarshal should succeed") + + rule := &rules.UnusedComponentRule{} + config := &linter.RuleConfig{} + + docInfo := createDocInfoWithIndexUnusedComponents(t, ctx, doc, "test.yaml") + + errs := rule.Run(ctx, docInfo, config) + assert.Empty(t, errs, "should have no lint errors") + }) + } +} + +func TestUnusedComponentRule_Violations(t *testing.T) { + t.Parallel() + + yamlInput := ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /pets: + get: + responses: + '200': + description: ok + content: + application/json: + schema: + $ref: '#/components/schemas/Pet' +components: + schemas: + Pet: + type: string + Orphan: + type: string + responses: + UnusedResponse: + description: not used + securitySchemes: + ApiKey: + type: apiKey + in: header + name: X-API-Key +security: + - ApiKey: [] +` + + ctx := t.Context() + doc, _, err := openapi.Unmarshal(ctx, strings.NewReader(yamlInput)) + require.NoError(t, err, "unmarshal should succeed") + + rule := &rules.UnusedComponentRule{} + config := &linter.RuleConfig{} + + docInfo := createDocInfoWithIndexUnusedComponents(t, ctx, doc, "test.yaml") + + errs := rule.Run(ctx, docInfo, config) + + expectedErrors := []string{ + "[20:5] warning semantic-unused-component `#/components/schemas/Orphan` is potentially unused or has been orphaned", + "[23:5] warning semantic-unused-component `#/components/responses/UnusedResponse` is potentially unused or has been orphaned", + } + + var errMsgs []string + for _, lintErr := range errs { + errMsgs = append(errMsgs, lintErr.Error()) + } + + assert.ElementsMatch(t, expectedErrors, errMsgs) +} + +func TestUnusedComponentRule_RuleMetadata(t *testing.T) { + t.Parallel() + + rule := &rules.UnusedComponentRule{} + + assert.Equal(t, "semantic-unused-component", rule.ID(), "rule ID should match") + assert.Equal(t, rules.CategorySemantic, rule.Category(), "rule category should match") + assert.NotEmpty(t, rule.Description(), "rule should have description") + assert.NotEmpty(t, rule.Link(), "rule should have documentation link") + assert.Equal(t, validation.SeverityWarning, rule.DefaultSeverity(), "default severity should be warning") + assert.Nil(t, rule.Versions(), "versions should be nil (all versions)") +} + +func TestUnusedComponentRule_UsageMarkingExtensions(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + yaml string + expectedErrors []string + }{ + { + name: "x-speakeasy-include marks schema as used", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /pets: + get: + responses: + '200': + description: ok +components: + schemas: + MarkedUsed: + type: string + x-speakeasy-include: true + ActuallyUnused: + type: string +`, + expectedErrors: []string{ + "[17:5] warning semantic-unused-component `#/components/schemas/ActuallyUnused` is potentially unused or has been orphaned", + }, + }, + { + name: "x-include marks schema as used", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /pets: + get: + responses: + '200': + description: ok +components: + schemas: + MarkedUsed: + type: string + x-include: true + ActuallyUnused: + type: string +`, + expectedErrors: []string{ + "[17:5] warning semantic-unused-component `#/components/schemas/ActuallyUnused` is potentially unused or has been orphaned", + }, + }, + { + name: "x-used marks schema as used", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /pets: + get: + responses: + '200': + description: ok +components: + schemas: + MarkedUsed: + type: string + x-used: true + ActuallyUnused: + type: string +`, + expectedErrors: []string{ + "[17:5] warning semantic-unused-component `#/components/schemas/ActuallyUnused` is potentially unused or has been orphaned", + }, + }, + { + name: "extension set to false does not mark as used", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /pets: + get: + responses: + '200': + description: ok +components: + schemas: + NotMarked: + type: string + x-speakeasy-include: false +`, + expectedErrors: []string{ + "[14:5] warning semantic-unused-component `#/components/schemas/NotMarked` is potentially unused or has been orphaned", + }, + }, + { + name: "x-speakeasy-include marks parameter as used", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /pets: + get: + responses: + '200': + description: ok +components: + parameters: + MarkedUsed: + name: test + in: query + x-speakeasy-include: true + ActuallyUnused: + name: unused + in: query +`, + expectedErrors: []string{ + "[18:5] warning semantic-unused-component `#/components/parameters/ActuallyUnused` is potentially unused or has been orphaned", + }, + }, + { + name: "x-include marks response as used", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /pets: + get: + responses: + '200': + description: ok +components: + responses: + MarkedUsed: + description: marked + x-include: true + ActuallyUnused: + description: unused +`, + expectedErrors: []string{ + "[17:5] warning semantic-unused-component `#/components/responses/ActuallyUnused` is potentially unused or has been orphaned", + }, + }, + { + name: "x-used marks security scheme as used", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /pets: + get: + responses: + '200': + description: ok +components: + securitySchemes: + MarkedUsed: + type: apiKey + in: header + name: X-API-Key + x-used: true + ActuallyUnused: + type: apiKey + in: header + name: X-Unused +`, + expectedErrors: []string{ + "[19:5] warning semantic-unused-component `#/components/securitySchemes/ActuallyUnused` is potentially unused or has been orphaned", + }, + }, + { + name: "all components with extensions are not flagged", + yaml: ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /pets: + get: + responses: + '200': + description: ok +components: + schemas: + Schema1: + type: string + x-speakeasy-include: true + Schema2: + type: string + x-include: true + Schema3: + type: string + x-used: true +`, + expectedErrors: []string{}, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + ctx := t.Context() + + doc, _, err := openapi.Unmarshal(ctx, strings.NewReader(tt.yaml)) + require.NoError(t, err, "unmarshal should succeed") + + rule := &rules.UnusedComponentRule{} + config := &linter.RuleConfig{} + + docInfo := createDocInfoWithIndexUnusedComponents(t, ctx, doc, "test.yaml") + + errs := rule.Run(ctx, docInfo, config) + + var errMsgs []string + for _, lintErr := range errs { + errMsgs = append(errMsgs, lintErr.Error()) + } + + assert.ElementsMatch(t, tt.expectedErrors, errMsgs, "should match expected errors") + }) + } +} + +func TestUnusedComponentRule_ExternalReferenceChainMarksUsed(t *testing.T) { + t.Parallel() + + ctx := t.Context() + + mainYaml := ` +openapi: 3.1.0 +info: + title: Test + version: 1.0.0 +paths: + /pets: + get: + responses: + '200': + description: ok + content: + application/json: + schema: + $ref: '%s/external.yaml#/components/schemas/ExternalSchema' +components: + schemas: + SharedUsed: + type: string + SharedUnused: + type: string` + + externalYaml := ` +openapi: 3.1.0 +info: + title: External + version: 1.0.0 +paths: + /external: + get: + responses: + '200': + description: ok + content: + application/json: + schema: + $ref: '#/components/schemas/ExternalUnused' +components: + schemas: + ExternalSchema: + type: object + properties: + shared: + $ref: '%s/main.yaml#/components/schemas/SharedUsed' + ExternalUnused: + type: object + properties: + unused: + $ref: '%s/main.yaml#/components/schemas/SharedUnused' +` + + var server *httptest.Server + server = httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + switch r.URL.Path { + case "/external.yaml": + w.Header().Set("Content-Type", "application/yaml") + w.WriteHeader(http.StatusOK) + _, _ = fmt.Fprintf(w, externalYaml, server.URL, server.URL) + case "/main.yaml": + w.Header().Set("Content-Type", "application/yaml") + w.WriteHeader(http.StatusOK) + _, _ = fmt.Fprintf(w, mainYaml, server.URL) + default: + w.WriteHeader(http.StatusNotFound) + } + })) + defer server.Close() + + doc, _, err := openapi.Unmarshal(ctx, strings.NewReader(fmt.Sprintf(mainYaml, server.URL))) + require.NoError(t, err, "unmarshal should succeed") + + rule := &rules.UnusedComponentRule{} + config := &linter.RuleConfig{} + + docInfo := createDocInfoWithIndexUnusedComponents(t, ctx, doc, server.URL+"/main.yaml") + + errs := rule.Run(ctx, docInfo, config) + + require.Len(t, errs, 1, "should only flag unreferenced components in main doc") + assert.Contains(t, errs[0].Error(), "`#/components/schemas/SharedUnused`", "should flag SharedUnused as unused") +} diff --git a/openapi/localize.go b/openapi/localize.go index 82e00f70..25757109 100644 --- a/openapi/localize.go +++ b/openapi/localize.go @@ -25,6 +25,8 @@ const ( LocalizeNamingPathBased LocalizeNamingStrategy = iota // LocalizeNamingCounter uses counter-based suffixes like "address_1.yaml" for conflicts LocalizeNamingCounter + // LocalizeNamingCustom uses a user-provided function for naming + LocalizeNamingCustom ) // LocalizeOptions represents the options available when localizing an OpenAPI document. @@ -39,6 +41,11 @@ type LocalizeOptions struct { HTTPClient system.Client // NamingStrategy determines how external reference files are named when localized. NamingStrategy LocalizeNamingStrategy + // CustomNamingFunc is used when NamingStrategy is LocalizeNamingCustom. + // It receives the original reference path and the resolved file content, + // and should return the desired filename for the localized file. + // The caller is responsible for ensuring filenames are unique. + CustomNamingFunc func(originalRef string, content []byte) string } // Localize transforms an OpenAPI document by copying all external reference files to a target directory @@ -143,7 +150,7 @@ func Localize(ctx context.Context, doc *OpenAPI, opts LocalizeOptions) error { } // Phase 2: Generate conflict-free filenames for all external references - generateLocalizedFilenames(localizeStorage, opts.NamingStrategy) + generateLocalizedFilenames(localizeStorage, opts.NamingStrategy, opts.CustomNamingFunc) // Phase 3: Copy external files to target directory if err := copyExternalFiles(ctx, localizeStorage, opts); err != nil { @@ -241,7 +248,7 @@ func discoverSchemaReference(ctx context.Context, schema *oas3.JSONSchema[oas3.R if resolutionInfo != nil { storage.externalRefs.Set(normalizedFilePath, "") // Will be filled in filename generation phase - if data, found := opts.RootDocument.GetCachedReferenceDocument(resolutionInfo.AbsoluteReference); found { + if data, found := opts.RootDocument.GetCachedReferenceDocument(resolutionInfo.AbsoluteDocumentPath); found { storage.resolvedContent[normalizedFilePath] = data } else { return fmt.Errorf("failed to get cached content for reference %s", normalizedFilePath) @@ -266,7 +273,7 @@ func discoverSchemaReference(ctx context.Context, schema *oas3.JSONSchema[oas3.R return discoverSchemaReference(ctx, s, ResolveOptions{ RootDocument: opts.RootDocument, TargetDocument: targetDocInfo.ResolvedDocument, - TargetLocation: targetDocInfo.AbsoluteReference, + TargetLocation: targetDocInfo.AbsoluteDocumentPath, VirtualFS: opts.VirtualFS, HTTPClient: opts.HTTPClient, }, storage) @@ -320,7 +327,7 @@ func discoverGenericReference[T any, V interfaces.Validator[T], C marshaller.Cor if resolutionInfo != nil { storage.externalRefs.Set(normalizedFilePath, "") // Will be filled in filename generation phase - if data, found := opts.RootDocument.GetCachedReferenceDocument(resolutionInfo.AbsoluteReference); found { + if data, found := opts.RootDocument.GetCachedReferenceDocument(resolutionInfo.AbsoluteDocumentPath); found { storage.resolvedContent[normalizedFilePath] = data } else { return fmt.Errorf("failed to get cached content for reference %s", normalizedFilePath) @@ -337,7 +344,7 @@ func discoverGenericReference[T any, V interfaces.Validator[T], C marshaller.Cor resolveOpts := ResolveOptions{ RootDocument: opts.RootDocument, TargetDocument: targetDocInfo.ResolvedDocument, - TargetLocation: targetDocInfo.AbsoluteReference, + TargetLocation: targetDocInfo.AbsoluteDocumentPath, VirtualFS: opts.VirtualFS, HTTPClient: opts.HTTPClient, } @@ -386,7 +393,18 @@ func discoverGenericReference[T any, V interfaces.Validator[T], C marshaller.Cor } // generateLocalizedFilenames creates conflict-free filenames for all external references -func generateLocalizedFilenames(storage *localizeStorage, strategy LocalizeNamingStrategy) { +func generateLocalizedFilenames(storage *localizeStorage, strategy LocalizeNamingStrategy, customNamingFunc func(string, []byte) string) { + // Custom naming: delegate entirely to the caller's function + if strategy == LocalizeNamingCustom && customNamingFunc != nil { + for ref := range storage.externalRefs.All() { + content := storage.resolvedContent[ref] + filename := customNamingFunc(ref, content) + storage.externalRefs.Set(ref, filename) + storage.usedFilenames[filename] = true + } + return + } + // First pass: collect all base filenames to detect conflicts baseFilenames := make(map[string][]string) // base filename -> list of full paths for ref := range storage.externalRefs.All() { diff --git a/openapi/localize_test.go b/openapi/localize_test.go index 2b3cae0c..d0502dd7 100644 --- a/openapi/localize_test.go +++ b/openapi/localize_test.go @@ -2,6 +2,8 @@ package openapi_test import ( "bytes" + "crypto/sha256" + "fmt" "net/http" "net/http/httptest" "os" @@ -181,6 +183,93 @@ func TestLocalize_CounterBased_Success(t *testing.T) { } } +func TestLocalize_CustomNaming_Success(t *testing.T) { + t.Parallel() + + ctx := t.Context() + + // Create a mock HTTP server to serve remote schemas + server := createMockRemoteServer(t) + defer server.Close() + + // Load the input document + inputFile, err := os.Open("testdata/localize/input/spec.yaml") + require.NoError(t, err) + defer inputFile.Close() + + inputDoc, validationErrs, err := openapi.Unmarshal(ctx, inputFile) + require.NoError(t, err) + require.Empty(t, validationErrs, "Input document should be valid") + + // Create a temporary directory for output + tempDir := t.TempDir() + + // Create custom HTTP client that redirects api.example.com to our test server + httpClient := createRedirectHTTPClient(server.URL) + + // Track which refs the custom naming function is called with + var calledRefs []string + + // Custom naming function that uses a content SHA prefix (similar to speakeasy bundler) + customNaming := func(originalRef string, content []byte) string { + calledRefs = append(calledRefs, originalRef) + + base := filepath.Base(originalRef) + ext := filepath.Ext(base) + if ext == "" { + ext = ".yaml" + } + name := strings.TrimSuffix(base, ext) + sha := sha256.Sum256(content) + return fmt.Sprintf("%s-%x%s", name, sha[:4], ext) + } + + opts := openapi.LocalizeOptions{ + DocumentLocation: "testdata/localize/input/spec.yaml", + TargetDirectory: tempDir, + VirtualFS: &system.FileSystem{}, + HTTPClient: httpClient, + NamingStrategy: openapi.LocalizeNamingCustom, + CustomNamingFunc: customNaming, + } + + err = openapi.Localize(ctx, inputDoc, opts) + require.NoError(t, err) + + // Verify the custom naming function was called for each external reference + assert.NotEmpty(t, calledRefs, "Custom naming function should have been called") + + // Verify that files with custom names exist in the target directory + entries, err := os.ReadDir(tempDir) + require.NoError(t, err) + assert.NotEmpty(t, entries, "Target directory should contain localized files") + + // All output filenames should contain a hex SHA suffix + for _, entry := range entries { + assert.Regexp(t, `-[0-9a-f]{8}\.yaml$`, entry.Name(), + "File %s should match custom naming pattern", entry.Name()) + } + + // Verify the document references were rewritten to use the custom filenames + var buf bytes.Buffer + err = openapi.Marshal(ctx, inputDoc, &buf) + require.NoError(t, err) + output := buf.String() + + // The output should not contain any of the original external references + assert.NotContains(t, output, "./components.yaml") + assert.NotContains(t, output, "./api/components.yaml") + assert.NotContains(t, output, "./shared/address.yaml") + assert.NotContains(t, output, "https://api.example.com/schemas/") + + // The main document should reference custom-named files (only direct refs, not transitive ones) + // Transitive refs (category, geo, metadata) live inside the localized files, not the main doc + assert.Contains(t, output, "components-") + assert.Contains(t, output, "address-") + assert.Contains(t, output, "UserProfile-") + assert.Contains(t, output, "UserPreferences-") +} + // createMockRemoteServer creates a mock HTTP server that serves remote schema files func createMockRemoteServer(t *testing.T) *httptest.Server { t.Helper() diff --git a/openapi/mediatype.go b/openapi/mediatype.go index 6cc46c90..c4018499 100644 --- a/openapi/mediatype.go +++ b/openapi/mediatype.go @@ -2,6 +2,7 @@ package openapi import ( "context" + "errors" "strings" "github.com/speakeasy-api/openapi/extensions" @@ -136,11 +137,7 @@ func (m *MediaType) Validate(ctx context.Context, opts ...validation.Option) []e // Validate mutual exclusivity: encoding MUST NOT be present with prefixEncoding or itemEncoding if core.Encoding.Present && (core.PrefixEncoding.Present || core.ItemEncoding.Present) { - errs = append(errs, validation.NewValueError( - validation.NewValueValidationError("encoding field MUST NOT be present when prefixEncoding or itemEncoding is present"), - core, - core.Encoding, - )) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationMutuallyExclusiveFields, errors.New("mediaType.encoding is mutually exclusive with mediaType.prefixEncoding and mediaType.itemEncoding"), core, core.Encoding)) } // Validate multipart-only constraint for encoding, prefixEncoding, and itemEncoding @@ -151,27 +148,15 @@ func (m *MediaType) Validate(ctx context.Context, opts ...validation.Option) []e isFormURLEncoded := strings.ToLower(mtCtx.MediaType) == "application/x-www-form-urlencoded" if core.PrefixEncoding.Present && !isMultipart { - errs = append(errs, validation.NewValueError( - validation.NewValueValidationError("prefixEncoding field SHALL only apply when the media type is multipart"), - core, - core.PrefixEncoding, - )) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationAllowedValues, errors.New("mediaType.prefixEncoding is only valid when the media type is multipart"), core, core.PrefixEncoding)) } if core.ItemEncoding.Present && !isMultipart { - errs = append(errs, validation.NewValueError( - validation.NewValueValidationError("itemEncoding field SHALL only apply when the media type is multipart"), - core, - core.ItemEncoding, - )) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationAllowedValues, errors.New("mediaType.itemEncoding is only valid when the media type is multipart"), core, core.ItemEncoding)) } if core.Encoding.Present && !isMultipart && !isFormURLEncoded { - errs = append(errs, validation.NewValueError( - validation.NewValueValidationError("encoding field SHALL only apply when the media type is multipart or application/x-www-form-urlencoded"), - core, - core.Encoding, - )) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationAllowedValues, errors.New("mediaType.encoding is only valid when the media type is multipart or application/x-www-form-urlencoded"), core, core.Encoding)) } } diff --git a/openapi/mediatype_multipart_validate_test.go b/openapi/mediatype_multipart_validate_test.go index 03a66e39..0e796cec 100644 --- a/openapi/mediatype_multipart_validate_test.go +++ b/openapi/mediatype_multipart_validate_test.go @@ -108,14 +108,14 @@ func TestMediaType_MultipartValidation_Error(t *testing.T) { yml: ` description: Test response content: - application/json: - schema: - type: array - prefixEncoding: - - contentType: application/json + application/json: + schema: + type: array + prefixEncoding: + - contentType: application/json `, wantErrs: []string{ - "prefixEncoding field SHALL only apply when the media type is multipart", + "error validation-allowed-values mediaType.prefixEncoding is only valid when the media type is multipart", }, }, { @@ -123,14 +123,14 @@ content: yml: ` description: Test response content: - application/json: - itemSchema: - type: object - itemEncoding: - contentType: application/json + application/json: + itemSchema: + type: object + itemEncoding: + contentType: application/json `, wantErrs: []string{ - "itemEncoding field SHALL only apply when the media type is multipart", + "error validation-allowed-values mediaType.itemEncoding is only valid when the media type is multipart", }, }, { @@ -138,18 +138,18 @@ content: yml: ` description: Test response content: - application/json: - schema: - type: object - properties: - file: - type: string - encoding: - file: - contentType: image/png + application/json: + schema: + type: object + properties: + file: + type: string + encoding: + file: + contentType: image/png `, wantErrs: []string{ - "encoding field SHALL only apply when the media type is multipart or application/x-www-form-urlencoded", + "error validation-allowed-values mediaType.encoding is only valid when the media type is multipart or application/x-www-form-urlencoded", }, }, } diff --git a/openapi/mediatype_validate_test.go b/openapi/mediatype_validate_test.go index eace23f0..cf224582 100644 --- a/openapi/mediatype_validate_test.go +++ b/openapi/mediatype_validate_test.go @@ -220,8 +220,8 @@ encoding: type: invalid-type `, wantErrs: []string{ - "[13:17] schema.type value must be one of 'array', 'boolean', 'integer', 'null', 'number', 'object', 'string'", - "[13:17] schema.type expected array, got string", + "[13:17] error validation-invalid-schema schema.type value must be one of 'array', 'boolean', 'integer', 'null', 'number', 'object', 'string'", + "[13:17] error validation-type-mismatch schema.type expected `array`, got `string`", }, }, { @@ -239,7 +239,7 @@ prefixEncoding: - contentType: application/json `, wantErrs: []string{ - "[8:3] encoding field MUST NOT be present when prefixEncoding or itemEncoding is present", + "[8:3] error validation-mutually-exclusive-fields mediaType.encoding is mutually exclusive with mediaType.prefixEncoding and mediaType.itemEncoding", }, }, { @@ -257,7 +257,7 @@ itemEncoding: contentType: application/json `, wantErrs: []string{ - "[8:3] encoding field MUST NOT be present when prefixEncoding or itemEncoding is present", + "[8:3] error validation-mutually-exclusive-fields mediaType.encoding is mutually exclusive with mediaType.prefixEncoding and mediaType.itemEncoding", }, }, } diff --git a/openapi/multifile_test.go b/openapi/multifile_test.go new file mode 100644 index 00000000..5267de20 --- /dev/null +++ b/openapi/multifile_test.go @@ -0,0 +1,156 @@ +package openapi_test + +import ( + "bytes" + "os" + "testing" + + "github.com/speakeasy-api/openapi/openapi" + "github.com/speakeasy-api/openapi/references" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func Test_MultiFile_BasicReferences(t *testing.T) { + t.Parallel() + ctx := t.Context() + + // Read the root OpenAPI file + data, err := os.ReadFile("testdata/multifile-basic/openapi.yaml") + require.NoError(t, err, "failed to read openapi.yaml") + + // Unmarshal the document + doc, validationErrs, err := openapi.Unmarshal(ctx, bytes.NewReader(data)) + require.NoError(t, err, "unmarshal should succeed") + require.NotNil(t, doc, "document should not be nil") + + t.Logf("Unmarshal validation errors: %d", len(validationErrs)) + for _, verr := range validationErrs { + t.Logf(" - %v", verr) + } + + // Build index with proper resolve options + resolveOpts := references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "testdata/multifile-basic/openapi.yaml", + } + + index := openapi.BuildIndex(ctx, doc, resolveOpts) + require.NotNil(t, index, "index should not be nil") + + // Check for index errors + indexErrors := index.GetAllErrors() + t.Logf("Index errors: %d", len(indexErrors)) + for i, ierr := range indexErrors { + t.Logf(" %d: %v", i+1, ierr) + } + + // The key test: verify no "source is nil" errors + sourceNilErrors := []error{} + for _, ierr := range indexErrors { + if assert.Contains(t, ierr.Error(), "source is nil") { + sourceNilErrors = append(sourceNilErrors, ierr) + } + } + + if len(sourceNilErrors) > 0 { + t.Errorf("Found %d 'source is nil' errors - this indicates reference resolution failure:", len(sourceNilErrors)) + for i, err := range sourceNilErrors { + t.Logf(" %d: %v", i+1, err) + } + t.FailNow() + } + + // Verify external components were indexed + t.Logf("External schemas: %d", len(index.ExternalSchemas)) + t.Logf("External responses: %d", len(index.ExternalResponses)) + t.Logf("External headers: %d", len(index.ExternalHeaders)) + t.Logf("External links: %d", len(index.ExternalLinks)) + t.Logf("External callbacks: %d", len(index.ExternalCallbacks)) + t.Logf("External pathItems: %d", len(index.ExternalPathItems)) + + // We should have external components from components.yaml + assert.NotEmpty(t, index.ExternalSchemas, "should have external schemas") + assert.NotEmpty(t, index.ExternalResponses, "should have external responses") + assert.NotEmpty(t, index.ExternalHeaders, "should have external headers") + assert.NotEmpty(t, index.ExternalLinks, "should have external links") + assert.NotEmpty(t, index.ExternalCallbacks, "should have external callbacks") + assert.NotEmpty(t, index.ExternalPathItems, "should have external pathItems") + + // Verify references can be resolved + t.Run("references resolve successfully", func(t *testing.T) { + t.Parallel() + // Get the operation that has the external response reference + require.NotNil(t, doc.Paths, "paths should not be nil") + pathItemRef, found := doc.Paths.Get("/test") + require.True(t, found, "should find /test path") + require.NotNil(t, pathItemRef, "pathItem should not be nil") + + // Get the actual PathItem (may need to resolve if it's a reference) + if pathItemRef.IsReference() { + _, err := pathItemRef.Resolve(ctx, resolveOpts) + require.NoError(t, err, "pathItem should resolve") + } + pathItem := pathItemRef.GetObject() + require.NotNil(t, pathItem, "resolved pathItem should not be nil") + getOp := pathItem.Get() + require.NotNil(t, getOp, "should have GET operation") + + // Try to resolve the 200 response reference + require.NotNil(t, getOp.Responses, "responses should not be nil") + response200, found := getOp.Responses.Get("200") + require.True(t, found, "should have 200 response") + require.True(t, response200.IsReference(), "200 response should be a reference") + + // Resolve it + validationErrs, err := response200.Resolve(ctx, resolveOpts) + require.NoError(t, err, "resolving 200 response should not error") + assert.Empty(t, validationErrs, "should have no validation errors") + + // Verify the resolved object + resolvedResponse := response200.GetObject() + require.NotNil(t, resolvedResponse, "resolved response should not be nil") + assert.Equal(t, "Successful response", resolvedResponse.Description, "should have correct description") + }) +} + +// Test the specific pattern: external file with internal component references +// This is failing because when resolving a component from an external file, +// internal references within that component (#/components/...) cannot be resolved +func Test_MultiFile_InternalReferencesInExternalFile(t *testing.T) { + t.Parallel() + ctx := t.Context() + + data, err := os.ReadFile("testdata/multifile-simple/openapi.yaml") + require.NoError(t, err) + + doc, _, err := openapi.Unmarshal(ctx, bytes.NewReader(data)) + require.NoError(t, err) + require.NotNil(t, doc) + + resolveOpts := references.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "testdata/multifile-simple/openapi.yaml", + } + + index := openapi.BuildIndex(ctx, doc, resolveOpts) + require.NotNil(t, index) + + indexErrors := index.GetAllErrors() + t.Logf("Index errors: %d", len(indexErrors)) + for i, ierr := range indexErrors { + t.Logf(" %d: %v", i+1, ierr) + } + + // This test demonstrates the bug: + // When TestResponse (from components.yaml) contains an internal reference + // to #/components/schemas/TestSchema, that reference cannot be resolved + // because the external file is not unmarshalled as a full document + for _, ierr := range indexErrors { + if assert.Contains(t, ierr.Error(), "source is nil") { + t.Logf("BUG CONFIRMED: %v", ierr) + } + } +} diff --git a/openapi/nested_reference_tracking_test.go b/openapi/nested_reference_tracking_test.go index f706c7ab..39d01141 100644 --- a/openapi/nested_reference_tracking_test.go +++ b/openapi/nested_reference_tracking_test.go @@ -393,3 +393,105 @@ paths: topLevelRef := resolvedSchema.GetTopLevelReference() assert.Nil(t, topLevelRef, "inline schema should have no top-level reference") } + +// TestGetReferenceChain_NestedProperty tests that reference chains are properly maintained +// when accessing a schema through a property of another schema. +// +// This is a regression test for a bug where nested property references would lose their +// parent chain context, returning only the immediate reference instead of the full chain. +// +// Expected: ContainerSchema.nested → SharedSchema should have chain [ContainerSchema, SharedSchema] +// Bug was: Chain only contained [SharedSchema], losing the parent context +func TestGetReferenceChain_NestedProperty(t *testing.T) { + t.Parallel() + + ctx := t.Context() + + yml := `openapi: "3.1.0" +info: + title: Test API + version: 1.0.0 +paths: + /container: + get: + responses: + '200': + description: OK + content: + application/json: + schema: + $ref: "#/components/schemas/ContainerSchema" +components: + schemas: + SharedSchema: + type: object + properties: + value: + type: string + ContainerSchema: + type: object + properties: + nested: + $ref: "#/components/schemas/SharedSchema" +` + + doc, validationErrs, err := Unmarshal(ctx, bytes.NewBufferString(yml)) + require.NoError(t, err, "should parse OpenAPI document") + assert.Empty(t, validationErrs, "should have no validation errors") + + // Get the ContainerSchema from the path response + pathItem, _ := doc.Paths.Get("/container") + require.NotNil(t, pathItem.GetObject(), "should have path item") + + getOp := pathItem.GetObject().Get() + require.NotNil(t, getOp, "should have GET operation") + + response, _ := getOp.Responses.Get("200") + require.NotNil(t, response.GetObject(), "should have response") + + containerSchema, _ := response.GetObject().Content.Get("application/json") + require.NotNil(t, containerSchema.Schema, "should have schema") + + // Resolve ContainerSchema + _, err = containerSchema.Schema.Resolve(ctx, oas3.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + require.NoError(t, err, "should resolve container schema") + + containerResolved := containerSchema.Schema.MustGetResolvedSchema() + props := containerResolved.GetSchema().GetProperties() + nestedProp, _ := props.Get("nested") + require.NotNil(t, nestedProp, "should have nested property") + + // Resolve the nested property + _, err = nestedProp.Resolve(ctx, oas3.ResolveOptions{ + RootDocument: doc, + TargetDocument: doc, + TargetLocation: "test.yaml", + }) + require.NoError(t, err, "should resolve nested property") + + // Get the reference chain for the nested property + // This should include BOTH ContainerSchema and SharedSchema + nestedResolved := nestedProp.MustGetResolvedSchema() + chain := nestedResolved.GetReferenceChain() + + require.Len(t, chain, 2, "reference chain should include both parent and target schemas") + assert.Equal(t, "#/components/schemas/ContainerSchema", string(chain[0].Reference), + "first entry should be the parent schema (ContainerSchema)") + assert.Equal(t, "#/components/schemas/SharedSchema", string(chain[1].Reference), + "second entry should be the target schema (SharedSchema)") + + // Verify convenience methods + immediateRef := nestedResolved.GetImmediateReference() + require.NotNil(t, immediateRef, "should have immediate reference") + assert.Equal(t, "#/components/schemas/SharedSchema", string(immediateRef.Reference), + "immediate reference should be SharedSchema") + + topLevelRef := nestedResolved.GetTopLevelReference() + require.NotNil(t, topLevelRef, "should have top-level reference") + assert.Equal(t, "#/components/schemas/ContainerSchema", string(topLevelRef.Reference), + "top-level reference should be ContainerSchema") +} diff --git a/openapi/openapi.go b/openapi/openapi.go index 0cb42c17..bfcf2efa 100644 --- a/openapi/openapi.go +++ b/openapi/openapi.go @@ -2,7 +2,9 @@ package openapi import ( "context" + "fmt" "net/url" + "strings" "github.com/speakeasy-api/openapi/extensions" "github.com/speakeasy-api/openapi/internal/interfaces" @@ -13,6 +15,7 @@ import ( "github.com/speakeasy-api/openapi/pointer" "github.com/speakeasy-api/openapi/sequencedmap" "github.com/speakeasy-api/openapi/validation" + "gopkg.in/yaml.v3" ) // Version is the version of the OpenAPI Specification that this package conforms to. @@ -219,11 +222,11 @@ func (o *OpenAPI) Validate(ctx context.Context, opts ...validation.Option) []err docVersion, err := version.Parse(o.OpenAPI) if err != nil { - errs = append(errs, validation.NewValueError(validation.NewValueValidationError("openapi.openapi invalid OpenAPI version %s: %s", o.OpenAPI, err.Error()), core, core.OpenAPI)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationSupportedVersion, fmt.Errorf("openapi.openapi invalid OpenAPI version %s: %w", o.OpenAPI, err), core, core.OpenAPI)) } if docVersion != nil { if docVersion.LessThan(*MinimumSupportedVersion) || docVersion.GreaterThan(*MaximumSupportedVersion) { - errs = append(errs, validation.NewValueError(validation.NewValueValidationError("openapi.openapi only OpenAPI versions between %s and %s are supported", MinimumSupportedVersion, MaximumSupportedVersion), core, core.OpenAPI)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationSupportedVersion, fmt.Errorf("openapi.openapi only OpenAPI versions between %s and %s are supported", MinimumSupportedVersion, MaximumSupportedVersion), core, core.OpenAPI)) } } @@ -259,17 +262,191 @@ func (o *OpenAPI) Validate(ctx context.Context, opts ...validation.Option) []err if core.Self.Present && o.Self != nil { if _, err := url.Parse(*o.Self); err != nil { - errs = append(errs, validation.NewValueError(validation.NewValueValidationError("openapi.$self is not a valid uri reference: %s", err), core, core.Self)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidFormat, fmt.Errorf("openapi.$self is not a valid uri reference: %w", err), core, core.Self)) } } if core.JSONSchemaDialect.Present && o.JSONSchemaDialect != nil { if _, err := url.Parse(*o.JSONSchemaDialect); err != nil { - errs = append(errs, validation.NewValueError(validation.NewValueValidationError("openapi.jsonSchemaDialect is not a valid uri: %s", err), core, core.JSONSchemaDialect)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidFormat, fmt.Errorf("`openapi.jsonSchemaDialect` is not a valid uri: %w", err), core, core.JSONSchemaDialect)) } } + operationIdErrs := validateOperationIDUniqueness(ctx, o) + errs = append(errs, operationIdErrs...) + + operationParameterErrs := validateOperationParameterUniqueness(ctx, o) + errs = append(errs, operationParameterErrs...) + o.Valid = len(errs) == 0 && core.GetValid() return errs } + +func validateOperationIDUniqueness(ctx context.Context, doc *OpenAPI) []error { + if doc == nil { + return nil + } + + seen := make(map[string]struct{}) + var errs []error + + for item := range Walk(ctx, doc) { + if err := item.Match(Matcher{ + Operation: func(op *Operation) error { + method, path := ExtractMethodAndPath(item.Location) + if method == "" || path == "" { + return nil + } + + operationID := op.GetOperationID() + if operationID == "" { + return nil + } + + if _, ok := seen[operationID]; ok { + errNode := getOperationIDValueNode(op) + if errNode == nil { + errNode = op.GetRootNode() + } + err := validation.NewValidationError( + validation.SeverityError, + validation.RuleValidationOperationIdUnique, + fmt.Errorf("the `%s` operation at path `%s` contains a duplicate operationId `%s`", method, path, operationID), + errNode, + ) + errs = append(errs, err) + return nil + } + + seen[operationID] = struct{}{} + return nil + }, + }); err != nil { + errs = append(errs, err) + } + } + + return errs +} + +func getOperationIDValueNode(op *Operation) *yaml.Node { + if op == nil { + return nil + } + + core := op.GetCore() + if core == nil || !core.OperationID.Present { + return nil + } + + return core.OperationID.ValueNode +} + +// validateParameterUniqueness checks for duplicate parameters in a list +// methodOrLevel should be the HTTP method (GET, POST, etc.) or "TOP" for path-level +func validateParameterUniqueness(parameters []*ReferencedParameter, methodOrLevel, path string, fallbackNode *yaml.Node) []error { + if len(parameters) == 0 { + return nil + } + + var errs []error + seen := make(map[string]bool) + + for _, paramRef := range parameters { + param := paramRef.GetObject() + if param == nil { + continue + } + + paramName := param.GetName() + paramIn := param.GetIn().String() + if paramName == "" || paramIn == "" { + continue + } + + key := paramName + "::" + paramIn + if seen[key] { + core := param.GetCore() + errNode := core.GetRootNode() + if errNode == nil { + errNode = fallbackNode + } + + var errMsg string + if methodOrLevel == "TOP" { + errMsg = fmt.Sprintf("parameter %q is duplicated in path %q", paramName, path) + } else { + errMsg = fmt.Sprintf("parameter %q is duplicated in %s operation at path %q", paramName, methodOrLevel, path) + } + + err := validation.NewValidationError( + validation.SeverityError, + validation.RuleValidationOperationParameters, + fmt.Errorf("%s", errMsg), + errNode, + ) + errs = append(errs, err) + } + seen[key] = true + } + + return errs +} + +func validateOperationParameterUniqueness(ctx context.Context, doc *OpenAPI) []error { + if doc == nil { + return nil + } + + var errs []error + + for item := range Walk(ctx, doc) { + if err := item.Match(Matcher{ + // Check duplicate parameters at Operation level + Operation: func(op *Operation) error { + method, path := ExtractMethodAndPath(item.Location) + if method == "" || path == "" { + return nil + } + + paramErrs := validateParameterUniqueness( + op.GetParameters(), + strings.ToUpper(method), + path, + op.GetRootNode(), + ) + errs = append(errs, paramErrs...) + + return nil + }, + // Check duplicate parameters at PathItem level + ReferencedPathItem: func(refPathItem *ReferencedPathItem) error { + pathItem := refPathItem.GetObject() + if pathItem == nil { + return nil + } + + // Get the path from the location (parent key) + path := item.Location.ParentKey() + if path == "" { + return nil + } + + paramErrs := validateParameterUniqueness( + pathItem.Parameters, + "TOP", + path, + pathItem.GetRootNode(), + ) + errs = append(errs, paramErrs...) + + return nil + }, + }); err != nil { + errs = append(errs, err) + } + } + + return errs +} diff --git a/openapi/openapi_examples_test.go b/openapi/openapi_examples_test.go index 01565b08..f4f50c86 100644 --- a/openapi/openapi_examples_test.go +++ b/openapi/openapi_examples_test.go @@ -254,22 +254,22 @@ func Example_validating() { fmt.Printf("\nReduced validation errors from %d to %d\n", len(validationErrs), len(newValidationErrs)) // Output: Initial validation errors: 16 - // [3:3] info.version is missing - // [22:17] schema.type.0 expected string, got null - // [28:30] response.content.application/json expected object, got `` - // [31:18] responses must have at least one response code - // [34:7] operation.responses is missing - // [43:17] schema.properties.required failed to validate either Schema [schema.properties.required expected object, got sequence] or bool [schema.properties.required expected bool, got sequence] - // [51:25] schema.properties.name.type expected array, got string - // [51:25] schema.properties.name.type value must be one of 'array', 'boolean', 'integer', 'null', 'number', 'object', 'string' - // [56:7] schema.examples expected array, got object - // [59:15] schema.properties.name expected one of [boolean, object], got string - // [59:15] schema.properties.name expected one of [boolean, object], got string - // [59:15] schema.properties.name failed to validate either Schema [schema.properties.name expected object, got `str...`] or bool [schema.properties.name line 59: cannot unmarshal !!str `string` into bool] - // [60:18] schema.properties.example expected one of [boolean, object], got string - // [60:18] schema.properties.example expected one of [boolean, object], got string - // [60:18] schema.properties.example failed to validate either Schema [schema.properties.example expected object, got `John Do...`] or bool [schema.properties.example line 60: cannot unmarshal !!str `John Doe` into bool] - // [63:9] schema.examples expected sequence, got object + // [3:3] error validation-required-field `info.version` is required + // [22:17] error validation-type-mismatch schema.type.0 expected `string`, got `null` + // [28:30] error validation-type-mismatch response.content.application/json expected `object`, got `` + // [31:18] error validation-allowed-values responses must have at least one response code + // [34:7] error validation-required-field `operation.responses` is required + // [43:17] error validation-type-mismatch schema.properties.required failed to validate either Schema [schema.properties.required expected `object`, got `sequence`] or bool [schema.properties.required expected `bool`, got `sequence`] + // [51:25] error validation-invalid-schema schema.properties.name.type value must be one of 'array', 'boolean', 'integer', 'null', 'number', 'object', 'string' + // [51:25] error validation-type-mismatch schema.properties.name.type expected `array`, got `string` + // [56:7] error validation-type-mismatch schema.examples expected `array`, got `object` + // [59:15] error validation-type-mismatch schema.properties.name expected one of [`boolean`, `object`], got `string` + // [59:15] error validation-type-mismatch schema.properties.name expected one of [`boolean`, `object`], got `string` + // [59:15] error validation-type-mismatch schema.properties.name failed to validate either Schema [schema.properties.name expected `object`, got `str...`] or bool [schema.properties.name line 59: cannot unmarshal !!str `string` into bool] + // [60:18] error validation-type-mismatch schema.properties.example expected one of [`boolean`, `object`], got `string` + // [60:18] error validation-type-mismatch schema.properties.example expected one of [`boolean`, `object`], got `string` + // [60:18] error validation-type-mismatch schema.properties.example failed to validate either Schema [schema.properties.example expected `object`, got `John Do...`] or bool [schema.properties.example line 60: cannot unmarshal !!str `John Doe` into bool] + // [63:9] error validation-type-mismatch schema.examples expected `sequence`, got `object` // // Fixing validation errors... // ✓ Added missing info.version @@ -277,13 +277,13 @@ func Example_validating() { // ✓ Added missing responses to POST /invalid // // Validation errors after fixes: 7 - // [51:25] schema.properties.name.type expected array, got string - // [51:25] schema.properties.name.type value must be one of 'array', 'boolean', 'integer', 'null', 'number', 'object', 'string' - // [56:7] schema.examples expected array, got object - // [59:15] schema.properties.name expected one of [boolean, object], got string - // [59:15] schema.properties.name expected one of [boolean, object], got string - // [60:18] schema.properties.example expected one of [boolean, object], got string - // [60:18] schema.properties.example expected one of [boolean, object], got string + // [51:25] error validation-invalid-schema schema.properties.name.type value must be one of 'array', 'boolean', 'integer', 'null', 'number', 'object', 'string' + // [51:25] error validation-type-mismatch schema.properties.name.type expected `array`, got `string` + // [56:7] error validation-type-mismatch schema.examples expected `array`, got `object` + // [59:15] error validation-type-mismatch schema.properties.name expected one of [`boolean`, `object`], got `string` + // [59:15] error validation-type-mismatch schema.properties.name expected one of [`boolean`, `object`], got `string` + // [60:18] error validation-type-mismatch schema.properties.example expected one of [`boolean`, `object`], got `string` + // [60:18] error validation-type-mismatch schema.properties.example expected one of [`boolean`, `object`], got `string` // // Reduced validation errors from 16 to 7 } diff --git a/openapi/openapi_unmarshal_test.go b/openapi/openapi_unmarshal_test.go index b3ed5de5..51d659f9 100644 --- a/openapi/openapi_unmarshal_test.go +++ b/openapi/openapi_unmarshal_test.go @@ -120,15 +120,15 @@ func TestOpenAPI_Unmarshal_Error(t *testing.T) { version: 1.0.0 paths: {}`, wantErrs: []string{ - "[1:1] openapi.openapi invalid OpenAPI version : invalid version ", - "[1:1] openapi.openapi is missing", + "[1:1] error validation-required-field `openapi.openapi` is required", + "[1:1] error validation-supported-version openapi.openapi invalid OpenAPI version : invalid version ", }, }, { name: "missing info field", yaml: `openapi: 3.1.0 paths: {}`, - wantErrs: []string{"[1:1] openapi.info is missing"}, + wantErrs: []string{"[1:1] error validation-required-field `openapi.info` is required"}, }, { name: "invalid openapi version", @@ -137,7 +137,7 @@ info: title: Test API version: 1.0.0 paths: {}`, - wantErrs: []string{fmt.Sprintf("[1:10] openapi.openapi only OpenAPI versions between %s and %s are supported", openapi.MinimumSupportedVersion, openapi.MaximumSupportedVersion)}, + wantErrs: []string{fmt.Sprintf("[1:10] error validation-supported-version openapi.openapi only OpenAPI versions between %s and %s are supported", openapi.MinimumSupportedVersion, openapi.MaximumSupportedVersion)}, }, } diff --git a/openapi/openapi_validate_test.go b/openapi/openapi_validate_test.go index f7bd6703..a06c7762 100644 --- a/openapi/openapi_validate_test.go +++ b/openapi/openapi_validate_test.go @@ -196,7 +196,7 @@ info: version: 1.0.0 paths: {} `, - wantErrs: []string{"openapi.openapi invalid OpenAPI version invalid-version"}, + wantErrs: []string{"error validation-supported-version openapi.openapi invalid OpenAPI version invalid-version"}, }, { name: "unsupported_openapi_version", @@ -207,7 +207,7 @@ info: version: 1.0.0 paths: {} `, - wantErrs: []string{"openapi.openapi only OpenAPI versions between"}, + wantErrs: []string{"error validation-supported-version openapi.openapi only OpenAPI versions between"}, }, { name: "invalid_info_missing_title", @@ -217,7 +217,7 @@ info: version: 1.0.0 paths: {} `, - wantErrs: []string{"[4:3] info.title is missing"}, + wantErrs: []string{"[4:3] error validation-required-field `info.title` is required"}, }, { name: "invalid_info_missing_version", @@ -227,7 +227,7 @@ info: title: Test API paths: {} `, - wantErrs: []string{"[4:3] info.version is missing"}, + wantErrs: []string{"[4:3] error validation-required-field `info.version` is required"}, }, { name: "invalid_server", @@ -240,7 +240,7 @@ servers: - description: Invalid server without URL paths: {} `, - wantErrs: []string{"[7:5] server.url is missing"}, + wantErrs: []string{"[7:5] error validation-required-field `server.url` is required"}, }, { name: "invalid_tag", @@ -253,7 +253,7 @@ tags: - description: Tag without name paths: {} `, - wantErrs: []string{"[7:5] tag.name is missing"}, + wantErrs: []string{"[7:5] error validation-required-field `tag.name` is required"}, }, { name: "invalid_external_docs", @@ -266,7 +266,7 @@ externalDocs: description: External docs without URL paths: {} `, - wantErrs: []string{"[7:3] externalDocumentation.url is missing"}, + wantErrs: []string{"[7:3] error validation-required-field `externalDocumentation.url` is required"}, }, { name: "invalid_self_not_uri", @@ -278,7 +278,84 @@ info: version: 1.0.0 paths: {} `, - wantErrs: []string{"openapi.$self is not a valid uri reference"}, + wantErrs: []string{"error validation-invalid-format openapi.$self is not a valid uri reference"}, + }, + { + name: "duplicate_operation_id", + yml: ` +openapi: 3.1.0 +info: + title: Test API + version: 1.0.0 +paths: + /melody: + post: + operationId: littleSong + responses: + '200': + description: ok + /ember: + get: + operationId: littleSong + responses: + '200': + description: ok +`, + wantErrs: []string{"error validation-operation-id-unique the `get` operation at path `/ember` contains a duplicate operationId `littleSong`"}, + }, + { + name: "duplicate_operation_parameter", + yml: ` +openapi: 3.1.0 +info: + title: Test API + version: 1.0.0 +paths: + /users/{id}: + get: + parameters: + - in: path + name: id + required: true + schema: + type: string + - in: path + name: id + required: true + schema: + type: string + responses: + '200': + description: ok +`, + wantErrs: []string{"error validation-operation-parameters parameter \"id\" is duplicated in GET operation at path \"/users/{id}\""}, + }, + { + name: "duplicate_pathitem_parameter", + yml: ` +openapi: 3.1.0 +info: + title: Test API + version: 1.0.0 +paths: + /users/{id}: + parameters: + - in: path + name: id + required: true + schema: + type: string + - in: path + name: id + required: true + schema: + type: string + get: + responses: + '200': + description: ok +`, + wantErrs: []string{"error validation-operation-parameters parameter \"id\" is duplicated in path \"/users/{id}\""}, }, } diff --git a/openapi/operation_validate_test.go b/openapi/operation_validate_test.go index 92c878d6..30ebd333 100644 --- a/openapi/operation_validate_test.go +++ b/openapi/operation_validate_test.go @@ -139,7 +139,7 @@ externalDocs: description: Invalid docs url: ":invalid" `, - wantErrs: []string{"[7:8] externalDocumentation.url is not a valid uri: parse \":invalid\": missing protocol scheme"}, + wantErrs: []string{"[7:8] warning validation-invalid-format externalDocumentation.url is not a valid uri: parse \":invalid\": missing protocol scheme"}, }, { name: "invalid server URL", @@ -151,7 +151,7 @@ servers: - url: ":invalid" description: Invalid server `, - wantErrs: []string{"[6:10] server.url is not a valid uri: parse \":invalid\": missing protocol scheme"}, + wantErrs: []string{"[6:10] error validation-invalid-format `server.url` is not a valid uri: parse \":invalid\": missing protocol scheme"}, }, } diff --git a/openapi/parameter.go b/openapi/parameter.go index 68304dce..0ff38226 100644 --- a/openapi/parameter.go +++ b/openapi/parameter.go @@ -2,6 +2,7 @@ package openapi import ( "context" + "errors" "fmt" "slices" "strings" @@ -212,51 +213,51 @@ func (p *Parameter) Validate(ctx context.Context, opts ...validation.Option) []e errs := []error{} if core.Name.Present && p.Name == "" { - errs = append(errs, validation.NewValueError(validation.NewMissingValueError("parameter.name is required"), core, core.Name)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("`parameter.name` is required"), core, core.Name)) } if core.In.Present && p.In == "" { - errs = append(errs, validation.NewValueError(validation.NewMissingValueError("parameter.in is required"), core, core.In)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("`parameter.in` is required"), core, core.In)) } else { switch p.In { case ParameterInQuery, ParameterInQueryString, ParameterInHeader, ParameterInPath, ParameterInCookie: default: - errs = append(errs, validation.NewValueError(validation.NewValueValidationError("parameter.in must be one of [%s]", strings.Join([]string{string(ParameterInQuery), string(ParameterInQueryString), string(ParameterInHeader), string(ParameterInPath), string(ParameterInCookie)}, ", ")), core, core.In)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationAllowedValues, fmt.Errorf("parameter.in must be one of [`%s`]", strings.Join([]string{string(ParameterInQuery), string(ParameterInQueryString), string(ParameterInHeader), string(ParameterInPath), string(ParameterInCookie)}, ", ")), core, core.In)) } } if p.In == ParameterInPath && (!core.Required.Present || !*p.Required) { - errs = append(errs, validation.NewValueError(validation.NewValueValidationError("parameter.in=path requires required=true"), core, core.Required)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("`parameter.in=path` requires `required=true`"), core, core.Required)) } if core.AllowEmptyValue.Present && p.In != ParameterInQuery { - errs = append(errs, validation.NewValueError(validation.NewValueValidationError("parameter.allowEmptyValue is only valid for in=query"), core, core.AllowEmptyValue)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationAllowedValues, errors.New("`parameter.allowEmptyValue` is only valid for `in=query`"), core, core.AllowEmptyValue)) } if core.Style.Present { switch p.In { case ParameterInQueryString: - errs = append(errs, validation.NewValueError(validation.NewValueValidationError("parameter field style is not allowed for in=querystring"), core, core.Style)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationAllowedValues, errors.New("parameter field style is not allowed for in=querystring"), core, core.Style)) case ParameterInPath: allowedStyles := []string{string(SerializationStyleSimple), string(SerializationStyleLabel), string(SerializationStyleMatrix)} if !slices.Contains(allowedStyles, string(*p.Style)) { - errs = append(errs, validation.NewValueError(validation.NewValueValidationError("parameter.style must be one of [%s] for in=path", strings.Join(allowedStyles, ", ")), core, core.Style)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationAllowedValues, fmt.Errorf("parameter.style must be one of [`%s`] for in=path", strings.Join(allowedStyles, ", ")), core, core.Style)) } case ParameterInQuery: allowedStyles := []string{string(SerializationStyleForm), string(SerializationStyleSpaceDelimited), string(SerializationStylePipeDelimited), string(SerializationStyleDeepObject)} if !slices.Contains(allowedStyles, string(*p.Style)) { - errs = append(errs, validation.NewValueError(validation.NewValueValidationError("parameter.style must be one of [%s] for in=query", strings.Join(allowedStyles, ", ")), core, core.Style)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationAllowedValues, fmt.Errorf("parameter.style must be one of [`%s`] for in=query", strings.Join(allowedStyles, ", ")), core, core.Style)) } case ParameterInHeader: allowedStyles := []string{string(SerializationStyleSimple)} if !slices.Contains(allowedStyles, string(*p.Style)) { - errs = append(errs, validation.NewValueError(validation.NewValueValidationError("parameter.style must be one of [%s] for in=header", strings.Join(allowedStyles, ", ")), core, core.Style)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationAllowedValues, fmt.Errorf("parameter.style must be one of [`%s`] for in=header", strings.Join(allowedStyles, ", ")), core, core.Style)) } case ParameterInCookie: allowedStyles := []string{string(SerializationStyleForm)} if !slices.Contains(allowedStyles, string(*p.Style)) { - errs = append(errs, validation.NewValueError(validation.NewValueValidationError("parameter.style must be one of [%s] for in=cookie", strings.Join(allowedStyles, ", ")), core, core.Style)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationAllowedValues, fmt.Errorf("parameter.style must be one of [`%s`] for in=cookie", strings.Join(allowedStyles, ", ")), core, core.Style)) } } } @@ -264,7 +265,7 @@ func (p *Parameter) Validate(ctx context.Context, opts ...validation.Option) []e if core.Schema.Present { switch p.In { case ParameterInQueryString: - errs = append(errs, validation.NewValueError(validation.NewValueValidationError("parameter field schema is not allowed for in=querystring"), core, core.Schema)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationAllowedValues, errors.New("`parameter.schema` is not allowed for `in=querystring`"), core, core.Schema)) default: errs = append(errs, p.Schema.Validate(ctx, opts...)...) } @@ -273,11 +274,11 @@ func (p *Parameter) Validate(ctx context.Context, opts ...validation.Option) []e if !core.Content.Present || p.Content == nil { // Querystring parameters must use content instead of schema if p.In == ParameterInQueryString { - errs = append(errs, validation.NewValueError(validation.NewValueValidationError("parameter field content is required for in=querystring"), core, core.Content)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("`parameter.content` is required for `in=querystring`"), core, core.Content)) } } else if p.Content.Len() != 1 { // If present, content must have exactly one entry - errs = append(errs, validation.NewValueError(validation.NewValueValidationError("parameter field content must have exactly one entry"), core, core.Content)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationAllowedValues, errors.New("`parameter.content` must have exactly one entry"), core, core.Content)) } for mediaType, obj := range p.Content.All() { diff --git a/openapi/parameter_validate_test.go b/openapi/parameter_validate_test.go index 84a17768..729546f5 100644 --- a/openapi/parameter_validate_test.go +++ b/openapi/parameter_validate_test.go @@ -181,7 +181,7 @@ in: query schema: type: string `, - wantErrs: []string{"[2:1] parameter.name is missing"}, + wantErrs: []string{"[2:1] error validation-required-field `parameter.name` is required"}, }, { name: "empty name", @@ -191,7 +191,7 @@ in: query schema: type: string `, - wantErrs: []string{"[2:7] parameter.name is required"}, + wantErrs: []string{"[2:7] error validation-required-field `parameter.name` is required"}, }, { name: "missing in", @@ -200,7 +200,7 @@ name: test schema: type: string `, - wantErrs: []string{"[2:1] parameter.in is missing"}, + wantErrs: []string{"[2:1] error validation-required-field `parameter.in` is required"}, }, { name: "path parameter not required", @@ -211,7 +211,7 @@ required: false schema: type: string `, - wantErrs: []string{"[4:11] parameter.in=path requires required=true"}, + wantErrs: []string{"[4:11] error validation-required-field `parameter.in=path` requires `required=true`"}, }, { name: "invalid parameter location", @@ -221,7 +221,7 @@ in: invalid schema: type: string `, - wantErrs: []string{"[3:5] parameter.in must be one of [query, querystring, header, path, cookie]"}, + wantErrs: []string{"[3:5] error validation-allowed-values parameter.in must be one of [`query, querystring, header, path, cookie`]"}, }, { name: "multiple validation errors", @@ -231,8 +231,8 @@ in: path required: false `, wantErrs: []string{ - "[2:7] parameter.name is required", - "[4:11] parameter.in=path requires required=true", + "[2:7] error validation-required-field `parameter.name` is required", + "[4:11] error validation-required-field `parameter.in=path` requires `required=true`", }, }, { @@ -244,8 +244,8 @@ schema: type: object `, wantErrs: []string{ - "parameter field schema is not allowed for in=querystring", - "parameter field content is required for in=querystring", + "error validation-allowed-values `parameter.schema` is not allowed for `in=querystring`", + "error validation-required-field `parameter.content` is required for `in=querystring`", }, }, { @@ -259,7 +259,7 @@ content: schema: type: object `, - wantErrs: []string{"parameter field style is not allowed for in=querystring"}, + wantErrs: []string{"error validation-allowed-values parameter field style is not allowed for in=querystring"}, }, { name: "querystring parameter missing content", @@ -268,7 +268,7 @@ name: filter in: querystring description: Missing content field `, - wantErrs: []string{"parameter field content is required for in=querystring"}, + wantErrs: []string{"error validation-required-field `parameter.content` is required for `in=querystring`"}, }, { name: "parameter with multiple content entries", @@ -283,7 +283,7 @@ content: schema: type: object `, - wantErrs: []string{"parameter field content must have exactly one entry"}, + wantErrs: []string{"error validation-allowed-values `parameter.content` must have exactly one entry"}, }, } diff --git a/openapi/paths.go b/openapi/paths.go index 1b597781..57fba06e 100644 --- a/openapi/paths.go +++ b/openapi/paths.go @@ -2,6 +2,8 @@ package openapi import ( "context" + "fmt" + "iter" "slices" "strings" @@ -41,6 +43,14 @@ func (p *Paths) Len() int { return p.Map.Len() } +// All returns an iterator over all path items in the paths map. nil safe. +func (p *Paths) All() iter.Seq2[string, *ReferencedPathItem] { + if p == nil { + return func(yield func(string, *ReferencedPathItem) bool) {} + } + return p.Map.All() +} + // GetExtensions returns the value of the Extensions field. Returns an empty extensions map if not set. func (p *Paths) GetExtensions() *extensions.Extensions { if p == nil || p.Extensions == nil { @@ -102,6 +112,10 @@ func (m HTTPMethod) Is(method string) bool { return strings.EqualFold(string(m), method) } +func (m HTTPMethod) String() string { + return string(m) +} + func IsStandardMethod(s string) bool { return slices.Contains(standardHttpMethods, HTTPMethod(s)) } @@ -316,20 +330,20 @@ func (p *PathItem) Validate(ctx context.Context, opts ...validation.Option) []er for methodName, op := range p.AdditionalOperations.All() { errs = append(errs, op.Validate(ctx, opts...)...) if IsStandardMethod(strings.ToLower(methodName)) { - errs = append(errs, validation.NewMapKeyError(validation.NewValueValidationError("method [%s] is a standard HTTP method and must be defined in its own field", methodName), core, core.AdditionalOperations, methodName)) + errs = append(errs, validation.NewMapKeyError(validation.SeverityError, validation.RuleValidationAllowedValues, fmt.Errorf("pathItem.additionalOperations method [%s] is a standardized HTTP method and must be defined in its own field", methodName), core, core.AdditionalOperations, methodName)) } } } for methodName := range p.Keys() { if !IsStandardMethod(strings.ToLower(string(methodName))) { - errs = append(errs, validation.NewMapKeyError(validation.NewValueValidationError("method [%s] is not a standard HTTP method and must be defined in the additionalOperations field", methodName), core, core, string(methodName))) + errs = append(errs, validation.NewMapKeyError(validation.SeverityError, validation.RuleValidationAllowedValues, fmt.Errorf("pathItem method [%s] is not a standardized HTTP method and must be listed under additionalOperations", methodName), core, core, methodName.String())) } } case !supportsAdditionalOperations: if core.AdditionalOperations.Present { - errs = append(errs, validation.NewValueError(validation.NewValueValidationError("additionalOperations is not supported in OpenAPI version %s", openapiVersion), core, core.AdditionalOperations)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationSupportedVersion, fmt.Errorf("pathItem.additionalOperations is not supported in OpenAPI version %s", openapiVersion), core, core.AdditionalOperations)) } } diff --git a/openapi/paths_validate_test.go b/openapi/paths_validate_test.go index 30461c26..3e51294e 100644 --- a/openapi/paths_validate_test.go +++ b/openapi/paths_validate_test.go @@ -309,7 +309,7 @@ get: '200': description: Successful response `, - wantErrs: []string{"[3:5] server.url is missing"}, + wantErrs: []string{"[3:5] error validation-required-field `server.url` is required"}, }, { name: "invalid_parameter", @@ -324,7 +324,7 @@ get: '200': description: Successful response `, - wantErrs: []string{"[3:5] parameter.name is missing"}, + wantErrs: []string{"[3:5] error validation-required-field `parameter.name` is required"}, }, { name: "unexpected_additional_operations", @@ -360,7 +360,7 @@ additionalOperations: description: Successful response x-test: some-value `, - wantErrs: []string{"method [GET] is a standard HTTP method and must be defined in its own field"}, + wantErrs: []string{"method [GET] is a standardized HTTP method and must be defined in its own field"}, }, { name: "invalid_openapi_version", @@ -384,7 +384,7 @@ copy: '201': description: Resource copied `, - wantErrs: []string{"method [copy] is not a standard HTTP method and must be defined in the additionalOperations field"}, + wantErrs: []string{"method [copy] is not a standardized HTTP method and must be listed under additionalOperations"}, }, } diff --git a/openapi/reference.go b/openapi/reference.go index c4b4c649..ec5d7c24 100644 --- a/openapi/reference.go +++ b/openapi/reference.go @@ -340,6 +340,29 @@ func (r *Reference[T, V, C]) GetDescription() string { return *r.Description } +// GetRootNode returns the root YAML node of the referenced object if it exists. +// Returns nil if the object is not resolved or doesn't have a root node. +func (r *Reference[T, V, C]) GetRootNode() *yaml.Node { + if r == nil { + return nil + } + + obj := r.GetObject() + if obj == nil { + return nil + } + + // Try to get the root node from the object via GetRootNode method + type nodeWithRootNode interface { + GetRootNode() *yaml.Node + } + if nodeWithRoot, ok := any(obj).(nodeWithRootNode); ok { + return nodeWithRoot.GetRootNode() + } + + return nil +} + // GetParent returns the immediate parent reference if this reference was resolved via a reference chain. // // Returns nil if: @@ -413,7 +436,7 @@ func (r *Reference[T, V, C]) Validate(ctx context.Context, opts ...validation.Op if core.Reference.Present { if err := r.Reference.Validate(); err != nil { - errs = append(errs, validation.NewValueError(validation.NewValueValidationError("reference.$ref is invalid: %s", err.Error()), core, core.Reference)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidSyntax, fmt.Errorf("reference.$ref is invalid: %w", err), core, core.Reference)) } } else if r.Object != nil { // Use the validator interface V to validate the object @@ -617,7 +640,7 @@ func resolveObjectWithTracking[T any, V interfaces.Validator[T], C marshaller.Co ref.ensureMutex() ref.cacheMutex.RLock() targetDoc := ref.referenceResolutionCache.ResolvedDocument - targetLoc := ref.referenceResolutionCache.AbsoluteReference + targetLoc := ref.referenceResolutionCache.AbsoluteDocumentPath ref.cacheMutex.RUnlock() opts.TargetDocument = targetDoc diff --git a/openapi/reference_validate_test.go b/openapi/reference_validate_test.go index 4468ff55..87e74cba 100644 --- a/openapi/reference_validate_test.go +++ b/openapi/reference_validate_test.go @@ -218,7 +218,7 @@ value: id: 123 externalValue: https://example.com/user.json `, - wantErrs: []string{"value and externalValue are mutually exclusive"}, + wantErrs: []string{"error validation-mutually-exclusive-fields example.value and example.externalValue are mutually exclusive"}, }, } diff --git a/openapi/requests.go b/openapi/requests.go index 07560005..9ab850f5 100644 --- a/openapi/requests.go +++ b/openapi/requests.go @@ -2,6 +2,7 @@ package openapi import ( "context" + "errors" "github.com/speakeasy-api/openapi/extensions" "github.com/speakeasy-api/openapi/internal/interfaces" @@ -57,7 +58,7 @@ func (r *RequestBody) Validate(ctx context.Context, opts ...validation.Option) [ errs := []error{} if core.Content.Present && r.Content.Len() == 0 { - errs = append(errs, validation.NewValueError(validation.NewMissingValueError("requestBody.content is required"), core, core.Content)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("`requestBody.content` is required"), core, core.Content)) } for mediaType, content := range r.Content.All() { diff --git a/openapi/requests_validate_test.go b/openapi/requests_validate_test.go index 0a571639..79db033b 100644 --- a/openapi/requests_validate_test.go +++ b/openapi/requests_validate_test.go @@ -130,7 +130,7 @@ func TestRequestBody_Validate_Error(t *testing.T) { description: Request body without content required: true `, - wantErrs: []string{"[2:1] requestBody.content is missing"}, + wantErrs: []string{"[2:1] error validation-required-field `requestBody.content` is required"}, }, { name: "empty content", @@ -138,7 +138,7 @@ required: true content: {} description: Request body with empty content `, - wantErrs: []string{"[2:10] requestBody.content is required"}, + wantErrs: []string{"[2:10] error validation-required-field `requestBody.content` is required"}, }, { name: "invalid schema in content", @@ -150,8 +150,8 @@ content: description: Request body with invalid schema `, wantErrs: []string{ - "[5:13] schema.type value must be one of 'array', 'boolean', 'integer', 'null', 'number', 'object', 'string'", - "[5:13] schema.type expected array, got string", + "[5:13] error validation-invalid-schema schema.type value must be one of 'array', 'boolean', 'integer', 'null', 'number', 'object', 'string'", + "[5:13] error validation-type-mismatch schema.type expected `array`, got `string`", }, }, } diff --git a/openapi/responses.go b/openapi/responses.go index 00714624..970ce6cc 100644 --- a/openapi/responses.go +++ b/openapi/responses.go @@ -2,6 +2,7 @@ package openapi import ( "context" + "errors" "fmt" "github.com/speakeasy-api/openapi/extensions" @@ -114,7 +115,7 @@ func (r *Responses) Validate(ctx context.Context, opts ...validation.Option) []e } if r.Len() == 0 && r.Default == nil { - errs = append(errs, validation.NewValidationError(validation.NewValueValidationError("responses must have at least one response code"), core.RootNode)) + errs = append(errs, validation.NewValidationError(validation.SeverityError, validation.RuleValidationAllowedValues, errors.New("responses must have at least one response code"), core.RootNode)) } for _, response := range r.All() { @@ -191,7 +192,7 @@ func (r *Response) Validate(ctx context.Context, opts ...validation.Option) []er errs := []error{} if core.Description.Present && r.Description == "" { - errs = append(errs, validation.NewValueError(validation.NewMissingValueError("response.description is required"), core, core.Description)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("`response.description` is required"), core, core.Description)) } for _, header := range r.GetHeaders().All() { diff --git a/openapi/responses_validate_test.go b/openapi/responses_validate_test.go index 2a44a5f9..c7ae35b2 100644 --- a/openapi/responses_validate_test.go +++ b/openapi/responses_validate_test.go @@ -148,7 +148,7 @@ content: schema: type: object `, - wantErrs: []string{"[2:1] response.description is missing"}, + wantErrs: []string{"[2:1] error validation-required-field `response.description` is required"}, }, { name: "empty description", @@ -159,7 +159,7 @@ content: schema: type: object `, - wantErrs: []string{"[2:14] response.description is required"}, + wantErrs: []string{"[2:14] error validation-required-field `response.description` is required"}, }, { name: "invalid schema in content", @@ -171,8 +171,8 @@ content: type: invalid-type `, wantErrs: []string{ - "[6:13] schema.type value must be one of 'array', 'boolean', 'integer', 'null', 'number', 'object', 'string'", - "[6:13] schema.type expected array, got string", + "[6:13] error validation-invalid-schema schema.type value must be one of 'array', 'boolean', 'integer', 'null', 'number', 'object', 'string'", + "[6:13] error validation-type-mismatch schema.type expected `array`, got `string`", }, }, } @@ -298,19 +298,19 @@ func TestResponses_Validate_Error(t *testing.T) { "404": description: Not found `, - wantErrs: []string{"description is required"}, + wantErrs: []string{"error validation-required-field `response.description` is required"}, }, { name: "no response codes", yml: ` x-test: some-value `, - wantErrs: []string{"responses must have at least one response code"}, + wantErrs: []string{"error validation-allowed-values responses must have at least one response code"}, }, { name: "empty responses object", yml: `{}`, - wantErrs: []string{"responses must have at least one response code"}, + wantErrs: []string{"error validation-allowed-values responses must have at least one response code"}, }, } diff --git a/openapi/sanitize.go b/openapi/sanitize.go index bf37826f..2aa6a985 100644 --- a/openapi/sanitize.go +++ b/openapi/sanitize.go @@ -606,10 +606,17 @@ func getCoreModelFromAny(model any) any { GetCoreAny() any } + var directCore any if coreModel, ok := model.(coreGetter); ok { - core := coreModel.GetCoreAny() - if core != nil { - return core + directCore = coreModel.GetCoreAny() + if directCore != nil { + if coreModeler, ok := directCore.(marshaller.CoreModeler); ok { + if len(coreModeler.GetUnknownProperties()) > 0 { + return directCore + } + } else { + return directCore + } } } @@ -622,7 +629,48 @@ func getCoreModelFromAny(model any) any { inner, err := navigable.GetNavigableNode() if err == nil && inner != nil { // Recursively try to get core from the inner value - return getCoreModelFromAny(inner) + if innerCore := getCoreModelFromAny(inner); innerCore != nil { + return innerCore + } + } + } + + return directCore +} + +// getRootNodeFromAny attempts to extract the root yaml.Node from various OpenAPI types. +// This is used for node-to-operation mapping during indexing. +func getRootNodeFromAny(model any) *yaml.Node { + if model == nil { + return nil + } + + // Try direct GetRootNode() + type rootNodeGetter interface { + GetRootNode() *yaml.Node + } + + if getter, ok := model.(rootNodeGetter); ok { + return getter.GetRootNode() + } + + // Try navigable node (for EitherValue wrappers) + type navigableNoder interface { + GetNavigableNode() (any, error) + } + + if navigable, ok := model.(navigableNoder); ok { + inner, err := navigable.GetNavigableNode() + if err == nil && inner != nil { + // Recursively try to get root node from the inner value + return getRootNodeFromAny(inner) + } + } + + // Try to get core model and extract root node from there + if core := getCoreModelFromAny(model); core != nil { + if getter, ok := core.(rootNodeGetter); ok { + return getter.GetRootNode() } } diff --git a/openapi/security.go b/openapi/security.go index 82d77151..eca14325 100644 --- a/openapi/security.go +++ b/openapi/security.go @@ -2,6 +2,7 @@ package openapi import ( "context" + "errors" "fmt" "net/url" "strings" @@ -169,47 +170,59 @@ func (s *SecurityScheme) Validate(ctx context.Context, opts ...validation.Option if core.Type.Present { if s.Type == "" { - errs = append(errs, validation.NewValueError(validation.NewMissingValueError("securityScheme.type is required"), core, core.Type)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("`securityScheme.type` is required"), core, core.Type)) } else { switch s.Type { case SecuritySchemeTypeAPIKey: if !core.Name.Present || *s.Name == "" { - errs = append(errs, validation.NewValueError(validation.NewMissingValueError("securityScheme.name is required for type=apiKey"), core, core.Name)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("`securityScheme.name` is required for type=apiKey"), core, core.Name)) } if !core.In.Present || *s.In == "" { - errs = append(errs, validation.NewValueError(validation.NewMissingValueError("securityScheme.in is required for type=apiKey"), core, core.In)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("`securityScheme.in` is required for type=apiKey"), core, core.In)) } else { switch *s.In { case SecuritySchemeInHeader: case SecuritySchemeInQuery: case SecuritySchemeInCookie: default: - errs = append(errs, validation.NewValueError(validation.NewValueValidationError("securityScheme.in must be one of [%s] for type=apiKey", strings.Join([]string{string(SecuritySchemeInHeader), string(SecuritySchemeInQuery), string(SecuritySchemeInCookie)}, ", ")), core, core.In)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationAllowedValues, fmt.Errorf("securityScheme.in must be one of [`%s`] for type=apiKey", strings.Join([]string{string(SecuritySchemeInHeader), string(SecuritySchemeInQuery), string(SecuritySchemeInCookie)}, ", ")), core, core.In)) } } case SecuritySchemeTypeHTTP: if !core.Scheme.Present || *s.Scheme == "" { - errs = append(errs, validation.NewValueError(validation.NewMissingValueError("securityScheme.scheme is required for type=http"), core, core.Scheme)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("`securityScheme.scheme` is required for type=http"), core, core.Scheme)) } case SecuritySchemeTypeMutualTLS: case SecuritySchemeTypeOAuth2: if !core.Flows.Present || s.Flows == nil { - errs = append(errs, validation.NewValueError(validation.NewMissingValueError("securityScheme.flows is required for type=oauth2"), core, core.Flows)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("`securityScheme.flows` is required for type=oauth2"), core, core.Flows)) } else { errs = append(errs, s.Flows.Validate(ctx, opts...)...) } // Validate oauth2MetadataUrl if present if core.OAuth2MetadataUrl.Present && s.OAuth2MetadataUrl != nil && *s.OAuth2MetadataUrl != "" { if _, err := url.Parse(*s.OAuth2MetadataUrl); err != nil { - errs = append(errs, validation.NewValueError(validation.NewValueValidationError("securityScheme.oauth2MetadataUrl is not a valid uri: %s", err), core, core.OAuth2MetadataUrl)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidFormat, fmt.Errorf("`securityScheme.oauth2MetadataUrl` is not a valid uri: %w", err), core, core.OAuth2MetadataUrl)) } } case SecuritySchemeTypeOpenIDConnect: if !core.OpenIdConnectUrl.Present || *s.OpenIdConnectUrl == "" { - errs = append(errs, validation.NewValueError(validation.NewMissingValueError("securityScheme.openIdConnectUrl is required for type=openIdConnect"), core, core.OpenIdConnectUrl)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("`securityScheme.openIdConnectUrl` is required for type=openIdConnect"), core, core.OpenIdConnectUrl)) + } else { + if _, err := url.Parse(*s.OpenIdConnectUrl); err != nil { + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidFormat, fmt.Errorf("`securityScheme.openIdConnectUrl` is not a valid uri: %w", err), core, core.OpenIdConnectUrl)) + } } default: - errs = append(errs, validation.NewValueError(validation.NewValueValidationError("securityScheme.type must be one of [%s]", strings.Join([]string{string(SecuritySchemeTypeAPIKey), string(SecuritySchemeTypeHTTP), string(SecuritySchemeTypeMutualTLS), string(SecuritySchemeTypeOAuth2), string(SecuritySchemeTypeOpenIDConnect)}, ", ")), core, core.Type)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationAllowedValues, fmt.Errorf("securityScheme.type must be one of [`%s`]", strings.Join([]string{string(SecuritySchemeTypeAPIKey), string(SecuritySchemeTypeHTTP), string(SecuritySchemeTypeMutualTLS), string(SecuritySchemeTypeOAuth2), string(SecuritySchemeTypeOpenIDConnect)}, ", ")), core, core.Type)) + } + } + + // Check for unused properties based on security scheme type + if s.Type != "" { + unusedFields := getUnusedFields(s.Type.String(), core) + for _, field := range unusedFields { + errs = append(errs, field.error) } } } @@ -219,6 +232,115 @@ func (s *SecurityScheme) Validate(ctx context.Context, opts ...validation.Option return errs } +type unusedFieldError struct { + fieldName string + error error +} + +func getUnusedFields(schemeType string, coreScheme *core.SecurityScheme) []unusedFieldError { + var unused []unusedFieldError + + // Define which fields are invalid for each type + invalidFields := map[string]struct { + name bool + in bool + scheme bool + bearerFormat bool + flows bool + openIdConnectUrl bool + oauth2MetadataUrl bool + }{ + string(SecuritySchemeTypeAPIKey): { + scheme: true, + bearerFormat: true, + flows: true, + openIdConnectUrl: true, + oauth2MetadataUrl: true, + }, + string(SecuritySchemeTypeHTTP): { + name: true, + in: true, + flows: true, + openIdConnectUrl: true, + oauth2MetadataUrl: true, + }, + string(SecuritySchemeTypeMutualTLS): { + name: true, + in: true, + scheme: true, + bearerFormat: true, + flows: true, + openIdConnectUrl: true, + oauth2MetadataUrl: true, + }, + string(SecuritySchemeTypeOAuth2): { + name: true, + in: true, + scheme: true, + bearerFormat: true, + openIdConnectUrl: true, + }, + string(SecuritySchemeTypeOpenIDConnect): { + name: true, + in: true, + scheme: true, + bearerFormat: true, + flows: true, + oauth2MetadataUrl: true, + }, + } + + invalid, exists := invalidFields[schemeType] + if !exists { + return unused + } + + if invalid.name && coreScheme.Name.Present { + unused = append(unused, unusedFieldError{ + fieldName: "name", + error: validation.NewValueError(validation.SeverityWarning, validation.RuleValidationAllowedValues, errors.New("securityScheme.name is not used for type="+schemeType+" (only valid for type=apiKey)"), coreScheme, coreScheme.Name), + }) + } + if invalid.in && coreScheme.In.Present { + unused = append(unused, unusedFieldError{ + fieldName: "in", + error: validation.NewValueError(validation.SeverityWarning, validation.RuleValidationAllowedValues, errors.New("securityScheme.in is not used for type="+schemeType+" (only valid for type=apiKey)"), coreScheme, coreScheme.In), + }) + } + if invalid.scheme && coreScheme.Scheme.Present { + unused = append(unused, unusedFieldError{ + fieldName: "scheme", + error: validation.NewValueError(validation.SeverityWarning, validation.RuleValidationAllowedValues, errors.New("securityScheme.scheme is not used for type="+schemeType+" (only valid for type=http)"), coreScheme, coreScheme.Scheme), + }) + } + if invalid.bearerFormat && coreScheme.BearerFormat.Present { + unused = append(unused, unusedFieldError{ + fieldName: "bearerFormat", + error: validation.NewValueError(validation.SeverityWarning, validation.RuleValidationAllowedValues, errors.New("securityScheme.bearerFormat is not used for type="+schemeType+" (only valid for type=http)"), coreScheme, coreScheme.BearerFormat), + }) + } + if invalid.flows && coreScheme.Flows.Present { + unused = append(unused, unusedFieldError{ + fieldName: "flows", + error: validation.NewValueError(validation.SeverityWarning, validation.RuleValidationAllowedValues, errors.New("securityScheme.flows is not used for type="+schemeType+" (only valid for type=oauth2)"), coreScheme, coreScheme.Flows), + }) + } + if invalid.openIdConnectUrl && coreScheme.OpenIdConnectUrl.Present { + unused = append(unused, unusedFieldError{ + fieldName: "openIdConnectUrl", + error: validation.NewValueError(validation.SeverityWarning, validation.RuleValidationAllowedValues, errors.New("securityScheme.openIdConnectUrl is not used for type="+schemeType+" (only valid for type=openIdConnect)"), coreScheme, coreScheme.OpenIdConnectUrl), + }) + } + if invalid.oauth2MetadataUrl && coreScheme.OAuth2MetadataUrl.Present { + unused = append(unused, unusedFieldError{ + fieldName: "oauth2MetadataUrl", + error: validation.NewValueError(validation.SeverityWarning, validation.RuleValidationAllowedValues, errors.New("securityScheme.oauth2MetadataUrl is not used for type="+schemeType+" (only valid for type=oauth2)"), coreScheme, coreScheme.OAuth2MetadataUrl), + }) + } + + return unused +} + // SecurityRequirement represents a security requirement for an API or operation. // Each name in the map represents a security scheme that can be used to secure the API or operation. // If the security scheme is of type "oauth2" or "openIdConnect", then the value is a list of scope names required by the operation. @@ -302,7 +424,7 @@ func (s *SecurityRequirement) Validate(ctx context.Context, opts ...validation.O } // Not found as component name and not a valid URI - errs = append(errs, validation.NewMapKeyError(validation.NewValueValidationError("securityRequirement scheme %s is not defined in components.securitySchemes and is not a valid URI reference", securityScheme), core, core, securityScheme)) + errs = append(errs, validation.NewMapKeyError(validation.SeverityError, validation.RuleValidationSchemeNotFound, fmt.Errorf("securityRequirement scheme %s is not defined in components.securitySchemes and is not a valid URI reference", securityScheme), core, core, securityScheme)) } s.Valid = len(errs) == 0 && core.GetValid() @@ -498,68 +620,68 @@ func (o *OAuthFlow) Validate(ctx context.Context, opts ...validation.Option) []e switch *oAuthFlowType { case OAuthFlowTypeImplicit: if !core.AuthorizationURL.Present || *o.AuthorizationURL == "" { - errs = append(errs, validation.NewValueError(validation.NewMissingValueError("oAuthFlow.authorizationUrl is required for type=implicit"), core, core.AuthorizationURL)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("oAuthFlow.authorizationUrl is required for type=implicit"), core, core.AuthorizationURL)) } else { if _, err := url.Parse(*o.AuthorizationURL); err != nil { - errs = append(errs, validation.NewValueError(validation.NewValueValidationError("oAuthFlow.authorizationUrl is not a valid uri: %s", err), core, core.AuthorizationURL)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidFormat, fmt.Errorf("oAuthFlow.authorizationUrl is not a valid uri: %w", err), core, core.AuthorizationURL)) } } case OAuthFlowTypePassword: if !core.TokenURL.Present || *o.TokenURL == "" { - errs = append(errs, validation.NewValueError(validation.NewMissingValueError("oAuthFlow.tokenUrl is required for type=password"), core, core.TokenURL)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("oAuthFlow.tokenUrl is required for type=password"), core, core.TokenURL)) } else { if _, err := url.Parse(*o.TokenURL); err != nil { - errs = append(errs, validation.NewValueError(validation.NewValueValidationError("oAuthFlow.tokenUrl is not a valid uri: %s", err), core, core.TokenURL)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidFormat, fmt.Errorf("oAuthFlow.tokenUrl is not a valid uri: %w", err), core, core.TokenURL)) } } case OAuthFlowTypeClientCredentials: if !core.TokenURL.Present || *o.TokenURL == "" { - errs = append(errs, validation.NewValueError(validation.NewMissingValueError("oAuthFlow.tokenUrl is required for type=clientCredentials"), core, core.TokenURL)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("oAuthFlow.tokenUrl is required for type=clientCredentials"), core, core.TokenURL)) } else { if _, err := url.Parse(*o.TokenURL); err != nil { - errs = append(errs, validation.NewValueError(validation.NewValueValidationError("oAuthFlow.tokenUrl is not a valid uri: %s", err), core, core.TokenURL)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidFormat, fmt.Errorf("oAuthFlow.tokenUrl is not a valid uri: %w", err), core, core.TokenURL)) } } case OAuthFlowTypeAuthorizationCode: if !core.AuthorizationURL.Present || *o.AuthorizationURL == "" { - errs = append(errs, validation.NewValueError(validation.NewMissingValueError("oAuthFlow.authorizationUrl is required for type=authorizationCode"), core, core.AuthorizationURL)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("oAuthFlow.authorizationUrl is required for type=authorizationCode"), core, core.AuthorizationURL)) } else { if _, err := url.Parse(*o.AuthorizationURL); err != nil { - errs = append(errs, validation.NewValueError(validation.NewValueValidationError("oAuthFlow.authorizationUrl is not a valid uri: %s", err), core, core.AuthorizationURL)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidFormat, fmt.Errorf("oAuthFlow.authorizationUrl is not a valid uri: %w", err), core, core.AuthorizationURL)) } } if !core.TokenURL.Present || *o.TokenURL == "" { - errs = append(errs, validation.NewValueError(validation.NewMissingValueError("oAuthFlow.tokenUrl is required for type=authorizationCode"), core, core.TokenURL)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("oAuthFlow.tokenUrl is required for type=authorizationCode"), core, core.TokenURL)) } else { if _, err := url.Parse(*o.TokenURL); err != nil { - errs = append(errs, validation.NewValueError(validation.NewValueValidationError("oAuthFlow.tokenUrl is not a valid uri: %s", err), core, core.TokenURL)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidFormat, fmt.Errorf("oAuthFlow.tokenUrl is not a valid uri: %w", err), core, core.TokenURL)) } } case OAuthFlowTypeDeviceAuthorization: if !core.DeviceAuthorizationURL.Present || *o.DeviceAuthorizationURL == "" { - errs = append(errs, validation.NewValueError(validation.NewMissingValueError("oAuthFlow.deviceAuthorizationUrl is required for type=deviceAuthorization"), core, core.DeviceAuthorizationURL)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("oAuthFlow.deviceAuthorizationUrl is required for type=deviceAuthorization"), core, core.DeviceAuthorizationURL)) } else { if _, err := url.Parse(*o.DeviceAuthorizationURL); err != nil { - errs = append(errs, validation.NewValueError(validation.NewValueValidationError("oAuthFlow.deviceAuthorizationUrl is not a valid uri: %s", err), core, core.DeviceAuthorizationURL)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidFormat, fmt.Errorf("oAuthFlow.deviceAuthorizationUrl is not a valid uri: %w", err), core, core.DeviceAuthorizationURL)) } } if !core.TokenURL.Present || *o.TokenURL == "" { - errs = append(errs, validation.NewValueError(validation.NewMissingValueError("oAuthFlow.tokenUrl is required for type=deviceAuthorization"), core, core.TokenURL)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("oAuthFlow.tokenUrl is required for type=deviceAuthorization"), core, core.TokenURL)) } else { if _, err := url.Parse(*o.TokenURL); err != nil { - errs = append(errs, validation.NewValueError(validation.NewValueValidationError("oAuthFlow.tokenUrl is not a valid uri: %s", err), core, core.TokenURL)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidFormat, fmt.Errorf("oAuthFlow.tokenUrl is not a valid uri: %w", err), core, core.TokenURL)) } } } if core.RefreshURL.Present { if _, err := url.Parse(*o.RefreshURL); err != nil { - errs = append(errs, validation.NewValueError(validation.NewValueValidationError("oAuthFlow.refreshUrl is not a valid uri: %s", err), core, core.RefreshURL)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidFormat, fmt.Errorf("oAuthFlow.refreshUrl is not a valid uri: %w", err), core, core.RefreshURL)) } } if !core.Scopes.Present { - errs = append(errs, validation.NewValueError(validation.NewMissingValueError("oAuthFlow.scopes is required (empty map is allowed)"), core, core.Scopes)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("oAuthFlow.scopes is required (empty map is allowed)"), core, core.Scopes)) } o.Valid = len(errs) == 0 && core.GetValid() diff --git a/openapi/security_validate_test.go b/openapi/security_validate_test.go index 306ae1f6..ac82a99e 100644 --- a/openapi/security_validate_test.go +++ b/openapi/security_validate_test.go @@ -2,6 +2,7 @@ package openapi_test import ( "bytes" + "errors" "strings" "testing" @@ -160,7 +161,7 @@ func TestSecurityScheme_Validate_Error(t *testing.T) { yml: ` description: Some security scheme `, - wantErrs: []string{"[2:1] securityScheme.type is missing"}, + wantErrs: []string{"[2:1] error validation-required-field `securityScheme.type` is required"}, }, { name: "invalid_type", @@ -175,7 +176,7 @@ type: invalid type: apiKey in: header `, - wantErrs: []string{"name is required for type=apiKey"}, + wantErrs: []string{"`securityScheme.name` is required for type=apiKey"}, }, { name: "api_key_missing_in", @@ -183,7 +184,7 @@ in: header type: apiKey name: X-API-Key `, - wantErrs: []string{"in is required for type=apiKey"}, + wantErrs: []string{"`securityScheme.in` is required for type=apiKey"}, }, { name: "api_key_invalid_in", @@ -192,28 +193,28 @@ type: apiKey name: X-API-Key in: invalid `, - wantErrs: []string{"in must be one of"}, + wantErrs: []string{"securityScheme.in must be one of"}, }, { name: "http_missing_scheme", yml: ` type: http `, - wantErrs: []string{"scheme is required for type=http"}, + wantErrs: []string{"`securityScheme.scheme` is required for type=http"}, }, { name: "oauth2_missing_flows", yml: ` type: oauth2 `, - wantErrs: []string{"flows is required for type=oauth2"}, + wantErrs: []string{"`securityScheme.flows` is required for type=oauth2"}, }, { name: "openid_missing_url", yml: ` type: openIdConnect `, - wantErrs: []string{"openIdConnectUrl is required for type=openIdConnect"}, + wantErrs: []string{"`securityScheme.openIdConnectUrl` is required for type=openIdConnect"}, }, { name: "oauth2_invalid_metadata_url", @@ -227,7 +228,27 @@ flows: read: Read access oauth2MetadataUrl: ://invalid-url `, - wantErrs: []string{"oauth2MetadataUrl is not a valid uri"}, + wantErrs: []string{"`securityScheme.oauth2MetadataUrl` is not a valid uri"}, + }, + { + name: "oauth2_flow_invalid_authorization_url", + yml: ` +type: oauth2 +flows: + implicit: + authorizationUrl: http:// blah. + scopes: + read: Read access +`, + wantErrs: []string{"oAuthFlow.authorizationUrl is not a valid uri"}, + }, + { + name: "openid_invalid_url", + yml: ` +type: openIdConnect +openIdConnectUrl: http:// blah. +`, + wantErrs: []string{"`securityScheme.openIdConnectUrl` is not a valid uri"}, }, } @@ -263,6 +284,191 @@ oauth2MetadataUrl: ://invalid-url } } +func TestSecurityScheme_Validate_UnusedProperties(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + yml string + wantWarnings []string + }{ + { + name: "http_with_in_property", + yml: ` +type: http +scheme: bearer +in: header +`, + wantWarnings: []string{"in is not used for type=http"}, + }, + { + name: "http_with_name_property", + yml: ` +type: http +scheme: bearer +name: X-API-Key +`, + wantWarnings: []string{"name is not used for type=http"}, + }, + { + name: "http_with_flows_property", + yml: ` +type: http +scheme: bearer +flows: + implicit: + authorizationUrl: https://example.com/oauth/authorize + scopes: {} +`, + wantWarnings: []string{"flows is not used for type=http"}, + }, + { + name: "apiKey_with_scheme_property", + yml: ` +type: apiKey +name: X-API-Key +in: header +scheme: bearer +`, + wantWarnings: []string{"scheme is not used for type=apiKey"}, + }, + { + name: "apiKey_with_bearerFormat_property", + yml: ` +type: apiKey +name: X-API-Key +in: header +bearerFormat: JWT +`, + wantWarnings: []string{"bearerFormat is not used for type=apiKey"}, + }, + { + name: "apiKey_with_flows_property", + yml: ` +type: apiKey +name: X-API-Key +in: header +flows: + implicit: + authorizationUrl: https://example.com/oauth/authorize + scopes: {} +`, + wantWarnings: []string{"flows is not used for type=apiKey"}, + }, + { + name: "mutualTLS_with_scheme_property", + yml: ` +type: mutualTLS +scheme: bearer +`, + wantWarnings: []string{"scheme is not used for type=mutualTLS"}, + }, + { + name: "mutualTLS_with_name_and_in_properties", + yml: ` +type: mutualTLS +name: X-API-Key +in: header +`, + wantWarnings: []string{ + "name is not used for type=mutualTLS", + "in is not used for type=mutualTLS", + }, + }, + { + name: "oauth2_with_scheme_property", + yml: ` +type: oauth2 +flows: + authorizationCode: + authorizationUrl: https://example.com/oauth/authorize + tokenUrl: https://example.com/oauth/token + scopes: {} +scheme: bearer +`, + wantWarnings: []string{"scheme is not used for type=oauth2"}, + }, + { + name: "oauth2_with_name_and_in_properties", + yml: ` +type: oauth2 +flows: + authorizationCode: + authorizationUrl: https://example.com/oauth/authorize + tokenUrl: https://example.com/oauth/token + scopes: {} +name: X-API-Key +in: header +`, + wantWarnings: []string{ + "name is not used for type=oauth2", + "in is not used for type=oauth2", + }, + }, + { + name: "openIdConnect_with_scheme_property", + yml: ` +type: openIdConnect +openIdConnectUrl: https://example.com/.well-known/openid-configuration +scheme: bearer +`, + wantWarnings: []string{"scheme is not used for type=openIdConnect"}, + }, + { + name: "openIdConnect_with_flows_property", + yml: ` +type: openIdConnect +openIdConnectUrl: https://example.com/.well-known/openid-configuration +flows: + implicit: + authorizationUrl: https://example.com/oauth/authorize + scopes: {} +`, + wantWarnings: []string{"flows is not used for type=openIdConnect"}, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + var securityScheme openapi.SecurityScheme + + validationErrs, err := marshaller.Unmarshal(t.Context(), bytes.NewBufferString(tt.yml), &securityScheme) + require.NoError(t, err) + + errs := securityScheme.Validate(t.Context()) + + // Combine unmarshalling and validation errors + validationErrs = append(validationErrs, errs...) + + // Extract warnings (severity = warning) + var warnings []error + for _, e := range validationErrs { + var verr *validation.Error + if errors.As(e, &verr) && verr.Severity == validation.SeverityWarning { + warnings = append(warnings, e) + } + } + + require.NotEmpty(t, warnings, "Expected validation warnings") + require.Len(t, warnings, len(tt.wantWarnings), "Expected %d warnings, got %d: %v", len(tt.wantWarnings), len(warnings), warnings) + + // Check that all expected warnings are present + for _, wantWarning := range tt.wantWarnings { + found := false + for _, gotWarning := range warnings { + if gotWarning != nil && strings.Contains(gotWarning.Error(), wantWarning) { + found = true + break + } + } + require.True(t, found, "Expected warning containing '%s' not found in: %v", wantWarning, warnings) + } + }) + } +} + func TestSecurityRequirement_Validate_Success(t *testing.T) { t.Parallel() diff --git a/openapi/server.go b/openapi/server.go index 83789bd6..f71f644d 100644 --- a/openapi/server.go +++ b/openapi/server.go @@ -87,16 +87,23 @@ func (s *Server) Validate(ctx context.Context, opts ...validation.Option) []erro if core.URL.Present { switch { case s.URL == "": - errs = append(errs, validation.NewValueError(validation.NewMissingValueError("server.url is required"), core, core.URL)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("`server.url` is required"), core, core.URL)) case !strings.Contains(s.URL, "{"): if _, err := url.Parse(s.URL); err != nil { - errs = append(errs, validation.NewValueError(validation.NewValueValidationError("server.url is not a valid uri: %s", err), core, core.URL)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidFormat, fmt.Errorf("`server.url` is not a valid uri: %w", err), core, core.URL)) } default: - if resolvedURL, err := resolveServerVariables(s.URL, s.Variables); err != nil { - errs = append(errs, validation.NewValueError(validation.NewValueValidationError("server.url is not a valid uri: %s", err), core, core.URL)) + resolvedURL, resolveErrs := resolveServerVariables(s.URL, s.Variables) + if len(resolveErrs) > 0 { + for _, resolveErr := range resolveErrs { + err := resolveErr + if err == nil { + continue + } + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidSyntax, err, core, core.URL)) + } } else if _, err := url.Parse(resolvedURL); err != nil { - errs = append(errs, validation.NewValueError(validation.NewValueValidationError("server.url is not a valid uri: %s", err), core, core.URL)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidFormat, fmt.Errorf("`server.url` is not a valid uri: %w", err), core, core.URL)) } } } @@ -157,12 +164,12 @@ func (v *ServerVariable) Validate(ctx context.Context, opts ...validation.Option errs := []error{} if core.Default.Present && v.Default == "" { - errs = append(errs, validation.NewValueError(validation.NewMissingValueError("serverVariable.default is required"), core, core.Default)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("`serverVariable.default` is required"), core, core.Default)) } if core.Enum.Present { if !slices.Contains(v.Enum, v.Default) { - errs = append(errs, validation.NewValueError(validation.NewValueValidationError("serverVariable.default must be one of [%s]", strings.Join(v.Enum, ", ")), core, core.Enum)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationAllowedValues, fmt.Errorf("serverVariable.default must be one of [`%s`]", strings.Join(v.Enum, ", ")), core, core.Default)) } } @@ -171,13 +178,15 @@ func (v *ServerVariable) Validate(ctx context.Context, opts ...validation.Option return errs } -func resolveServerVariables(serverURL string, variables *sequencedmap.Map[string, *ServerVariable]) (string, error) { +func resolveServerVariables(serverURL string, variables *sequencedmap.Map[string, *ServerVariable]) (string, []error) { if variables.Len() == 0 { - return "", errors.New("serverURL contains variables but no variables are defined") + return "", []error{errors.New("serverURL contains variables but no variables are defined")} } resolvedURL := serverURL + var resolveErrs []error + matches := variablePattern.FindAllStringSubmatch(serverURL, -1) for _, match := range matches { if len(match) < 2 { @@ -189,15 +198,39 @@ func resolveServerVariables(serverURL string, variables *sequencedmap.Map[string variable, exists := variables.Get(variableName) if !exists { - return "", fmt.Errorf("server variable '%s' is not defined", variableName) + err := fmt.Errorf("server variable `%s` is not defined%s", formatServerVariableName(variableName), doubleCurlyBraceHint(serverURL)) + resolveErrs = append(resolveErrs, err) + continue } if variable.Default == "" { - return "", fmt.Errorf("server variable '%s' has no default value", variableName) + err := fmt.Errorf("server variable `%s` has no default value", formatServerVariableName(variableName)) + resolveErrs = append(resolveErrs, err) + continue } resolvedURL = strings.ReplaceAll(resolvedURL, placeholder, variable.Default) } + if len(resolveErrs) > 0 { + return "", resolveErrs + } + return resolvedURL, nil } + +func formatServerVariableName(variableName string) string { + if strings.HasPrefix(variableName, "{") { + return "{" + strings.TrimPrefix(variableName, "{") + "}" + } + + return variableName +} + +func doubleCurlyBraceHint(serverURL string) string { + if strings.Contains(serverURL, "{{") || strings.Contains(serverURL, "}}") { + return ". Use single curly braces for variable substitution" + } + + return "" +} diff --git a/openapi/server_test.go b/openapi/server_test.go index 2b927ce5..5d4eaa2a 100644 --- a/openapi/server_test.go +++ b/openapi/server_test.go @@ -1,6 +1,7 @@ package openapi import ( + "strings" "testing" "github.com/speakeasy-api/openapi/sequencedmap" @@ -103,8 +104,8 @@ func Test_resolveServerVariables_Success(t *testing.T) { t.Run(tt.name, func(t *testing.T) { t.Parallel() - result, err := resolveServerVariables(tt.args.serverURL, tt.args.variables) - require.NoError(t, err) + result, errs := resolveServerVariables(tt.args.serverURL, tt.args.variables) + require.Empty(t, errs, "expected no errors") assert.Equal(t, tt.expected, result) }) } @@ -118,9 +119,9 @@ func Test_resolveServerVariables_Error(t *testing.T) { variables *sequencedmap.Map[string, *ServerVariable] } tests := []struct { - name string - args args - expectedErr string + name string + args args + expectedErrs []string }{ { name: "no variables defined", @@ -128,7 +129,7 @@ func Test_resolveServerVariables_Error(t *testing.T) { serverURL: "https://{host}/api", variables: sequencedmap.New[string, *ServerVariable](), }, - expectedErr: "serverURL contains variables but no variables are defined", + expectedErrs: []string{"serverURL contains variables but no variables are defined"}, }, { name: "undefined variable", @@ -140,7 +141,7 @@ func Test_resolveServerVariables_Error(t *testing.T) { return vars }(), }, - expectedErr: "server variable 'host' is not defined", + expectedErrs: []string{"server variable `host` is not defined"}, }, { name: "variable with empty default", @@ -152,7 +153,7 @@ func Test_resolveServerVariables_Error(t *testing.T) { return vars }(), }, - expectedErr: "server variable 'host' has no default value", + expectedErrs: []string{"server variable `host` has no default value"}, }, { name: "multiple variables with one undefined", @@ -164,7 +165,7 @@ func Test_resolveServerVariables_Error(t *testing.T) { return vars }(), }, - expectedErr: "server variable 'port' is not defined", + expectedErrs: []string{"server variable `port` is not defined"}, }, { name: "multiple variables with one having empty default", @@ -177,7 +178,7 @@ func Test_resolveServerVariables_Error(t *testing.T) { return vars }(), }, - expectedErr: "server variable 'port' has no default value", + expectedErrs: []string{"server variable `port` has no default value"}, }, { name: "malformed nested brackets creates invalid variable name", @@ -189,7 +190,23 @@ func Test_resolveServerVariables_Error(t *testing.T) { return vars }(), }, - expectedErr: "server variable 'incomplete/path/{host' is not defined", + expectedErrs: []string{"server variable `incomplete/path/{host` is not defined"}, + }, + { + name: "double curly braces produces multiple errors", + args: args{ + serverURL: "https://{{host}}{{port}}/api", + variables: func() *sequencedmap.Map[string, *ServerVariable] { + vars := sequencedmap.New[string, *ServerVariable]() + vars.Set("host", &ServerVariable{Default: "api.example.com"}) + vars.Set("port", &ServerVariable{Default: "8080"}) + return vars + }(), + }, + expectedErrs: []string{ + "server variable `{host}` is not defined. Use single curly braces for variable substitution", + "server variable `{port}` is not defined. Use single curly braces for variable substitution", + }, }, } @@ -197,10 +214,27 @@ func Test_resolveServerVariables_Error(t *testing.T) { t.Run(tt.name, func(t *testing.T) { t.Parallel() - result, err := resolveServerVariables(tt.args.serverURL, tt.args.variables) - require.Error(t, err) + result, errs := resolveServerVariables(tt.args.serverURL, tt.args.variables) + require.NotEmpty(t, errs, "expected errors") assert.Empty(t, result) - assert.Contains(t, err.Error(), tt.expectedErr) + for _, expectedErr := range tt.expectedErrs { + assertErrorContains(t, errs, expectedErr) + } }) } } + +func assertErrorContains(t *testing.T, errs []error, expected string) { + t.Helper() + + for _, err := range errs { + if err == nil { + continue + } + if strings.Contains(err.Error(), expected) { + return + } + } + + assert.Fail(t, "expected error not found", "expected %q in errors: %v", expected, errs) +} diff --git a/openapi/server_validate_test.go b/openapi/server_validate_test.go index 6ed72673..27dcf557 100644 --- a/openapi/server_validate_test.go +++ b/openapi/server_validate_test.go @@ -109,7 +109,7 @@ func TestServer_Validate_Error(t *testing.T) { yml: ` description: Server without URL `, - wantErrs: []string{"[2:1] server.url is missing"}, + wantErrs: []string{"[2:1] error validation-required-field `server.url` is required"}, }, { name: "empty URL", @@ -117,7 +117,7 @@ description: Server without URL url: "" description: Server with empty URL `, - wantErrs: []string{"[2:6] server.url is required"}, + wantErrs: []string{"[2:6] error validation-required-field `server.url` is required"}, }, { name: "variable without default value", @@ -127,7 +127,7 @@ variables: environment: description: Environment name `, - wantErrs: []string{"[5:5] serverVariable.default is missing"}, + wantErrs: []string{"[5:5] error validation-required-field `serverVariable.default` is required"}, }, { name: "variable with empty default", @@ -138,7 +138,7 @@ variables: default: "" description: Environment name `, - wantErrs: []string{"[5:14] serverVariable.default is required"}, + wantErrs: []string{"[5:14] error validation-required-field `serverVariable.default` is required"}, }, { name: "variable with invalid enum value", @@ -152,7 +152,7 @@ variables: - development description: Environment name `, - wantErrs: []string{"[7:7] serverVariable.default must be one of [staging, development]"}, + wantErrs: []string{"[5:14] error validation-allowed-values serverVariable.default must be one of [`staging, development`]"}, }, { name: "multiple validation errors", @@ -164,8 +164,47 @@ variables: description: Environment name `, wantErrs: []string{ - "[2:6] server.url is required", - "[5:14] serverVariable.default is required", + "[2:6] error validation-required-field `server.url` is required", + "[5:14] error validation-required-field `serverVariable.default` is required", + }, + }, + { + name: "double curly braces variable", + yml: ` +url: http://{{hostname}}:8080 +variables: + hostname: + default: api +`, + wantErrs: []string{ + "error validation-invalid-syntax server variable `{hostname}` is not defined. Use single curly braces for variable substitution", + }, + }, + { + name: "double curly braces multiple variables", + yml: ` +url: http://{{hostname}}{{port}} +variables: + hostname: + default: api + port: + default: "8080" +`, + wantErrs: []string{ + "error validation-invalid-syntax server variable `{hostname}` is not defined. Use single curly braces for variable substitution", + "error validation-invalid-syntax server variable `{port}` is not defined. Use single curly braces for variable substitution", + }, + }, + { + name: "missing variable with single braces", + yml: ` +url: http://{hostname}:8080 +variables: + port: + default: "8080" +`, + wantErrs: []string{ + "error validation-invalid-syntax server variable `hostname` is not defined", }, }, } @@ -280,7 +319,7 @@ func TestServerVariable_Validate_Error(t *testing.T) { yml: ` description: Variable without default `, - wantErrs: []string{"[2:1] serverVariable.default is missing"}, + wantErrs: []string{"[2:1] error validation-required-field `serverVariable.default` is required"}, }, { name: "empty default", @@ -288,7 +327,7 @@ description: Variable without default default: "" description: Variable with empty default `, - wantErrs: []string{"[2:10] serverVariable.default is required"}, + wantErrs: []string{"[2:10] error validation-required-field `serverVariable.default` is required"}, }, { name: "default not in enum", @@ -299,7 +338,7 @@ enum: - valid2 description: Variable with invalid default `, - wantErrs: []string{"[4:3] serverVariable.default must be one of [valid1, valid2]"}, + wantErrs: []string{"[2:10] error validation-allowed-values serverVariable.default must be one of [`valid1, valid2`]"}, }, } diff --git a/openapi/tag.go b/openapi/tag.go index bba656f7..4c851abc 100644 --- a/openapi/tag.go +++ b/openapi/tag.go @@ -2,6 +2,8 @@ package openapi import ( "context" + "errors" + "fmt" "github.com/speakeasy-api/openapi/extensions" "github.com/speakeasy-api/openapi/internal/interfaces" @@ -96,7 +98,7 @@ func (t *Tag) Validate(ctx context.Context, opts ...validation.Option) []error { errs := []error{} if core.Name.Present && t.Name == "" { - errs = append(errs, validation.NewValueError(validation.NewMissingValueError("tag.name is required"), core, core.Name)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("`tag.name` is required"), core, core.Name)) } if t.ExternalDocs != nil { @@ -121,15 +123,15 @@ func (t *Tag) Validate(ctx context.Context, opts ...validation.Option) []error { } if !parentExists { - errs = append(errs, validation.NewValueError( - validation.NewMissingValueError("parent tag '%s' does not exist", *t.Parent), + errs = append(errs, validation.NewValueError(validation.SeverityWarning, validation.RuleValidationTagNotFound, + fmt.Errorf("parent tag `%s` does not exist", *t.Parent), core, core.Parent)) } // Check for circular references if t.hasCircularParentReference(allTags, make(map[string]bool)) { - errs = append(errs, validation.NewValueError( - validation.NewValueValidationError("circular parent reference detected for tag '%s'", t.Name), + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationCircularReference, + fmt.Errorf("circular parent reference detected for tag `%s`", t.Name), core, core.Parent)) } } diff --git a/openapi/tag_validate_test.go b/openapi/tag_validate_test.go index 95a5a22d..a24e6319 100644 --- a/openapi/tag_validate_test.go +++ b/openapi/tag_validate_test.go @@ -117,7 +117,7 @@ func TestTag_Validate_Error(t *testing.T) { yml: ` description: A tag without name `, - wantErrs: []string{"[2:1] tag.name is missing"}, + wantErrs: []string{"[2:1] error validation-required-field `tag.name` is required"}, }, { name: "empty name", @@ -125,7 +125,7 @@ description: A tag without name name: "" description: A tag with empty name `, - wantErrs: []string{"[2:7] tag.name is required"}, + wantErrs: []string{"[2:7] error validation-required-field `tag.name` is required"}, }, { name: "invalid external docs URL", @@ -134,7 +134,7 @@ name: test externalDocs: url: ":invalid" `, - wantErrs: []string{"[4:8] externalDocumentation.url is not a valid uri: parse \":invalid\": missing protocol scheme"}, + wantErrs: []string{"[4:8] warning validation-invalid-format externalDocumentation.url is not a valid uri: parse \":invalid\": missing protocol scheme"}, }, { name: "external docs without URL", @@ -143,7 +143,7 @@ name: test externalDocs: description: Documentation without URL `, - wantErrs: []string{"[4:3] externalDocumentation.url is missing"}, + wantErrs: []string{"[4:3] error validation-required-field `externalDocumentation.url` is required"}, }, { name: "multiple validation errors", @@ -153,8 +153,8 @@ externalDocs: url: ":invalid" `, wantErrs: []string{ - "[2:7] tag.name is required", - "[4:8] externalDocumentation.url is not a valid uri: parse \":invalid\": missing protocol scheme", + "[2:7] error validation-required-field `tag.name` is required", + "[4:8] warning validation-invalid-format externalDocumentation.url is not a valid uri: parse \":invalid\": missing protocol scheme", }, }, } @@ -247,7 +247,7 @@ func TestTag_ValidateWithTags_ParentNotFound_Error(t *testing.T) { found := false for _, err := range errs { - if strings.Contains(err.Error(), "parent tag 'nonexistent' does not exist") { + if strings.Contains(err.Error(), "parent tag `nonexistent` does not exist") { found = true break } diff --git a/openapi/testdata/bundle/issue50/expected.yaml b/openapi/testdata/bundle/issue50/expected.yaml index 1ed26581..e3dc9d51 100644 --- a/openapi/testdata/bundle/issue50/expected.yaml +++ b/openapi/testdata/bundle/issue50/expected.yaml @@ -28,7 +28,7 @@ paths: "500": $ref: "#/components/responses/internal-server-error" parameters: - - $ref: "#/components/parameters/X-Idempotency-Key" + - $ref: '#/components/parameters/X-Idempotency-Key' components: securitySchemes: bearerAuth: diff --git a/openapi/testdata/inline/bundled_counter_expected.yaml b/openapi/testdata/inline/bundled_counter_expected.yaml index cf9b92a6..35a7c435 100644 --- a/openapi/testdata/inline/bundled_counter_expected.yaml +++ b/openapi/testdata/inline/bundled_counter_expected.yaml @@ -108,7 +108,7 @@ paths: tags: - posts parameters: - - $ref: "#/components/parameters/DestinationParam" + - $ref: '#/components/parameters/DestinationParam' requestBody: $ref: "#/components/requestBodies/CopyRequest" responses: @@ -300,8 +300,8 @@ paths: tags: [external] summary: Test external parameter with complex reference chain parameters: - - $ref: "#/components/parameters/ComplexFilterParam" - - $ref: "#/components/parameters/PaginationParam" + - $ref: '#/components/parameters/ComplexFilterParam' + - $ref: '#/components/parameters/PaginationParam' responses: "200": description: Filtered results diff --git a/openapi/testdata/inline/bundled_expected.yaml b/openapi/testdata/inline/bundled_expected.yaml index f5e76158..2824f374 100644 --- a/openapi/testdata/inline/bundled_expected.yaml +++ b/openapi/testdata/inline/bundled_expected.yaml @@ -108,7 +108,7 @@ paths: tags: - posts parameters: - - $ref: "#/components/parameters/DestinationParam" + - $ref: '#/components/parameters/DestinationParam' requestBody: $ref: "#/components/requestBodies/CopyRequest" responses: @@ -300,8 +300,8 @@ paths: tags: [external] summary: Test external parameter with complex reference chain parameters: - - $ref: "#/components/parameters/ComplexFilterParam" - - $ref: "#/components/parameters/PaginationParam" + - $ref: '#/components/parameters/ComplexFilterParam' + - $ref: '#/components/parameters/PaginationParam' responses: "200": description: Filtered results diff --git a/openapi/testdata/multifile-basic/components.yaml b/openapi/testdata/multifile-basic/components.yaml new file mode 100644 index 00000000..4983adb2 --- /dev/null +++ b/openapi/testdata/multifile-basic/components.yaml @@ -0,0 +1,78 @@ +components: + schemas: + TestObject: + type: object + properties: + id: + type: string + name: + type: string + required: + - id + ErrorObject: + type: object + properties: + error: + type: string + message: + type: string + + responses: + TestResponse: + description: Successful response + content: + application/json: + schema: + $ref: "#/components/schemas/TestObject" + headers: + X-Custom-Header: + $ref: "#/components/headers/CustomHeader" + links: + TestLink: + $ref: "#/components/links/TestLink" + + ErrorResponse: + description: Error response + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorObject" + + headers: + CustomHeader: + description: Custom header + schema: + type: string + + links: + TestLink: + operationId: getTest + description: Link back to test operation + + callbacks: + TestCallback: + "{$request.body#/callbackUrl}": + post: + operationId: callbackOperation + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/TestObject" + responses: + "200": + description: Callback acknowledged + + pathItems: + TestWebhook: + post: + operationId: webhookOperation + summary: Webhook operation + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/TestObject" + responses: + "200": + $ref: "#/components/responses/TestResponse" diff --git a/openapi/testdata/multifile-basic/openapi.yaml b/openapi/testdata/multifile-basic/openapi.yaml new file mode 100644 index 00000000..bf0b8f14 --- /dev/null +++ b/openapi/testdata/multifile-basic/openapi.yaml @@ -0,0 +1,26 @@ +openapi: 3.1.0 +info: + title: Multi-file Reference Test + version: 1.0.0 +paths: + /test: + get: + operationId: getTest + summary: Test operation + responses: + "200": + $ref: "./components.yaml#/components/responses/TestResponse" + "400": + $ref: "./components.yaml#/components/responses/ErrorResponse" + callbacks: + testCallback: + $ref: "./components.yaml#/components/callbacks/TestCallback" +webhooks: + testWebhook: + $ref: "./components.yaml#/components/pathItems/TestWebhook" +components: + securitySchemes: + apiKey: + type: apiKey + in: header + name: X-API-Key diff --git a/openapi/testdata/multifile-simple/components.yaml b/openapi/testdata/multifile-simple/components.yaml new file mode 100644 index 00000000..bab37a73 --- /dev/null +++ b/openapi/testdata/multifile-simple/components.yaml @@ -0,0 +1,14 @@ +components: + schemas: + TestSchema: + type: object + properties: + id: + type: string + responses: + TestResponse: + description: Test response + content: + application/json: + schema: + $ref: "#/components/schemas/TestSchema" diff --git a/openapi/testdata/multifile-simple/openapi.yaml b/openapi/testdata/multifile-simple/openapi.yaml new file mode 100644 index 00000000..cb089558 --- /dev/null +++ b/openapi/testdata/multifile-simple/openapi.yaml @@ -0,0 +1,10 @@ +openapi: 3.1.0 +info: + title: Simple Test + version: 1.0.0 +paths: + /test: + get: + responses: + "200": + $ref: "./components.yaml#/components/responses/TestResponse" diff --git a/overlay/README.md b/overlay/README.md index 44e70e4d..6a005f32 100644 --- a/overlay/README.md +++ b/overlay/README.md @@ -3,7 +3,7 @@ OpenAPI

OpenAPI Overlay

-

An implementation of the OpenAPI Overlay Specification 1.1.0 for applying modifications to OpenAPI documents +

An implementation of the OpenAPI Overlay Specification for applying modifications to OpenAPI documents

@@ -25,19 +25,16 @@ ## Features -- **OpenAPI Overlay Specification Compliance**: Full implementation of the [OpenAPI Overlay Specification 1.0.0](https://github.com/OAI/Overlay-Specification/blob/main/versions/1.0.0.md) and [1.1.0](https://github.com/OAI/Overlay-Specification/blob/main/versions/1.1.0.md) +- **OpenAPI Overlay Specification Compliance**: Full implementation of the [OpenAPI Overlay Specification](https://github.com/OAI/Overlay-Specification/blob/3f398c6/versions/1.0.0.md) (2023-10-12) and [version 1.1.0](https://github.com/OAI/Overlay-Specification/blob/e2c3cec/versions/1.1.0-dev.md) - **JSONPath Target Selection**: Uses JSONPath expressions to select nodes for modification -- **RFC 9535 JSONPath**: Version 1.1.0 uses RFC 9535-compliant JSONPath by default for improved interoperability - **Remove, Update, and Copy Actions**: Support for remove actions (pruning nodes), update actions (merging values), and copy actions (duplicating or moving nodes) -- **Upgrade Support**: Built-in `Upgrade()` function to upgrade overlay documents from 1.0.0 to 1.1.0 -- **Info Description Field**: Version 1.1.0 supports a `description` field in the overlay info section - **Flexible Input/Output**: Works with both YAML and JSON formats - **Batch Operations**: Apply multiple modifications to large numbers of nodes in a single operation - **YAML v1.2 Support**: Uses [gopkg.in/yaml.v3](https://pkg.go.dev/gopkg.in/yaml.v3) for YAML v1.2 parsing (superset of JSON) ## About OpenAPI Overlays -This specification defines a means of editing an OpenAPI Specification file by applying a list of actions. Each action is either a remove action that prunes nodes or an update that merges a value into nodes. The nodes impacted are selected by a target expression which uses JSONPath. This implementation supports [version 1.1.0](https://github.com/OAI/Overlay-Specification/blob/main/versions/1.1.0.md) which adds a `copy` action for duplicating or moving nodes within the document, RFC 9535 JSONPath as the default, and a description field in the info section. +This specification defines a means of editing an OpenAPI Specification file by applying a list of actions. Each action is either a remove action that prunes nodes or an update that merges a value into nodes. The nodes impacted are selected by a target expression which uses JSONPath. This implementation also supports [version 1.1.0](https://github.com/OAI/Overlay-Specification/blob/e2c3cec/versions/1.1.0-dev.md) which adds a `copy` action for duplicating or moving nodes within the document. The specification itself says very little about the input file to be modified or the output file. The presumed intention is that the input and output be an OpenAPI Specification, but that is not required. diff --git a/overlay/validate.go b/overlay/validate.go index af1f5d59..15221112 100644 --- a/overlay/validate.go +++ b/overlay/validate.go @@ -17,7 +17,7 @@ var ( // Errors var ( ErrOverlayVersionInvalid = errors.New("overlay version is invalid") - ErrOverlayVersionNotSupported = fmt.Errorf("overlay version must be one of: %s", strings.Join(sliceutil.Map(SupportedVersions, func(v *version.Version) string { return v.String() }), ", ")) + ErrOverlayVersionNotSupported = fmt.Errorf("overlay version must be one of: `%s`", strings.Join(sliceutil.Map(SupportedVersions, func(v *version.Version) string { return v.String() }), ", ")) ErrOverlayVersionMustBeDefined = errors.New("overlay version must be defined") ErrOverlayInfoTitleMustBeDefined = errors.New("overlay info title must be defined") ErrOverlayInfoVersionMustBeDefined = errors.New("overlay info version must be defined") diff --git a/overlay/validate_test.go b/overlay/validate_test.go index c5d95a92..d68dc571 100644 --- a/overlay/validate_test.go +++ b/overlay/validate_test.go @@ -67,7 +67,7 @@ func TestOverlay_Validate(t *testing.T) { }, }, }, - expectedErrors: []string{"overlay version must be one of: 1.0.0, 1.1.0"}, + expectedErrors: []string{"overlay version must be one of: `1.0.0, 1.1.0`"}, }, { name: "empty overlay version", diff --git a/references/resolution.go b/references/resolution.go index 3906fd10..c310840f 100644 --- a/references/resolution.go +++ b/references/resolution.go @@ -23,11 +23,15 @@ type ResolutionTarget interface { GetCachedReferenceDocument(key string) ([]byte, bool) StoreReferenceDocumentInCache(key string, doc []byte) + + GetCachedExternalDocument(key string) (any, bool) + StoreExternalDocumentInCache(key string, doc any) } type Resolvable[T any] interface { GetReference() Reference Resolve(ctx context.Context, opts ResolveOptions) ([]error, error) + IsResolved() bool GetResolvedObject() *T } @@ -53,8 +57,10 @@ type Unmarshal[T any] func(ctx context.Context, node *yaml.Node, skipValidation type ResolveResult[T any] struct { // Object is the resolved object Object *T + // AbsoluteDocumentPath is the absolute reference that was resolved + AbsoluteDocumentPath string // AbsoluteReference is the absolute reference that was resolved - AbsoluteReference string + AbsoluteReference Reference // ResolvedDocument is the document that was resolved against (for chaining resolutions) ResolvedDocument any } @@ -103,10 +109,10 @@ func Resolve[T any](ctx context.Context, ref Reference, unmarshaler Unmarshal[T] return nil, nil, err } - absRef := result.AbsoluteReference + absDocPath := result.AbsoluteReference finalClassification := result.Classification - absRefWithJP := utils.BuildAbsoluteReference(absRef, string(jp)) + absRefWithJP := utils.BuildAbsoluteReference(absDocPath, string(jp)) // Try and get the object from the cache as we should avoid recreating it if possible var obj *T @@ -119,9 +125,10 @@ func Resolve[T any](ctx context.Context, ref Reference, unmarshaler Unmarshal[T] if uri == "" { if coOK { return &ResolveResult[T]{ - Object: obj, - AbsoluteReference: absRef, - ResolvedDocument: opts.TargetDocument, + Object: obj, + AbsoluteDocumentPath: absDocPath, + AbsoluteReference: Reference(absRefWithJP), + ResolvedDocument: opts.TargetDocument, }, nil, nil } @@ -134,41 +141,44 @@ func Resolve[T any](ctx context.Context, ref Reference, unmarshaler Unmarshal[T] opts.RootDocument.StoreReferencedObjectInCache(absRefWithJP, obj) return &ResolveResult[T]{ - Object: obj, - AbsoluteReference: opts.TargetLocation, - ResolvedDocument: opts.TargetDocument, + Object: obj, + AbsoluteDocumentPath: opts.TargetLocation, + AbsoluteReference: Reference(utils.BuildAbsoluteReference(opts.TargetLocation, string(jp))), + ResolvedDocument: opts.TargetDocument, }, validationErrs, nil } else if opts.DisableExternalRefs { return nil, nil, errors.New("external reference not allowed") } - cd, cdOK := opts.RootDocument.GetCachedReferenceDocument(absRef) + cd, cdOK := opts.RootDocument.GetCachedReferenceDocument(absDocPath) if coOK && cdOK { return &ResolveResult[T]{ - Object: obj, - AbsoluteReference: absRef, - ResolvedDocument: cd, + Object: obj, + AbsoluteDocumentPath: absDocPath, + AbsoluteReference: Reference(absRefWithJP), + ResolvedDocument: cd, }, nil, nil } // If we have a cached document, try and resolve against it if cdOK { - obj, resolvedDoc, validationErrs, err := resolveAgainstData(ctx, absRef, bytes.NewReader(cd), jp, unmarshaler, opts) + obj, resolvedDoc, validationErrs, err := resolveAgainstData(ctx, absDocPath, bytes.NewReader(cd), jp, unmarshaler, opts) if err != nil { return nil, validationErrs, err } return &ResolveResult[T]{ - Object: obj, - AbsoluteReference: absRef, - ResolvedDocument: resolvedDoc, + Object: obj, + AbsoluteDocumentPath: absDocPath, + AbsoluteReference: Reference(absRefWithJP), + ResolvedDocument: resolvedDoc, }, validationErrs, nil } // Otherwise resolve the reference switch finalClassification.Type { case utils.ReferenceTypeURL: - obj, resolvedDoc, validationErrs, err := resolveAgainstURL(ctx, absRef, jp, unmarshaler, opts) + obj, resolvedDoc, validationErrs, err := resolveAgainstURL(ctx, absDocPath, jp, unmarshaler, opts) if err != nil { return nil, validationErrs, err } @@ -177,12 +187,13 @@ func Resolve[T any](ctx context.Context, ref Reference, unmarshaler Unmarshal[T] opts.RootDocument.StoreReferencedObjectInCache(absRefWithJP, obj) return &ResolveResult[T]{ - Object: obj, - AbsoluteReference: absRef, - ResolvedDocument: resolvedDoc, + Object: obj, + AbsoluteDocumentPath: absDocPath, + AbsoluteReference: Reference(absRefWithJP), + ResolvedDocument: resolvedDoc, }, validationErrs, nil case utils.ReferenceTypeFilePath: - obj, resolvedDoc, validationErrs, err := resolveAgainstFilePath(ctx, absRef, jp, unmarshaler, opts) + obj, resolvedDoc, validationErrs, err := resolveAgainstFilePath(ctx, absDocPath, jp, unmarshaler, opts) if err != nil { return nil, validationErrs, err } @@ -191,9 +202,10 @@ func Resolve[T any](ctx context.Context, ref Reference, unmarshaler Unmarshal[T] opts.RootDocument.StoreReferencedObjectInCache(absRefWithJP, obj) return &ResolveResult[T]{ - Object: obj, - AbsoluteReference: absRef, - ResolvedDocument: resolvedDoc, + Object: obj, + AbsoluteDocumentPath: absDocPath, + AbsoluteReference: Reference(absRefWithJP), + ResolvedDocument: resolvedDoc, }, validationErrs, nil default: return nil, nil, fmt.Errorf("unsupported reference type: %d", finalClassification.Type) @@ -257,19 +269,35 @@ func resolveAgainstData[T any](ctx context.Context, absRef string, reader io.Rea return nil, nil, nil, err } - var node yaml.Node - if err := yaml.Unmarshal(data, &node); err != nil { - return nil, nil, nil, err + opts.RootDocument.InitCache() + + // Check if we have a cached parsed YAML node for this external document + var node *yaml.Node + if cachedDoc, ok := opts.RootDocument.GetCachedExternalDocument(absRef); ok { + if cachedNode, ok := cachedDoc.(*yaml.Node); ok { + node = cachedNode + } + } + + // If not cached, parse and cache the YAML node + if node == nil { + var parsedNode yaml.Node + if err := yaml.Unmarshal(data, &parsedNode); err != nil { + return nil, nil, nil, err + } + node = &parsedNode + // Cache the parsed YAML node so internal references can navigate it + opts.RootDocument.StoreExternalDocumentInCache(absRef, node) } var target any // Handle empty JSON pointer case - if jp is empty, target the root node directly if jp == "" { - target = &node + target = node } else { var jpErr error - target, jpErr = jsonpointer.GetTarget(node, jp) + target, jpErr = jsonpointer.GetTarget(*node, jp) if jpErr != nil { return nil, nil, nil, jpErr } @@ -284,6 +312,12 @@ func resolveAgainstData[T any](ctx context.Context, absRef string, reader io.Rea return nil, nil, nil, fmt.Errorf("expected *yaml.Node, got %T", target) } + // CRITICAL FIX: Update the TargetDocument to be the parsed YAML node + // This allows internal references (like #/components/schemas/...) within + // the resolved component to navigate through the external file's structure + resolveOptsWithDocument := opts + resolveOptsWithDocument.TargetDocument = node + resolved, validationErrs, err := unmarshaler(ctx, targetNode, opts.SkipValidation) if err != nil { return nil, nil, validationErrs, err @@ -293,10 +327,9 @@ func resolveAgainstData[T any](ctx context.Context, absRef string, reader io.Rea return nil, nil, validationErrs, fmt.Errorf("nil %T returned from unmarshaler", target) } - opts.RootDocument.InitCache() opts.RootDocument.StoreReferenceDocumentInCache(absRef, data) - return resolved, &node, validationErrs, nil + return resolved, node, validationErrs, nil } func cast[T any](target any) (*T, error) { diff --git a/references/resolution_test.go b/references/resolution_test.go index ea4a27be..afa8bd48 100644 --- a/references/resolution_test.go +++ b/references/resolution_test.go @@ -21,14 +21,16 @@ import ( // MockResolutionTarget implements ResolutionTarget for testing type MockResolutionTarget struct { - objCache map[string]any - docCache map[string][]byte + objCache map[string]any + docCache map[string][]byte + extDocCache map[string]any } func NewMockResolutionTarget() *MockResolutionTarget { return &MockResolutionTarget{ - objCache: make(map[string]any), - docCache: make(map[string][]byte), + objCache: make(map[string]any), + docCache: make(map[string][]byte), + extDocCache: make(map[string]any), } } @@ -57,6 +59,18 @@ func (m *MockResolutionTarget) InitCache() { if m.docCache == nil { m.docCache = make(map[string][]byte) } + if m.extDocCache == nil { + m.extDocCache = make(map[string]any) + } +} + +func (m *MockResolutionTarget) GetCachedExternalDocument(key string) (any, bool) { + data, exists := m.extDocCache[key] + return data, exists +} + +func (m *MockResolutionTarget) StoreExternalDocumentInCache(key string, doc any) { + m.extDocCache[key] = doc } // MockVirtualFS implements system.VirtualFS for testing @@ -392,7 +406,7 @@ func TestResolve_Errors(t *testing.T) { require.NoError(t, err) assert.Nil(t, validationErrs) require.NotNil(t, result) - assert.Equal(t, ".", result.AbsoluteReference) + assert.Equal(t, ".", result.AbsoluteDocumentPath) }) t.Run("missing root document", func(t *testing.T) { @@ -788,15 +802,15 @@ func TestResolve_AbsoluteVsRelativeReferenceHandling(t *testing.T) { require.NotNil(t, result.Object) // Verify the absolute reference is what we expect - assert.Equal(t, tt.expectedAbsoluteRef, result.AbsoluteReference, tt.description) + assert.Equal(t, tt.expectedAbsoluteRef, result.AbsoluteDocumentPath, tt.description) // Verify the behavior matches our expectation about absolute vs relative if tt.isAbsolute { // For absolute references, the result should be exactly the same as the original URI - assert.Equal(t, tt.referenceURI, result.AbsoluteReference, "Absolute reference should remain unchanged") + assert.Equal(t, tt.referenceURI, result.AbsoluteDocumentPath, "Absolute reference should remain unchanged") } else { // For relative references, the result should be different from the original URI - assert.NotEqual(t, tt.referenceURI, result.AbsoluteReference, "Relative reference should be resolved") + assert.NotEqual(t, tt.referenceURI, result.AbsoluteDocumentPath, "Relative reference should be resolved") } }) } @@ -838,7 +852,7 @@ func TestResolve_RootDocumentDifferentFromTargetDocument(t *testing.T) { assert.Nil(t, validationErrs) require.NotNil(t, result) require.NotNil(t, result.Object) - assert.Equal(t, "/project/api/schemas/user.yaml", result.AbsoluteReference) + assert.Equal(t, "/project/api/schemas/user.yaml", result.AbsoluteDocumentPath) // Verify the cache was stored in the ROOT document, not the target document cachedData, exists := rootDoc.GetCachedReferenceDocument("/project/api/schemas/user.yaml") @@ -878,7 +892,7 @@ func TestResolve_RootDocumentDifferentFromTargetDocument(t *testing.T) { assert.Nil(t, validationErrs) require.NotNil(t, result) require.NotNil(t, result.Object) - assert.Equal(t, "https://external.com/schemas/common.yaml", result.AbsoluteReference) + assert.Equal(t, "https://external.com/schemas/common.yaml", result.AbsoluteDocumentPath) // Verify the cache was stored in the ROOT document, not the target document cachedData, exists := rootDoc.GetCachedReferenceDocument("https://external.com/schemas/common.yaml") @@ -920,7 +934,7 @@ func TestResolve_RootDocumentDifferentFromTargetDocument(t *testing.T) { assert.Nil(t, validationErrs) require.NotNil(t, result) require.NotNil(t, result.Object) - assert.Equal(t, "/project/api/schemas/cached.yaml", result.AbsoluteReference) + assert.Equal(t, "/project/api/schemas/cached.yaml", result.AbsoluteDocumentPath) // Verify the cache from root document was used (not the file system) retrievedCache, exists := rootDoc.GetCachedReferenceDocument("/project/api/schemas/cached.yaml") @@ -953,7 +967,7 @@ func TestResolve_RootDocumentDifferentFromTargetDocument(t *testing.T) { require.NotNil(t, result) require.NotNil(t, result.Object) assert.Equal(t, "nested-string", result.Object.StringField) - assert.Equal(t, "/project/external.yaml", result.AbsoluteReference) + assert.Equal(t, "/project/external.yaml", result.AbsoluteDocumentPath) // Verify that the resolved document is the target document assert.Equal(t, targetDoc, result.ResolvedDocument) diff --git a/swagger/core/reference.go b/swagger/core/reference.go index d95f5fbe..32efa31b 100644 --- a/swagger/core/reference.go +++ b/swagger/core/reference.go @@ -31,7 +31,7 @@ func (r *Reference[T]) Unmarshal(ctx context.Context, parentName string, node *y if resolvedNode.Kind != yaml.MappingNode { r.SetValid(false, false) - return []error{validation.NewValidationError(validation.NewTypeMismatchError(parentName, "reference expected object, got %s", yml.NodeKindToString(resolvedNode.Kind)), resolvedNode)}, nil + return []error{validation.NewValidationError(validation.SeverityError, validation.RuleValidationTypeMismatch, validation.NewTypeMismatchError(parentName, "reference expected `object`, got %s", yml.NodeKindToString(resolvedNode.Kind)), resolvedNode)}, nil } if _, _, ok := yml.GetMapElementNodes(ctx, resolvedNode, "$ref"); ok { diff --git a/swagger/externaldocs.go b/swagger/externaldocs.go index c85c8be4..e6ab8745 100644 --- a/swagger/externaldocs.go +++ b/swagger/externaldocs.go @@ -2,6 +2,8 @@ package swagger import ( "context" + "errors" + "fmt" "net/url" "github.com/speakeasy-api/openapi/extensions" @@ -55,12 +57,12 @@ func (e *ExternalDocumentation) Validate(ctx context.Context, opts ...validation errs := []error{} if c.URL.Present && e.URL == "" { - errs = append(errs, validation.NewValueError(validation.NewMissingValueError("externalDocs.url is required"), c, c.URL)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("`externalDocs.url` is required"), c, c.URL)) } if c.URL.Present { if _, err := url.Parse(e.URL); err != nil { - errs = append(errs, validation.NewValueError(validation.NewValueValidationError("externalDocs.url is not a valid uri: %s", err), c, c.URL)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidFormat, fmt.Errorf("externalDocs.url is not a valid uri: %w", err), c, c.URL)) } } diff --git a/swagger/info.go b/swagger/info.go index 80b864ef..8b3913cc 100644 --- a/swagger/info.go +++ b/swagger/info.go @@ -2,6 +2,8 @@ package swagger import ( "context" + "errors" + "fmt" "net/mail" "net/url" @@ -96,16 +98,16 @@ func (i *Info) Validate(ctx context.Context, opts ...validation.Option) []error errs := []error{} if c.Title.Present && i.Title == "" { - errs = append(errs, validation.NewValueError(validation.NewMissingValueError("info.title is required"), c, c.Title)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("`info.title` is required"), c, c.Title)) } if c.Version.Present && i.Version == "" { - errs = append(errs, validation.NewValueError(validation.NewMissingValueError("info.version is required"), c, c.Version)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("`info.version` is required"), c, c.Version)) } if c.TermsOfService.Present { if _, err := url.Parse(*i.TermsOfService); err != nil { - errs = append(errs, validation.NewValueError(validation.NewValueValidationError("info.termsOfService is not a valid uri: %s", err), c, c.TermsOfService)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidFormat, fmt.Errorf("info.termsOfService is not a valid uri: %w", err), c, c.TermsOfService)) } } @@ -177,13 +179,13 @@ func (c *Contact) Validate(ctx context.Context, opts ...validation.Option) []err if core.URL.Present { if _, err := url.Parse(*c.URL); err != nil { - errs = append(errs, validation.NewValueError(validation.NewValueValidationError("contact.url is not a valid uri: %s", err), core, core.URL)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidFormat, fmt.Errorf("contact.url is not a valid uri: %w", err), core, core.URL)) } } if core.Email.Present { if _, err := mail.ParseAddress(*c.Email); err != nil { - errs = append(errs, validation.NewValueError(validation.NewValueValidationError("contact.email is not a valid email address: %s", err), core, core.Email)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidFormat, fmt.Errorf("contact.email is not a valid email address: %w", err), core, core.Email)) } } @@ -236,12 +238,12 @@ func (l *License) Validate(ctx context.Context, opts ...validation.Option) []err errs := []error{} if core.Name.Present && l.Name == "" { - errs = append(errs, validation.NewValueError(validation.NewMissingValueError("license.name is required"), core, core.Name)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("`license.name` is required"), core, core.Name)) } if core.URL.Present { if _, err := url.Parse(*l.URL); err != nil { - errs = append(errs, validation.NewValueError(validation.NewValueValidationError("license.url is not a valid uri: %s", err), core, core.URL)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidFormat, fmt.Errorf("license.url is not a valid uri: %w", err), core, core.URL)) } } diff --git a/swagger/info_validate_test.go b/swagger/info_validate_test.go index f12c20fe..71c42b00 100644 --- a/swagger/info_validate_test.go +++ b/swagger/info_validate_test.go @@ -65,12 +65,12 @@ func TestInfo_Validate_Error(t *testing.T) { { name: "missing_title", yml: `version: 1.0.0`, - wantErrs: []string{"info.title is missing"}, + wantErrs: []string{"`info.title` is required"}, }, { name: "missing_version", yml: `title: Test API`, - wantErrs: []string{"info.version is missing"}, + wantErrs: []string{"`info.version` is required"}, }, { name: "invalid_contact_email", @@ -86,7 +86,7 @@ contact: version: 1.0.0 license: url: https://example.com/license`, - wantErrs: []string{"license.name is missing"}, + wantErrs: []string{"`license.name` is required"}, }, } @@ -249,7 +249,7 @@ func TestLicense_Validate_Error(t *testing.T) { { name: "missing_name", yml: `url: https://example.com/license`, - wantErrs: []string{"license.name is missing"}, + wantErrs: []string{"`license.name` is required"}, }, } diff --git a/swagger/operation.go b/swagger/operation.go index 90cb167b..22719261 100644 --- a/swagger/operation.go +++ b/swagger/operation.go @@ -2,6 +2,8 @@ package swagger import ( "context" + "errors" + "fmt" "mime" "github.com/speakeasy-api/openapi/extensions" @@ -155,7 +157,7 @@ func (o *Operation) Validate(ctx context.Context, opts ...validation.Option) []e errs := []error{} if !c.Responses.Present { - errs = append(errs, validation.NewValueError(validation.NewMissingValueError("operation.responses is required"), c, c.Responses)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("`operation.responses` is required"), c, c.Responses)) } else if o.Responses != nil { errs = append(errs, o.Responses.Validate(ctx, opts...)...) } @@ -173,7 +175,9 @@ func (o *Operation) Validate(ctx context.Context, opts ...validation.Option) []e } if !valid { errs = append(errs, validation.NewValueError( - validation.NewValueValidationError("operation.scheme must be one of [http, https, ws, wss], got '%s'", scheme), + validation.SeverityError, + validation.RuleValidationAllowedValues, + fmt.Errorf("operation.scheme must be one of [http, https, ws, wss], got `%s`", scheme), c, c.Schemes)) } } @@ -184,7 +188,9 @@ func (o *Operation) Validate(ctx context.Context, opts ...validation.Option) []e for _, mimeType := range o.Consumes { if _, _, err := mime.ParseMediaType(mimeType); err != nil { errs = append(errs, validation.NewValueError( - validation.NewValueValidationError("operation.consumes contains invalid MIME type '%s': %s", mimeType, err), + validation.SeverityError, + validation.RuleValidationInvalidFormat, + fmt.Errorf("operation.consumes contains invalid MIME type `%s`: %w", mimeType, err), c, c.Consumes)) } } @@ -195,7 +201,9 @@ func (o *Operation) Validate(ctx context.Context, opts ...validation.Option) []e for _, mimeType := range o.Produces { if _, _, err := mime.ParseMediaType(mimeType); err != nil { errs = append(errs, validation.NewValueError( - validation.NewValueValidationError("operation.produces contains invalid MIME type '%s': %s", mimeType, err), + validation.SeverityError, + validation.RuleValidationInvalidFormat, + fmt.Errorf("operation.produces contains invalid MIME type `%s`: %w", mimeType, err), c, c.Produces)) } } diff --git a/swagger/operation_validate_test.go b/swagger/operation_validate_test.go index 899b1eca..1242af13 100644 --- a/swagger/operation_validate_test.go +++ b/swagger/operation_validate_test.go @@ -81,7 +81,7 @@ func TestOperation_Validate_Error(t *testing.T) { { name: "missing_responses", yml: `summary: Test operation`, - wantErrs: []string{"operation.responses is required"}, + wantErrs: []string{"`operation.responses` is required"}, }, } diff --git a/swagger/parameter.go b/swagger/parameter.go index 0dc46007..8f9bd7f7 100644 --- a/swagger/parameter.go +++ b/swagger/parameter.go @@ -2,6 +2,8 @@ package swagger import ( "context" + "errors" + "fmt" "strings" "github.com/speakeasy-api/openapi/extensions" @@ -168,11 +170,11 @@ func (p *Parameter) Validate(ctx context.Context, opts ...validation.Option) []e errs := []error{} if c.Name.Present && p.Name == "" { - errs = append(errs, validation.NewValueError(validation.NewMissingValueError("parameter.name is required"), c, c.Name)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("`parameter.name` is required"), c, c.Name)) } if c.In.Present && p.In == "" { - errs = append(errs, validation.NewValueError(validation.NewMissingValueError("parameter.in is required"), c, c.In)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("parameter.in is required"), c, c.In)) } else if c.In.Present { errs = append(errs, p.validateIn(c)...) errs = append(errs, p.validateParameterType(ctx, c, opts...)...) @@ -180,7 +182,7 @@ func (p *Parameter) Validate(ctx context.Context, opts ...validation.Option) []e // allowEmptyValue only valid for query or formData if c.AllowEmptyValue.Present && p.In != ParameterInQuery && p.In != ParameterInFormData { - errs = append(errs, validation.NewValueError(validation.NewValueValidationError("parameter.allowEmptyValue is only valid for in=query or in=formData"), c, c.AllowEmptyValue)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationAllowedValues, errors.New("parameter.allowEmptyValue is only valid for in=query or in=formData"), c, c.AllowEmptyValue)) } // Validate items if present @@ -195,7 +197,9 @@ func (p *Parameter) Validate(ctx context.Context, opts ...validation.Option) []e opCore := operation.GetCore() if !opCore.Consumes.Present || len(operation.Consumes) == 0 { errs = append(errs, validation.NewValueError( - validation.NewValueValidationError("parameter with type=file requires operation to have consumes defined"), + validation.SeverityError, + validation.RuleValidationRequiredField, + errors.New("parameter with type=file requires operation to have consumes defined"), c, c.Type)) } else { hasValidConsumes := false @@ -207,7 +211,9 @@ func (p *Parameter) Validate(ctx context.Context, opts ...validation.Option) []e } if !hasValidConsumes { errs = append(errs, validation.NewValueError( - validation.NewValueValidationError("parameter with type=file requires operation consumes to be 'multipart/form-data' or 'application/x-www-form-urlencoded'"), + validation.SeverityError, + validation.RuleValidationAllowedValues, + errors.New("parameter with type=file requires operation consumes to be 'multipart/form-data' or 'application/x-www-form-urlencoded'"), c, c.Type)) } } @@ -231,7 +237,7 @@ func (p *Parameter) validateIn(c *core.Parameter) []error { } } if !valid { - errs = append(errs, validation.NewValueError(validation.NewValueValidationError("parameter.in must be one of [%s]", strings.Join([]string{string(ParameterInQuery), string(ParameterInHeader), string(ParameterInPath), string(ParameterInFormData), string(ParameterInBody)}, ", ")), c, c.In)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationAllowedValues, fmt.Errorf("parameter.in must be one of [`%s`]", strings.Join([]string{string(ParameterInQuery), string(ParameterInHeader), string(ParameterInPath), string(ParameterInFormData), string(ParameterInBody)}, ", ")), c, c.In)) } return errs @@ -242,13 +248,13 @@ func (p *Parameter) validateParameterType(ctx context.Context, c *core.Parameter // Path parameters must be required if p.In == ParameterInPath && (!c.Required.Present || !p.GetRequired()) { - errs = append(errs, validation.NewValueError(validation.NewValueValidationError("parameter.in=path requires required=true"), c, c.Required)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("parameter.in=path requires required=true"), c, c.Required)) } // Body parameters require schema if p.In == ParameterInBody { if !c.Schema.Present { - errs = append(errs, validation.NewValueError(validation.NewMissingValueError("parameter.schema is required for in=body"), c, c.Schema)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("parameter.schema is required for in=body"), c, c.Schema)) return errs } errs = append(errs, p.Schema.Validate(ctx, opts...)...) @@ -257,12 +263,12 @@ func (p *Parameter) validateParameterType(ctx context.Context, c *core.Parameter // Non-body parameters require type if !c.Type.Present { - errs = append(errs, validation.NewValueError(validation.NewMissingValueError("parameter.type is required for non-body parameters"), c, c.Type)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("parameter.type is required for non-body parameters"), c, c.Type)) return errs } if c.Type.Present && (p.Type == nil || *p.Type == "") { - errs = append(errs, validation.NewValueError(validation.NewMissingValueError("parameter.type is required for non-body parameters"), c, c.Type)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("parameter.type is required for non-body parameters"), c, c.Type)) return errs } @@ -276,24 +282,26 @@ func (p *Parameter) validateParameterType(ctx context.Context, c *core.Parameter } } if !valid { - errs = append(errs, validation.NewValueError(validation.NewValueValidationError("parameter.type must be one of [%s]", strings.Join(validTypes, ", ")), c, c.Type)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationAllowedValues, fmt.Errorf("parameter.type must be one of [`%s`]", strings.Join(validTypes, ", ")), c, c.Type)) } // File type only allowed for formData if *p.Type == "file" && p.In != ParameterInFormData { - errs = append(errs, validation.NewValueError(validation.NewValueValidationError("parameter.type=file requires in=formData"), c, c.Type)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationAllowedValues, errors.New("parameter.type=file requires in=formData"), c, c.Type)) } // Array type requires items if *p.Type == "array" && !c.Items.Present { - errs = append(errs, validation.NewValueError(validation.NewMissingValueError("parameter.items is required when type=array"), c, c.Items)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("parameter.items is required when type=array"), c, c.Items)) } // Validate collectionFormat=multi only for query or formData if p.CollectionFormat != nil && *p.CollectionFormat == CollectionFormatMulti { if p.In != ParameterInQuery && p.In != ParameterInFormData { errs = append(errs, validation.NewValueError( - validation.NewValueValidationError("collectionFormat='multi' is only valid for in=query or in=formData"), + validation.SeverityError, + validation.RuleValidationAllowedValues, + errors.New("collectionFormat='multi' is only valid for in=query or in=formData"), c, c.CollectionFormat)) } } @@ -369,7 +377,7 @@ func (i *Items) Validate(ctx context.Context, opts ...validation.Option) []error errs := []error{} if c.Type.Present && i.Type == "" { - errs = append(errs, validation.NewValueError(validation.NewMissingValueError("items.type is required"), c, c.Type)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("`items.type` is required"), c, c.Type)) } else if c.Type.Present { validTypes := []string{"string", "number", "integer", "boolean", "array"} valid := false @@ -380,12 +388,12 @@ func (i *Items) Validate(ctx context.Context, opts ...validation.Option) []error } } if !valid { - errs = append(errs, validation.NewValueError(validation.NewValueValidationError("items.type must be one of [%s]", strings.Join(validTypes, ", ")), c, c.Type)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationAllowedValues, fmt.Errorf("items.type must be one of [`%s`]", strings.Join(validTypes, ", ")), c, c.Type)) } // Array type requires items if i.Type == "array" && !c.Items.Present { - errs = append(errs, validation.NewValueError(validation.NewMissingValueError("items.items is required when type=array"), c, c.Items)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("items.items is required when type=array"), c, c.Items)) } } diff --git a/swagger/parameter_test.go b/swagger/parameter_test.go index ef50b238..4a619cd4 100644 --- a/swagger/parameter_test.go +++ b/swagger/parameter_test.go @@ -172,7 +172,7 @@ func TestParameter_Validate_Error(t *testing.T) { in: query type: string `, - wantErrs: []string{"parameter.name is missing"}, + wantErrs: []string{"`parameter.name` is required"}, }, { name: "empty name", @@ -181,7 +181,7 @@ name: "" in: query type: string `, - wantErrs: []string{"parameter.name is required"}, + wantErrs: []string{"`parameter.name` is required"}, }, { name: "missing in", @@ -189,7 +189,7 @@ type: string name: test type: string `, - wantErrs: []string{"parameter.in is missing"}, + wantErrs: []string{"`parameter.in` is required"}, }, { name: "path parameter not required", @@ -270,7 +270,7 @@ in: path required: false `, wantErrs: []string{ - "parameter.name is required", + "`parameter.name` is required", "parameter.in=path requires required=true", }, }, @@ -372,7 +372,7 @@ func TestItems_Validate_Error(t *testing.T) { yml: ` format: int32 `, - wantErrs: []string{"items.type is missing"}, + wantErrs: []string{"`items.type` is required"}, }, { name: "array items without nested items", diff --git a/swagger/paths.go b/swagger/paths.go index 47e10166..63629b86 100644 --- a/swagger/paths.go +++ b/swagger/paths.go @@ -2,6 +2,7 @@ package swagger import ( "context" + "fmt" "strings" "github.com/speakeasy-api/openapi/extensions" @@ -48,7 +49,9 @@ func (p *Paths) Validate(ctx context.Context, opts ...validation.Option) []error if !strings.HasPrefix(path, "/") { pathKeyNode := c.GetMapKeyNodeOrRoot(path, c.RootNode) errs = append(errs, validation.NewValidationError( - validation.NewValueValidationError("path '%s' must begin with a slash '/'", path), + validation.SeverityError, + validation.RuleValidationInvalidSyntax, + fmt.Errorf("path `%s` must begin with a slash '/'", path), pathKeyNode)) } errs = append(errs, pathItem.Validate(ctx, opts...)...) diff --git a/swagger/response.go b/swagger/response.go index baea119b..ce835fc7 100644 --- a/swagger/response.go +++ b/swagger/response.go @@ -2,6 +2,7 @@ package swagger import ( "context" + "errors" "github.com/speakeasy-api/openapi/extensions" "github.com/speakeasy-api/openapi/internal/interfaces" @@ -58,7 +59,9 @@ func (r *Responses) Validate(ctx context.Context, opts ...validation.Option) []e hasResponse := (c.Default.Present && r.Default != nil) || (r.Map != nil && r.Len() > 0) if !hasResponse { errs = append(errs, validation.NewValueError( - validation.NewMissingValueError("responses must contain at least one response code or default"), + validation.SeverityError, + validation.RuleValidationRequiredField, + errors.New("responses must contain at least one response code or default"), c, c.Default)) } @@ -139,7 +142,7 @@ func (r *Response) Validate(ctx context.Context, opts ...validation.Option) []er errs := []error{} if c.Description.Present && r.Description == "" { - errs = append(errs, validation.NewValueError(validation.NewMissingValueError("response.description is required"), c, c.Description)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("`response.description` is required"), c, c.Description)) } for _, header := range r.Headers.All() { @@ -228,7 +231,7 @@ func (h *Header) Validate(ctx context.Context, opts ...validation.Option) []erro errs := []error{} if c.Type.Present && h.Type == "" { - errs = append(errs, validation.NewValueError(validation.NewMissingValueError("header.type is required"), c, c.Type)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("`header.type` is required"), c, c.Type)) } else if c.Type.Present { validTypes := []string{"string", "number", "integer", "boolean", "array"} valid := false @@ -239,12 +242,12 @@ func (h *Header) Validate(ctx context.Context, opts ...validation.Option) []erro } } if !valid { - errs = append(errs, validation.NewValueError(validation.NewValueValidationError("header.type must be one of [string, number, integer, boolean, array]"), c, c.Type)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationAllowedValues, errors.New("header.type must be one of [string, number, integer, boolean, array]"), c, c.Type)) } // Array type requires items if h.Type == "array" && !c.Items.Present { - errs = append(errs, validation.NewValueError(validation.NewMissingValueError("header.items is required when type=array"), c, c.Items)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("header.items is required when type=array"), c, c.Items)) } } diff --git a/swagger/response_validate_test.go b/swagger/response_validate_test.go index 4c8292f7..04e4dd5a 100644 --- a/swagger/response_validate_test.go +++ b/swagger/response_validate_test.go @@ -77,7 +77,7 @@ func TestResponse_Validate_Error(t *testing.T) { { name: "missing_description", yml: `schema: {type: object}`, - wantErrs: []string{"response.description is missing"}, + wantErrs: []string{"`response.description` is required"}, }, } @@ -164,7 +164,7 @@ func TestHeader_Validate_Error(t *testing.T) { { name: "missing_type", yml: `description: Some header`, - wantErrs: []string{"header.type is missing"}, + wantErrs: []string{"`header.type` is required"}, }, { name: "invalid_type", diff --git a/swagger/security.go b/swagger/security.go index 56fa8861..6acae8c7 100644 --- a/swagger/security.go +++ b/swagger/security.go @@ -2,6 +2,8 @@ package swagger import ( "context" + "errors" + "fmt" "net/url" "strings" @@ -153,7 +155,7 @@ func (s *SecurityScheme) Validate(ctx context.Context, opts ...validation.Option errs := []error{} if c.Type.Present && s.Type == "" { - errs = append(errs, validation.NewValueError(validation.NewMissingValueError("securityScheme.type is required"), c, c.Type)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("`securityScheme.type` is required"), c, c.Type)) } else { validTypes := []SecuritySchemeType{SecuritySchemeTypeBasic, SecuritySchemeTypeAPIKey, SecuritySchemeTypeOAuth2} valid := false @@ -164,26 +166,26 @@ func (s *SecurityScheme) Validate(ctx context.Context, opts ...validation.Option } } if !valid { - errs = append(errs, validation.NewValueError(validation.NewValueValidationError("securityScheme.type must be one of [%s]", strings.Join([]string{string(SecuritySchemeTypeBasic), string(SecuritySchemeTypeAPIKey), string(SecuritySchemeTypeOAuth2)}, ", ")), c, c.Type)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationAllowedValues, fmt.Errorf("securityScheme.type must be one of [`%s`]", strings.Join([]string{string(SecuritySchemeTypeBasic), string(SecuritySchemeTypeAPIKey), string(SecuritySchemeTypeOAuth2)}, ", ")), c, c.Type)) } } // Validate apiKey specific fields if s.Type == SecuritySchemeTypeAPIKey { if !c.Name.Present || s.Name == nil || *s.Name == "" { - errs = append(errs, validation.NewValueError(validation.NewMissingValueError("securityScheme.name is required for type=apiKey"), c, c.Name)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("`securityScheme.name` is required for type=apiKey"), c, c.Name)) } if !c.In.Present || s.In == nil { - errs = append(errs, validation.NewValueError(validation.NewMissingValueError("securityScheme.in is required for type=apiKey"), c, c.In)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("`securityScheme.in` is required for type=apiKey"), c, c.In)) } else if *s.In != SecuritySchemeInQuery && *s.In != SecuritySchemeInHeader { - errs = append(errs, validation.NewValueError(validation.NewValueValidationError("securityScheme.in must be one of [%s] for type=apiKey", strings.Join([]string{string(SecuritySchemeInQuery), string(SecuritySchemeInHeader)}, ", ")), c, c.In)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationAllowedValues, fmt.Errorf("securityScheme.in must be one of [`%s`] for type=apiKey", strings.Join([]string{string(SecuritySchemeInQuery), string(SecuritySchemeInHeader)}, ", ")), c, c.In)) } } // Validate oauth2 specific fields if s.Type == SecuritySchemeTypeOAuth2 { if !c.Flow.Present || s.Flow == nil { - errs = append(errs, validation.NewValueError(validation.NewMissingValueError("securityScheme.flow is required for type=oauth2"), c, c.Flow)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("`securityScheme.flow` is required for type=oauth2"), c, c.Flow)) } else { validFlows := []OAuth2Flow{OAuth2FlowImplicit, OAuth2FlowPassword, OAuth2FlowApplication, OAuth2FlowAccessCode} valid := false @@ -194,37 +196,37 @@ func (s *SecurityScheme) Validate(ctx context.Context, opts ...validation.Option } } if !valid { - errs = append(errs, validation.NewValueError(validation.NewValueValidationError("securityScheme.flow must be one of [%s] for type=oauth2", strings.Join([]string{string(OAuth2FlowImplicit), string(OAuth2FlowPassword), string(OAuth2FlowApplication), string(OAuth2FlowAccessCode)}, ", ")), c, c.Flow)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationAllowedValues, fmt.Errorf("securityScheme.flow must be one of [`%s`] for type=oauth2", strings.Join([]string{string(OAuth2FlowImplicit), string(OAuth2FlowPassword), string(OAuth2FlowApplication), string(OAuth2FlowAccessCode)}, ", ")), c, c.Flow)) } if s.Flow != nil { // authorizationUrl required for implicit and accessCode flows if (*s.Flow == OAuth2FlowImplicit || *s.Flow == OAuth2FlowAccessCode) && (!c.AuthorizationURL.Present || s.AuthorizationURL == nil || *s.AuthorizationURL == "") { - errs = append(errs, validation.NewValueError(validation.NewMissingValueError("securityScheme.authorizationUrl is required for flow=%s", *s.Flow), c, c.AuthorizationURL)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationRequiredField, fmt.Errorf("securityScheme.authorizationUrl is required for flow=%s", *s.Flow), c, c.AuthorizationURL)) } // tokenUrl required for password, application and accessCode flows if (*s.Flow == OAuth2FlowPassword || *s.Flow == OAuth2FlowApplication || *s.Flow == OAuth2FlowAccessCode) && (!c.TokenURL.Present || s.TokenURL == nil || *s.TokenURL == "") { - errs = append(errs, validation.NewValueError(validation.NewMissingValueError("securityScheme.tokenUrl is required for flow=%s", *s.Flow), c, c.TokenURL)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationRequiredField, fmt.Errorf("securityScheme.tokenUrl is required for flow=%s", *s.Flow), c, c.TokenURL)) } } } if !c.Scopes.Present { - errs = append(errs, validation.NewValueError(validation.NewMissingValueError("securityScheme.scopes is required for type=oauth2"), c, c.Scopes)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("`securityScheme.scopes` is required for type=oauth2"), c, c.Scopes)) } } // Validate URLs if c.AuthorizationURL.Present && s.AuthorizationURL != nil && *s.AuthorizationURL != "" { if _, err := url.Parse(*s.AuthorizationURL); err != nil { - errs = append(errs, validation.NewValueError(validation.NewValueValidationError("securityScheme.authorizationUrl is not a valid uri: %s", err), c, c.AuthorizationURL)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidFormat, fmt.Errorf("securityScheme.authorizationUrl is not a valid uri: %w", err), c, c.AuthorizationURL)) } } if c.TokenURL.Present && s.TokenURL != nil && *s.TokenURL != "" { if _, err := url.Parse(*s.TokenURL); err != nil { - errs = append(errs, validation.NewValueError(validation.NewValueValidationError("securityScheme.tokenUrl is not a valid uri: %s", err), c, c.TokenURL)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidFormat, fmt.Errorf("securityScheme.tokenUrl is not a valid uri: %w", err), c, c.TokenURL)) } } @@ -272,7 +274,9 @@ func (s *SecurityRequirement) Validate(ctx context.Context, opts ...validation.O secScheme, exists := swagger.SecurityDefinitions.Get(name) if !exists { errs = append(errs, validation.NewValidationError( - validation.NewValueValidationError("security requirement '%s' does not match any security scheme in securityDefinitions", name), + validation.SeverityError, + validation.RuleValidationSchemeNotFound, + fmt.Errorf("security requirement `%s` does not match any security scheme in securityDefinitions", name), c.RootNode)) continue } @@ -281,7 +285,9 @@ func (s *SecurityRequirement) Validate(ctx context.Context, opts ...validation.O if secScheme.Type != SecuritySchemeTypeOAuth2 { if len(scopes) > 0 { errs = append(errs, validation.NewValidationError( - validation.NewValueValidationError("security requirement '%s' must have empty scopes array for non-oauth2 security scheme (type=%s)", name, secScheme.Type), + validation.SeverityError, + validation.RuleValidationAllowedValues, + fmt.Errorf("security requirement `%s` must have empty scopes array for non-oauth2 security scheme (type=`%s`)", name, secScheme.Type), c.RootNode)) } } diff --git a/swagger/security_validate_test.go b/swagger/security_validate_test.go index 4da59fda..b53170f7 100644 --- a/swagger/security_validate_test.go +++ b/swagger/security_validate_test.go @@ -99,7 +99,7 @@ func TestSecurityScheme_Validate_Error(t *testing.T) { { name: "missing_type", yml: `description: Some security scheme`, - wantErrs: []string{"securityScheme.type is missing"}, + wantErrs: []string{"`securityScheme.type` is required"}, }, { name: "invalid_type", @@ -111,13 +111,13 @@ description: Test`, name: "apiKey_missing_name", yml: `type: apiKey in: header`, - wantErrs: []string{"securityScheme.name is required for type=apiKey"}, + wantErrs: []string{"`securityScheme.name` is required for type=apiKey"}, }, { name: "apiKey_missing_in", yml: `type: apiKey name: X-API-Key`, - wantErrs: []string{"securityScheme.in is required for type=apiKey"}, + wantErrs: []string{"`securityScheme.in` is required for type=apiKey"}, }, { name: "apiKey_invalid_in", @@ -131,7 +131,7 @@ in: invalid`, yml: `type: oauth2 scopes: read: Read access`, - wantErrs: []string{"securityScheme.flow is required for type=oauth2"}, + wantErrs: []string{"`securityScheme.flow` is required for type=oauth2"}, }, { name: "oauth2_invalid_flow", @@ -180,7 +180,7 @@ scopes: yml: `type: oauth2 flow: password tokenUrl: https://example.com/token`, - wantErrs: []string{"securityScheme.scopes is required for type=oauth2"}, + wantErrs: []string{"`securityScheme.scopes` is required for type=oauth2"}, }, } diff --git a/swagger/swagger.go b/swagger/swagger.go index 9e1f86d7..13984732 100644 --- a/swagger/swagger.go +++ b/swagger/swagger.go @@ -2,6 +2,8 @@ package swagger import ( "context" + "errors" + "fmt" "mime" "strings" @@ -191,9 +193,9 @@ func (s *Swagger) Validate(ctx context.Context, opts ...validation.Option) []err errs := []error{} if c.Swagger.Present && s.Swagger == "" { - errs = append(errs, validation.NewValueError(validation.NewMissingValueError("swagger is required"), c, c.Swagger)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("`swagger` is required"), c, c.Swagger)) } else if c.Swagger.Present && s.Swagger != "2.0" { - errs = append(errs, validation.NewValueError(validation.NewValueValidationError("swagger must be '2.0'"), c, c.Swagger)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationSupportedVersion, errors.New("swagger must be '2.0'"), c, c.Swagger)) } if c.Info.Present { @@ -204,7 +206,9 @@ func (s *Swagger) Validate(ctx context.Context, opts ...validation.Option) []err if c.BasePath.Present && s.BasePath != nil && *s.BasePath != "" { if !strings.HasPrefix(*s.BasePath, "/") { errs = append(errs, validation.NewValueError( - validation.NewValueValidationError("basePath must start with a leading slash '/'"), + validation.SeverityError, + validation.RuleValidationInvalidSyntax, + errors.New("basePath must start with a leading slash '/'"), c, c.BasePath)) } } @@ -222,7 +226,9 @@ func (s *Swagger) Validate(ctx context.Context, opts ...validation.Option) []err } if !valid { errs = append(errs, validation.NewValueError( - validation.NewValueValidationError("scheme must be one of [http, https, ws, wss], got '%s'", scheme), + validation.SeverityError, + validation.RuleValidationAllowedValues, + fmt.Errorf("scheme must be one of [http, https, ws, wss], got `%s`", scheme), c, c.Schemes)) } } @@ -233,7 +239,9 @@ func (s *Swagger) Validate(ctx context.Context, opts ...validation.Option) []err for _, mimeType := range s.Consumes { if _, _, err := mime.ParseMediaType(mimeType); err != nil { errs = append(errs, validation.NewValueError( - validation.NewValueValidationError("consumes contains invalid MIME type '%s': %s", mimeType, err), + validation.SeverityError, + validation.RuleValidationInvalidFormat, + fmt.Errorf("consumes contains invalid MIME type `%s`: %w", mimeType, err), c, c.Consumes)) } } @@ -244,7 +252,9 @@ func (s *Swagger) Validate(ctx context.Context, opts ...validation.Option) []err for _, mimeType := range s.Produces { if _, _, err := mime.ParseMediaType(mimeType); err != nil { errs = append(errs, validation.NewValueError( - validation.NewValueValidationError("produces contains invalid MIME type '%s': %s", mimeType, err), + validation.SeverityError, + validation.RuleValidationInvalidFormat, + fmt.Errorf("produces contains invalid MIME type `%s`: %w", mimeType, err), c, c.Produces)) } } @@ -261,7 +271,9 @@ func (s *Swagger) Validate(ctx context.Context, opts ...validation.Option) []err if tag != nil && tag.Name != "" { if tagNames[tag.Name] { errs = append(errs, validation.NewValueError( - validation.NewValueValidationError("tag name '%s' must be unique", tag.Name), + validation.SeverityError, + validation.RuleValidationDuplicateKey, + fmt.Errorf("tag name `%s` must be unique", tag.Name), c, c.Tags)) } tagNames[tag.Name] = true @@ -320,7 +332,9 @@ func (s *Swagger) validateOperationIDUniqueness(c *core.Swagger) []error { opID := *operation.OperationID if operationIDs[opID] { errs = append(errs, validation.NewValueError( - validation.NewValueValidationError("operationId '%s' must be unique among all operations", opID), + validation.SeverityError, + validation.RuleValidationDuplicateKey, + fmt.Errorf("operationId `%s` must be unique among all operations", opID), c, c.Paths)) } operationIDs[opID] = true diff --git a/swagger/swagger_test.go b/swagger/swagger_test.go index e63c8d3b..29ab4d52 100644 --- a/swagger/swagger_test.go +++ b/swagger/swagger_test.go @@ -79,13 +79,13 @@ func TestUnmarshal_ValidationErrors(t *testing.T) { title: Test API version: 1.0.0 paths: {}`, - expectedError: "swagger is missing", + expectedError: "`swagger.swagger` is required", }, { name: "missing info field", yaml: `swagger: "2.0" paths: {}`, - expectedError: "info is missing", + expectedError: "`swagger.info` is required", }, { name: "missing paths field", @@ -93,7 +93,7 @@ paths: {}`, info: title: Test API version: 1.0.0`, - expectedError: "paths is missing", + expectedError: "`swagger.paths` is required", }, { name: "missing info.title", @@ -101,7 +101,7 @@ info: info: version: 1.0.0 paths: {}`, - expectedError: "info.title is missing", + expectedError: "`info.title` is required", }, { name: "missing info.version", @@ -109,7 +109,7 @@ paths: {}`, info: title: Test API paths: {}`, - expectedError: "info.version is missing", + expectedError: "`info.version` is required", }, { name: "invalid swagger version", diff --git a/swagger/swagger_validate_test.go b/swagger/swagger_validate_test.go index 7a4b0f85..c611ddb4 100644 --- a/swagger/swagger_validate_test.go +++ b/swagger/swagger_validate_test.go @@ -584,7 +584,7 @@ tags: - name: users description: Duplicate tag paths: {}`, - wantErrs: []string{"tag name 'users' must be unique"}, + wantErrs: []string{"tag name `users` must be unique"}, }, } @@ -690,7 +690,7 @@ paths: responses: 200: description: Success`, - wantErrs: []string{"operationId 'getItems' must be unique"}, + wantErrs: []string{"operationId `getItems` must be unique"}, }, } @@ -1101,17 +1101,17 @@ func TestSecurityRequirement_Validate_Error(t *testing.T) { { name: "undefined_security_scheme", yml: `undefined: []`, - wantErrs: []string{"security requirement 'undefined' does not match any security scheme"}, + wantErrs: []string{"security requirement `undefined` does not match any security scheme"}, }, { name: "apiKey_with_non_empty_scopes", yml: `apiKey: ["some_scope"]`, - wantErrs: []string{"security requirement 'apiKey' must have empty scopes array for non-oauth2"}, + wantErrs: []string{"security requirement `apiKey` must have empty scopes array for non-oauth2"}, }, { name: "basic_with_non_empty_scopes", yml: `basic: ["some_scope"]`, - wantErrs: []string{"security requirement 'basic' must have empty scopes array for non-oauth2"}, + wantErrs: []string{"security requirement `basic` must have empty scopes array for non-oauth2"}, }, } diff --git a/swagger/tag.go b/swagger/tag.go index 60b6aba0..e294744f 100644 --- a/swagger/tag.go +++ b/swagger/tag.go @@ -2,6 +2,7 @@ package swagger import ( "context" + "errors" "github.com/speakeasy-api/openapi/extensions" "github.com/speakeasy-api/openapi/internal/interfaces" @@ -64,7 +65,7 @@ func (t *Tag) Validate(ctx context.Context, opts ...validation.Option) []error { errs := []error{} if c.Name.Present && t.Name == "" { - errs = append(errs, validation.NewValueError(validation.NewMissingValueError("tag.name is required"), c, c.Name)) + errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("`tag.name` is required"), c, c.Name)) } if c.ExternalDocs.Present { diff --git a/swagger/tag_validate_test.go b/swagger/tag_validate_test.go index e3bf4a79..ab755e87 100644 --- a/swagger/tag_validate_test.go +++ b/swagger/tag_validate_test.go @@ -63,7 +63,7 @@ func TestTag_Validate_Error(t *testing.T) { { name: "missing_name", yml: `description: Some description`, - wantErrs: []string{"tag.name is missing"}, + wantErrs: []string{"`tag.name` is required"}, }, } @@ -142,7 +142,7 @@ func TestExternalDocumentation_Validate_Error(t *testing.T) { { name: "missing_url", yml: `description: Some description`, - wantErrs: []string{"externalDocumentation.url is missing"}, + wantErrs: []string{"`externalDocumentation.url` is required"}, }, } diff --git a/validation/errors.go b/validation/errors.go index 447757f8..d3ff660c 100644 --- a/validation/errors.go +++ b/validation/errors.go @@ -6,23 +6,72 @@ import ( "gopkg.in/yaml.v3" ) +type Severity string + +const ( + SeverityError Severity = "error" + SeverityWarning Severity = "warning" + SeverityHint Severity = "hint" +) + +func (s Severity) String() string { + return string(s) +} + +// Rank returns a numeric rank for severity comparison. +// Higher rank means worse severity. +// SeverityError = 2, SeverityWarning = 1, SeverityHint = 0. +// Unknown severities are treated as SeverityError. +func (s Severity) Rank() int { + switch s { + case SeverityError: + return 2 + case SeverityWarning: + return 1 + case SeverityHint: + return 0 + default: + return 2 // Treat unknown as error + } +} + // Error represents a validation error and the line and column where it occurred // TODO allow getting the JSON path for line/column for validation errors type Error struct { UnderlyingError error Node *yaml.Node + Severity Severity + Rule string + Fix Fix + // DocumentLocation is the absolute location (URL or file path) of the document + // where the error originated. Empty means the main document. + DocumentLocation string +} + +// Fix represents a suggested fix for a error finding +type Fix interface { + Apply(doc any) error + FixDescription() string } var _ error = (*Error)(nil) func (e Error) Error() string { - return fmt.Sprintf("[%d:%d] %s", e.GetLineNumber(), e.GetColumnNumber(), e.UnderlyingError.Error()) + msg := fmt.Sprintf("[%d:%d] %s %s %s", e.GetLineNumber(), e.GetColumnNumber(), e.Severity, e.Rule, e.UnderlyingError.Error()) + if e.DocumentLocation != "" { + msg = fmt.Sprintf("%s (document: %s)", msg, e.DocumentLocation) + } + return msg } func (e Error) Unwrap() error { return e.UnderlyingError } +func (e Error) GetNode() *yaml.Node { + return e.Node +} + func (e Error) GetLineNumber() int { if e.Node == nil { return -1 @@ -37,6 +86,15 @@ func (e Error) GetColumnNumber() int { return e.Node.Column } +func (e Error) GetSeverity() Severity { + return e.Severity +} + +// GetDocumentLocation returns the document location where the error originated. +func (e Error) GetDocumentLocation() string { + return e.DocumentLocation +} + // ValueNodeGetter provides access to value nodes for error reporting. type ValueNodeGetter interface { GetValueNodeOrRoot(root *yaml.Node) *yaml.Node @@ -57,10 +115,23 @@ type MapValueNodeGetter interface { GetMapValueNodeOrRoot(key string, root *yaml.Node) *yaml.Node } -func NewValidationError(err error, node *yaml.Node) error { +func NewValidationError(severity Severity, rule string, err error, node *yaml.Node) error { return &Error{ UnderlyingError: err, Node: node, + Severity: severity, + Rule: rule, + } +} + +// NewValidationErrorWithDocumentLocation creates a validation error with document location metadata. +func NewValidationErrorWithDocumentLocation(severity Severity, rule string, err error, node *yaml.Node, documentLocation string) error { + return &Error{ + UnderlyingError: err, + Node: node, + Severity: severity, + Rule: rule, + DocumentLocation: documentLocation, } } @@ -68,7 +139,7 @@ type CoreModeler interface { GetRootNode() *yaml.Node } -func NewValueError(err error, core CoreModeler, node ValueNodeGetter) error { +func NewValueError(severity Severity, rule string, err error, core CoreModeler, node ValueNodeGetter) error { rootNode := core.GetRootNode() if rootNode == nil { @@ -76,6 +147,8 @@ func NewValueError(err error, core CoreModeler, node ValueNodeGetter) error { return &Error{ UnderlyingError: err, // Default to line 0, column 0 if we can't get location info + Severity: severity, + Rule: rule, } } valueNode := node.GetValueNodeOrRoot(rootNode) @@ -83,10 +156,12 @@ func NewValueError(err error, core CoreModeler, node ValueNodeGetter) error { return &Error{ UnderlyingError: err, Node: valueNode, + Severity: severity, + Rule: rule, } } -func NewSliceError(err error, core CoreModeler, node SliceNodeGetter, index int) error { +func NewSliceError(severity Severity, rule string, err error, core CoreModeler, node SliceNodeGetter, index int) error { rootNode := core.GetRootNode() if rootNode == nil { @@ -94,6 +169,8 @@ func NewSliceError(err error, core CoreModeler, node SliceNodeGetter, index int) return &Error{ UnderlyingError: err, // Default to line 0, column 0 if we can't get location info + Severity: severity, + Rule: rule, } } valueNode := node.GetSliceValueNodeOrRoot(index, rootNode) @@ -101,10 +178,12 @@ func NewSliceError(err error, core CoreModeler, node SliceNodeGetter, index int) return &Error{ UnderlyingError: err, Node: valueNode, + Severity: severity, + Rule: rule, } } -func NewMapKeyError(err error, core CoreModeler, node MapKeyNodeGetter, key string) error { +func NewMapKeyError(severity Severity, rule string, err error, core CoreModeler, node MapKeyNodeGetter, key string) error { rootNode := core.GetRootNode() if rootNode == nil { @@ -112,6 +191,8 @@ func NewMapKeyError(err error, core CoreModeler, node MapKeyNodeGetter, key stri return &Error{ UnderlyingError: err, // Default to line 0, column 0 if we can't get location info + Severity: severity, + Rule: rule, } } valueNode := node.GetMapKeyNodeOrRoot(key, rootNode) @@ -119,10 +200,12 @@ func NewMapKeyError(err error, core CoreModeler, node MapKeyNodeGetter, key stri return &Error{ UnderlyingError: err, Node: valueNode, + Severity: severity, + Rule: rule, } } -func NewMapValueError(err error, core CoreModeler, node MapValueNodeGetter, key string) error { +func NewMapValueError(severity Severity, rule string, err error, core CoreModeler, node MapValueNodeGetter, key string) error { rootNode := core.GetRootNode() if rootNode == nil { @@ -130,6 +213,8 @@ func NewMapValueError(err error, core CoreModeler, node MapValueNodeGetter, key return &Error{ UnderlyingError: err, // Default to line 0, column 0 if we can't get location info + Severity: severity, + Rule: rule, } } valueNode := node.GetMapValueNodeOrRoot(key, rootNode) @@ -137,6 +222,8 @@ func NewMapValueError(err error, core CoreModeler, node MapValueNodeGetter, key return &Error{ UnderlyingError: err, Node: valueNode, + Severity: severity, + Rule: rule, } } @@ -166,55 +253,3 @@ func (e TypeMismatchError) Error() string { return fmt.Sprintf("%s%s", name, e.Msg) } - -type MissingFieldError struct { - Msg string -} - -var _ error = (*MissingFieldError)(nil) - -func NewMissingFieldError(msg string, args ...any) *MissingFieldError { - return &MissingFieldError{ - Msg: fmt.Sprintf(msg, args...), - } -} - -func (e MissingFieldError) Error() string { - return e.Msg -} - -type MissingValueError struct { - Msg string -} - -var _ error = (*MissingValueError)(nil) - -func NewMissingValueError(msg string, args ...any) *MissingValueError { - return &MissingValueError{ - Msg: fmt.Sprintf(msg, args...), - } -} - -func (e MissingValueError) Error() string { - return e.Msg -} - -type ValueValidationError struct { - Msg string -} - -var _ error = (*ValueValidationError)(nil) - -func NewValueValidationError(msg string, args ...any) *ValueValidationError { - if len(args) > 0 { - msg = fmt.Sprintf(msg, args...) - } - - return &ValueValidationError{ - Msg: msg, - } -} - -func (e ValueValidationError) Error() string { - return e.Msg -} diff --git a/validation/rules.go b/validation/rules.go new file mode 100644 index 00000000..49412c1f --- /dev/null +++ b/validation/rules.go @@ -0,0 +1,139 @@ +package validation + +const ( + // Spec Validation Rules + RuleValidationRequiredField = "validation-required-field" + RuleValidationTypeMismatch = "validation-type-mismatch" + RuleValidationDuplicateKey = "validation-duplicate-key" + RuleValidationInvalidFormat = "validation-invalid-format" + RuleValidationEmptyValue = "validation-empty-value" + RuleValidationInvalidReference = "validation-invalid-reference" + RuleValidationInvalidSyntax = "validation-invalid-syntax" + RuleValidationInvalidSchema = "validation-invalid-schema" + RuleValidationInvalidTarget = "validation-invalid-target" + RuleValidationAllowedValues = "validation-allowed-values" + RuleValidationMutuallyExclusiveFields = "validation-mutually-exclusive-fields" + RuleValidationOperationNotFound = "validation-operation-not-found" + RuleValidationOperationIdUnique = "validation-operation-id-unique" + RuleValidationOperationParameters = "validation-operation-parameters" + RuleValidationSchemeNotFound = "validation-scheme-not-found" + RuleValidationTagNotFound = "validation-tag-not-found" + RuleValidationSupportedVersion = "validation-supported-version" + RuleValidationCircularReference = "validation-circular-reference" +) + +type RuleInfo struct { + Summary string + Description string + HowToFix string +} + +var ruleInfoByID = map[string]RuleInfo{ + RuleValidationRequiredField: { + Summary: "Missing required field.", + Description: "Required fields must be present in the document. Missing required fields cause validation to fail.", + HowToFix: "Provide the required field in the document.", + }, + RuleValidationTypeMismatch: { + Summary: "Type mismatch.", + Description: "Values must match the schema types defined in the specification. Mismatched types can break tooling and validation.", + HowToFix: "Update the value to match the schema type or adjust the schema.", + }, + RuleValidationDuplicateKey: { + Summary: "Duplicate key.", + Description: "Duplicate keys are not allowed in objects. Remove duplicates to avoid parsing ambiguity.", + HowToFix: "Remove or rename the duplicate key.", + }, + RuleValidationInvalidFormat: { + Summary: "Invalid format.", + Description: "Values must match the specified format. Invalid formats can lead to runtime or interoperability issues.", + HowToFix: "Use a value that conforms to the required format.", + }, + RuleValidationEmptyValue: { + Summary: "Empty value.", + Description: "Values must not be empty when the field requires content. Empty values typically indicate missing data.", + HowToFix: "Provide a non-empty value.", + }, + RuleValidationInvalidReference: { + Summary: "Invalid reference.", + Description: "References must resolve to existing components or locations. Broken references prevent correct validation and resolution.", + HowToFix: "Fix the $ref target or define the referenced component.", + }, + RuleValidationInvalidSyntax: { + Summary: "Invalid syntax.", + Description: "Documents must be valid YAML or JSON. Syntax errors prevent parsing.", + HowToFix: "Correct the syntax errors in the document.", + }, + RuleValidationInvalidSchema: { + Summary: "Invalid schema.", + Description: "Schemas must be valid according to the OpenAPI/JSON Schema rules. Invalid schemas can make the document unusable for tooling.", + HowToFix: "Correct schema keywords and values to match the specification.", + }, + RuleValidationInvalidTarget: { + Summary: "Invalid target.", + Description: "Validation targets must exist and be valid for the context. Invalid targets typically indicate a bad reference path.", + HowToFix: "Point to a valid target or adjust the reference context.", + }, + RuleValidationAllowedValues: { + Summary: "Value not allowed.", + Description: "Values must be one of the allowed values. Using disallowed values violates the specification.", + HowToFix: "Use a value from the allowed set.", + }, + RuleValidationMutuallyExclusiveFields: { + Summary: "Mutually exclusive fields.", + Description: "Mutually exclusive fields cannot be used together. Choose one of the conflicting fields.", + HowToFix: "Remove one of the conflicting fields.", + }, + RuleValidationOperationNotFound: { + Summary: "Operation not found.", + Description: "Referenced operations must exist in the specification. Missing operations indicate a broken link.", + HowToFix: "Add the operation or fix the reference.", + }, + RuleValidationOperationIdUnique: { + Summary: "Duplicate operationId.", + Description: "Operation IDs must be unique across the specification. Duplicate IDs cause conflicts in tooling.", + HowToFix: "Assign unique operationId values.", + }, + RuleValidationOperationParameters: { + Summary: "Invalid operation parameters.", + Description: "Operation parameters must be valid and correctly defined. Invalid parameters can break request handling.", + HowToFix: "Fix parameter definitions and resolve invalid references.", + }, + RuleValidationSchemeNotFound: { + Summary: "Security scheme not found.", + Description: "Referenced security schemes must be defined. Missing schemes make security requirements invalid.", + HowToFix: "Define the security scheme or fix the scheme reference.", + }, + RuleValidationTagNotFound: { + Summary: "Tag not found.", + Description: "Operation tags should be defined in the top-level tags array. Undefined tags make documentation inconsistent.", + HowToFix: "Add the tag to the top-level tags array or fix the tag name.", + }, + RuleValidationSupportedVersion: { + Summary: "Unsupported OpenAPI version.", + Description: "The document must use a supported OpenAPI version. Unsupported versions may not be parsed correctly.", + HowToFix: "Update the document to a supported OpenAPI version.", + }, + RuleValidationCircularReference: { + Summary: "Circular reference.", + Description: "Schemas must not contain circular references that cannot be resolved. Unresolvable cycles can break validation and tooling.", + HowToFix: "Refactor schemas to break the reference cycle.", + }, +} + +func RuleInfoForID(ruleID string) (RuleInfo, bool) { + info, ok := ruleInfoByID[ruleID] + return info, ok +} + +func RuleSummary(ruleID string) string { + return ruleInfoByID[ruleID].Summary +} + +func RuleDescription(ruleID string) string { + return ruleInfoByID[ruleID].Description +} + +func RuleHowToFix(ruleID string) string { + return ruleInfoByID[ruleID].HowToFix +} diff --git a/validation/rules_test.go b/validation/rules_test.go new file mode 100644 index 00000000..eb0ac09c --- /dev/null +++ b/validation/rules_test.go @@ -0,0 +1,145 @@ +package validation + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestRuleInfoForID_Success(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + ruleID string + expectOK bool + expectInfo RuleInfo + }{ + { + name: "known rule returns info", + ruleID: RuleValidationRequiredField, + expectOK: true, + expectInfo: RuleInfo{ + Summary: "Missing required field.", + Description: "Required fields must be present in the document. Missing required fields cause validation to fail.", + HowToFix: "Provide the required field in the document.", + }, + }, + { + name: "another known rule returns info", + ruleID: RuleValidationCircularReference, + expectOK: true, + expectInfo: RuleInfo{ + Summary: "Circular reference.", + Description: "Schemas must not contain circular references that cannot be resolved. Unresolvable cycles can break validation and tooling.", + HowToFix: "Refactor schemas to break the reference cycle.", + }, + }, + { + name: "unknown rule returns empty info", + ruleID: "unknown-rule-id", + expectOK: false, + expectInfo: RuleInfo{}, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + info, ok := RuleInfoForID(tt.ruleID) + assert.Equal(t, tt.expectOK, ok, "ok should match expected") + assert.Equal(t, tt.expectInfo, info, "info should match expected") + }) + } +} + +func TestRuleSummary_Success(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + ruleID string + expected string + }{ + { + name: "known rule returns summary", + ruleID: RuleValidationTypeMismatch, + expected: "Type mismatch.", + }, + { + name: "unknown rule returns empty string", + ruleID: "unknown-rule", + expected: "", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + result := RuleSummary(tt.ruleID) + assert.Equal(t, tt.expected, result, "summary should match expected") + }) + } +} + +func TestRuleDescription_Success(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + ruleID string + expected string + }{ + { + name: "known rule returns description", + ruleID: RuleValidationDuplicateKey, + expected: "Duplicate keys are not allowed in objects. Remove duplicates to avoid parsing ambiguity.", + }, + { + name: "unknown rule returns empty string", + ruleID: "unknown-rule", + expected: "", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + result := RuleDescription(tt.ruleID) + assert.Equal(t, tt.expected, result, "description should match expected") + }) + } +} + +func TestRuleHowToFix_Success(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + ruleID string + expected string + }{ + { + name: "known rule returns how to fix", + ruleID: RuleValidationInvalidReference, + expected: "Fix the $ref target or define the referenced component.", + }, + { + name: "unknown rule returns empty string", + ruleID: "unknown-rule", + expected: "", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + result := RuleHowToFix(tt.ruleID) + assert.Equal(t, tt.expected, result, "how to fix should match expected") + }) + } +} diff --git a/validation/utils_test.go b/validation/utils_test.go index 7b42dee6..9e9c2a30 100644 --- a/validation/utils_test.go +++ b/validation/utils_test.go @@ -1,7 +1,7 @@ package validation import ( - stderrors "errors" + "errors" "testing" "github.com/stretchr/testify/assert" @@ -27,13 +27,13 @@ func TestSortValidationErrors_Success(t *testing.T) { name: "single validation error", errors: []error{ &Error{ - UnderlyingError: stderrors.New("error1"), + UnderlyingError: errors.New("error1"), Node: &yaml.Node{Line: 5, Column: 10}, }, }, expected: []error{ &Error{ - UnderlyingError: stderrors.New("error1"), + UnderlyingError: errors.New("error1"), Node: &yaml.Node{Line: 5, Column: 10}, }, }, @@ -42,29 +42,29 @@ func TestSortValidationErrors_Success(t *testing.T) { name: "multiple validation errors sorted by line", errors: []error{ &Error{ - UnderlyingError: stderrors.New("error3"), + UnderlyingError: errors.New("error3"), Node: &yaml.Node{Line: 10, Column: 5}, }, &Error{ - UnderlyingError: stderrors.New("error1"), + UnderlyingError: errors.New("error1"), Node: &yaml.Node{Line: 2, Column: 3}, }, &Error{ - UnderlyingError: stderrors.New("error2"), + UnderlyingError: errors.New("error2"), Node: &yaml.Node{Line: 5, Column: 8}, }, }, expected: []error{ &Error{ - UnderlyingError: stderrors.New("error1"), + UnderlyingError: errors.New("error1"), Node: &yaml.Node{Line: 2, Column: 3}, }, &Error{ - UnderlyingError: stderrors.New("error2"), + UnderlyingError: errors.New("error2"), Node: &yaml.Node{Line: 5, Column: 8}, }, &Error{ - UnderlyingError: stderrors.New("error3"), + UnderlyingError: errors.New("error3"), Node: &yaml.Node{Line: 10, Column: 5}, }, }, @@ -73,29 +73,29 @@ func TestSortValidationErrors_Success(t *testing.T) { name: "validation errors with same line sorted by column", errors: []error{ &Error{ - UnderlyingError: stderrors.New("error2"), + UnderlyingError: errors.New("error2"), Node: &yaml.Node{Line: 5, Column: 15}, }, &Error{ - UnderlyingError: stderrors.New("error1"), + UnderlyingError: errors.New("error1"), Node: &yaml.Node{Line: 5, Column: 3}, }, &Error{ - UnderlyingError: stderrors.New("error3"), + UnderlyingError: errors.New("error3"), Node: &yaml.Node{Line: 5, Column: 20}, }, }, expected: []error{ &Error{ - UnderlyingError: stderrors.New("error1"), + UnderlyingError: errors.New("error1"), Node: &yaml.Node{Line: 5, Column: 3}, }, &Error{ - UnderlyingError: stderrors.New("error2"), + UnderlyingError: errors.New("error2"), Node: &yaml.Node{Line: 5, Column: 15}, }, &Error{ - UnderlyingError: stderrors.New("error3"), + UnderlyingError: errors.New("error3"), Node: &yaml.Node{Line: 5, Column: 20}, }, }, @@ -103,54 +103,54 @@ func TestSortValidationErrors_Success(t *testing.T) { { name: "mix of validation errors and regular errors", errors: []error{ - stderrors.New("regular error 2"), + errors.New("regular error 2"), &Error{ - UnderlyingError: stderrors.New("validation error"), + UnderlyingError: errors.New("validation error"), Node: &yaml.Node{Line: 5, Column: 10}, }, - stderrors.New("regular error 1"), + errors.New("regular error 1"), }, expected: []error{ &Error{ - UnderlyingError: stderrors.New("validation error"), + UnderlyingError: errors.New("validation error"), Node: &yaml.Node{Line: 5, Column: 10}, }, - stderrors.New("regular error 2"), - stderrors.New("regular error 1"), + errors.New("regular error 2"), + errors.New("regular error 1"), }, }, { name: "only regular errors", errors: []error{ - stderrors.New("error C"), - stderrors.New("error A"), - stderrors.New("error B"), + errors.New("error C"), + errors.New("error A"), + errors.New("error B"), }, expected: []error{ - stderrors.New("error C"), - stderrors.New("error A"), - stderrors.New("error B"), + errors.New("error C"), + errors.New("error A"), + errors.New("error B"), }, }, { name: "validation errors with nil nodes", errors: []error{ &Error{ - UnderlyingError: stderrors.New("error with nil node"), + UnderlyingError: errors.New("error with nil node"), Node: nil, }, &Error{ - UnderlyingError: stderrors.New("error with node"), + UnderlyingError: errors.New("error with node"), Node: &yaml.Node{Line: 5, Column: 10}, }, }, expected: []error{ &Error{ - UnderlyingError: stderrors.New("error with nil node"), + UnderlyingError: errors.New("error with nil node"), Node: nil, }, &Error{ - UnderlyingError: stderrors.New("error with node"), + UnderlyingError: errors.New("error with node"), Node: &yaml.Node{Line: 5, Column: 10}, }, }, @@ -158,65 +158,65 @@ func TestSortValidationErrors_Success(t *testing.T) { { name: "complex mixed scenario", errors: []error{ - stderrors.New("regular error"), + errors.New("regular error"), &Error{ - UnderlyingError: stderrors.New("validation error line 10"), + UnderlyingError: errors.New("validation error line 10"), Node: &yaml.Node{Line: 10, Column: 5}, }, &Error{ - UnderlyingError: stderrors.New("validation error line 2 col 15"), + UnderlyingError: errors.New("validation error line 2 col 15"), Node: &yaml.Node{Line: 2, Column: 15}, }, &Error{ - UnderlyingError: stderrors.New("validation error line 2 col 3"), + UnderlyingError: errors.New("validation error line 2 col 3"), Node: &yaml.Node{Line: 2, Column: 3}, }, - stderrors.New("another regular error"), + errors.New("another regular error"), }, expected: []error{ &Error{ - UnderlyingError: stderrors.New("validation error line 2 col 3"), + UnderlyingError: errors.New("validation error line 2 col 3"), Node: &yaml.Node{Line: 2, Column: 3}, }, &Error{ - UnderlyingError: stderrors.New("validation error line 2 col 15"), + UnderlyingError: errors.New("validation error line 2 col 15"), Node: &yaml.Node{Line: 2, Column: 15}, }, &Error{ - UnderlyingError: stderrors.New("validation error line 10"), + UnderlyingError: errors.New("validation error line 10"), Node: &yaml.Node{Line: 10, Column: 5}, }, - stderrors.New("regular error"), - stderrors.New("another regular error"), + errors.New("regular error"), + errors.New("another regular error"), }, }, { name: "validation errors with zero line/column", errors: []error{ &Error{ - UnderlyingError: stderrors.New("error at 0,0"), + UnderlyingError: errors.New("error at 0,0"), Node: &yaml.Node{Line: 0, Column: 0}, }, &Error{ - UnderlyingError: stderrors.New("error at 1,1"), + UnderlyingError: errors.New("error at 1,1"), Node: &yaml.Node{Line: 1, Column: 1}, }, &Error{ - UnderlyingError: stderrors.New("error at 0,5"), + UnderlyingError: errors.New("error at 0,5"), Node: &yaml.Node{Line: 0, Column: 5}, }, }, expected: []error{ &Error{ - UnderlyingError: stderrors.New("error at 0,0"), + UnderlyingError: errors.New("error at 0,0"), Node: &yaml.Node{Line: 0, Column: 0}, }, &Error{ - UnderlyingError: stderrors.New("error at 0,5"), + UnderlyingError: errors.New("error at 0,5"), Node: &yaml.Node{Line: 0, Column: 5}, }, &Error{ - UnderlyingError: stderrors.New("error at 1,1"), + UnderlyingError: errors.New("error at 1,1"), Node: &yaml.Node{Line: 1, Column: 1}, }, }, @@ -245,8 +245,8 @@ func TestSortValidationErrors_Success(t *testing.T) { // Check if both are validation errors var expectedValidationErr, actualValidationErr *Error - expectedIsValidation := stderrors.As(expectedErr, &expectedValidationErr) - actualIsValidation := stderrors.As(actualErr, &actualValidationErr) + expectedIsValidation := errors.As(expectedErr, &expectedValidationErr) + actualIsValidation := errors.As(actualErr, &actualValidationErr) switch { case expectedIsValidation && actualIsValidation: @@ -290,7 +290,7 @@ func TestSortValidationErrors_EdgeCases_Success(t *testing.T) { errors := []error{ nil, &Error{ - UnderlyingError: stderrors.New("valid error"), + UnderlyingError: errors.New("valid error"), Node: &yaml.Node{Line: 1, Column: 1}, }, nil, @@ -313,15 +313,15 @@ func TestSortValidationErrors_EdgeCases_Success(t *testing.T) { errors := []error{ &Error{ - UnderlyingError: stderrors.New("error with negative line"), + UnderlyingError: errors.New("error with negative line"), Node: &yaml.Node{Line: -1, Column: 5}, }, &Error{ - UnderlyingError: stderrors.New("error with positive line"), + UnderlyingError: errors.New("error with positive line"), Node: &yaml.Node{Line: 1, Column: 5}, }, &Error{ - UnderlyingError: stderrors.New("error with negative column"), + UnderlyingError: errors.New("error with negative column"), Node: &yaml.Node{Line: 1, Column: -1}, }, } @@ -337,4 +337,137 @@ func TestSortValidationErrors_EdgeCases_Success(t *testing.T) { assert.Equal(t, "error with negative column", err1.UnderlyingError.Error()) assert.Equal(t, "error with positive line", err2.UnderlyingError.Error()) }) + + t.Run("same line and column sorted by error message", func(t *testing.T) { + t.Parallel() + + errors := []error{ + &Error{ + UnderlyingError: errors.New("zzz error"), + Node: &yaml.Node{Line: 5, Column: 10}, + }, + &Error{ + UnderlyingError: errors.New("aaa error"), + Node: &yaml.Node{Line: 5, Column: 10}, + }, + &Error{ + UnderlyingError: errors.New("mmm error"), + Node: &yaml.Node{Line: 5, Column: 10}, + }, + } + + SortValidationErrors(errors) + + var err0, err1, err2 *Error + require.ErrorAs(t, errors[0], &err0) + require.ErrorAs(t, errors[1], &err1) + require.ErrorAs(t, errors[2], &err2) + assert.Equal(t, "aaa error", err0.UnderlyingError.Error()) + assert.Equal(t, "mmm error", err1.UnderlyingError.Error()) + assert.Equal(t, "zzz error", err2.UnderlyingError.Error()) + }) + + t.Run("same line column and identical error message", func(t *testing.T) { + t.Parallel() + + errors := []error{ + &Error{ + UnderlyingError: errors.New("same error"), + Node: &yaml.Node{Line: 5, Column: 10}, + Severity: SeverityError, + }, + &Error{ + UnderlyingError: errors.New("same error"), + Node: &yaml.Node{Line: 5, Column: 10}, + Severity: SeverityWarning, + }, + } + + SortValidationErrors(errors) + + // Both have same message so order should remain stable + var err0, err1 *Error + require.ErrorAs(t, errors[0], &err0) + require.ErrorAs(t, errors[1], &err1) + // Both should have the same message + assert.Equal(t, "same error", err0.UnderlyingError.Error()) + assert.Equal(t, "same error", err1.UnderlyingError.Error()) + // Stable sort means first stays first + assert.Equal(t, SeverityError, err0.Severity) + assert.Equal(t, SeverityWarning, err1.Severity) + }) + + t.Run("interleaved regular and validation errors forces all comparison branches", func(t *testing.T) { + t.Parallel() + + // Interleave regular and validation errors to force the sorting algorithm + // to compare them in both directions (a=regular/b=validation AND a=validation/b=regular) + errors := []error{ + errors.New("regular error 1"), + &Error{ + UnderlyingError: errors.New("validation error 1"), + Node: &yaml.Node{Line: 10, Column: 5}, + }, + errors.New("regular error 2"), + &Error{ + UnderlyingError: errors.New("validation error 2"), + Node: &yaml.Node{Line: 5, Column: 3}, + }, + errors.New("regular error 3"), + &Error{ + UnderlyingError: errors.New("validation error 3"), + Node: &yaml.Node{Line: 15, Column: 7}, + }, + errors.New("regular error 4"), + } + + SortValidationErrors(errors) + + // Validation errors should come first, sorted by line number + var validationErr0, validationErr1, validationErr2 *Error + require.ErrorAs(t, errors[0], &validationErr0) + require.ErrorAs(t, errors[1], &validationErr1) + require.ErrorAs(t, errors[2], &validationErr2) + assert.Equal(t, 5, validationErr0.Node.Line, "first validation error should be line 5") + assert.Equal(t, 10, validationErr1.Node.Line, "second validation error should be line 10") + assert.Equal(t, 15, validationErr2.Node.Line, "third validation error should be line 15") + + // Regular errors should follow, preserving stable order + var notValidation *Error + assert.NotErrorAs(t, errors[3], ¬Validation, "index 3 should be regular error") + assert.NotErrorAs(t, errors[4], ¬Validation, "index 4 should be regular error") + assert.NotErrorAs(t, errors[5], ¬Validation, "index 5 should be regular error") + assert.NotErrorAs(t, errors[6], ¬Validation, "index 6 should be regular error") + }) + + t.Run("validation errors first then regular errors forces bIsValidationErr", func(t *testing.T) { + t.Parallel() + + // Start with validation errors, then regular errors + // The merge sort should compare elements in the opposite direction during some phase + errors := []error{ + &Error{ + UnderlyingError: errors.New("validation error 1"), + Node: &yaml.Node{Line: 20, Column: 10}, + }, + &Error{ + UnderlyingError: errors.New("validation error 2"), + Node: &yaml.Node{Line: 10, Column: 5}, + }, + errors.New("regular error 1"), + errors.New("regular error 2"), + } + + SortValidationErrors(errors) + + // Validation errors should come first, sorted by line + var validationErr0, validationErr1 *Error + require.ErrorAs(t, errors[0], &validationErr0) + require.ErrorAs(t, errors[1], &validationErr1) + assert.Equal(t, 10, validationErr0.Node.Line) + assert.Equal(t, 20, validationErr1.Node.Line) + // Regular errors follow + assert.Equal(t, "regular error 1", errors[2].Error()) + assert.Equal(t, "regular error 2", errors[3].Error()) + }) } diff --git a/validation/validation_test.go b/validation/validation_test.go index ce7cf5b9..d53c2ed9 100644 --- a/validation/validation_test.go +++ b/validation/validation_test.go @@ -22,31 +22,37 @@ func TestError_Error_Success(t *testing.T) { name: "error with valid node", err: &Error{ UnderlyingError: errors.New("test error"), + Severity: SeverityError, + Rule: RuleValidationTypeMismatch, Node: &yaml.Node{ Line: 10, Column: 5, }, }, - expected: "[10:5] test error", + expected: "[10:5] error validation-type-mismatch test error", }, { name: "error with nil node", err: &Error{ UnderlyingError: errors.New("test error"), + Severity: SeverityWarning, + Rule: RuleValidationInvalidFormat, Node: nil, }, - expected: "[-1:-1] test error", + expected: "[-1:-1] warning validation-invalid-format test error", }, { name: "error with zero line/column", err: &Error{ UnderlyingError: errors.New("test error"), + Severity: SeverityError, + Rule: RuleValidationRequiredField, Node: &yaml.Node{ Line: 0, Column: 0, }, }, - expected: "[0:0] test error", + expected: "[0:0] error validation-required-field test error", }, } @@ -162,12 +168,14 @@ func TestNewValidationError_Success(t *testing.T) { underlyingErr := errors.New("test error") node := &yaml.Node{Line: 5, Column: 10} - result := NewValidationError(underlyingErr, node) + result := NewValidationError(SeverityError, RuleValidationTypeMismatch, underlyingErr, node) var validationErr *Error require.ErrorAs(t, result, &validationErr, "should return *Error type") assert.Equal(t, underlyingErr, validationErr.UnderlyingError) assert.Equal(t, node, validationErr.Node) + assert.Equal(t, SeverityError, validationErr.Severity) + assert.Equal(t, RuleValidationTypeMismatch, validationErr.Rule) } // Mock types for testing the error creation functions @@ -270,12 +278,14 @@ func TestNewValueError_Success(t *testing.T) { t.Parallel() underlyingErr := errors.New("test error") - result := NewValueError(underlyingErr, tt.core, tt.nodeGetter) + result := NewValueError(SeverityError, RuleValidationTypeMismatch, underlyingErr, tt.core, tt.nodeGetter) var validationErr *Error require.ErrorAs(t, result, &validationErr, "should return *Error type") assert.Equal(t, underlyingErr, validationErr.UnderlyingError) assert.Equal(t, tt.expectedNode, validationErr.Node) + assert.Equal(t, SeverityError, validationErr.Severity) + assert.Equal(t, RuleValidationTypeMismatch, validationErr.Rule) }) } } @@ -320,12 +330,14 @@ func TestNewSliceError_Success(t *testing.T) { t.Parallel() underlyingErr := errors.New("slice error") - result := NewSliceError(underlyingErr, tt.core, tt.nodeGetter, tt.index) + result := NewSliceError(SeverityError, RuleValidationTypeMismatch, underlyingErr, tt.core, tt.nodeGetter, tt.index) var validationErr *Error require.ErrorAs(t, result, &validationErr, "should return *Error type") assert.Equal(t, underlyingErr, validationErr.UnderlyingError) assert.Equal(t, tt.expectedNode, validationErr.Node) + assert.Equal(t, SeverityError, validationErr.Severity) + assert.Equal(t, RuleValidationTypeMismatch, validationErr.Rule) }) } } @@ -370,12 +382,14 @@ func TestNewMapKeyError_Success(t *testing.T) { t.Parallel() underlyingErr := errors.New("map key error") - result := NewMapKeyError(underlyingErr, tt.core, tt.nodeGetter, tt.key) + result := NewMapKeyError(SeverityError, RuleValidationTypeMismatch, underlyingErr, tt.core, tt.nodeGetter, tt.key) var validationErr *Error require.ErrorAs(t, result, &validationErr, "should return *Error type") assert.Equal(t, underlyingErr, validationErr.UnderlyingError) assert.Equal(t, tt.expectedNode, validationErr.Node) + assert.Equal(t, SeverityError, validationErr.Severity) + assert.Equal(t, RuleValidationTypeMismatch, validationErr.Rule) }) } } @@ -420,7 +434,7 @@ func TestNewMapValueError_Success(t *testing.T) { t.Parallel() underlyingErr := errors.New("map value error") - result := NewMapValueError(underlyingErr, tt.core, tt.nodeGetter, tt.key) + result := NewMapValueError(SeverityError, RuleValidationTypeMismatch, underlyingErr, tt.core, tt.nodeGetter, tt.key) var validationErr *Error require.ErrorAs(t, result, &validationErr, "should return *Error type") @@ -471,27 +485,29 @@ func TestTypeMismatchError_Success(t *testing.T) { } } -// Test MissingFieldError -func TestMissingFieldError_Success(t *testing.T) { +// Test Severity.String() method +func TestSeverity_String_Success(t *testing.T) { t.Parallel() tests := []struct { name string - msg string - args []any + severity Severity expected string }{ { - name: "simple missing field message", - msg: "required field missing", - args: nil, - expected: "required field missing", + name: "error severity", + severity: SeverityError, + expected: "error", + }, + { + name: "warning severity", + severity: SeverityWarning, + expected: "warning", }, { - name: "missing field with field name", - msg: "required field '%s' is missing", - args: []any{"name"}, - expected: "required field 'name' is missing", + name: "hint severity", + severity: SeverityHint, + expected: "hint", }, } @@ -499,34 +515,45 @@ func TestMissingFieldError_Success(t *testing.T) { t.Run(tt.name, func(t *testing.T) { t.Parallel() - err := NewMissingFieldError(tt.msg, tt.args...) - assert.Equal(t, tt.expected, err.Error()) - assert.Equal(t, tt.expected, err.Msg) + result := tt.severity.String() + assert.Equal(t, tt.expected, result, "severity string should match") }) } } -// Test MissingValueError -func TestMissingValueError_Success(t *testing.T) { +// Test Severity.Rank() method +func TestSeverity_Rank_Success(t *testing.T) { t.Parallel() tests := []struct { name string - msg string - args []any - expected string + severity Severity + expected int }{ { - name: "simple missing value message", - msg: "value is required", - args: nil, - expected: "value is required", + name: "error severity has rank 2", + severity: SeverityError, + expected: 2, + }, + { + name: "warning severity has rank 1", + severity: SeverityWarning, + expected: 1, + }, + { + name: "hint severity has rank 0", + severity: SeverityHint, + expected: 0, + }, + { + name: "unknown severity treated as error", + severity: Severity("unknown"), + expected: 2, }, { - name: "missing value with context", - msg: "value for field '%s' is required", - args: []any{"description"}, - expected: "value for field 'description' is required", + name: "empty severity treated as error", + severity: Severity(""), + expected: 2, }, } @@ -534,40 +561,201 @@ func TestMissingValueError_Success(t *testing.T) { t.Run(tt.name, func(t *testing.T) { t.Parallel() - err := NewMissingValueError(tt.msg, tt.args...) - assert.Equal(t, tt.expected, err.Error()) - assert.Equal(t, tt.expected, err.Msg) + result := tt.severity.Rank() + assert.Equal(t, tt.expected, result, "severity rank should match") }) } } -// Test ValueValidationError -func TestValueValidationError_Success(t *testing.T) { +// Test Severity.Rank() ordering for comparison +func TestSeverity_Rank_Ordering(t *testing.T) { + t.Parallel() + + // Verify that error > warning > hint in terms of rank (worse severity = higher rank) + assert.Greater(t, SeverityError.Rank(), SeverityWarning.Rank(), "error should have higher rank than warning") + assert.Greater(t, SeverityWarning.Rank(), SeverityHint.Rank(), "warning should have higher rank than hint") + assert.Greater(t, SeverityError.Rank(), SeverityHint.Rank(), "error should have higher rank than hint") +} + +// Test Error.GetSeverity() method +func TestError_GetSeverity_Success(t *testing.T) { t.Parallel() tests := []struct { name string - msg string - args []any + err *Error + expected Severity + }{ + { + name: "error severity", + err: &Error{ + UnderlyingError: errors.New("test error"), + Severity: SeverityError, + }, + expected: SeverityError, + }, + { + name: "warning severity", + err: &Error{ + UnderlyingError: errors.New("test warning"), + Severity: SeverityWarning, + }, + expected: SeverityWarning, + }, + { + name: "hint severity", + err: &Error{ + UnderlyingError: errors.New("test hint"), + Severity: SeverityHint, + }, + expected: SeverityHint, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + result := tt.err.GetSeverity() + assert.Equal(t, tt.expected, result, "severity should match") + }) + } +} + +// Test Error.Error() with DocumentLocation +func TestError_Error_WithDocumentLocation(t *testing.T) { + t.Parallel() + + err := &Error{ + UnderlyingError: errors.New("test error"), + Severity: SeverityError, + Rule: RuleValidationInvalidReference, + Node: &yaml.Node{Line: 5, Column: 3}, + DocumentLocation: "https://example.com/spec.yaml", + } + + result := err.Error() + assert.Equal(t, "[5:3] error validation-invalid-reference test error (document: https://example.com/spec.yaml)", result) +} + +// Test Error.GetNode() method +func TestError_GetNode_Success(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + err *Error + expected *yaml.Node + }{ + { + name: "returns node when set", + err: &Error{ + Node: &yaml.Node{Line: 10, Column: 5}, + }, + expected: &yaml.Node{Line: 10, Column: 5}, + }, + { + name: "returns nil when node is nil", + err: &Error{ + Node: nil, + }, + expected: nil, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + result := tt.err.GetNode() + assert.Equal(t, tt.expected, result, "node should match expected") + }) + } +} + +// Test Error.GetDocumentLocation() method +func TestError_GetDocumentLocation_Success(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + err *Error expected string }{ { - name: "simple validation error", - msg: "invalid value", - args: nil, - expected: "invalid value", + name: "returns document location when set", + err: &Error{ + DocumentLocation: "https://example.com/spec.yaml", + }, + expected: "https://example.com/spec.yaml", + }, + { + name: "returns empty string when not set", + err: &Error{}, + expected: "", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + result := tt.err.GetDocumentLocation() + assert.Equal(t, tt.expected, result, "document location should match expected") + }) + } +} + +// Test NewValidationErrorWithDocumentLocation function +func TestNewValidationErrorWithDocumentLocation_Success(t *testing.T) { + t.Parallel() + + underlyingErr := errors.New("remote error") + node := &yaml.Node{Line: 15, Column: 8} + docLocation := "https://example.com/components.yaml" + + result := NewValidationErrorWithDocumentLocation(SeverityWarning, RuleValidationInvalidReference, underlyingErr, node, docLocation) + + var validationErr *Error + require.ErrorAs(t, result, &validationErr, "should return *Error type") + assert.Equal(t, underlyingErr, validationErr.UnderlyingError, "underlying error should match") + assert.Equal(t, node, validationErr.Node, "node should match") + assert.Equal(t, SeverityWarning, validationErr.Severity, "severity should match") + assert.Equal(t, RuleValidationInvalidReference, validationErr.Rule, "rule should match") + assert.Equal(t, docLocation, validationErr.DocumentLocation, "document location should match") +} + +// Test TypeMismatchError with ParentName +func TestTypeMismatchError_WithParentName_Success(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + parentName string + msg string + args []any + expected string + }{ + { + name: "with parent name", + parentName: "Response", + msg: "type mismatch", + args: nil, + expected: "Response type mismatch", }, { - name: "validation error with formatting", - msg: "value '%s' is not valid for field '%s'", - args: []any{"invalid", "status"}, - expected: "value 'invalid' is not valid for field 'status'", + name: "with parent name and formatting", + parentName: "Schema", + msg: "expected %s, got %s", + args: []any{"string", "int"}, + expected: "Schema expected string, got int", }, { - name: "validation error with no args but formatting placeholders", - msg: "value %s is invalid", - args: []any{}, - expected: "value %s is invalid", + name: "empty parent name", + parentName: "", + msg: "standalone error", + args: nil, + expected: "standalone error", }, } @@ -575,9 +763,8 @@ func TestValueValidationError_Success(t *testing.T) { t.Run(tt.name, func(t *testing.T) { t.Parallel() - err := NewValueValidationError(tt.msg, tt.args...) - assert.Equal(t, tt.expected, err.Error()) - assert.Equal(t, tt.expected, err.Msg) + err := NewTypeMismatchError(tt.parentName, tt.msg, tt.args...) + assert.Equal(t, tt.expected, err.Error(), "error message should match") }) } } diff --git a/values/core/eithervalue.go b/values/core/eithervalue.go index e55a87d6..b280f0e7 100644 --- a/values/core/eithervalue.go +++ b/values/core/eithervalue.go @@ -75,11 +75,14 @@ func (v *EitherValue[L, R]) Unmarshal(ctx context.Context, parentName string, no name += " " } - validationError = validation.NewValueValidationError(fmt.Sprintf("%s%s", name, msg)) + validationError = fmt.Errorf("%s%s", name, msg) } + // Get severity and rule from the worst error + severity, rule := getWorstSeverityAndRule(allParentErrs) + // Return the validation error along with all child errors separately - result := []error{validation.NewValidationError(validationError, node)} + result := []error{validation.NewValidationError(severity, rule, validationError, node)} result = append(result, leftChildErrs...) result = append(result, rightChildErrs...) @@ -155,7 +158,7 @@ func (v *EitherValue[L, R]) SyncChanges(ctx context.Context, model any, valueNod } if mv.Kind() != reflect.Struct { - return nil, fmt.Errorf("expected struct, got %s", mv.Kind()) + return nil, fmt.Errorf("expected `struct`, got `%s`", mv.Kind()) } lf := mv.FieldByName("Left") @@ -252,3 +255,34 @@ func typeToName[T any]() string { return name } + +// getWorstSeverityAndRule finds the worst severity and its first rule from a list of errors. +// Severity order (worst to best): error > warning > hint +// Returns the severity and rule of the first error with the worst severity. +// If no validation errors are found, returns SeverityError and RuleValidationTypeMismatch as defaults. +func getWorstSeverityAndRule(errs []error) (validation.Severity, string) { + var worstSeverity validation.Severity + var worstRule string + worstSeverityRank := -1 // -1 means no validation error found yet + + for _, err := range errs { + var validationErr *validation.Error + if !errors.As(err, &validationErr) { + continue + } + + rank := validationErr.Severity.Rank() + if rank > worstSeverityRank { + worstSeverityRank = rank + worstSeverity = validationErr.Severity + worstRule = validationErr.Rule + } + } + + // Default to error severity and type mismatch rule if no validation errors found + if worstSeverityRank == -1 { + return validation.SeverityError, validation.RuleValidationTypeMismatch + } + + return worstSeverity, worstRule +} diff --git a/values/core/eithervalue_test.go b/values/core/eithervalue_test.go index b4fb7511..6270c21d 100644 --- a/values/core/eithervalue_test.go +++ b/values/core/eithervalue_test.go @@ -105,9 +105,9 @@ func TestEitherValue_BothTypesFailValidation(t *testing.T) { foundTypeMismatchError := false for _, validationErr := range validationErrs { errStr := validationErr.Error() - // Check for type mismatch patterns like "expected X, got Y" - if (strings.Contains(errStr, "expected string") || strings.Contains(errStr, "expected bool")) && - strings.Contains(errStr, "got sequence") { + // Check for type mismatch patterns like "expected `X`, got `Y`" + if (strings.Contains(errStr, "expected `string`") || strings.Contains(errStr, "expected `bool`")) && + strings.Contains(errStr, "got `sequence`") { foundTypeMismatchError = true break } @@ -222,7 +222,7 @@ func TestEitherValue_SyncChanges_Error(t *testing.T) { name: "non-struct model", model: "not a struct", expectError: true, - errorMsg: "expected struct, got string", + errorMsg: "expected `struct`, got `string`", }, { name: "both left and right nil", @@ -349,22 +349,22 @@ func TestHasTypeMismatchErrors_Success(t *testing.T) { { name: "contains type mismatch error", errors: []error{ - validation.NewValidationError(validation.NewTypeMismatchError("", "expected string but got number"), nil), + validation.NewTypeMismatchError("", "expected string but got number"), }, expected: true, }, { name: "contains type mismatch error with parent name", errors: []error{ - validation.NewValidationError(validation.NewTypeMismatchError("", "expected object but received array"), nil), + validation.NewTypeMismatchError("", "expected object but received array"), }, expected: true, }, { name: "no type mismatch errors", errors: []error{ - validation.NewValidationError(validation.NewValueValidationError("some other validation error"), nil), - validation.NewValidationError(validation.NewMissingFieldError("missing required field"), nil), + errors.New("some other validation error"), + errors.New("missing required field"), }, expected: false, }, diff --git a/walk/locations.go b/walk/locations.go index 9f9df2cc..43be5de0 100644 --- a/walk/locations.go +++ b/walk/locations.go @@ -50,3 +50,34 @@ func (l Locations[T]) ToJSONPointer() jsonpointer.JSONPointer { return jsonpointer.JSONPointer(sb.String()) } + +// IsParent checks if the immediate parent field matches the given field name. +// It handles both direct struct fields and map/slice items. +func (l Locations[T]) IsParent(field string) bool { + if len(l) == 0 { + return false + } + + last := l[len(l)-1] + if last.ParentKey != nil || last.ParentIndex != nil { + if len(l) < 2 { + return false + } + return l[len(l)-2].ParentField == field + } + + return last.ParentField == field +} + +// ParentKey returns the key of the current item if it is in a map. +// Returns empty string if not in a map or key is nil. +func (l Locations[T]) ParentKey() string { + if len(l) == 0 { + return "" + } + last := l[len(l)-1] + if last.ParentKey != nil { + return *last.ParentKey + } + return "" +} diff --git a/walk/locations_test.go b/walk/locations_test.go new file mode 100644 index 00000000..6fa289a0 --- /dev/null +++ b/walk/locations_test.go @@ -0,0 +1,164 @@ +package walk_test + +import ( + "testing" + + "github.com/speakeasy-api/openapi/pointer" + "github.com/speakeasy-api/openapi/walk" + "github.com/stretchr/testify/assert" +) + +func TestLocations_IsParent_Success(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + locations walk.Locations[string] + field string + expected bool + }{ + { + name: "empty locations returns false", + locations: walk.Locations[string]{}, + field: "anything", + expected: false, + }, + { + name: "last entry matches field directly", + locations: walk.Locations[string]{ + {ParentField: "paths"}, + {ParentField: "responses"}, + }, + field: "responses", + expected: true, + }, + { + name: "last entry does not match field", + locations: walk.Locations[string]{ + {ParentField: "paths"}, + {ParentField: "responses"}, + }, + field: "schemas", + expected: false, + }, + { + name: "last entry has parent key so checks second to last", + locations: walk.Locations[string]{ + {ParentField: "paths"}, + {ParentField: "responses"}, + {ParentKey: pointer.From("200")}, + }, + field: "responses", + expected: true, + }, + { + name: "last entry has parent index so checks second to last", + locations: walk.Locations[string]{ + {ParentField: "paths"}, + {ParentField: "tags"}, + {ParentIndex: pointer.From(0)}, + }, + field: "tags", + expected: true, + }, + { + name: "last entry has parent key but second to last does not match", + locations: walk.Locations[string]{ + {ParentField: "paths"}, + {ParentField: "responses"}, + {ParentKey: pointer.From("200")}, + }, + field: "schemas", + expected: false, + }, + { + name: "single entry with parent key returns false (no second to last)", + locations: walk.Locations[string]{ + {ParentKey: pointer.From("key")}, + }, + field: "anything", + expected: false, + }, + { + name: "single entry with parent index returns false (no second to last)", + locations: walk.Locations[string]{ + {ParentIndex: pointer.From(0)}, + }, + field: "anything", + expected: false, + }, + { + name: "single entry matches field directly", + locations: walk.Locations[string]{ + {ParentField: "paths"}, + }, + field: "paths", + expected: true, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + result := tt.locations.IsParent(tt.field) + assert.Equal(t, tt.expected, result, "IsParent result should match expected") + }) + } +} + +func TestLocations_ParentKey_Success(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + locations walk.Locations[string] + expected string + }{ + { + name: "empty locations returns empty string", + locations: walk.Locations[string]{}, + expected: "", + }, + { + name: "last entry has parent key", + locations: walk.Locations[string]{ + {ParentField: "paths"}, + {ParentKey: pointer.From("/users")}, + }, + expected: "/users", + }, + { + name: "last entry has no parent key", + locations: walk.Locations[string]{ + {ParentField: "paths"}, + {ParentField: "responses"}, + }, + expected: "", + }, + { + name: "single entry with parent key", + locations: walk.Locations[string]{ + {ParentKey: pointer.From("myKey")}, + }, + expected: "myKey", + }, + { + name: "last entry has parent index but no key", + locations: walk.Locations[string]{ + {ParentField: "tags"}, + {ParentIndex: pointer.From(3)}, + }, + expected: "", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + result := tt.locations.ParentKey() + assert.Equal(t, tt.expected, result, "ParentKey result should match expected") + }) + } +}