diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml
index bbdaa840..8aca113c 100644
--- a/.github/workflows/release.yaml
+++ b/.github/workflows/release.yaml
@@ -8,6 +8,7 @@ on:
permissions:
contents: write
packages: write
+ id-token: write
jobs:
goreleaser:
@@ -52,3 +53,31 @@ jobs:
dist/
!dist/*.txt
retention-days: 30
+
+ npm-publish:
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v6
+
+ - name: Setup Node.js
+ uses: actions/setup-node@v4
+ with:
+ node-version: 24
+ registry-url: https://registry.npmjs.org
+
+ - name: Install dependencies
+ working-directory: openapi/linter/customrules/types
+ run: npm ci
+
+ - name: Set version from tag
+ working-directory: openapi/linter/customrules/types
+ run: npm version "${GITHUB_REF_NAME#v}" --no-git-tag-version
+
+ - name: Build
+ working-directory: openapi/linter/customrules/types
+ run: npm run build
+
+ - name: Publish
+ working-directory: openapi/linter/customrules/types
+ run: npm publish --provenance --access public
diff --git a/.github/workflows/update-cmd-dependency.yaml b/.github/workflows/update-cmd-dependency.yaml
deleted file mode 100644
index dd3b06f7..00000000
--- a/.github/workflows/update-cmd-dependency.yaml
+++ /dev/null
@@ -1,89 +0,0 @@
-name: Update CMD OpenAPI Dependency
-
-on:
- push:
- branches: [main]
- # Only run if changes affect the root module (not cmd/openapi itself)
- paths-ignore:
- - "cmd/openapi/**"
- - ".github/workflows/update-cmd-dependency.yaml"
-
-permissions:
- contents: write
- pull-requests: write
-
-jobs:
- update-dependency:
- name: Update cmd/openapi dependency
- runs-on: ubuntu-latest
- steps:
- - uses: actions/checkout@v6
-
- - name: Setup Go
- uses: actions/setup-go@v6
- with:
- go-version-file: "go.mod"
- cache: false # Disable caching to ensure fresh dependency resolution
-
- - name: Update cmd/openapi go.mod
- run: |
- cd cmd/openapi
-
- # Update to latest main commit
- go get github.com/speakeasy-api/openapi@main
- go mod tidy
-
- - name: Check for changes
- id: changes
- run: |
- if git diff --quiet cmd/openapi/go.mod cmd/openapi/go.sum; then
- echo "changed=false" >> $GITHUB_OUTPUT
- echo "No changes detected in cmd/openapi/go.mod or go.sum"
- else
- echo "changed=true" >> $GITHUB_OUTPUT
- echo "Changes detected in cmd/openapi/go.mod or go.sum"
-
- # Get the new version for the PR description
- NEW_VERSION=$(grep 'github.com/speakeasy-api/openapi v' cmd/openapi/go.mod | head -1 | awk '{print $2}')
- echo "version=${NEW_VERSION}" >> $GITHUB_OUTPUT
- echo "Updated to version: ${NEW_VERSION}"
- fi
-
- - name: Create Pull Request
- if: steps.changes.outputs.changed == 'true'
- uses: peter-evans/create-pull-request@v8
- with:
- token: ${{ secrets.GITHUB_TOKEN }}
- commit-message: |
- chore(cmd): update openapi dependency to latest main
-
- Updates cmd/openapi/go.mod to use the latest commit from main.
- Version: ${{ steps.changes.outputs.version }}
- branch: bot/update-cmd-openapi-dependency
- delete-branch: true
- title: "chore(cmd): update openapi dependency to latest main"
- body: |
- ## Updates cmd/openapi dependency
-
- This PR updates the `cmd/openapi/go.mod` file to reference the latest commit from main.
-
- **Updated to:** `${{ steps.changes.outputs.version }}`
-
- **Changes:**
- - Updated `github.com/speakeasy-api/openapi` dependency in `cmd/openapi/go.mod`
- - Ran `go mod tidy` to update dependencies
-
- ---
- *This PR was automatically created by the [update-cmd-dependency workflow](.github/workflows/update-cmd-dependency.yaml)*
- labels: |
- dependencies
- automated
-
- - name: Summary
- run: |
- if [ "${{ steps.changes.outputs.changed }}" == "true" ]; then
- echo "✅ Pull request created to update cmd/openapi dependency"
- echo "Version: ${{ steps.changes.outputs.version }}"
- else
- echo "ℹ️ No changes needed - cmd/openapi dependency already up to date"
- fi
diff --git a/.github/workflows/update-submodule-dependencies.yaml b/.github/workflows/update-submodule-dependencies.yaml
new file mode 100644
index 00000000..775bfbd1
--- /dev/null
+++ b/.github/workflows/update-submodule-dependencies.yaml
@@ -0,0 +1,115 @@
+name: Update Submodule Dependencies
+
+on:
+ push:
+ branches: [main]
+ # Only run if changes affect the root module (not submodules themselves)
+ paths-ignore:
+ - "cmd/openapi/**"
+ - "openapi/linter/customrules/**"
+ - ".github/workflows/update-submodule-dependencies.yaml"
+
+permissions:
+ contents: write
+ pull-requests: write
+
+jobs:
+ update-dependencies:
+ name: Update submodule dependencies
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v6
+
+ - name: Setup Go
+ uses: actions/setup-go@v6
+ with:
+ go-version-file: "go.mod"
+ cache: false # Disable caching to ensure fresh dependency resolution
+
+ - name: Update openapi/linter/customrules go.mod
+ run: |
+ cd openapi/linter/customrules
+
+ # Update to latest main commit
+ go get github.com/speakeasy-api/openapi@main
+ go mod tidy
+
+ - name: Update cmd/openapi go.mod
+ run: |
+ cd cmd/openapi
+
+ # Update to latest main commit (both main module and customrules)
+ go get github.com/speakeasy-api/openapi@main
+ go get github.com/speakeasy-api/openapi/openapi/linter/customrules@main
+ go mod tidy
+
+ - name: Check for changes
+ id: changes
+ run: |
+ CHANGED_FILES=""
+
+ # Check customrules module
+ if ! git diff --quiet openapi/linter/customrules/go.mod openapi/linter/customrules/go.sum 2>/dev/null; then
+ CHANGED_FILES="${CHANGED_FILES}customrules "
+ fi
+
+ # Check cmd/openapi module
+ if ! git diff --quiet cmd/openapi/go.mod cmd/openapi/go.sum 2>/dev/null; then
+ CHANGED_FILES="${CHANGED_FILES}cmd "
+ fi
+
+ if [ -z "$CHANGED_FILES" ]; then
+ echo "changed=false" >> $GITHUB_OUTPUT
+ echo "No changes detected"
+ else
+ echo "changed=true" >> $GITHUB_OUTPUT
+ echo "modules=${CHANGED_FILES}" >> $GITHUB_OUTPUT
+ echo "Changes detected in: ${CHANGED_FILES}"
+
+ # Get the new version for the PR description
+ NEW_VERSION=$(grep 'github.com/speakeasy-api/openapi v' cmd/openapi/go.mod | head -1 | awk '{print $2}')
+ echo "version=${NEW_VERSION}" >> $GITHUB_OUTPUT
+ echo "Updated to version: ${NEW_VERSION}"
+ fi
+
+ - name: Create Pull Request
+ if: steps.changes.outputs.changed == 'true'
+ uses: peter-evans/create-pull-request@v8
+ with:
+ token: ${{ secrets.GITHUB_TOKEN }}
+ commit-message: |
+ chore: update submodule dependencies to latest main
+
+ Updates go.mod files in submodules to use the latest commit from main.
+ Version: ${{ steps.changes.outputs.version }}
+ Updated modules: ${{ steps.changes.outputs.modules }}
+ branch: bot/update-submodule-dependencies
+ delete-branch: true
+ title: "chore: update submodule dependencies to latest main"
+ body: |
+ ## Updates submodule dependencies
+
+ This PR updates the `go.mod` files in submodules to reference the latest commit from main.
+
+ **Updated to:** `${{ steps.changes.outputs.version }}`
+ **Updated modules:** ${{ steps.changes.outputs.modules }}
+
+ **Changes:**
+ - Updated `github.com/speakeasy-api/openapi` dependency in submodule go.mod files
+ - Ran `go mod tidy` to update dependencies
+
+ ---
+ *This PR was automatically created by the [update-submodule-dependencies workflow](.github/workflows/update-submodule-dependencies.yaml)*
+ labels: |
+ dependencies
+ automated
+
+ - name: Summary
+ run: |
+ if [ "${{ steps.changes.outputs.changed }}" == "true" ]; then
+ echo "✅ Pull request created to update submodule dependencies"
+ echo "Version: ${{ steps.changes.outputs.version }}"
+ echo "Modules: ${{ steps.changes.outputs.modules }}"
+ else
+ echo "ℹ️ No changes needed - submodule dependencies already up to date"
+ fi
diff --git a/AGENTS.md b/AGENTS.md
index 3e961abc..8a40e09c 100644
--- a/AGENTS.md
+++ b/AGENTS.md
@@ -106,6 +106,66 @@ git commit -m "feat: implement prefixEncoding and itemEncoding for OpenAPI 3.2
3. **Searchability**: Easier to search and filter commits
4. **Tool Compatibility**: Works better with automated tools and scripts
+## Linter Rules
+
+This project uses `golangci-lint` with strict rules. Run `mise lint` to check. The most common violations are listed below. **When you encounter a new common lint pattern not documented here, add it to this section so future sessions avoid the same mistakes.**
+
+### perfsprint — Avoid `fmt.Sprintf` for Simple String Operations
+
+The `perfsprint` linter flags unnecessary `fmt.Sprintf` calls. Use string concatenation or `strconv` instead.
+
+#### ❌ Bad
+
+```go
+// Single %s — just use concatenation
+msg := fmt.Sprintf("prefix: %s", value)
+
+// Single %d — use strconv
+msg := fmt.Sprintf("%d", count)
+
+// Writing formatted string to a writer
+b.WriteString(fmt.Sprintf("hello %s world %d", name, n))
+```
+
+#### ✅ Good
+
+```go
+// String concatenation
+msg := "prefix: " + value
+
+// strconv for numbers
+msg := strconv.Itoa(count)
+
+// fmt.Fprintf writes directly to the writer
+fmt.Fprintf(b, "hello %s world %d", name, n)
+
+// For string-only format with multiple args, concatenation is fine
+b.WriteString(indent + "const x = " + varName + ";\n")
+```
+
+**Rule of thumb:** If `fmt.Sprintf` has a single `%s` or `%d` verb and nothing else complex, replace it with concatenation or `strconv`. If writing to an `io.Writer`/`strings.Builder`, use `fmt.Fprintf` directly instead of `WriteString(fmt.Sprintf(...))`.
+
+### staticcheck — Common Issues
+
+- **QF1012**: Use `fmt.Fprintf(w, ...)` instead of `w.WriteString(fmt.Sprintf(...))` — writes directly to the writer without an intermediate string allocation.
+- **QF1003**: Use tagged `switch` instead of `if-else` chains on the same variable.
+- **S1016**: Use type conversion `TargetType(value)` instead of struct literal when types have identical fields.
+
+### predeclared — Don't Shadow Built-in Identifiers
+
+Avoid using `min`, `max`, `new`, `len`, `cap`, `copy`, `delete`, `error`, `any` as variable names. Use descriptive alternatives like `minVal`, `maxVal`.
+
+### testifylint — Test Assertion Best Practices
+
+- Use `assert.Empty(t, val)` instead of `assert.Equal(t, "", val)`
+- Use `assert.True(t, val)` / `assert.False(t, val)` instead of `assert.Equal(t, true/false, val)`
+- Use `require.Error(t, err)` instead of `assert.Error(t, err)` for error checks
+- Use `assert.Len(t, slice, n)` instead of `assert.Equal(t, n, len(slice))`
+
+### gocritic — Code Style
+
+- Convert `if-else if` chains to `switch` statements when comparing the same variable.
+
## Testing
Follow these testing conventions when writing Go tests in this project. Run newly added or modified test immediately after changes to make sure they work as expected before continuing with more work.
diff --git a/README.md b/README.md
index b9014161..7ab05223 100644
--- a/README.md
+++ b/README.md
@@ -72,7 +72,9 @@ The `arazzo` package provides an API for working with Arazzo documents including
### [openapi](./openapi)
-The `openapi` package provides an API for working with OpenAPI documents including reading, creating, mutating, walking, validating and upgrading them. Supports OpenAPI 3.0.x, 3.1.x, and 3.2.x specifications.
+The `openapi` package provides an API for working with OpenAPI documents including reading, creating, mutating, walking, validating, upgrading, and linting them. Supports OpenAPI 3.0.x, 3.1.x, and 3.2.x specifications.
+
+The [`openapi/linter`](./openapi/linter) subpackage provides a configurable linter with 60+ built-in rules covering style, security (OWASP), and semantic validation. Custom rules can be written in TypeScript/JavaScript using the [`@speakeasy-api/openapi-linter-types`](https://www.npmjs.com/package/@speakeasy-api/openapi-linter-types) package.
### [swagger](./swagger)
@@ -125,6 +127,7 @@ The CLI provides four main command groups:
- `explore` - Interactively explore an OpenAPI specification in the terminal
- `inline` - Inline all references in an OpenAPI specification
- `join` - Join multiple OpenAPI documents into a single document
+ - `lint` - Lint an OpenAPI specification for style, security, and best practices
- `localize` - Localize an OpenAPI specification by copying external references to a target directory
- `optimize` - Optimize an OpenAPI specification by deduplicating inline schemas
- `sanitize` - Remove unwanted elements from an OpenAPI specification
@@ -150,6 +153,12 @@ The CLI provides four main command groups:
# Validate an OpenAPI specification
openapi spec validate ./spec.yaml
+# Lint for style, security, and best practices
+openapi spec lint ./spec.yaml
+
+# Lint with custom configuration
+openapi spec lint --config lint.yaml ./spec.yaml
+
# Bundle external references into components section
openapi spec bundle ./spec.yaml ./bundled-spec.yaml
diff --git a/arazzo/arazzo.go b/arazzo/arazzo.go
index 4e6d9ec4..907f25f0 100644
--- a/arazzo/arazzo.go
+++ b/arazzo/arazzo.go
@@ -109,11 +109,11 @@ func (a *Arazzo) Validate(ctx context.Context, opts ...validation.Option) []erro
arazzoVersion, err := version.Parse(a.Arazzo)
if err != nil {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("arazzo.version is invalid %s: %s", a.Arazzo, err.Error()), core, core.Arazzo))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidFormat, fmt.Errorf("arazzo.version is invalid `%s`: %w", a.Arazzo, err), core, core.Arazzo))
}
if arazzoVersion != nil {
if arazzoVersion.GreaterThan(*MaximumSupportedVersion) {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("arazzo.version only Arazzo versions between %s and %s are supported", MinimumSupportedVersion, MaximumSupportedVersion), core, core.Arazzo))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationSupportedVersion, fmt.Errorf("arazzo.version only Arazzo versions between `%s` and `%s` are supported", MinimumSupportedVersion, MaximumSupportedVersion), core, core.Arazzo))
}
}
@@ -125,7 +125,7 @@ func (a *Arazzo) Validate(ctx context.Context, opts ...validation.Option) []erro
errs = append(errs, sourceDescription.Validate(ctx, opts...)...)
if _, ok := sourceDescriptionNames[sourceDescription.Name]; ok {
- errs = append(errs, validation.NewSliceError(validation.NewValueValidationError("sourceDescription.name %s is not unique", sourceDescription.Name), core, core.SourceDescriptions, i))
+ errs = append(errs, validation.NewSliceError(validation.SeverityError, validation.RuleValidationDuplicateKey, fmt.Errorf("sourceDescription.name `%s` is not unique", sourceDescription.Name), core, core.SourceDescriptions, i))
}
sourceDescriptionNames[sourceDescription.Name] = true
@@ -137,7 +137,7 @@ func (a *Arazzo) Validate(ctx context.Context, opts ...validation.Option) []erro
errs = append(errs, workflow.Validate(ctx, opts...)...)
if _, ok := workflowIds[workflow.WorkflowID]; ok {
- errs = append(errs, validation.NewSliceError(validation.NewValueValidationError("workflow.workflowId %s is not unique", workflow.WorkflowID), core, core.Workflows, i))
+ errs = append(errs, validation.NewSliceError(validation.SeverityError, validation.RuleValidationDuplicateKey, fmt.Errorf("workflow.workflowId `%s` is not unique", workflow.WorkflowID), core, core.Workflows, i))
}
workflowIds[workflow.WorkflowID] = true
diff --git a/arazzo/arazzo_examples_test.go b/arazzo/arazzo_examples_test.go
index e2f54e38..254a7c47 100644
--- a/arazzo/arazzo_examples_test.go
+++ b/arazzo/arazzo_examples_test.go
@@ -190,6 +190,6 @@ func Example_validating() {
fmt.Printf("%s\n", err.Error())
}
// Output:
- // [3:3] info.version is missing
- // [13:9] step at least one of operationId, operationPath or workflowId fields must be set
+ // [3:3] error validation-required-field `info.version` is required
+ // [13:9] error validation-required-field step at least one of operationId, operationPath or workflowId fields must be set
}
diff --git a/arazzo/arazzo_test.go b/arazzo/arazzo_test.go
index 8c567aaf..506c27fe 100644
--- a/arazzo/arazzo_test.go
+++ b/arazzo/arazzo_test.go
@@ -300,11 +300,11 @@ sourceDescriptions:
column int
underlyingError error
}{
- {line: 1, column: 1, underlyingError: validation.NewMissingFieldError("arazzo.workflows is missing")},
- {line: 1, column: 9, underlyingError: validation.NewValueValidationError("arazzo.version only Arazzo versions between 1.0.0 and 1.0.1 are supported")},
- {line: 4, column: 3, underlyingError: validation.NewMissingFieldError("info.version is missing")},
- {line: 6, column: 5, underlyingError: validation.NewMissingFieldError("sourceDescription.url is missing")},
- {line: 7, column: 11, underlyingError: validation.NewValueValidationError("sourceDescription.type must be one of [openapi, arazzo]")},
+ {line: 1, column: 1, underlyingError: errors.New("`arazzo.workflows` is required")},
+ {line: 1, column: 9, underlyingError: errors.New("arazzo.version only Arazzo versions between `1.0.0` and `1.0.1` are supported")},
+ {line: 4, column: 3, underlyingError: errors.New("`info.version` is required")},
+ {line: 6, column: 5, underlyingError: errors.New("`sourceDescription.url` is required")},
+ {line: 7, column: 11, underlyingError: errors.New("sourceDescription.type must be one of [`openapi, arazzo`]")},
}
require.Len(t, validationErrs, len(expectedErrors), "number of validation errors should match")
@@ -546,8 +546,8 @@ var stressTests = []struct {
args: args{
location: "https://raw.githubusercontent.com/Redocly/museum-openapi-example/2770b2b2e59832d245c7b0eb0badf6568d7efb53/arazzo/museum-api.arazzo.yaml",
validationIgnores: []string{
- "[71:24] invalid jsonpath expression: Error at line 1, column 7: unexpected token when parsing segment", // legit invalid RFC 9535 syntax
- "[107:24] invalid jsonpath expression: Error at line 1, column 7: unexpected token when parsing segment", // legit invalid RFC 9535 syntax
+ "[71:24] error validation-invalid-syntax invalid jsonpath expression: Error at line 1, column 7: unexpected token when parsing segment", // legit invalid RFC 9535 syntax
+ "[107:24] error validation-invalid-syntax invalid jsonpath expression: Error at line 1, column 7: unexpected token when parsing segment", // legit invalid RFC 9535 syntax
},
},
wantTitle: "Redocly Museum API Test Workflow",
@@ -564,7 +564,7 @@ var stressTests = []struct {
args: args{
location: "https://raw.githubusercontent.com/Redocly/warp-single-sidebar/b78fc09da52d7755e92e1bc8f990edd37421cbde/apis/arazzo.yaml",
validationIgnores: []string{
- "[63:24] invalid jsonpath expression: Error at line 1, column 12: unexpected token when parsing segment", // legit invalid RFC 9535 syntax
+ "[63:24] error validation-invalid-syntax invalid jsonpath expression: Error at line 1, column 12: unexpected token when parsing segment", // legit invalid RFC 9535 syntax
},
},
wantTitle: "Warp API",
@@ -605,10 +605,10 @@ var stressTests = []struct {
args: args{
location: "https://raw.githubusercontent.com/OAI/Arazzo-Specification/23852b8b0d13ab1e3288a57a990611ffed45ab5d/examples/1.0.0/oauth.arazzo.yaml",
validationIgnores: []string{
- "[65:24] invalid jsonpath expression: Error at line 1, column 15: unexpected token when parsing segment", // legit invalid RFC 9535 syntax
- "[105:24] invalid jsonpath expression: Error at line 1, column 15: unexpected token when parsing segment", // legit invalid RFC 9535 syntax
- "[155:24] invalid jsonpath expression: Error at line 1, column 15: unexpected token when parsing segment", // legit invalid RFC 9535 syntax
- "[175:24] invalid jsonpath expression: Error at line 1, column 15: unexpected token when parsing segment", // legit invalid RFC 9535 syntax
+ "[65:24] error validation-invalid-syntax invalid jsonpath expression: Error at line 1, column 15: unexpected token when parsing segment", // legit invalid RFC 9535 syntax
+ "[105:24] error validation-invalid-syntax invalid jsonpath expression: Error at line 1, column 15: unexpected token when parsing segment", // legit invalid RFC 9535 syntax
+ "[155:24] error validation-invalid-syntax invalid jsonpath expression: Error at line 1, column 15: unexpected token when parsing segment", // legit invalid RFC 9535 syntax
+ "[175:24] error validation-invalid-syntax invalid jsonpath expression: Error at line 1, column 15: unexpected token when parsing segment", // legit invalid RFC 9535 syntax
},
},
wantTitle: "Example OAuth service",
@@ -632,7 +632,7 @@ var stressTests = []struct {
args: args{
location: "https://raw.githubusercontent.com/frankkilcommins/simple-spectral-arazzo-GA/4ec8856f1cf21c0f77597c715c150ef3e2772a89/apis/OnlineStore.arazzo.yaml",
validationIgnores: []string{
- "info.title is missing", // legit issue
+ "`info.title` is required", // legit issue
"operationId must be a valid expression if there are multiple OpenAPI source descriptions", // legit issue
"$responses.body.menuItems[0].subcategories[0].id", // legit issue
},
@@ -645,9 +645,9 @@ var stressTests = []struct {
args: args{
location: "https://raw.githubusercontent.com/leidenheit/itarazzo-library/3b335e1c4293444add52b5f2476420e2d871b1a5/src/test/resources/test.arazzo.yaml",
validationIgnores: []string{
- "expression is not valid, must begin with $: 4711Chocolate", // legit issue
- "[32:24] invalid jsonpath expression: Error at line 1, column 0: unexpected token", // unsupported version: draft-goessner-dispatch-jsonpath-00
- "[36:24] invalid jsonpath expression: Error at line 1, column 5: unexpected token when parsing segment", // unsupported version: draft-goessner-dispatch-jsonpath-00
+ "expression is not valid, must begin with $: 4711Chocolate", // legit issue
+ "[32:24] error validation-invalid-syntax invalid jsonpath expression: Error at line 1, column 0: unexpected token", // unsupported version: draft-goessner-dispatch-jsonpath-00
+ "[36:24] error validation-invalid-syntax invalid jsonpath expression: Error at line 1, column 5: unexpected token when parsing segment", // unsupported version: draft-goessner-dispatch-jsonpath-00
},
},
wantTitle: "A cookie eating workflow",
@@ -659,9 +659,9 @@ var stressTests = []struct {
validationIgnores: []string{
"jsonpointer must start with /: $.status", // legit issues TODO: improve the error returned as it is wrong
"jsonpointer must start with /: $.id", // legit issues TODO: improve the error returned as it is wrong
- "[81:24] invalid jsonpath expression: Error at line 1, column 7: unexpected token when parsing segment", // unsupported version: draft-goessner-dispatch-jsonpath-00
- "[110:24] invalid jsonpath expression: Error at line 1, column 5: unexpected token when parsing segment", // unsupported version: draft-goessner-dispatch-jsonpath-00
- "[114:24] invalid jsonpath expression: Error at line 1, column 9: unexpected token when parsing segment", // unsupported version: draft-goessner-dispatch-jsonpath-00
+ "[81:24] error validation-invalid-syntax invalid jsonpath expression: Error at line 1, column 7: unexpected token when parsing segment", // unsupported version: draft-goessner-dispatch-jsonpath-00
+ "[110:24] error validation-invalid-syntax invalid jsonpath expression: Error at line 1, column 5: unexpected token when parsing segment", // unsupported version: draft-goessner-dispatch-jsonpath-00
+ "[114:24] error validation-invalid-syntax invalid jsonpath expression: Error at line 1, column 9: unexpected token when parsing segment", // unsupported version: draft-goessner-dispatch-jsonpath-00
},
},
wantTitle: "PetStore - Example of Workflows",
@@ -671,7 +671,7 @@ var stressTests = []struct {
args: args{
location: "https://raw.githubusercontent.com/ritza-co/e2e-testing-arazzo/c0615c3708a1e4c0fcaeb79edae78ddc4eb5ba82/arazzo.yaml",
validationIgnores: []string{
- "[42:24] invalid jsonpath expression: Error at line 1, column 8: unexpected token", // legit invalid RFC 9535 syntax
+ "[42:24] error validation-invalid-syntax invalid jsonpath expression: Error at line 1, column 8: unexpected token", // legit invalid RFC 9535 syntax
},
},
wantTitle: "Build-a-Bot Workflow",
@@ -681,7 +681,7 @@ var stressTests = []struct {
args: args{
location: "https://raw.githubusercontent.com/API-Flows/openapi-workflow-registry/75c237ce1b155ba9f8dc7f065759df7ae1cbbbe5/root/adyen/adyen-giving.yaml",
validationIgnores: []string{
- "in must be one of [path, query, header, cookie] but was body",
+ "in must be one of [`path, query, header, cookie`] but was `body`",
},
},
wantTitle: "Adyen Giving",
diff --git a/arazzo/components.go b/arazzo/components.go
index 515a75ef..76a07d90 100644
--- a/arazzo/components.go
+++ b/arazzo/components.go
@@ -2,6 +2,7 @@ package arazzo
import (
"context"
+ "fmt"
"regexp"
"github.com/speakeasy-api/openapi/arazzo/core"
@@ -44,7 +45,7 @@ func (c *Components) Validate(ctx context.Context, opts ...validation.Option) []
for key, input := range c.Inputs.All() {
if !componentNameRegex.MatchString(key) {
- errs = append(errs, validation.NewMapKeyError(validation.NewValueValidationError("components.inputs key must be a valid key [%s]: %s", componentNameRegex.String(), key), core, core.Inputs, key))
+ errs = append(errs, validation.NewMapKeyError(validation.SeverityError, validation.RuleValidationInvalidFormat, fmt.Errorf("components.inputs key must be a valid key [`%s`]: `%s`", componentNameRegex.String(), key), core, core.Inputs, key))
}
errs = append(errs, input.Validate(ctx, opts...)...)
@@ -52,7 +53,7 @@ func (c *Components) Validate(ctx context.Context, opts ...validation.Option) []
for key, parameter := range c.Parameters.All() {
if !componentNameRegex.MatchString(key) {
- errs = append(errs, validation.NewMapKeyError(validation.NewValueValidationError("components.parameters key must be a valid key [%s]: %s", componentNameRegex.String(), key), core, core.Parameters, key))
+ errs = append(errs, validation.NewMapKeyError(validation.SeverityError, validation.RuleValidationInvalidFormat, fmt.Errorf("components.parameters key must be a valid key [`%s`]: `%s`", componentNameRegex.String(), key), core, core.Parameters, key))
}
paramOps := opts
@@ -63,7 +64,7 @@ func (c *Components) Validate(ctx context.Context, opts ...validation.Option) []
for key, successAction := range c.SuccessActions.All() {
if !componentNameRegex.MatchString(key) {
- errs = append(errs, validation.NewMapKeyError(validation.NewValueValidationError("components.successActions key must be a valid key [%s]: %s", componentNameRegex.String(), key), core, core.SuccessActions, key))
+ errs = append(errs, validation.NewMapKeyError(validation.SeverityError, validation.RuleValidationInvalidFormat, fmt.Errorf("components.successActions key must be a valid key [`%s`]: `%s`", componentNameRegex.String(), key), core, core.SuccessActions, key))
}
successActionOps := opts
@@ -74,7 +75,7 @@ func (c *Components) Validate(ctx context.Context, opts ...validation.Option) []
for key, failureAction := range c.FailureActions.All() {
if !componentNameRegex.MatchString(key) {
- errs = append(errs, validation.NewMapKeyError(validation.NewValueValidationError("components.failureActions key must be a valid key [%s]: %s", componentNameRegex.String(), key), core, core.FailureActions, key))
+ errs = append(errs, validation.NewMapKeyError(validation.SeverityError, validation.RuleValidationInvalidFormat, fmt.Errorf("components.failureActions key must be a valid key [`%s`]: `%s`", componentNameRegex.String(), key), core, core.FailureActions, key))
}
failureActionOps := opts
diff --git a/arazzo/core/criterion.go b/arazzo/core/criterion.go
index d5849e08..50b3c9ef 100644
--- a/arazzo/core/criterion.go
+++ b/arazzo/core/criterion.go
@@ -63,7 +63,7 @@ func (c *CriterionTypeUnion) Unmarshal(ctx context.Context, parentName string, n
c.DetermineValidity(validationErrs)
default:
return []error{
- validation.NewValidationError(validation.NewTypeMismatchError(parentName, "criterionTypeUnion expected string or object, got %s", yml.NodeKindToString(resolvedNode.Kind)), resolvedNode),
+ validation.NewValidationError(validation.SeverityError, validation.RuleValidationTypeMismatch, validation.NewTypeMismatchError(parentName, "criterionTypeUnion expected string or object, got %s", yml.NodeKindToString(resolvedNode.Kind)), resolvedNode),
}, nil
}
@@ -78,7 +78,7 @@ func (c *CriterionTypeUnion) SyncChanges(ctx context.Context, model any, valueNo
}
if mv.Kind() != reflect.Struct {
- return nil, fmt.Errorf("CriterionTypeUnion.SyncChanges expected a struct, got %s", mv.Type())
+ return nil, fmt.Errorf("CriterionTypeUnion.SyncChanges expected a struct, got `%s`", mv.Type())
}
tf := mv.FieldByName("Type")
diff --git a/arazzo/core/criterion_syncchanges_test.go b/arazzo/core/criterion_syncchanges_test.go
index 7a78888c..7c02ef96 100644
--- a/arazzo/core/criterion_syncchanges_test.go
+++ b/arazzo/core/criterion_syncchanges_test.go
@@ -41,5 +41,5 @@ func TestCriterionTypeUnion_SyncChanges_NonStruct_Error(t *testing.T) {
ctu := CriterionTypeUnion{}
_, err = ctu.SyncChanges(t.Context(), "not a struct", node.Content[0])
require.Error(t, err, "SyncChanges should fail")
- assert.Contains(t, err.Error(), "CriterionTypeUnion.SyncChanges expected a struct, got string", "error message should match")
+ assert.Contains(t, err.Error(), "CriterionTypeUnion.SyncChanges expected a struct, got `string`", "error message should match")
}
diff --git a/arazzo/core/criterion_test.go b/arazzo/core/criterion_test.go
index 30db14c2..fb35a1d5 100644
--- a/arazzo/core/criterion_test.go
+++ b/arazzo/core/criterion_test.go
@@ -250,5 +250,5 @@ func TestCriterionTypeUnion_SyncChanges_Int_Error(t *testing.T) {
_, err := union.SyncChanges(t.Context(), 42, nil)
require.Error(t, err, "should return error for int model")
- require.Contains(t, err.Error(), "expected a struct", "error should mention struct expectation")
+ require.Contains(t, err.Error(), "expected a struct, got `int`", "error should mention struct expectation")
}
diff --git a/arazzo/core/reusable.go b/arazzo/core/reusable.go
index 5d32fafa..c9022a16 100644
--- a/arazzo/core/reusable.go
+++ b/arazzo/core/reusable.go
@@ -34,10 +34,8 @@ func (r *Reusable[T]) Unmarshal(ctx context.Context, parentName string, node *ya
if resolvedNode.Kind != yaml.MappingNode {
r.SetValid(false, false)
- r.SetValid(false, false)
-
return []error{
- validation.NewValidationError(validation.NewTypeMismatchError(parentName, "reusable expected object, got %s", yml.NodeKindToString(resolvedNode.Kind)), resolvedNode),
+ validation.NewValidationError(validation.SeverityError, validation.RuleValidationTypeMismatch, validation.NewTypeMismatchError(parentName, "reusable expected `object`, got %s", yml.NodeKindToString(resolvedNode.Kind)), resolvedNode),
}, nil
}
@@ -65,7 +63,7 @@ func (r *Reusable[T]) SyncChanges(ctx context.Context, model any, valueNode *yam
}
if mv.Kind() != reflect.Struct {
- return nil, fmt.Errorf("Reusable.SyncChanges expected a struct, got %s", mv.Kind())
+ return nil, fmt.Errorf("Reusable.SyncChanges expected a struct, got `%s`", mv.Kind())
}
of := mv.FieldByName("Object")
diff --git a/arazzo/core/reusable_test.go b/arazzo/core/reusable_test.go
index dc5c6d6f..96aeea43 100644
--- a/arazzo/core/reusable_test.go
+++ b/arazzo/core/reusable_test.go
@@ -39,7 +39,7 @@ func TestReusable_Unmarshal_NonMappingNode_Error(t *testing.T) {
validationErrs, err := reusable.Unmarshal(t.Context(), "test", node.Content[0])
require.NoError(t, err, "unmarshal error should be nil")
require.NotEmpty(t, validationErrs, "validation errors should not be empty")
- assert.Contains(t, validationErrs[0].Error(), "reusable expected object", "error message should match")
+ assert.Contains(t, validationErrs[0].Error(), "reusable expected `object`", "error message should match")
assert.False(t, reusable.GetValid(), "reusable should not be valid")
}
@@ -53,7 +53,7 @@ func TestReusable_SyncChanges_NonStruct_Error(t *testing.T) {
reusable := Reusable[*Parameter]{}
_, err = reusable.SyncChanges(t.Context(), "not a struct", node.Content[0])
require.Error(t, err, "SyncChanges should fail")
- assert.Contains(t, err.Error(), "Reusable.SyncChanges expected a struct, got string", "error message should match")
+ assert.Contains(t, err.Error(), "Reusable.SyncChanges expected a struct, got `string`", "error message should match")
}
func TestReusable_Unmarshal_NilNode_Error(t *testing.T) {
diff --git a/arazzo/criterion/condition.go b/arazzo/criterion/condition.go
index 34e10c10..1289a91c 100644
--- a/arazzo/criterion/condition.go
+++ b/arazzo/criterion/condition.go
@@ -2,6 +2,7 @@ package criterion
import (
"errors"
+ "fmt"
"strings"
"github.com/speakeasy-api/openapi/expression"
@@ -81,21 +82,21 @@ func (s *Condition) Validate(valueNode *yaml.Node, opts ...validation.Option) []
errs := []error{}
if s.Expression == "" {
- errs = append(errs, validation.NewValidationError(validation.NewMissingValueError("expression is required"), valueNode))
+ errs = append(errs, validation.NewValidationError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("expression is required"), valueNode))
}
if err := s.Expression.Validate(); err != nil {
- errs = append(errs, validation.NewValidationError(validation.NewValueValidationError(err.Error()), valueNode))
+ errs = append(errs, validation.NewValidationError(validation.SeverityError, validation.RuleValidationInvalidSyntax, fmt.Errorf("%s", err.Error()), valueNode))
}
switch s.Operator {
case OperatorLT, OperatorLTE, OperatorGT, OperatorGTE, OperatorEQ, OperatorNE, OperatorNot, OperatorAnd, OperatorOr:
default:
- errs = append(errs, validation.NewValidationError(validation.NewValueValidationError("operator must be one of [%s]", strings.Join([]string{string(OperatorLT), string(OperatorLTE), string(OperatorGT), string(OperatorGTE), string(OperatorEQ), string(OperatorNE), string(OperatorNot), string(OperatorAnd), string(OperatorOr)}, ", ")), valueNode))
+ errs = append(errs, validation.NewValidationError(validation.SeverityError, validation.RuleValidationAllowedValues, fmt.Errorf("operator must be one of [`%s`]", strings.Join([]string{string(OperatorLT), string(OperatorLTE), string(OperatorGT), string(OperatorGTE), string(OperatorEQ), string(OperatorNE), string(OperatorNot), string(OperatorAnd), string(OperatorOr)}, ", ")), valueNode))
}
if s.Value == "" {
- errs = append(errs, validation.NewValidationError(validation.NewMissingValueError("value is required"), valueNode))
+ errs = append(errs, validation.NewValidationError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("value is required"), valueNode))
}
return errs
diff --git a/arazzo/criterion/criterion.go b/arazzo/criterion/criterion.go
index 14bdbffa..dcaf3cd8 100644
--- a/arazzo/criterion/criterion.go
+++ b/arazzo/criterion/criterion.go
@@ -2,6 +2,7 @@ package criterion
import (
"context"
+ "errors"
"fmt"
"regexp"
"strings"
@@ -59,7 +60,7 @@ func (c *CriterionExpressionType) Validate(opts ...validation.Option) []error {
switch c.Version {
case CriterionTypeVersionDraftGoessnerDispatchJsonPath00:
default:
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("version must be one of [%s]", strings.Join([]string{string(CriterionTypeVersionDraftGoessnerDispatchJsonPath00)}, ", ")), core, core.Version))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationAllowedValues, fmt.Errorf("version must be one of [`%s`]", strings.Join([]string{string(CriterionTypeVersionDraftGoessnerDispatchJsonPath00)}, ", ")), core, core.Version))
}
case CriterionTypeXPath:
switch c.Version {
@@ -67,10 +68,10 @@ func (c *CriterionExpressionType) Validate(opts ...validation.Option) []error {
case CriterionTypeVersionXPath20:
case CriterionTypeVersionXPath10:
default:
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("version must be one of [%s]", strings.Join([]string{string(CriterionTypeVersionXPath30), string(CriterionTypeVersionXPath20), string(CriterionTypeVersionXPath10)}, ", ")), core, core.Version))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationAllowedValues, fmt.Errorf("version must be one of [`%s`]", strings.Join([]string{string(CriterionTypeVersionXPath30), string(CriterionTypeVersionXPath20), string(CriterionTypeVersionXPath10)}, ", ")), core, core.Version))
}
default:
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("type must be one of [%s]", strings.Join([]string{string(CriterionTypeJsonPath), string(CriterionTypeXPath)}, ", ")), core, core.Type))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationAllowedValues, fmt.Errorf("type must be one of [`%s`]", strings.Join([]string{string(CriterionTypeJsonPath), string(CriterionTypeXPath)}, ", ")), core, core.Type))
}
if len(errs) == 0 {
@@ -190,7 +191,7 @@ func (c *Criterion) Validate(opts ...validation.Option) []error {
errs := []error{}
if c.Condition == "" {
- errs = append(errs, validation.NewValueError(validation.NewMissingValueError("condition is required"), core, core.Condition))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("condition is required"), core, core.Condition))
}
if c.Type.Type != nil {
@@ -200,19 +201,19 @@ func (c *Criterion) Validate(opts ...validation.Option) []error {
case CriterionTypeJsonPath:
case CriterionTypeXPath:
default:
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("type must be one of [%s]", strings.Join([]string{string(CriterionTypeSimple), string(CriterionTypeRegex), string(CriterionTypeJsonPath), string(CriterionTypeXPath)}, ", ")), core, core.Type))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationAllowedValues, fmt.Errorf("type must be one of [`%s`]", strings.Join([]string{string(CriterionTypeSimple), string(CriterionTypeRegex), string(CriterionTypeJsonPath), string(CriterionTypeXPath)}, ", ")), core, core.Type))
}
} else if c.Type.ExpressionType != nil {
errs = append(errs, c.Type.ExpressionType.Validate(opts...)...)
}
if c.Type.IsTypeProvided() && c.Context == nil {
- errs = append(errs, validation.NewValueError(validation.NewMissingValueError("context is required, if type is set"), core, core.Context))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("context is required, if type is set"), core, core.Context))
}
if c.Context != nil {
if err := c.Context.Validate(); err != nil {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError(err.Error()), core, core.Context))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidSyntax, fmt.Errorf("%s", err.Error()), core, core.Context))
}
}
@@ -235,18 +236,18 @@ func (c *Criterion) validateCondition(opts ...validation.Option) []error {
case CriterionTypeSimple:
cond, err := newCondition(c.Condition)
if err != nil && c.Context == nil {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError(err.Error()), core, core.Condition))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidSyntax, fmt.Errorf("%s", err.Error()), core, core.Condition))
} else if cond != nil {
errs = append(errs, cond.Validate(valueNode, opts...)...)
}
case CriterionTypeRegex:
_, err := regexp.Compile(c.Condition)
if err != nil {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("invalid regex expression: %s", err.Error()), core, core.Condition))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidSyntax, fmt.Errorf("invalid regex expression: %w", err), core, core.Condition))
}
case CriterionTypeJsonPath:
if _, err := jsonpath.NewPath(c.Condition); err != nil {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("invalid jsonpath expression: %s", err), core, core.Condition))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidSyntax, fmt.Errorf("invalid jsonpath expression: %w", err), core, core.Condition))
}
case CriterionTypeXPath:
// TODO validate xpath
diff --git a/arazzo/criterion/criterion_validate_test.go b/arazzo/criterion/criterion_validate_test.go
index a34e28f3..0b328871 100644
--- a/arazzo/criterion/criterion_validate_test.go
+++ b/arazzo/criterion/criterion_validate_test.go
@@ -72,7 +72,7 @@ func TestCriterionExpressionType_Validate_Error(t *testing.T) {
Type: criterion.CriterionTypeJsonPath,
Version: "invalid-version",
},
- expectedError: "version must be one of [draft-goessner-dispatch-jsonpath-00]",
+ expectedError: "version must be one of [`draft-goessner-dispatch-jsonpath-00`]",
},
{
name: "invalid xpath version",
@@ -80,7 +80,7 @@ func TestCriterionExpressionType_Validate_Error(t *testing.T) {
Type: criterion.CriterionTypeXPath,
Version: "invalid-version",
},
- expectedError: "version must be one of [xpath-30, xpath-20, xpath-10]",
+ expectedError: "version must be one of [`xpath-30, xpath-20, xpath-10`]",
},
{
name: "invalid type",
@@ -88,7 +88,7 @@ func TestCriterionExpressionType_Validate_Error(t *testing.T) {
Type: "invalid-type",
Version: criterion.CriterionTypeVersionNone,
},
- expectedError: "type must be one of [jsonpath, xpath]",
+ expectedError: "type must be one of [`jsonpath, xpath`]",
},
}
diff --git a/arazzo/failureaction.go b/arazzo/failureaction.go
index fb36fd8a..4a019efd 100644
--- a/arazzo/failureaction.go
+++ b/arazzo/failureaction.go
@@ -3,6 +3,7 @@ package arazzo
import (
"context"
"errors"
+ "fmt"
"strings"
"github.com/speakeasy-api/openapi/arazzo/core"
@@ -69,22 +70,22 @@ func (f *FailureAction) Validate(ctx context.Context, opts ...validation.Option)
errs := []error{}
if core.Name.Present && f.Name == "" {
- errs = append(errs, validation.NewValueError(validation.NewMissingValueError("failureAction.name is required"), core, core.Name))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("failureAction.name is required"), core, core.Name))
}
switch f.Type {
case FailureActionTypeEnd:
if f.WorkflowID != nil {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("failureAction.workflowId is not allowed when type: end is specified"), core, core.WorkflowID))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationMutuallyExclusiveFields, errors.New("failureAction.workflowId is not allowed when type: end is specified"), core, core.WorkflowID))
}
if f.StepID != nil {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("failureAction.stepId is not allowed when type: end is specified"), core, core.StepID))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationMutuallyExclusiveFields, errors.New("failureAction.stepId is not allowed when type: end is specified"), core, core.StepID))
}
if f.RetryAfter != nil {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("failureAction.retryAfter is not allowed when type: end is specified"), core, core.RetryAfter))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationMutuallyExclusiveFields, errors.New("failureAction.retryAfter is not allowed when type: end is specified"), core, core.RetryAfter))
}
if f.RetryLimit != nil {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("failureAction.retryLimit is not allowed when type: end is specified"), core, core.RetryLimit))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationMutuallyExclusiveFields, errors.New("failureAction.retryLimit is not allowed when type: end is specified"), core, core.RetryLimit))
}
case FailureActionTypeGoto:
workflowIDNode := core.WorkflowID.GetKeyNodeOrRoot(core.RootNode)
@@ -100,10 +101,10 @@ func (f *FailureAction) Validate(ctx context.Context, opts ...validation.Option)
required: true,
}, opts...)...)
if f.RetryAfter != nil {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("failureAction.retryAfter is not allowed when type: goto is specified"), core, core.RetryAfter))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationMutuallyExclusiveFields, errors.New("failureAction.retryAfter is not allowed when type: goto is specified"), core, core.RetryAfter))
}
if f.RetryLimit != nil {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("failureAction.retryLimit is not allowed when type: goto is specified"), core, core.RetryLimit))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationMutuallyExclusiveFields, errors.New("failureAction.retryLimit is not allowed when type: goto is specified"), core, core.RetryLimit))
}
case FailureActionTypeRetry:
workflowIDNode := core.WorkflowID.GetKeyNodeOrRoot(core.RootNode)
@@ -120,16 +121,16 @@ func (f *FailureAction) Validate(ctx context.Context, opts ...validation.Option)
}, opts...)...)
if f.RetryAfter != nil {
if *f.RetryAfter < 0 {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("failureAction.retryAfter must be greater than or equal to 0"), core, core.RetryAfter))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationAllowedValues, errors.New("failureAction.retryAfter must be greater than or equal to 0"), core, core.RetryAfter))
}
}
if f.RetryLimit != nil {
if *f.RetryLimit < 0 {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("failureAction.retryLimit must be greater than or equal to 0"), core, core.RetryLimit))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationAllowedValues, errors.New("failureAction.retryLimit must be greater than or equal to 0"), core, core.RetryLimit))
}
}
default:
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("failureAction.type must be one of [%s]", strings.Join([]string{string(FailureActionTypeEnd), string(FailureActionTypeGoto), string(FailureActionTypeRetry)}, ", ")), core, core.Type))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationAllowedValues, fmt.Errorf("failureAction.type must be one of [`%s`]", strings.Join([]string{string(FailureActionTypeEnd), string(FailureActionTypeGoto), string(FailureActionTypeRetry)}, ", ")), core, core.Type))
}
for i := range f.Criteria {
diff --git a/arazzo/info.go b/arazzo/info.go
index 48cb5b4b..3136658c 100644
--- a/arazzo/info.go
+++ b/arazzo/info.go
@@ -2,6 +2,7 @@ package arazzo
import (
"context"
+ "errors"
"github.com/speakeasy-api/openapi/arazzo/core"
"github.com/speakeasy-api/openapi/extensions"
@@ -34,11 +35,11 @@ func (i *Info) Validate(ctx context.Context, opts ...validation.Option) []error
errs := []error{}
if core.Title.Present && i.Title == "" {
- errs = append(errs, validation.NewValueError(validation.NewMissingValueError("info.title is required"), core, core.Title))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("`info.title` is required"), core, core.Title))
}
if core.Version.Present && i.Version == "" {
- errs = append(errs, validation.NewValueError(validation.NewMissingValueError("info.version is required"), core, core.Version))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("`info.version` is required"), core, core.Version))
}
i.Valid = len(errs) == 0 && core.GetValid()
diff --git a/arazzo/parameter.go b/arazzo/parameter.go
index e97e5c9f..f56d3966 100644
--- a/arazzo/parameter.go
+++ b/arazzo/parameter.go
@@ -2,6 +2,8 @@ package arazzo
import (
"context"
+ "errors"
+ "fmt"
"strings"
"github.com/speakeasy-api/openapi/arazzo/core"
@@ -55,7 +57,7 @@ func (p *Parameter) Validate(ctx context.Context, opts ...validation.Option) []e
s := validation.GetContextObject[Step](o)
if core.Name.Present && p.Name == "" {
- errs = append(errs, validation.NewValueError(validation.NewMissingValueError("parameter fieldname is required"), core, core.Name))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("parameter fieldname is required"), core, core.Name))
}
in := In("")
@@ -71,25 +73,25 @@ func (p *Parameter) Validate(ctx context.Context, opts ...validation.Option) []e
default:
if p.In == nil || in == "" {
if w == nil && s != nil && s.WorkflowID == nil {
- errs = append(errs, validation.NewValueError(validation.NewMissingValueError("parameter.in is required within a step when workflowId is not set"), core, core.In))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("parameter.in is required within a step when workflowId is not set"), core, core.In))
}
}
if in != "" {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("parameter.in must be one of [%s] but was %s", strings.Join([]string{string(InPath), string(InQuery), string(InHeader), string(InCookie)}, ", "), in), core, core.In))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationAllowedValues, fmt.Errorf("parameter.in must be one of [`%s`] but was `%s`", strings.Join([]string{string(InPath), string(InQuery), string(InHeader), string(InCookie)}, ", "), in), core, core.In))
}
}
if core.Value.Present && p.Value == nil {
- errs = append(errs, validation.NewValueError(validation.NewMissingValueError("parameter.value is required"), core, core.Value))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("`parameter.value` is required"), core, core.Value))
} else if p.Value != nil {
_, expression, err := expression.GetValueOrExpressionValue(p.Value)
if err != nil {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError(err.Error()), core, core.Value))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidSyntax, fmt.Errorf("%s", err.Error()), core, core.Value))
}
if expression != nil {
if err := expression.Validate(); err != nil {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError(err.Error()), core, core.Value))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidSyntax, fmt.Errorf("%s", err.Error()), core, core.Value))
}
}
}
diff --git a/arazzo/payloadreplacement.go b/arazzo/payloadreplacement.go
index a8793963..ad0e850b 100644
--- a/arazzo/payloadreplacement.go
+++ b/arazzo/payloadreplacement.go
@@ -2,6 +2,8 @@ package arazzo
import (
"context"
+ "errors"
+ "fmt"
"github.com/speakeasy-api/openapi/arazzo/core"
"github.com/speakeasy-api/openapi/expression"
@@ -32,23 +34,23 @@ func (p *PayloadReplacement) Validate(ctx context.Context, opts ...validation.Op
errs := []error{}
if core.Target.Present && p.Target == "" {
- errs = append(errs, validation.NewValueError(validation.NewMissingValueError("payloadReplacement.target is required"), core, core.Target))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("payloadReplacement.target is required"), core, core.Target))
}
if err := p.Target.Validate(); err != nil {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("payloadReplacement.target is invalid: "+err.Error()), core, core.Target))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidSyntax, fmt.Errorf("payloadReplacement.target is invalid: %w", err), core, core.Target))
}
if core.Value.Present && p.Value == nil {
- errs = append(errs, validation.NewValueError(validation.NewMissingValueError("payloadReplacement.value is required"), core, core.Value))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("payloadReplacement.value is required"), core, core.Value))
} else if p.Value != nil {
_, expression, err := expression.GetValueOrExpressionValue(p.Value)
if err != nil {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("payloadReplacement.value is invalid: "+err.Error()), core, core.Value))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidSyntax, fmt.Errorf("payloadReplacement.value is invalid: %w", err), core, core.Value))
}
if expression != nil {
if err := expression.Validate(); err != nil {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("payloadReplacement.value is invalid: "+err.Error()), core, core.Value))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidSyntax, fmt.Errorf("payloadReplacement.value is invalid: %w", err), core, core.Value))
}
}
}
diff --git a/arazzo/requestbody.go b/arazzo/requestbody.go
index c7f2ac64..89c17533 100644
--- a/arazzo/requestbody.go
+++ b/arazzo/requestbody.go
@@ -2,6 +2,7 @@ package arazzo
import (
"context"
+ "fmt"
"mime"
"github.com/speakeasy-api/openapi/arazzo/core"
@@ -36,7 +37,7 @@ func (r *RequestBody) Validate(ctx context.Context, opts ...validation.Option) [
if r.ContentType != nil {
_, _, err := mime.ParseMediaType(*r.ContentType)
if err != nil {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("requestBody.contentType is not valid: %s", err.Error()), core, core.ContentType))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidFormat, fmt.Errorf("requestBody.contentType is not valid: %w", err), core, core.ContentType))
}
}
@@ -47,7 +48,7 @@ func (r *RequestBody) Validate(ctx context.Context, opts ...validation.Option) [
if err == nil && exp != nil {
// Only validate if the entire payload IS an expression (not just contains expressions)
if err := exp.Validate(); err != nil {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("requestBody.payload expression is not valid: %s", err.Error()), core, core.Payload))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidSyntax, fmt.Errorf("requestBody.payload expression is not valid: %w", err), core, core.Payload))
}
}
// If exp is nil, the payload is a value (not an expression) - no validation needed
diff --git a/arazzo/reusable.go b/arazzo/reusable.go
index 686f5bd9..d7a2c0a0 100644
--- a/arazzo/reusable.go
+++ b/arazzo/reusable.go
@@ -3,6 +3,7 @@ package arazzo
import (
"context"
"errors"
+ "fmt"
"reflect"
"unicode"
"unicode/utf8"
@@ -117,7 +118,7 @@ func (r *Reusable[T, V, C]) Validate(ctx context.Context, opts ...validation.Opt
case "parameters":
default:
if r.Value != nil {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("reusableParameter.value is not allowed when object is not a parameter"), core, core.Value))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationMutuallyExclusiveFields, errors.New("reusableParameter.value is not allowed when object is not a parameter"), core, core.Value))
}
}
@@ -136,7 +137,7 @@ func (r *Reusable[T, V, C]) validateReference(ctx context.Context, a *Arazzo, ob
core := r.GetCore()
if err := r.Reference.Validate(); err != nil {
return []error{
- validation.NewValueError(validation.NewValueValidationError("%s.reference is invalid: %s", componentTypeToReusableType(objComponentType), err.Error()), core, core.Reference),
+ validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidSyntax, fmt.Errorf("`%s`.reference is invalid: %w", componentTypeToReusableType(objComponentType), err), core, core.Reference),
}
}
@@ -144,13 +145,13 @@ func (r *Reusable[T, V, C]) validateReference(ctx context.Context, a *Arazzo, ob
if typ != expression.ExpressionTypeComponents {
return []error{
- validation.NewValueError(validation.NewValueValidationError("%s.reference must be a components expression, got %s", componentTypeToReusableType(objComponentType), r.Reference.GetType()), core, core.Reference),
+ validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidSyntax, fmt.Errorf("`%s`.reference must be a components expression, got `%s`", componentTypeToReusableType(objComponentType), r.Reference.GetType()), core, core.Reference),
}
}
if componentType == "" || len(references) != 1 {
return []error{
- validation.NewValueError(validation.NewValueValidationError("%s.reference must be a components expression with 3 parts, got %s", componentTypeToReusableType(objComponentType), *r.Reference), core, core.Reference),
+ validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidSyntax, fmt.Errorf("`%s`.reference must be a components expression with 3 parts, got `%s`", componentTypeToReusableType(objComponentType), *r.Reference), core, core.Reference),
}
}
@@ -186,7 +187,7 @@ func (r *Reusable[T, V, C]) validateReference(ctx context.Context, a *Arazzo, ob
}, opts...)
default:
return []error{
- validation.NewValueError(validation.NewValueValidationError("reference to %s is not valid, valid components are [parameters, successActions, failureActions]", componentType), core, core.Reference),
+ validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidReference, fmt.Errorf("reference to `%s` is not valid, valid components are [parameters, successActions, failureActions]", componentType), core, core.Reference),
}
}
}
@@ -203,20 +204,20 @@ type validateComponentReferenceArgs[T any] struct {
func validateComponentReference[T any, V interfaces.Validator[T]](ctx context.Context, args validateComponentReferenceArgs[V], opts ...validation.Option) []error {
if args.componentType != args.objComponentType {
return []error{
- validation.NewValidationError(validation.NewValueValidationError("%s.reference expected a %s reference got %s", componentTypeToReusableType(args.objComponentType), args.objComponentType, args.componentType), args.referenceValueNode),
+ validation.NewValidationError(validation.SeverityError, validation.RuleValidationTypeMismatch, fmt.Errorf("`%s`.reference expected a `%s` reference got `%s`", componentTypeToReusableType(args.objComponentType), args.objComponentType, args.componentType), args.referenceValueNode),
}
}
if args.components == nil {
return []error{
- validation.NewValidationError(validation.NewValueValidationError("%s.reference to missing component %s, components.%s not present", componentTypeToReusableType(args.objComponentType), *args.reference, args.componentType), args.referenceValueNode),
+ validation.NewValidationError(validation.SeverityError, validation.RuleValidationInvalidReference, fmt.Errorf("`%s`.reference to missing component `%s`, components.`%s` not present", componentTypeToReusableType(args.objComponentType), *args.reference, args.componentType), args.referenceValueNode),
}
}
component, ok := args.components.Get(args.componentName)
if !ok {
return []error{
- validation.NewValidationError(validation.NewValueValidationError("%s.reference to missing component %s, components.%s.%s not present", componentTypeToReusableType(args.objComponentType), *args.reference, args.componentType, args.componentName), args.referenceValueNode),
+ validation.NewValidationError(validation.SeverityError, validation.RuleValidationInvalidReference, fmt.Errorf("`%s`.reference to missing component `%s`, components.`%s`.`%s` not present", componentTypeToReusableType(args.objComponentType), *args.reference, args.componentType, args.componentName), args.referenceValueNode),
}
}
diff --git a/arazzo/sourcedescription.go b/arazzo/sourcedescription.go
index a9a6b61a..d4f94ab9 100644
--- a/arazzo/sourcedescription.go
+++ b/arazzo/sourcedescription.go
@@ -2,6 +2,8 @@ package arazzo
import (
"context"
+ "errors"
+ "fmt"
"net/url"
"strings"
@@ -57,14 +59,14 @@ func (s *SourceDescription) Validate(ctx context.Context, opts ...validation.Opt
errs := []error{}
if core.Name.Present && s.Name == "" {
- errs = append(errs, validation.NewValueError(validation.NewMissingValueError("sourceDescription.name is required"), core, core.Name))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("sourceDescription.name is required"), core, core.Name))
}
if core.URL.Present && s.URL == "" {
- errs = append(errs, validation.NewValueError(validation.NewMissingValueError("sourceDescription.url is required"), core, core.URL))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("sourceDescription.url is required"), core, core.URL))
} else if core.URL.Present {
if _, err := url.Parse(s.URL); err != nil {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("sourceDescription.url is not a valid url/uri according to RFC 3986: %s", err), core, core.URL))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidFormat, fmt.Errorf("sourceDescription.url is not a valid url/uri according to RFC 3986: %w", err), core, core.URL))
}
}
@@ -72,7 +74,7 @@ func (s *SourceDescription) Validate(ctx context.Context, opts ...validation.Opt
case SourceDescriptionTypeOpenAPI:
case SourceDescriptionTypeArazzo:
default:
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("sourceDescription.type must be one of [%s]", strings.Join([]string{SourceDescriptionTypeOpenAPI, SourceDescriptionTypeArazzo}, ", ")), core, core.Type))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationAllowedValues, fmt.Errorf("sourceDescription.type must be one of [`%s`]", strings.Join([]string{SourceDescriptionTypeOpenAPI, SourceDescriptionTypeArazzo}, ", ")), core, core.Type))
}
s.Valid = len(errs) == 0 && core.GetValid()
diff --git a/arazzo/step.go b/arazzo/step.go
index 40605f41..b0fe508f 100644
--- a/arazzo/step.go
+++ b/arazzo/step.go
@@ -90,10 +90,10 @@ func (s *Step) Validate(ctx context.Context, opts ...validation.Option) []error
errs := []error{}
if core.StepID.Present && s.StepID == "" {
- errs = append(errs, validation.NewValueError(validation.NewMissingValueError("step.stepId is required"), core, core.StepID))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("step.stepId is required"), core, core.StepID))
} else if s.StepID != "" {
if !stepIDRegex.MatchString(s.StepID) {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("step.stepId must be a valid name [%s]: %s", stepIDRegex.String(), s.StepID), core, core.StepID))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidFormat, fmt.Errorf("step.stepId must be a valid name [`%s`]: `%s`", stepIDRegex.String(), s.StepID), core, core.StepID))
}
numStepsWithID := 0
@@ -103,7 +103,7 @@ func (s *Step) Validate(ctx context.Context, opts ...validation.Option) []error
}
}
if numStepsWithID > 1 {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("step.stepId must be unique within the workflow, found %d steps with the same stepId", numStepsWithID), core, core.StepID))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationDuplicateKey, fmt.Errorf("step.stepId must be unique within the workflow, found `%d` steps with the same stepId", numStepsWithID), core, core.StepID))
}
}
@@ -121,10 +121,10 @@ func (s *Step) Validate(ctx context.Context, opts ...validation.Option) []error
}
switch numSet {
case 0:
- errs = append(errs, validation.NewValidationError(validation.NewMissingValueError("step at least one of operationId, operationPath or workflowId fields must be set"), core.RootNode))
+ errs = append(errs, validation.NewValidationError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("step at least one of operationId, operationPath or workflowId fields must be set"), core.RootNode))
case 1:
default:
- errs = append(errs, validation.NewValidationError(validation.NewValueValidationError("step only one of operationId, operationPath or workflowId.can be set"), core.RootNode))
+ errs = append(errs, validation.NewValidationError(validation.SeverityError, validation.RuleValidationMutuallyExclusiveFields, errors.New("step only one of operationId, operationPath or workflowId can be set"), core.RootNode))
}
if s.OperationID != nil {
@@ -135,65 +135,65 @@ func (s *Step) Validate(ctx context.Context, opts ...validation.Option) []error
}
}
if numOpenAPISourceDescriptions > 1 && !s.OperationID.IsExpression() {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("step.operationId must be a valid expression if there are multiple OpenAPI source descriptions"), core, core.OperationID))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidSyntax, errors.New("step.operationId must be a valid expression if there are multiple OpenAPI source descriptions"), core, core.OperationID))
}
if s.OperationID.IsExpression() {
if err := s.OperationID.Validate(); err != nil {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("step.operationId expression is invalid: %s", err.Error()), core, core.OperationID))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidSyntax, fmt.Errorf("step.operationId expression is invalid: %w", err), core, core.OperationID))
}
typ, sourceDescriptionName, _, _ := s.OperationID.GetParts()
if typ != expression.ExpressionTypeSourceDescriptions {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("step.operationId must be a sourceDescriptions expression, got %s", typ), core, core.OperationID))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidSyntax, fmt.Errorf("step.operationId must be a sourceDescriptions expression, got `%s`", typ), core, core.OperationID))
}
if a.SourceDescriptions.Find(sourceDescriptionName) == nil {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("step.operationId referencing sourceDescription %s not found", sourceDescriptionName), core, core.OperationID))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidReference, fmt.Errorf("step.operationId referencing sourceDescription `%s` not found", sourceDescriptionName), core, core.OperationID))
}
}
}
if s.OperationPath != nil {
if err := s.OperationPath.Validate(); err != nil {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("step.operationPath expression is invalid: %s", err.Error()), core, core.OperationPath))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidSyntax, fmt.Errorf("step.operationPath expression is invalid: %w", err), core, core.OperationPath))
}
typ, sourceDescriptionName, expressionParts, jp := s.OperationPath.GetParts()
if typ != expression.ExpressionTypeSourceDescriptions {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("step.operationPath must be a sourceDescriptions expression, got %s", typ), core, core.OperationPath))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidSyntax, fmt.Errorf("step.operationPath must be a sourceDescriptions expression, got `%s`", typ), core, core.OperationPath))
}
if a.SourceDescriptions.Find(sourceDescriptionName) == nil {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("step.operationPath referencing sourceDescription %s not found", sourceDescriptionName), core, core.OperationPath))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidReference, fmt.Errorf("step.operationPath referencing sourceDescription `%s` not found", sourceDescriptionName), core, core.OperationPath))
}
if len(expressionParts) != 1 || expressionParts[0] != "url" {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("step.operationPath must reference the url of a sourceDescription"), core, core.OperationPath))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidSyntax, errors.New("step.operationPath must reference the url of a sourceDescription"), core, core.OperationPath))
}
if jp == "" {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("step.operationPath must contain a json pointer to the operation path within the sourceDescription"), core, core.OperationPath))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("step.operationPath must contain a json pointer to the operation path within the sourceDescription"), core, core.OperationPath))
}
}
if s.WorkflowID != nil {
if s.WorkflowID.IsExpression() {
if err := s.WorkflowID.Validate(); err != nil {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("step.workflowId expression is invalid: %s", err.Error()), core, core.WorkflowID))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidSyntax, fmt.Errorf("step.workflowId expression is invalid: %w", err), core, core.WorkflowID))
}
typ, sourceDescriptionName, _, _ := s.WorkflowID.GetParts()
if typ != expression.ExpressionTypeSourceDescriptions {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("step.workflowId must be a sourceDescriptions expression, got %s", typ), core, core.WorkflowID))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidSyntax, fmt.Errorf("step.workflowId must be a sourceDescriptions expression, got `%s`", typ), core, core.WorkflowID))
}
if a.SourceDescriptions.Find((sourceDescriptionName)) == nil {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("step.workflowId referencing sourceDescription %s not found", sourceDescriptionName), core, core.WorkflowID))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidReference, fmt.Errorf("step.workflowId referencing sourceDescription `%s` not found", sourceDescriptionName), core, core.WorkflowID))
}
} else if a.Workflows.Find(pointer.Value(s.WorkflowID).String()) == nil {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("step.workflowId referencing workflow %s not found", *s.WorkflowID), core, core.WorkflowID))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidReference, fmt.Errorf("step.workflowId referencing workflow `%s` not found", *s.WorkflowID), core, core.WorkflowID))
}
}
@@ -206,14 +206,14 @@ func (s *Step) Validate(ctx context.Context, opts ...validation.Option) []error
if parameter.Reference != nil {
_, ok := parameterRefs[string(*parameter.Reference)]
if ok {
- errs = append(errs, validation.NewSliceError(validation.NewValueValidationError("step.parameters duplicate parameter found with reference %s", *parameter.Reference), core, core.Parameters, i))
+ errs = append(errs, validation.NewSliceError(validation.SeverityError, validation.RuleValidationDuplicateKey, fmt.Errorf("step.parameters duplicate parameter found with reference `%s`", *parameter.Reference), core, core.Parameters, i))
}
parameterRefs[string(*parameter.Reference)] = true
} else if parameter.Object != nil {
id := fmt.Sprintf("%s.%v", parameter.Object.Name, parameter.Object.In)
_, ok := parameters[id]
if ok {
- errs = append(errs, validation.NewSliceError(validation.NewValueValidationError("step.parameters duplicate parameter found with name %s and in %v", parameter.Object.Name, parameter.Object.In), core, core.Parameters, i))
+ errs = append(errs, validation.NewSliceError(validation.SeverityError, validation.RuleValidationDuplicateKey, fmt.Errorf("step.parameters duplicate parameter found with name `%s` and in `%v`", parameter.Object.Name, parameter.Object.In), core, core.Parameters, i))
}
parameters[id] = true
}
@@ -221,7 +221,7 @@ func (s *Step) Validate(ctx context.Context, opts ...validation.Option) []error
if s.RequestBody != nil {
if s.WorkflowID != nil {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("step.requestBody should not be set when workflowId is set"), core, core.RequestBody))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationMutuallyExclusiveFields, errors.New("step.requestBody should not be set when workflowId is set"), core, core.RequestBody))
}
errs = append(errs, s.RequestBody.Validate(ctx, opts...)...)
@@ -240,14 +240,14 @@ func (s *Step) Validate(ctx context.Context, opts ...validation.Option) []error
if onSuccess.Reference != nil {
_, ok := successActionRefs[string(*onSuccess.Reference)]
if ok {
- errs = append(errs, validation.NewSliceError(validation.NewValueValidationError("step.onSuccess duplicate successAction found with reference %s", *onSuccess.Reference), core, core.OnSuccess, i))
+ errs = append(errs, validation.NewSliceError(validation.SeverityError, validation.RuleValidationDuplicateKey, fmt.Errorf("step.onSuccess duplicate successAction found with reference `%s`", *onSuccess.Reference), core, core.OnSuccess, i))
}
successActionRefs[string(*onSuccess.Reference)] = true
} else if onSuccess.Object != nil {
id := fmt.Sprintf("%s.%v", onSuccess.Object.Name, onSuccess.Object.Type)
_, ok := successActions[id]
if ok {
- errs = append(errs, validation.NewSliceError(validation.NewValueValidationError("step.onSuccess duplicate successAction found with name %s and type %v", onSuccess.Object.Name, onSuccess.Object.Type), core, core.OnSuccess, i))
+ errs = append(errs, validation.NewSliceError(validation.SeverityError, validation.RuleValidationDuplicateKey, fmt.Errorf("step.onSuccess duplicate successAction found with name `%s` and type `%v`", onSuccess.Object.Name, onSuccess.Object.Type), core, core.OnSuccess, i))
}
successActions[id] = true
}
@@ -262,14 +262,14 @@ func (s *Step) Validate(ctx context.Context, opts ...validation.Option) []error
if onFailure.Reference != nil {
_, ok := failureActionRefs[string(*onFailure.Reference)]
if ok {
- errs = append(errs, validation.NewSliceError(validation.NewValueValidationError("step.onFailure duplicate failureAction found with reference %s", *onFailure.Reference), core, core.OnFailure, i))
+ errs = append(errs, validation.NewSliceError(validation.SeverityError, validation.RuleValidationDuplicateKey, fmt.Errorf("step.onFailure duplicate failureAction found with reference `%s`", *onFailure.Reference), core, core.OnFailure, i))
}
failureActionRefs[string(*onFailure.Reference)] = true
} else if onFailure.Object != nil {
id := fmt.Sprintf("%s.%v", onFailure.Object.Name, onFailure.Object.Type)
_, ok := failureActions[id]
if ok {
- errs = append(errs, validation.NewSliceError(validation.NewValueValidationError("step.onFailure duplicate failureAction found with name %s and type %v", onFailure.Object.Name, onFailure.Object.Type), core, core.OnFailure, i))
+ errs = append(errs, validation.NewSliceError(validation.SeverityError, validation.RuleValidationDuplicateKey, fmt.Errorf("step.onFailure duplicate failureAction found with name `%s` and type `%v`", onFailure.Object.Name, onFailure.Object.Type), core, core.OnFailure, i))
}
failureActions[id] = true
}
@@ -277,11 +277,11 @@ func (s *Step) Validate(ctx context.Context, opts ...validation.Option) []error
for name, output := range s.Outputs.All() {
if !outputNameRegex.MatchString(name) {
- errs = append(errs, validation.NewMapKeyError(validation.NewValueValidationError("step.outputs name must be a valid name [%s]: %s", outputNameRegex.String(), name), core, core.Outputs, name))
+ errs = append(errs, validation.NewMapKeyError(validation.SeverityError, validation.RuleValidationInvalidFormat, fmt.Errorf("step.outputs name must be a valid name [`%s`]: `%s`", outputNameRegex.String(), name), core, core.Outputs, name))
}
if err := output.Validate(); err != nil {
- errs = append(errs, validation.NewMapValueError(validation.NewValueValidationError("step.outputs expression is invalid: %s", err.Error()), core, core.Outputs, name))
+ errs = append(errs, validation.NewMapValueError(validation.SeverityError, validation.RuleValidationInvalidSyntax, fmt.Errorf("step.outputs expression is invalid: %w", err), core, core.Outputs, name))
}
}
diff --git a/arazzo/successaction.go b/arazzo/successaction.go
index e73d4feb..8fc3e5e0 100644
--- a/arazzo/successaction.go
+++ b/arazzo/successaction.go
@@ -3,6 +3,7 @@ package arazzo
import (
"context"
"errors"
+ "fmt"
"strings"
"github.com/speakeasy-api/openapi/arazzo/core"
@@ -64,16 +65,16 @@ func (s *SuccessAction) Validate(ctx context.Context, opts ...validation.Option)
errs := []error{}
if core.Name.Present && s.Name == "" {
- errs = append(errs, validation.NewValueError(validation.NewMissingValueError("successAction.name is required"), core, core.Name))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("successAction.name is required"), core, core.Name))
}
switch s.Type {
case SuccessActionTypeEnd:
if s.WorkflowID != nil {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("successAction.workflowId is not allowed when type: end is specified"), core, core.WorkflowID))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationMutuallyExclusiveFields, errors.New("successAction.workflowId is not allowed when type: end is specified"), core, core.WorkflowID))
}
if s.StepID != nil {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("successAction.stepId is not allowed when type: end is specified"), core, core.StepID))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationMutuallyExclusiveFields, errors.New("successAction.stepId is not allowed when type: end is specified"), core, core.StepID))
}
case SuccessActionTypeGoto:
workflowIDNode := core.WorkflowID.GetKeyNodeOrRoot(core.RootNode)
@@ -90,7 +91,7 @@ func (s *SuccessAction) Validate(ctx context.Context, opts ...validation.Option)
required: true,
}, opts...)...)
default:
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("successAction.type must be one of [%s]", strings.Join([]string{string(SuccessActionTypeEnd), string(SuccessActionTypeGoto)}, ", ")), core, core.Type))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationAllowedValues, fmt.Errorf("successAction.type must be one of [`%s`]", strings.Join([]string{string(SuccessActionTypeEnd), string(SuccessActionTypeGoto)}, ", ")), core, core.Type))
}
for i := range s.Criteria {
@@ -120,28 +121,28 @@ func validationActionWorkflowIDAndStepID(ctx context.Context, parentName string,
errs := []error{}
if params.required && params.workflowID == nil && params.stepID == nil {
- errs = append(errs, validation.NewValidationError(validation.NewMissingValueError("%s.workflowId or stepId is required", parentName), params.workflowIDNode))
+ errs = append(errs, validation.NewValidationError(validation.SeverityError, validation.RuleValidationRequiredField, fmt.Errorf("`%s`.workflowId or stepId is required", parentName), params.workflowIDNode))
}
if params.workflowID != nil && params.stepID != nil {
- errs = append(errs, validation.NewValidationError(validation.NewValueValidationError("%s.workflowId and stepId are mutually exclusive, only one can be specified", parentName), params.workflowIDNode))
+ errs = append(errs, validation.NewValidationError(validation.SeverityError, validation.RuleValidationMutuallyExclusiveFields, fmt.Errorf("`%s`.workflowId and stepId are mutually exclusive, only one can be specified", parentName), params.workflowIDNode))
}
if params.workflowID != nil {
if params.workflowID.IsExpression() {
if err := params.workflowID.Validate(); err != nil {
- errs = append(errs, validation.NewValidationError(validation.NewValueValidationError("%s.workflowId expression is invalid: %s", parentName, err.Error()), params.workflowIDNode))
+ errs = append(errs, validation.NewValidationError(validation.SeverityError, validation.RuleValidationInvalidSyntax, fmt.Errorf("`%s`.workflowId expression is invalid: %w", parentName, err), params.workflowIDNode))
}
typ, sourceDescriptionName, _, _ := params.workflowID.GetParts()
if typ != expression.ExpressionTypeSourceDescriptions {
- errs = append(errs, validation.NewValidationError(validation.NewValueValidationError("%s.workflowId must be a sourceDescriptions expression, got %s", parentName, typ), params.workflowIDNode))
+ errs = append(errs, validation.NewValidationError(validation.SeverityError, validation.RuleValidationInvalidSyntax, fmt.Errorf("`%s`.workflowId must be a sourceDescriptions expression, got `%s`", parentName, typ), params.workflowIDNode))
}
if params.arazzo.SourceDescriptions.Find(sourceDescriptionName) == nil {
- errs = append(errs, validation.NewValidationError(validation.NewValueValidationError("%s.sourceDescription value %s not found", parentName, sourceDescriptionName), params.workflowIDNode))
+ errs = append(errs, validation.NewValidationError(validation.SeverityError, validation.RuleValidationInvalidReference, fmt.Errorf("`%s`.sourceDescription value `%s` not found", parentName, sourceDescriptionName), params.workflowIDNode))
}
} else if params.arazzo.Workflows.Find(pointer.Value(params.workflowID).String()) == nil {
- errs = append(errs, validation.NewValidationError(validation.NewValueValidationError("%s.workflowId value %s does not exist", parentName, *params.workflowID), params.workflowIDNode))
+ errs = append(errs, validation.NewValidationError(validation.SeverityError, validation.RuleValidationInvalidReference, fmt.Errorf("`%s`.workflowId value `%s` does not exist", parentName, *params.workflowID), params.workflowIDNode))
}
}
if params.stepID != nil {
@@ -206,11 +207,11 @@ func validationActionWorkflowIDAndStepID(ctx context.Context, parentName string,
}
if !foundStepId {
- errs = append(errs, validation.NewValidationError(validation.NewValueValidationError("%s.stepId value %s does not exist in any parent workflows", parentName, pointer.Value(params.stepID)), params.workflowIDNode))
+ errs = append(errs, validation.NewValidationError(validation.SeverityError, validation.RuleValidationInvalidReference, fmt.Errorf("`%s`.stepId value `%s` does not exist in any parent workflows", parentName, pointer.Value(params.stepID)), params.workflowIDNode))
}
}
} else if w.Steps.Find(pointer.Value(params.stepID)) == nil {
- errs = append(errs, validation.NewValidationError(validation.NewValueValidationError("%s.stepId value %s does not exist in workflow %s", parentName, pointer.Value(params.stepID), w.WorkflowID), params.workflowIDNode))
+ errs = append(errs, validation.NewValidationError(validation.SeverityError, validation.RuleValidationInvalidReference, fmt.Errorf("`%s`.stepId value `%s` does not exist in workflow `%s`", parentName, pointer.Value(params.stepID), w.WorkflowID), params.workflowIDNode))
}
}
diff --git a/arazzo/workflow.go b/arazzo/workflow.go
index 411685c7..1e2ddbd6 100644
--- a/arazzo/workflow.go
+++ b/arazzo/workflow.go
@@ -3,6 +3,7 @@ package arazzo
import (
"context"
"errors"
+ "fmt"
"regexp"
"github.com/speakeasy-api/openapi/arazzo/core"
@@ -78,7 +79,7 @@ func (w *Workflow) Validate(ctx context.Context, opts ...validation.Option) []er
errs := []error{}
if core.WorkflowID.Present && w.WorkflowID == "" {
- errs = append(errs, validation.NewValueError(validation.NewMissingValueError("workflow.workflowId is required"), core, core.WorkflowID))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("workflow.workflowId is required"), core, core.WorkflowID))
}
if w.Inputs != nil {
@@ -88,20 +89,20 @@ func (w *Workflow) Validate(ctx context.Context, opts ...validation.Option) []er
for i, dependsOn := range w.DependsOn {
if dependsOn.IsExpression() {
if err := dependsOn.Validate(); err != nil {
- errs = append(errs, validation.NewSliceError(validation.NewValueValidationError("workflow.dependsOn expression is invalid: %s", err.Error()), core, core.DependsOn, i))
+ errs = append(errs, validation.NewSliceError(validation.SeverityError, validation.RuleValidationInvalidSyntax, fmt.Errorf("workflow.dependsOn expression is invalid: %w", err), core, core.DependsOn, i))
}
typ, sourceDescriptionName, _, _ := dependsOn.GetParts()
if typ != expression.ExpressionTypeSourceDescriptions {
- errs = append(errs, validation.NewSliceError(validation.NewValueValidationError("workflow.dependsOn must be a sourceDescriptions expression if not a workflowId, got %s", typ), core, core.DependsOn, i))
+ errs = append(errs, validation.NewSliceError(validation.SeverityError, validation.RuleValidationInvalidSyntax, fmt.Errorf("workflow.dependsOn must be a sourceDescriptions expression if not a workflowId, got `%s`", typ), core, core.DependsOn, i))
}
if a.SourceDescriptions.Find(sourceDescriptionName) == nil {
- errs = append(errs, validation.NewSliceError(validation.NewValueValidationError("workflow.dependsOn sourceDescription %s not found", sourceDescriptionName), core, core.DependsOn, i))
+ errs = append(errs, validation.NewSliceError(validation.SeverityError, validation.RuleValidationInvalidReference, fmt.Errorf("workflow.dependsOn sourceDescription `%s` not found", sourceDescriptionName), core, core.DependsOn, i))
}
} else if a.Workflows.Find(string(dependsOn)) == nil {
- errs = append(errs, validation.NewSliceError(validation.NewValueValidationError("workflow.dependsOn workflowId %s not found", dependsOn), core, core.DependsOn, i))
+ errs = append(errs, validation.NewSliceError(validation.SeverityError, validation.RuleValidationInvalidReference, fmt.Errorf("workflow.dependsOn workflowId `%s` not found", dependsOn), core, core.DependsOn, i))
}
}
@@ -119,11 +120,11 @@ func (w *Workflow) Validate(ctx context.Context, opts ...validation.Option) []er
for name, output := range w.Outputs.All() {
if !outputNameRegex.MatchString(name) {
- errs = append(errs, validation.NewMapKeyError(validation.NewValueValidationError("workflow.outputs name must be a valid name [%s]: %s", outputNameRegex.String(), name), core, core.Outputs, name))
+ errs = append(errs, validation.NewMapKeyError(validation.SeverityError, validation.RuleValidationInvalidFormat, fmt.Errorf("workflow.outputs name must be a valid name [`%s`]: `%s`", outputNameRegex.String(), name), core, core.Outputs, name))
}
if err := output.Validate(); err != nil {
- errs = append(errs, validation.NewMapValueError(validation.NewValueValidationError("workflow.outputs expression is invalid: %s", err.Error()), core, core.Outputs, name))
+ errs = append(errs, validation.NewMapValueError(validation.SeverityError, validation.RuleValidationInvalidSyntax, fmt.Errorf("workflow.outputs expression is invalid: %w", err), core, core.Outputs, name))
}
}
diff --git a/cmd/openapi/commands/openapi/README.md b/cmd/openapi/commands/openapi/README.md
index ad443f93..41853780 100644
--- a/cmd/openapi/commands/openapi/README.md
+++ b/cmd/openapi/commands/openapi/README.md
@@ -9,6 +9,9 @@ OpenAPI specifications define REST APIs in a standard format. These commands hel
- [Table of Contents](#table-of-contents)
- [Available Commands](#available-commands)
- [`validate`](#validate)
+ - [`lint`](#lint)
+ - [Configuration File](#configuration-file)
+ - [Custom Rules](#custom-rules)
- [`upgrade`](#upgrade)
- [`inline`](#inline)
- [`clean`](#clean)
@@ -48,6 +51,282 @@ This command checks for:
- Reference resolution and validity
- Best practice recommendations
+### `lint`
+
+Lint an OpenAPI specification document for style, consistency, and best practices.
+
+```bash
+# Lint a specification file
+openapi spec lint ./spec.yaml
+
+# Lint with JSON output
+openapi spec lint -f json ./spec.yaml
+
+# Lint with a custom configuration file
+openapi spec lint -c ./lint.yaml ./spec.yaml
+
+# Lint with specific rules disabled
+openapi spec lint -d rule-id-1 -d rule-id-2 ./spec.yaml
+```
+
+**Flags:**
+
+| Flag | Short | Description |
+|------|-------|-------------|
+| `--format` | `-f` | Output format: `text` (default) or `json` |
+| `--config` | `-c` | Path to lint configuration file |
+| `--ruleset` | `-r` | Ruleset to use (default: `all`) |
+| `--disable` | `-d` | Rules to disable (can be specified multiple times) |
+
+**What lint checks:**
+
+- All validation errors (structural validity, schema compliance, references)
+- Path parameter validation
+- Operation ID requirements
+- Consistent naming conventions
+- Security best practices
+- Additional style and consistency rules
+
+**Default Configuration Path:**
+
+If no `--config` flag is provided, the linter looks for a configuration file at `~/.openapi/lint.yaml`.
+
+#### Configuration File
+
+Create a YAML configuration file to customize linting behavior:
+
+```yaml
+# lint.yaml
+
+# Extend from a base ruleset (optional)
+extends:
+ - recommended
+
+# Configure individual rules
+rules:
+ # Disable a rule entirely
+ - id: operation-operationId
+ disabled: true
+
+ # Change the severity of a rule
+ - id: path-params
+ severity: error # error, warning, or hint
+
+ # Use match patterns for bulk configuration
+ - match: "^oas3-.*"
+ severity: warning
+
+ # Disable rules matching a pattern
+ - match: "^oas2-.*"
+ disabled: true
+
+# Configure rules by category
+categories:
+ validation:
+ severity: error
+ style:
+ severity: warning
+ disabled: false
+
+# Custom rules configuration (requires TypeScript/JavaScript rules)
+custom_rules:
+ paths:
+ - ./rules/*.ts
+ - ./rules/**/*.ts
+
+# Output format (text or json)
+output_format: text
+```
+
+**Configuration Options:**
+
+| Option | Type | Description |
+|--------|------|-------------|
+| `extends` | `string[]` | Rulesets to extend from (`all`, `recommended`, `security`) |
+| `rules` | `RuleEntry[]` | Individual rule configurations |
+| `categories` | `map[string]CategoryConfig` | Category-level configurations |
+| `custom_rules` | `CustomRulesConfig` | Custom TypeScript/JavaScript rules |
+| `output_format` | `string` | Output format (`text` or `json`) |
+
+**Available Rulesets:**
+
+| Ruleset | Description |
+|---------|-------------|
+| `all` | All available rules (default) |
+| `recommended` | Balanced ruleset - semantic rules, essential style, basic security |
+| `security` | Comprehensive OWASP security rules |
+
+**Rule Entry Options:**
+
+| Option | Type | Description |
+|--------|------|-------------|
+| `id` | `string` | Exact rule ID to configure |
+| `match` | `string` | Regex pattern to match rule IDs |
+| `severity` | `string` | `error`, `warning`, or `hint` |
+| `disabled` | `bool` | Set to `true` to disable the rule |
+
+#### Custom Rules
+
+Custom rules allow you to write linting rules in TypeScript or JavaScript. Rules are loaded when you specify paths in the configuration file.
+
+**Setup:**
+
+**Step 1:** Install the types package in your rules directory:
+
+```bash
+cd ./rules
+npm init -y
+npm install @anthropic/openapi-linter-types
+```
+
+**Step 2:** Create a TypeScript rule file:
+
+```typescript
+// rules/require-operation-summary.ts
+import {
+ Rule,
+ createError,
+ type Context,
+ type DocumentInfo,
+ type RuleConfig,
+ type Severity,
+ type ValidationError,
+} from '@anthropic/openapi-linter-types';
+
+class RequireOperationSummary extends Rule {
+ id(): string {
+ return 'custom-require-operation-summary';
+ }
+
+ category(): string {
+ return 'style';
+ }
+
+ description(): string {
+ return 'All operations must have a summary for documentation.';
+ }
+
+ summary(): string {
+ return 'Operations must have summary';
+ }
+
+ defaultSeverity(): Severity {
+ return 'warning';
+ }
+
+ run(ctx: Context, docInfo: DocumentInfo, config: RuleConfig): ValidationError[] {
+ const errors: ValidationError[] = [];
+
+ // Access all operations via the index
+ for (const opNode of docInfo.getIndex().getOperations()) {
+ const op = opNode.getNode();
+ if (!op.getSummary()) {
+ errors.push(
+ createError(
+ config.getSeverity(this.defaultSeverity()),
+ this.id(),
+ `Operation "${op.getOperationID() || 'unnamed'}" is missing a summary`,
+ op.getRootNode()
+ )
+ );
+ }
+ }
+
+ return errors;
+ }
+}
+
+// Register the rule with the linter
+registerRule(new RequireOperationSummary());
+```
+
+**Step 3:** Configure the linter to load your rules:
+
+```yaml
+# lint.yaml
+custom_rules:
+ paths:
+ - ./rules/*.ts
+
+rules:
+ # Optionally configure your custom rules
+ - id: custom-require-operation-summary
+ severity: error
+```
+
+**Step 4:** Run the linter:
+
+```bash
+openapi spec lint -c ./lint.yaml ./spec.yaml
+```
+
+**Custom Rule API:**
+
+Your rule class must implement the `RuleRunner` interface:
+
+| Method | Required | Description |
+|--------|----------|-------------|
+| `id()` | Yes | Unique rule identifier |
+| `category()` | Yes | Rule category (e.g., `style`, `security`) |
+| `description()` | Yes | Full description of the rule |
+| `summary()` | Yes | Short summary for output |
+| `link()` | No | URL to documentation |
+| `defaultSeverity()` | No | Default severity (`error`, `warning`, `hint`) |
+| `versions()` | No | OpenAPI versions this rule applies to |
+| `run()` | Yes | Execute the rule and return validation errors |
+
+**Accessing Document Data:**
+
+The `DocumentInfo` object provides access to the parsed OpenAPI document:
+
+```typescript
+// Get the OpenAPI document
+const doc = docInfo.getDocument();
+
+// Get the pre-built index for efficient traversal
+const index = docInfo.getIndex();
+
+// Access indexed collections
+index.getOperations(); // All operations
+index.getComponentSchemas(); // Component schemas
+index.getInlineRequestBodies(); // Inline request bodies
+index.getInlineResponses(); // Inline responses
+index.getInlineParameters(); // Inline parameters
+index.getInlineHeaders(); // Inline headers
+index.getInlineSchemas(); // Inline schemas
+index.getPathItems(); // Path items
+index.getSecurityRequirements(); // Security requirements
+index.getCallbacks(); // Callbacks
+```
+
+**Creating Validation Errors:**
+
+Use the `createError` helper to create validation errors with proper source location:
+
+```typescript
+import { createError, Severity } from '@anthropic/openapi-linter-types';
+
+// Create an error at a specific node location
+const error = createError(
+ 'warning', // severity
+ 'my-rule-id', // rule ID
+ 'Description of the issue', // message
+ node.getRootNode() // YAML node for location
+);
+```
+
+**Console Logging:**
+
+Use `console.log`, `console.warn`, and `console.error` for debugging:
+
+```typescript
+run(ctx: Context, docInfo: DocumentInfo, config: RuleConfig): ValidationError[] {
+ console.log('Running custom rule...');
+ console.log('Operations count:', docInfo.getIndex().getOperations().length);
+ // ...
+}
+```
+
### `upgrade`
Upgrade an OpenAPI specification to the latest supported version (3.2.0).
diff --git a/cmd/openapi/commands/openapi/convert_rules.go b/cmd/openapi/commands/openapi/convert_rules.go
new file mode 100644
index 00000000..261f05d2
--- /dev/null
+++ b/cmd/openapi/commands/openapi/convert_rules.go
@@ -0,0 +1,166 @@
+package openapi
+
+import (
+ "fmt"
+ "os"
+ "path/filepath"
+ "sort"
+
+ "github.com/speakeasy-api/openapi/openapi/linter/converter"
+ "github.com/spf13/cobra"
+)
+
+var convertRulesCmd = &cobra.Command{
+ Use: "convert-rules ",
+ Short: "Convert Spectral/Vacuum/legacy configs to native linter format",
+ Long: `Convert a Spectral, Vacuum, or legacy Speakeasy lint config into the native
+linter format. This generates:
+
+ - A lint.yaml config file with mapped rule overrides
+ - TypeScript rule files for custom rules that don't have native equivalents
+
+Supported input formats:
+ - Spectral configs (.spectral.yml / .spectral.yaml)
+ - Vacuum configs (Spectral-compatible format)
+ - Legacy Speakeasy lint.yaml (with lintVersion/defaultRuleset/rulesets)
+
+Examples:
+ openapi spec lint convert-rules .spectral.yml
+ openapi spec lint convert-rules .spectral.yml --output ./converted
+ openapi spec lint convert-rules lint.yaml --dry-run
+ openapi spec lint convert-rules .spectral.yml --force`,
+ Args: cobra.ExactArgs(1),
+ Run: runConvertRules,
+}
+
+var (
+ convertOutput string
+ convertRulesDir string
+ convertForce bool
+ convertDryRun bool
+)
+
+func init() {
+ convertRulesCmd.Flags().StringVarP(&convertOutput, "output", "o", ".", "Output directory for generated files")
+ convertRulesCmd.Flags().StringVar(&convertRulesDir, "rules-dir", "./rules", "Subdirectory for generated .ts rule files")
+ convertRulesCmd.Flags().BoolVarP(&convertForce, "force", "f", false, "Overwrite existing files")
+ convertRulesCmd.Flags().BoolVar(&convertDryRun, "dry-run", false, "Print summary without writing files")
+
+ lintCmd.AddCommand(convertRulesCmd)
+}
+
+func runConvertRules(cmd *cobra.Command, args []string) {
+ configFile := args[0]
+
+ // Parse the input config
+ ir, err := converter.ParseFile(configFile)
+ if err != nil {
+ fmt.Fprintf(os.Stderr, "Error parsing config: %v\n", err)
+ os.Exit(1)
+ }
+
+ // Generate native output
+ result, err := converter.Generate(ir,
+ converter.WithRulesDir(convertRulesDir),
+ )
+ if err != nil {
+ fmt.Fprintf(os.Stderr, "Error generating output: %v\n", err)
+ os.Exit(1)
+ }
+
+ // Print summary
+ printConvertSummary(result, configFile)
+
+ // Print warnings
+ if len(result.Warnings) > 0 {
+ fmt.Println("\nWarnings:")
+ for _, w := range result.Warnings {
+ prefix := ""
+ if w.RuleID != "" {
+ prefix = fmt.Sprintf("[%s] ", w.RuleID)
+ }
+ fmt.Printf(" %s(%s) %s\n", prefix, w.Phase, w.Message)
+ }
+ }
+
+ if convertDryRun {
+ fmt.Println("\n--dry-run: no files written")
+ return
+ }
+
+ // Check for existing files unless --force
+ if !convertForce {
+ configPath := filepath.Join(convertOutput, "lint.yaml")
+ if _, err := os.Stat(configPath); err == nil {
+ fmt.Fprintf(os.Stderr, "Error: %s already exists (use --force to overwrite)\n", configPath)
+ os.Exit(1)
+ }
+ rulesPath := filepath.Join(convertOutput, convertRulesDir)
+ if _, err := os.Stat(rulesPath); err == nil {
+ fmt.Fprintf(os.Stderr, "Error: %s already exists (use --force to overwrite)\n", rulesPath)
+ os.Exit(1)
+ }
+ }
+
+ // Ensure output directory exists
+ if err := os.MkdirAll(convertOutput, 0o755); err != nil { //nolint:gosec
+ fmt.Fprintf(os.Stderr, "Error creating output directory: %v\n", err)
+ os.Exit(1)
+ }
+
+ // Write files
+ if err := result.WriteFiles(convertOutput); err != nil {
+ fmt.Fprintf(os.Stderr, "Error writing files: %v\n", err)
+ os.Exit(1)
+ }
+
+ fmt.Printf("\nFiles written to %s\n", convertOutput)
+}
+
+func printConvertSummary(result *converter.GenerateResult, inputFile string) {
+ fmt.Printf("Converting: %s\n\n", inputFile)
+
+ // Extends
+ if len(result.Config.Extends) > 0 {
+ fmt.Printf("Extends: %v\n", result.Config.Extends)
+ }
+
+ // Rule overrides
+ overrideCount := 0
+ for _, entry := range result.Config.Rules {
+ if entry.Disabled != nil || entry.Severity != nil {
+ overrideCount++
+ }
+ }
+ if overrideCount > 0 {
+ fmt.Printf("Rule overrides: %d\n", overrideCount)
+ }
+
+ // Generated rules
+ if len(result.GeneratedRules) > 0 {
+ ruleIDs := sortedKeys(result.GeneratedRules)
+ fmt.Printf("Generated rules: %d\n", len(result.GeneratedRules))
+ for _, ruleID := range ruleIDs {
+ fmt.Printf(" - %s.ts\n", ruleID)
+ }
+
+ // Files to be written
+ fmt.Println("\nFiles:")
+ fmt.Println(" - lint.yaml")
+ for _, ruleID := range ruleIDs {
+ fmt.Printf(" - %s/%s.ts\n", convertRulesDir, ruleID)
+ }
+ } else {
+ fmt.Println("\nFiles:")
+ fmt.Println(" - lint.yaml")
+ }
+}
+
+func sortedKeys(m map[string]string) []string {
+ keys := make([]string, 0, len(m))
+ for k := range m {
+ keys = append(keys, k)
+ }
+ sort.Strings(keys)
+ return keys
+}
diff --git a/cmd/openapi/commands/openapi/explore.go b/cmd/openapi/commands/openapi/explore.go
index 9694135e..152f2023 100644
--- a/cmd/openapi/commands/openapi/explore.go
+++ b/cmd/openapi/commands/openapi/explore.go
@@ -2,6 +2,7 @@ package openapi
import (
"context"
+ "errors"
"fmt"
"os"
"path/filepath"
@@ -56,7 +57,7 @@ func runExplore(cmd *cobra.Command, args []string) error {
}
if len(operations) == 0 {
- return fmt.Errorf("no operations found in the OpenAPI document")
+ return errors.New("no operations found in the OpenAPI document")
}
// Get document info for display
@@ -95,7 +96,7 @@ func loadOpenAPIDocument(ctx context.Context, file string) (*openapi.OpenAPI, er
return nil, fmt.Errorf("failed to unmarshal OpenAPI document: %w", err)
}
if doc == nil {
- return nil, fmt.Errorf("failed to parse OpenAPI document: document is nil")
+ return nil, errors.New("failed to parse OpenAPI document: document is nil")
}
// Report validation errors as warnings but continue
diff --git a/cmd/openapi/commands/openapi/lint.go b/cmd/openapi/commands/openapi/lint.go
new file mode 100644
index 00000000..d3011696
--- /dev/null
+++ b/cmd/openapi/commands/openapi/lint.go
@@ -0,0 +1,192 @@
+package openapi
+
+import (
+ "context"
+ "fmt"
+ "os"
+ "path/filepath"
+
+ "github.com/speakeasy-api/openapi/linter"
+ "github.com/speakeasy-api/openapi/openapi"
+ openapiLinter "github.com/speakeasy-api/openapi/openapi/linter"
+ "github.com/spf13/cobra"
+
+ // Enable custom rules support
+ _ "github.com/speakeasy-api/openapi/openapi/linter/customrules"
+)
+
+var lintCmd = &cobra.Command{
+ Use: "lint ",
+ Short: "Lint an OpenAPI specification document",
+ Long: `Lint an OpenAPI specification document for style, consistency, and best practices.
+
+This command runs both spec validation and additional lint rules including:
+- Path parameter validation
+- Operation ID requirements
+- Consistent naming conventions
+- Security best practices (OWASP)
+
+CONFIGURATION:
+
+By default, the linter looks for a configuration file at ~/.openapi/lint.yaml.
+Use --config to specify a custom configuration file.
+
+Available rulesets: all (default), recommended, security
+
+Example configuration (lint.yaml):
+
+ extends: recommended
+
+ rules:
+ - id: operation-operationId
+ severity: error
+ - id: some-rule
+ disabled: true
+
+ custom_rules:
+ paths:
+ - ./rules/*.ts
+
+CUSTOM RULES:
+
+Write custom linting rules in TypeScript or JavaScript. Install the types package
+in your rules directory:
+
+ npm install @speakeasy-api/openapi-linter-types
+
+Then configure the paths in your lint.yaml under custom_rules.paths.
+
+See the full documentation at:
+https://github.com/speakeasy-api/openapi/blob/main/cmd/openapi/commands/openapi/README.md#lint`,
+ Args: cobra.ExactArgs(1),
+ Run: runLint,
+}
+
+var (
+ lintOutputFormat string
+ lintRuleset string
+ lintConfigFile string
+ lintDisableRules []string
+)
+
+func init() {
+ lintCmd.Flags().StringVarP(&lintOutputFormat, "format", "f", "text", "Output format: text or json")
+ lintCmd.Flags().StringVarP(&lintRuleset, "ruleset", "r", "all", "Ruleset to use (default loads from config)")
+ lintCmd.Flags().StringVarP(&lintConfigFile, "config", "c", "", "Path to lint config file (default: ~/.openapi/lint.yaml)")
+ lintCmd.Flags().StringSliceVarP(&lintDisableRules, "disable", "d", nil, "Rule IDs to disable (can be repeated)")
+}
+
+func runLint(cmd *cobra.Command, args []string) {
+ ctx := cmd.Context()
+ file := args[0]
+
+ if err := lintOpenAPI(ctx, file); err != nil {
+ fmt.Fprintf(os.Stderr, "Error: %v\n", err)
+ os.Exit(1)
+ }
+}
+
+func lintOpenAPI(ctx context.Context, file string) error {
+ cleanFile := filepath.Clean(file)
+
+ // Get absolute path for document location
+ absPath, err := filepath.Abs(cleanFile)
+ if err != nil {
+ return fmt.Errorf("failed to get absolute path: %w", err)
+ }
+
+ // Load the OpenAPI document
+ f, err := os.Open(cleanFile)
+ if err != nil {
+ return fmt.Errorf("failed to open file: %w", err)
+ }
+ defer f.Close()
+
+ // Unmarshal with validation to get validation errors
+ doc, validationErrors, err := openapi.Unmarshal(ctx, f)
+ if err != nil {
+ return fmt.Errorf("failed to unmarshal file: %w", err)
+ }
+
+ // Build linter configuration
+ config := buildLintConfig()
+
+ // Create the OpenAPI linter with default rules
+ lint, err := openapiLinter.NewLinter(config)
+ if err != nil {
+ return fmt.Errorf("failed to create linter: %w", err)
+ }
+
+ // Create document info with location
+ docInfo := linter.NewDocumentInfo(doc, absPath)
+
+ // Run linting with validation errors passed in
+ output, err := lint.Lint(ctx, docInfo, validationErrors, nil)
+ if err != nil {
+ return fmt.Errorf("linting failed: %w", err)
+ }
+
+ // Format and print output
+ switch lintOutputFormat {
+ case "json":
+ fmt.Println(output.FormatJSON())
+ default:
+ fmt.Printf("%s\n", cleanFile)
+ fmt.Println(output.FormatText())
+ }
+
+ // Exit with error code if there are errors
+ if output.HasErrors() {
+ return fmt.Errorf("linting found %d errors", output.ErrorCount())
+ }
+
+ return nil
+}
+
+func buildLintConfig() *linter.Config {
+ config := linter.NewConfig()
+
+ // Load from config file if specified
+ if lintConfigFile != "" {
+ loaded, err := linter.LoadConfigFromFile(lintConfigFile)
+ if err != nil {
+ fmt.Fprintf(os.Stderr, "Error: %v\n", err)
+ os.Exit(1)
+ }
+ config = loaded
+ } else {
+ homeDir, err := os.UserHomeDir()
+ if err != nil {
+ fmt.Fprintf(os.Stderr, "Error: %v\n", err)
+ os.Exit(1)
+ }
+ defaultPath := filepath.Join(homeDir, ".openapi", "lint.yaml")
+ loaded, err := linter.LoadConfigFromFile(defaultPath)
+ if err == nil {
+ config = loaded
+ }
+ }
+
+ // Disable specified rules
+ for _, rule := range lintDisableRules {
+ disabled := true
+ config.Rules = append(config.Rules, linter.RuleEntry{
+ ID: rule,
+ Disabled: &disabled,
+ })
+ }
+
+ // Set output format
+ switch lintOutputFormat {
+ case "json":
+ config.OutputFormat = linter.OutputFormatJSON
+ default:
+ config.OutputFormat = linter.OutputFormatText
+ }
+
+ return config
+}
+
+func ptr[T any](v T) *T {
+ return &v
+}
diff --git a/cmd/openapi/commands/openapi/root.go b/cmd/openapi/commands/openapi/root.go
index e492e60f..f2619205 100644
--- a/cmd/openapi/commands/openapi/root.go
+++ b/cmd/openapi/commands/openapi/root.go
@@ -5,6 +5,7 @@ import "github.com/spf13/cobra"
// Apply adds OpenAPI commands to the provided root command
func Apply(rootCmd *cobra.Command) {
rootCmd.AddCommand(validateCmd)
+ rootCmd.AddCommand(lintCmd)
rootCmd.AddCommand(upgradeCmd)
rootCmd.AddCommand(inlineCmd)
rootCmd.AddCommand(cleanCmd)
diff --git a/cmd/openapi/commands/openapi/snip.go b/cmd/openapi/commands/openapi/snip.go
index 8ccf1a53..5bc265b2 100644
--- a/cmd/openapi/commands/openapi/snip.go
+++ b/cmd/openapi/commands/openapi/snip.go
@@ -2,6 +2,7 @@ package openapi
import (
"context"
+ "errors"
"fmt"
"strings"
@@ -95,7 +96,7 @@ func runSnip(cmd *cobra.Command, args []string) error {
// If -w is specified without any operation selection flags, error
if snipWriteInPlace && !(hasRemoveFlags || hasKeepFlags) {
- return fmt.Errorf("--write flag requires specifying operations via --operationId/--operation or --keepOperationId/--keepOperation")
+ return errors.New("--write flag requires specifying operations via --operationId/--operation or --keepOperationId/--keepOperation")
}
// Interactive mode when no flags provided
@@ -105,7 +106,7 @@ func runSnip(cmd *cobra.Command, args []string) error {
// Disallow mixing keep + remove flags; ambiguous intent
if hasRemoveFlags && hasKeepFlags {
- return fmt.Errorf("cannot combine keep and remove flags; use either --operationId/--operation or --keepOperationId/--keepOperation")
+ return errors.New("cannot combine keep and remove flags; use either --operationId/--operation or --keepOperationId/--keepOperation")
}
// CLI mode
@@ -138,7 +139,7 @@ func runSnipCLI(ctx context.Context, inputFile, outputFile string) error {
}
if len(operationsToRemove) == 0 {
- return fmt.Errorf("no operations specified for removal")
+ return errors.New("no operations specified for removal")
}
// Perform the snip
@@ -175,7 +176,7 @@ func runSnipCLIKeep(ctx context.Context, inputFile, outputFile string) error {
return err
}
if len(keepOps) == 0 {
- return fmt.Errorf("no operations specified to keep")
+ return errors.New("no operations specified to keep")
}
// Collect all operations from the document
@@ -184,7 +185,7 @@ func runSnipCLIKeep(ctx context.Context, inputFile, outputFile string) error {
return fmt.Errorf("failed to collect operations: %w", err)
}
if len(allOps) == 0 {
- return fmt.Errorf("no operations found in the OpenAPI document")
+ return errors.New("no operations found in the OpenAPI document")
}
// Build lookup sets for keep filters
@@ -248,7 +249,7 @@ func runSnipInteractive(ctx context.Context, inputFile, outputFile string) error
}
if len(operations) == 0 {
- return fmt.Errorf("no operations found in the OpenAPI document")
+ return errors.New("no operations found in the OpenAPI document")
}
// Get document info
@@ -296,7 +297,7 @@ func runSnipInteractive(ctx context.Context, inputFile, outputFile string) error
// Get the final model state
tuiModel, ok := finalModel.(tui.Model)
if !ok {
- return fmt.Errorf("unexpected model type")
+ return errors.New("unexpected model type")
}
// Check if user performed an action or just quit
diff --git a/cmd/openapi/commands/overlay/README.md b/cmd/openapi/commands/overlay/README.md
index ded98676..19adc340 100644
--- a/cmd/openapi/commands/overlay/README.md
+++ b/cmd/openapi/commands/overlay/README.md
@@ -11,7 +11,6 @@ OpenAPI Overlays provide a way to modify OpenAPI and Arazzo specifications witho
- [`apply`](#apply)
- [`validate`](#validate)
- [`compare`](#compare)
- - [`upgrade`](#upgrade)
- [What are OpenAPI Overlays?](#what-are-openapi-overlays)
- [Example Overlay](#example-overlay)
- [Common Use Cases](#common-use-cases)
@@ -100,40 +99,6 @@ Features:
- Creates overlay files that can recreate the transformation
- Supports both positional arguments and explicit flags
-### `upgrade`
-
-Upgrade an Overlay document to the latest supported version (1.1.0).
-
-```bash
-# Preview upgrade (output to stdout)
-openapi overlay upgrade my-overlay.yaml
-
-# Upgrade and save to new file
-openapi overlay upgrade my-overlay.yaml upgraded-overlay.yaml
-
-# Upgrade in-place
-openapi overlay upgrade -w my-overlay.yaml
-```
-
-Features:
-
-- Updates the Overlay version field from 1.0.0 to 1.1.0
-- Enables RFC 9535 JSONPath as the default implementation
-- Clears redundant `x-speakeasy-jsonpath: rfc9535` (now default in 1.1.0)
-- All existing actions remain valid and functional
-- Validates overlay before and after upgrade
-
-Version Differences:
-
-| Version | Default JSONPath | Setting |
-| ------- | ---------------- | -------------------------------------------- |
-| 1.0.0 | Legacy yamlpath | `x-speakeasy-jsonpath: rfc9535` for RFC 9535 |
-| 1.1.0+ | RFC 9535 | `x-speakeasy-jsonpath: legacy` for legacy |
-
-Options:
-
-- `-w, --write`: Write result in-place to input file
-
## What are OpenAPI Overlays?
OpenAPI Overlays are documents that describe modifications to be applied to OpenAPI specifications. They allow you to:
diff --git a/cmd/openapi/commands/overlay/root.go b/cmd/openapi/commands/overlay/root.go
index b949db06..1aecb92c 100644
--- a/cmd/openapi/commands/overlay/root.go
+++ b/cmd/openapi/commands/overlay/root.go
@@ -5,6 +5,5 @@ import "github.com/spf13/cobra"
func Apply(rootCmd *cobra.Command) {
rootCmd.AddCommand(applyCmd)
rootCmd.AddCommand(compareCmd)
- rootCmd.AddCommand(upgradeCmd)
rootCmd.AddCommand(validateCmd)
}
diff --git a/cmd/openapi/commands/overlay/upgrade.go b/cmd/openapi/commands/overlay/upgrade.go
deleted file mode 100644
index b04f8358..00000000
--- a/cmd/openapi/commands/overlay/upgrade.go
+++ /dev/null
@@ -1,122 +0,0 @@
-package overlay
-
-import (
- "fmt"
- "os"
-
- "github.com/speakeasy-api/openapi/overlay"
- "github.com/speakeasy-api/openapi/overlay/loader"
- "github.com/spf13/cobra"
- "gopkg.in/yaml.v3"
-)
-
-var upgradeCmd = &cobra.Command{
- Use: "upgrade [output-file]",
- Short: "Upgrade an Overlay document to the latest supported version (1.1.0)",
- Long: `Upgrade an Overlay specification document to the latest supported version (1.1.0).
-
-The upgrade process includes:
-- Updating the Overlay version field from 1.0.0 to 1.1.0
-- Enabling RFC 9535 JSONPath as the default implementation
-- Clearing redundant x-speakeasy-jsonpath: rfc9535 (now default in 1.1.0)
-- All existing actions remain valid and functional
-- Support for new 1.1.0 features like copy actions and info description
-
-Version Differences:
- 1.0.0: Legacy JSONPath by default, RFC 9535 opt-in with x-speakeasy-jsonpath: rfc9535
- 1.1.0: RFC 9535 JSONPath by default, legacy opt-out with x-speakeasy-jsonpath: legacy
-
-Output options:
- - No output file specified: writes to stdout (pipe-friendly)
- - Output file specified: writes to the specified file
- - --write flag: writes in-place to the input file`,
- Example: ` # Preview upgrade (output to stdout)
- openapi overlay upgrade my-overlay.yaml
-
- # Upgrade and save to new file
- openapi overlay upgrade my-overlay.yaml upgraded-overlay.yaml
-
- # Upgrade in-place
- openapi overlay upgrade -w my-overlay.yaml`,
- Args: cobra.RangeArgs(1, 2),
- Run: runOverlayUpgrade,
-}
-
-var overlayWriteInPlace bool
-
-func init() {
- upgradeCmd.Flags().BoolVarP(&overlayWriteInPlace, "write", "w", false,
- "write result in-place to input file")
-}
-
-func runOverlayUpgrade(cmd *cobra.Command, args []string) {
- ctx := cmd.Context()
- inputFile := args[0]
-
- var outputFile string
- if len(args) > 1 {
- outputFile = args[1]
- }
-
- // Load the overlay
- o, err := loader.LoadOverlay(inputFile)
- if err != nil {
- Dief("Failed to load overlay: %v", err)
- }
-
- // Validate the overlay before upgrade
- if err := o.Validate(); err != nil {
- Dief("Overlay validation failed: %v", err)
- }
-
- originalVersion := o.Version
-
- // Perform the upgrade
- upgraded, err := overlay.Upgrade(ctx, o)
- if err != nil {
- Dief("Failed to upgrade overlay: %v", err)
- }
-
- // Print status
- if !upgraded {
- fmt.Fprintf(os.Stderr, "No upgrade needed - overlay is already at version %s\n", originalVersion)
- } else {
- fmt.Fprintf(os.Stderr, "Successfully upgraded overlay from %s to %s\n", originalVersion, o.Version)
- }
-
- // Validate the upgraded overlay
- if err := o.Validate(); err != nil {
- Dief("Upgraded overlay failed validation: %v", err)
- }
-
- // Serialize output
- output, err := o.ToString()
- if err != nil {
- Dief("Failed to serialize overlay: %v", err)
- }
-
- // Determine output destination
- switch {
- case overlayWriteInPlace:
- if err := os.WriteFile(inputFile, []byte(output), 0644); err != nil {
- Dief("Failed to write to input file: %v", err)
- }
- fmt.Fprintf(os.Stderr, "Wrote upgraded overlay to %s\n", inputFile)
- case outputFile != "":
- if err := os.WriteFile(outputFile, []byte(output), 0644); err != nil {
- Dief("Failed to write to output file: %v", err)
- }
- fmt.Fprintf(os.Stderr, "Wrote upgraded overlay to %s\n", outputFile)
- default:
- // Write to stdout
- var node yaml.Node
- if err := yaml.Unmarshal([]byte(output), &node); err != nil {
- Dief("Failed to parse output: %v", err)
- }
- encoder := yaml.NewEncoder(os.Stdout)
- encoder.SetIndent(2)
- if err := encoder.Encode(&node); err != nil {
- Dief("Failed to write to stdout: %v", err)
- }
- }
-}
diff --git a/cmd/openapi/go.mod b/cmd/openapi/go.mod
index d0e1e36e..a5b36ff9 100644
--- a/cmd/openapi/go.mod
+++ b/cmd/openapi/go.mod
@@ -6,7 +6,8 @@ require (
github.com/charmbracelet/bubbles v0.21.0
github.com/charmbracelet/bubbletea v1.3.10
github.com/charmbracelet/lipgloss v1.1.0
- github.com/speakeasy-api/openapi v1.15.1-0.20260123232020-443f8a84b64c
+ github.com/speakeasy-api/openapi v1.15.2-0.20260205050808-54a315b347f2
+ github.com/speakeasy-api/openapi/openapi/linter/customrules v0.0.0-20260205050808-54a315b347f2
github.com/spf13/cobra v1.10.1
github.com/stretchr/testify v1.11.1
gopkg.in/yaml.v3 v3.0.1
@@ -20,10 +21,14 @@ require (
github.com/charmbracelet/x/cellbuf v0.0.13-0.20250311204145-2c3ea96c31dd // indirect
github.com/charmbracelet/x/term v0.2.1 // indirect
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect
+ github.com/dlclark/regexp2 v1.11.4 // indirect
+ github.com/dop251/goja v0.0.0-20260106131823-651366fbe6e3 // indirect
github.com/dprotaso/go-yit v0.0.0-20191028211022-135eb7262960 // indirect
github.com/erikgeiser/coninput v0.0.0-20211004153227-1c3628e74d0f // indirect
+ github.com/evanw/esbuild v0.27.2 // indirect
+ github.com/go-sourcemap/sourcemap v2.1.4+incompatible // indirect
+ github.com/google/pprof v0.0.0-20230207041349-798e818bf904 // indirect
github.com/inconshreveable/mousetrap v1.1.0 // indirect
- github.com/kr/text v0.2.0 // indirect
github.com/lucasb-eyer/go-colorful v1.2.0 // indirect
github.com/mattn/go-isatty v0.0.20 // indirect
github.com/mattn/go-localereader v0.0.1 // indirect
diff --git a/cmd/openapi/go.sum b/cmd/openapi/go.sum
index 9bde1a69..6f06a7d2 100644
--- a/cmd/openapi/go.sum
+++ b/cmd/openapi/go.sum
@@ -1,3 +1,5 @@
+github.com/Masterminds/semver/v3 v3.2.1 h1:RN9w6+7QoMeJVGyfmbcgs28Br8cvmnucEXnY0rYXWg0=
+github.com/Masterminds/semver/v3 v3.2.1/go.mod h1:qvl/7zhW3nngYb5+80sSMF+FG2BjYrf8m9wsX0PNOMQ=
github.com/atotto/clipboard v0.1.4 h1:EH0zSVneZPSuFR11BlR9YppQTVDbh5+16AmcJi4g1z4=
github.com/atotto/clipboard v0.1.4/go.mod h1:ZY9tmq7sm5xIbd9bOK4onWV4S6X0u6GY7Vn0Yu86PYI=
github.com/aymanbagabas/go-osc52/v2 v2.0.1 h1:HwpRHbFMcZLEVr42D4p7XBqjyuxQH5SMiErDT4WkJ2k=
@@ -17,19 +19,26 @@ github.com/charmbracelet/x/cellbuf v0.0.13-0.20250311204145-2c3ea96c31dd/go.mod
github.com/charmbracelet/x/term v0.2.1 h1:AQeHeLZ1OqSXhrAWpYUtZyX1T3zVxfpZuEQMIQaGIAQ=
github.com/charmbracelet/x/term v0.2.1/go.mod h1:oQ4enTYFV7QN4m0i9mzHrViD7TQKvNEEkHUMCmsxdUg=
github.com/cpuguy83/go-md2man/v2 v2.0.6/go.mod h1:oOW0eioCTA6cOiMLiUPZOpcVxMig6NIQQ7OS05n1F4g=
-github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1VwoXQT9A3Wy9MM3WgvqSxFWenqJduM=
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
-github.com/dlclark/regexp2 v1.11.0 h1:G/nrcoOa7ZXlpoa/91N3X7mM3r8eIlMBBJZvsz/mxKI=
-github.com/dlclark/regexp2 v1.11.0/go.mod h1:DHkYz0B9wPfa6wondMfaivmHpzrQ3v9q8cnmRbL6yW8=
+github.com/dlclark/regexp2 v1.11.4 h1:rPYF9/LECdNymJufQKmri9gV604RvvABwgOA8un7yAo=
+github.com/dlclark/regexp2 v1.11.4/go.mod h1:DHkYz0B9wPfa6wondMfaivmHpzrQ3v9q8cnmRbL6yW8=
+github.com/dop251/goja v0.0.0-20260106131823-651366fbe6e3 h1:bVp3yUzvSAJzu9GqID+Z96P+eu5TKnIMJSV4QaZMauM=
+github.com/dop251/goja v0.0.0-20260106131823-651366fbe6e3/go.mod h1:MxLav0peU43GgvwVgNbLAj1s/bSGboKkhuULvq/7hx4=
github.com/dprotaso/go-yit v0.0.0-20191028211022-135eb7262960 h1:aRd8M7HJVZOqn/vhOzrGcQH0lNAMkqMn+pXUYkatmcA=
github.com/dprotaso/go-yit v0.0.0-20191028211022-135eb7262960/go.mod h1:9HQzr9D/0PGwMEbC3d5AB7oi67+h4TsQqItC1GVYG58=
github.com/erikgeiser/coninput v0.0.0-20211004153227-1c3628e74d0f h1:Y/CXytFA4m6baUTXGLOoWe4PQhGxaX0KpnayAqC48p4=
github.com/erikgeiser/coninput v0.0.0-20211004153227-1c3628e74d0f/go.mod h1:vw97MGsxSvLiUE2X8qFplwetxpGLQrlU1Q9AUEIzCaM=
+github.com/evanw/esbuild v0.27.2 h1:3xBEws9y/JosfewXMM2qIyHAi+xRo8hVx475hVkJfNg=
+github.com/evanw/esbuild v0.27.2/go.mod h1:D2vIQZqV/vIf/VRHtViaUtViZmG7o+kKmlBfVQuRi48=
github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo=
+github.com/go-sourcemap/sourcemap v2.1.4+incompatible h1:a+iTbH5auLKxaNwQFg0B+TCYl6lbukKPc7b5x0n1s6Q=
+github.com/go-sourcemap/sourcemap v2.1.4+incompatible/go.mod h1:F8jJfvm2KbVjc5NqelyYJmf/v5J0dwNLS2mL4sNA1Jg=
github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
+github.com/google/pprof v0.0.0-20230207041349-798e818bf904 h1:4/hN5RUoecvl+RmJRE2YxKWtnnQls6rQjjW5oV7qg2U=
+github.com/google/pprof v0.0.0-20230207041349-798e818bf904/go.mod h1:uglQLonpP8qtYCYyzA+8c/9qtqgA3qsXGYqCPKARAFg=
github.com/hpcloud/tail v1.0.0 h1:nfCOvKYfkgYP8hkirhJocXT2+zOD8yUNjXaWfTlyFKI=
github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU=
github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8=
@@ -75,8 +84,10 @@ github.com/sergi/go-diff v1.1.0 h1:we8PVUC3FE2uYfodKH/nBHMSetSfHDR6scGdBi+erh0=
github.com/sergi/go-diff v1.1.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM=
github.com/speakeasy-api/jsonpath v0.6.2 h1:Mys71yd6u8kuowNCR0gCVPlVAHCmKtoGXYoAtcEbqXQ=
github.com/speakeasy-api/jsonpath v0.6.2/go.mod h1:ymb2iSkyOycmzKwbEAYPJV/yi2rSmvBCLZJcyD+VVWw=
-github.com/speakeasy-api/openapi v1.15.1-0.20260123232020-443f8a84b64c h1:nmJ3K4QQO1fNkIKM0GKfEGs/Cav7udtn9LijHF8ZaFw=
-github.com/speakeasy-api/openapi v1.15.1-0.20260123232020-443f8a84b64c/go.mod h1:aiVj+JnirrwZDtKegt0hQrj/ixl3v17EkN2YGnTuSro=
+github.com/speakeasy-api/openapi v1.15.2-0.20260205050808-54a315b347f2 h1:HAVe+/IBKXdUv/Qq1UzXIWV4RDHA8JQA0OGpgd/a0Zs=
+github.com/speakeasy-api/openapi v1.15.2-0.20260205050808-54a315b347f2/go.mod h1:aiVj+JnirrwZDtKegt0hQrj/ixl3v17EkN2YGnTuSro=
+github.com/speakeasy-api/openapi/openapi/linter/customrules v0.0.0-20260205050808-54a315b347f2 h1:pQsTBWKRf27uGkT1vbosf/GFiUfNr0qSxkd+FfdZSwE=
+github.com/speakeasy-api/openapi/openapi/linter/customrules v0.0.0-20260205050808-54a315b347f2/go.mod h1:Z2pg+iCf6izq0dVz3Ow/jHrHSlT220y899gwNHz1ZzE=
github.com/spf13/cobra v1.10.1 h1:lJeBwCfmrnXthfAupyUTzJ/J4Nc1RsHC/mSRU2dll/s=
github.com/spf13/cobra v1.10.1/go.mod h1:7SmJGaTHFVBY0jW4NXGluQoLvhqFQM+6XSKD+P4XaB0=
github.com/spf13/pflag v1.0.9 h1:9exaQaMOCwffKiiiYk6/BndUBv+iRViNW+4lEMi0PvY=
@@ -100,6 +111,7 @@ golang.org/x/sync v0.19.0 h1:vV+1eWNmZ5geRlYjzm2adRgW2/mcpevXNg50YZtPCE4=
golang.org/x/sync v0.19.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI=
golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20210809222454-d867a43fc93e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.36.0 h1:KVRy2GtZBrk1cBYA7MKu5bEZFxQk4NIDV6RLVcC8o0k=
golang.org/x/sys v0.36.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
@@ -116,8 +128,9 @@ gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 h1:uRGJdciOHaEIrze2W8Q3AKkep
gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw=
gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
-gopkg.in/yaml.v2 v2.2.4 h1:/eiJrUcujPVeJ3xlSWaiNi3uSVmDGBK1pDHUHAnao1I=
gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
+gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY=
+gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
gopkg.in/yaml.v3 v3.0.0-20191026110619-0b21df46bc1d/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
diff --git a/cmd/openapi/internal/explore/tui/input.go b/cmd/openapi/internal/explore/tui/input.go
index 990af4de..a8a07a7b 100644
--- a/cmd/openapi/internal/explore/tui/input.go
+++ b/cmd/openapi/internal/explore/tui/input.go
@@ -1,6 +1,7 @@
package tui
import (
+ "errors"
"fmt"
"strings"
@@ -111,7 +112,7 @@ func PromptForFilePath(prompt, defaultValue string) (string, error) {
inputModel, ok := finalModel.(InputModel)
if !ok {
- return "", fmt.Errorf("unexpected model type")
+ return "", errors.New("unexpected model type")
}
if inputModel.IsCancelled() {
diff --git a/cmd/update-lint-docs/main.go b/cmd/update-lint-docs/main.go
new file mode 100644
index 00000000..b8dfdda8
--- /dev/null
+++ b/cmd/update-lint-docs/main.go
@@ -0,0 +1,183 @@
+package main
+
+import (
+ "fmt"
+ "os"
+ "path/filepath"
+ "regexp"
+ "sort"
+ "strings"
+
+ "github.com/speakeasy-api/openapi/linter"
+ "github.com/speakeasy-api/openapi/openapi"
+ openapiLinter "github.com/speakeasy-api/openapi/openapi/linter"
+)
+
+func main() {
+ if err := updateLintDocs(); err != nil {
+ fmt.Fprintf(os.Stderr, "Error: %v\n", err)
+ os.Exit(1)
+ }
+}
+
+func updateLintDocs() error {
+ fmt.Println("🔄 Updating lint rules in README files...")
+
+ if err := updateOpenAPILintDocs(); err != nil {
+ return fmt.Errorf("failed to update OpenAPI lint docs: %w", err)
+ }
+
+ if err := updateRuleLinks(); err != nil {
+ return fmt.Errorf("failed to update rule links: %w", err)
+ }
+
+ fmt.Println("🎉 Lint docs updated successfully!")
+ return nil
+}
+
+func updateOpenAPILintDocs() error {
+ readmeFile := "openapi/linter/README.md"
+
+ // Check if README exists
+ if _, err := os.Stat(readmeFile); os.IsNotExist(err) {
+ fmt.Printf("⚠️ No README file found: %s\n", readmeFile)
+ return nil
+ }
+
+ // Create linter to get the registry
+ config := linter.NewConfig()
+ lint, err := openapiLinter.NewLinter(config)
+ if err != nil {
+ return fmt.Errorf("failed to create linter: %w", err)
+ }
+ docGen := linter.NewDocGenerator(lint.Registry())
+
+ // Generate rules table
+ content := generateRulesTable(docGen)
+
+ // Update README file
+ if err := updateReadmeFile(readmeFile, content); err != nil {
+ return fmt.Errorf("failed to update README: %w", err)
+ }
+
+ fmt.Printf("✅ Updated %s\n", readmeFile)
+ return nil
+}
+
+func generateRulesTable(docGen *linter.DocGenerator[*openapi.OpenAPI]) string {
+ docs := docGen.GenerateAllRuleDocs()
+
+ // Sort rules alphabetically by ID
+ sort.Slice(docs, func(i, j int) bool {
+ return docs[i].ID < docs[j].ID
+ })
+
+ var content strings.Builder
+ content.WriteString("| Rule | Severity | Description |\n")
+ content.WriteString("|------|----------|-------------|\n")
+
+ for _, doc := range docs {
+ // Escape pipe characters in description
+ desc := strings.ReplaceAll(doc.Description, "|", "\\|")
+ // Replace newlines with spaces
+ desc = strings.ReplaceAll(desc, "\n", " ")
+ content.WriteString(fmt.Sprintf("| `%s` | %s | %s |\n", doc.ID, doc.ID, doc.DefaultSeverity, desc))
+ }
+
+ return content.String()
+}
+
+func updateReadmeFile(filename, newContent string) error {
+ // Read the current README
+ data, err := os.ReadFile(filename) //nolint:gosec
+ if err != nil {
+ return err
+ }
+
+ content := string(data)
+
+ // Find the start and end markers
+ startMarker := ""
+ endMarker := ""
+
+ startIdx := strings.Index(content, startMarker)
+ endIdx := strings.Index(content, endMarker)
+
+ if startIdx == -1 || endIdx == -1 {
+ return fmt.Errorf("could not find lint rules markers in %s", filename)
+ }
+
+ // Replace the content between markers
+ before := content[:startIdx+len(startMarker)]
+ after := content[endIdx:]
+
+ newFileContent := before + "\n\n" + newContent + "\n" + after
+
+ // Write the updated content
+ return os.WriteFile(filename, []byte(newFileContent), 0600)
+}
+
+func updateRuleLinks() error {
+ const baseURL = "https://github.com/speakeasy-api/openapi/blob/main/openapi/linter/README.md"
+ rulesDir := "openapi/linter/rules"
+
+ // Get all rule files
+ entries, err := os.ReadDir(rulesDir)
+ if err != nil {
+ return fmt.Errorf("failed to read rules directory: %w", err)
+ }
+
+ // Pattern to match Link() method - captures receiver and return value
+ linkPattern := regexp.MustCompile(`func (\([^)]+\)) Link\(\) string \{\s*return "[^"]*"\s*\}`)
+
+ updatedCount := 0
+ for _, entry := range entries {
+ if entry.IsDir() || !strings.HasSuffix(entry.Name(), ".go") || strings.HasSuffix(entry.Name(), "_test.go") {
+ continue
+ }
+
+ filePath := filepath.Join(rulesDir, entry.Name())
+
+ // Read the file
+ data, err := os.ReadFile(filePath) //nolint:gosec
+ if err != nil {
+ return fmt.Errorf("failed to read %s: %w", filePath, err)
+ }
+
+ content := string(data)
+
+ // Find the rule ID constant
+ ruleIDPattern := regexp.MustCompile(`const (Rule\w+) = "([^"]+)"`)
+ matches := ruleIDPattern.FindStringSubmatch(content)
+ if len(matches) < 3 {
+ continue // Skip if no rule ID found
+ }
+ ruleID := matches[2]
+
+ // Create the new link
+ newLink := fmt.Sprintf("%s#%s", baseURL, ruleID)
+
+ // Replace the Link() method, preserving the receiver
+ newContent := linkPattern.ReplaceAllStringFunc(content, func(match string) string {
+ receiverMatch := regexp.MustCompile(`func (\([^)]+\))`).FindStringSubmatch(match)
+ if len(receiverMatch) > 1 {
+ return fmt.Sprintf(`func %s Link() string {
+ return "%s"
+}`, receiverMatch[1], newLink)
+ }
+ return match
+ })
+
+ // Only write if content changed
+ if newContent != content {
+ if err := os.WriteFile(filePath, []byte(newContent), 0600); err != nil {
+ return fmt.Errorf("failed to write %s: %w", filePath, err)
+ }
+ updatedCount++
+ fmt.Printf("✅ Updated link in %s\n", filePath)
+ }
+ }
+
+ fmt.Printf("✅ Updated links in %d rule files\n", updatedCount)
+ return nil
+}
diff --git a/go.work b/go.work
index b115ffd4..1e38c8f4 100644
--- a/go.work
+++ b/go.work
@@ -4,4 +4,6 @@ use (
.
./cmd/openapi
./jsonschema/oas3/tests
+ ./openapi/linter/converter/tests
+ ./openapi/linter/customrules
)
diff --git a/jsonpointer/jsonpointer.go b/jsonpointer/jsonpointer.go
index 6d66e904..042ee0bb 100644
--- a/jsonpointer/jsonpointer.go
+++ b/jsonpointer/jsonpointer.go
@@ -221,6 +221,10 @@ type NavigableNoder interface {
}
func getStructTarget(sourceVal reflect.Value, currentPart navigationPart, stack []navigationPart, currentPath string, o *options) (any, []navigationPart, error) {
+ if sourceVal.Kind() == reflect.Ptr && sourceVal.IsNil() {
+ return nil, nil, ErrNotFound.Wrap(fmt.Errorf("struct is nil at %s", currentPath))
+ }
+
if interfaces.ImplementsInterface[NavigableNoder](sourceVal.Type()) {
val, stack, err := getNavigableNoderTarget(sourceVal, currentPart, stack, currentPath, o)
if err != nil {
diff --git a/jsonpointer/models_test.go b/jsonpointer/models_test.go
index 272725a5..4d2ef7bd 100644
--- a/jsonpointer/models_test.go
+++ b/jsonpointer/models_test.go
@@ -240,6 +240,37 @@ func TestNavigateModel_EmbeddedMap(t *testing.T) {
})
}
+func TestNavigateModel_NilModelPointer(t *testing.T) {
+ t.Parallel()
+
+ t.Run("nil model pointer returns error instead of panic", func(t *testing.T) {
+ t.Parallel()
+
+ // A nil pointer to a model type should return an error, not panic.
+ // This reproduces a crash when resolving a broken $ref like
+ // "#/components/schemas/DoesNotExist" where the Components pointer is nil.
+ var model *tests.TestPrimitiveHighModel
+ _, err := GetTarget(model, "/stringField")
+ require.Error(t, err)
+ assert.Contains(t, err.Error(), "not found")
+ assert.Contains(t, err.Error(), "nil")
+ })
+
+ t.Run("nil nested model pointer returns error instead of panic", func(t *testing.T) {
+ t.Parallel()
+
+ // A model with a nil nested model pointer should return an error
+ // when trying to navigate through the nil pointer.
+ model := &tests.TestComplexHighModel{
+ NestedModel: nil,
+ }
+ _, err := GetTarget(model, "/nestedModel/stringField")
+ require.Error(t, err)
+ assert.Contains(t, err.Error(), "not found")
+ assert.Contains(t, err.Error(), "nil")
+ })
+}
+
func TestNavigateModel_EmbeddedMapEscapedKeys(t *testing.T) {
t.Parallel()
diff --git a/jsonschema/oas3/core/discriminator_test.go b/jsonschema/oas3/core/discriminator_test.go
new file mode 100644
index 00000000..322032f6
--- /dev/null
+++ b/jsonschema/oas3/core/discriminator_test.go
@@ -0,0 +1,325 @@
+package core
+
+import (
+ "testing"
+
+ "github.com/speakeasy-api/openapi/marshaller"
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestDiscriminator_Unmarshal_AllFields_Success(t *testing.T) {
+ t.Parallel()
+
+ tests := []struct {
+ name string
+ yaml string
+ }{
+ {
+ name: "all fields populated",
+ yaml: `
+propertyName: petType
+mapping:
+ dog: "#/components/schemas/Dog"
+ cat: "#/components/schemas/Cat"
+defaultMapping: "#/components/schemas/Pet"
+x-custom: value
+`,
+ },
+ {
+ name: "only required propertyName field",
+ yaml: `
+propertyName: type
+`,
+ },
+ {
+ name: "propertyName with mapping",
+ yaml: `
+propertyName: objectType
+mapping:
+ typeA: "#/components/schemas/TypeA"
+ typeB: "#/components/schemas/TypeB"
+`,
+ },
+ {
+ name: "propertyName with defaultMapping",
+ yaml: `
+propertyName: kind
+defaultMapping: "#/components/schemas/DefaultType"
+`,
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ var target Discriminator
+ validationErrs, err := marshaller.UnmarshalCore(ctx, "", parseYAML(t, tt.yaml), &target)
+
+ require.NoError(t, err, "unmarshal should succeed")
+ require.Empty(t, validationErrs, "should not have validation errors")
+ assert.NotNil(t, target, "Discriminator should not be nil")
+ })
+ }
+}
+
+func TestDiscriminator_Unmarshal_PropertyNameField_Success(t *testing.T) {
+ t.Parallel()
+
+ tests := []struct {
+ name string
+ yaml string
+ expectedPropertyName string
+ }{
+ {
+ name: "simple property name",
+ yaml: `propertyName: type`,
+ expectedPropertyName: "type",
+ },
+ {
+ name: "camelCase property name",
+ yaml: `propertyName: petType`,
+ expectedPropertyName: "petType",
+ },
+ {
+ name: "snake_case property name",
+ yaml: `propertyName: pet_type`,
+ expectedPropertyName: "pet_type",
+ },
+ {
+ name: "kebab-case property name",
+ yaml: `propertyName: pet-type`,
+ expectedPropertyName: "pet-type",
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ var target Discriminator
+ validationErrs, err := marshaller.UnmarshalCore(ctx, "", parseYAML(t, tt.yaml), &target)
+
+ require.NoError(t, err, "unmarshal should succeed")
+ require.Empty(t, validationErrs, "should not have validation errors")
+ assert.Equal(t, tt.expectedPropertyName, target.PropertyName.Value, "should parse propertyName correctly")
+ })
+ }
+}
+
+func TestDiscriminator_Unmarshal_MappingField_Success(t *testing.T) {
+ t.Parallel()
+
+ tests := []struct {
+ name string
+ yaml string
+ key string
+ expectedRef string
+ expectedSize int
+ }{
+ {
+ name: "single mapping entry",
+ yaml: `
+propertyName: type
+mapping:
+ dog: "#/components/schemas/Dog"
+`,
+ key: "dog",
+ expectedRef: "#/components/schemas/Dog",
+ expectedSize: 1,
+ },
+ {
+ name: "multiple mapping entries",
+ yaml: `
+propertyName: type
+mapping:
+ dog: "#/components/schemas/Dog"
+ cat: "#/components/schemas/Cat"
+ bird: "#/components/schemas/Bird"
+`,
+ key: "cat",
+ expectedRef: "#/components/schemas/Cat",
+ expectedSize: 3,
+ },
+ {
+ name: "mapping with external refs",
+ yaml: `
+propertyName: type
+mapping:
+ local: "#/components/schemas/Local"
+ external: "https://example.com/schemas/External"
+`,
+ key: "external",
+ expectedRef: "https://example.com/schemas/External",
+ expectedSize: 2,
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ var target Discriminator
+ validationErrs, err := marshaller.UnmarshalCore(ctx, "", parseYAML(t, tt.yaml), &target)
+
+ require.NoError(t, err, "unmarshal should succeed")
+ require.Empty(t, validationErrs, "should not have validation errors")
+ require.NotNil(t, target.Mapping.Value, "mapping should be set")
+ assert.Equal(t, tt.expectedSize, target.Mapping.Value.Len(), "should have correct number of mappings")
+
+ value, found := target.Mapping.Value.Get(tt.key)
+ require.True(t, found, "should find mapping key")
+ assert.Equal(t, tt.expectedRef, value.Value, "should parse mapping value correctly")
+ })
+ }
+}
+
+func TestDiscriminator_Unmarshal_DefaultMappingField_Success(t *testing.T) {
+ t.Parallel()
+
+ tests := []struct {
+ name string
+ yaml string
+ expectedDefaultMapping string
+ }{
+ {
+ name: "defaultMapping with component ref",
+ yaml: `
+propertyName: type
+defaultMapping: "#/components/schemas/Default"
+`,
+ expectedDefaultMapping: "#/components/schemas/Default",
+ },
+ {
+ name: "defaultMapping with external ref",
+ yaml: `
+propertyName: type
+defaultMapping: "https://example.com/schemas/Default"
+`,
+ expectedDefaultMapping: "https://example.com/schemas/Default",
+ },
+ {
+ name: "defaultMapping with path ref",
+ yaml: `
+propertyName: type
+defaultMapping: "#/definitions/Default"
+`,
+ expectedDefaultMapping: "#/definitions/Default",
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ var target Discriminator
+ validationErrs, err := marshaller.UnmarshalCore(ctx, "", parseYAML(t, tt.yaml), &target)
+
+ require.NoError(t, err, "unmarshal should succeed")
+ require.Empty(t, validationErrs, "should not have validation errors")
+ require.NotNil(t, target.DefaultMapping.Value, "defaultMapping should be set")
+ assert.Equal(t, tt.expectedDefaultMapping, *target.DefaultMapping.Value, "should parse defaultMapping correctly")
+ })
+ }
+}
+
+func TestDiscriminator_Unmarshal_Extensions_Success(t *testing.T) {
+ t.Parallel()
+
+ tests := []struct {
+ name string
+ yaml string
+ extensionKey string
+ expectedValue string
+ }{
+ {
+ name: "single extension",
+ yaml: `
+propertyName: type
+x-custom: value
+`,
+ extensionKey: "x-custom",
+ expectedValue: "value",
+ },
+ {
+ name: "multiple extensions",
+ yaml: `
+propertyName: type
+x-first: value1
+x-second: value2
+`,
+ extensionKey: "x-first",
+ expectedValue: "value1",
+ },
+ {
+ name: "extension with all fields",
+ yaml: `
+propertyName: type
+mapping:
+ dog: "#/components/schemas/Dog"
+defaultMapping: "#/components/schemas/Pet"
+x-vendor: custom-value
+`,
+ extensionKey: "x-vendor",
+ expectedValue: "custom-value",
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ var target Discriminator
+ validationErrs, err := marshaller.UnmarshalCore(ctx, "", parseYAML(t, tt.yaml), &target)
+
+ require.NoError(t, err, "unmarshal should succeed")
+ require.Empty(t, validationErrs, "should not have validation errors")
+ require.NotNil(t, target.Extensions, "extensions should be set")
+
+ ext, found := target.Extensions.Get(tt.extensionKey)
+ require.True(t, found, "should find extension")
+ assert.Equal(t, tt.expectedValue, ext.Value.Value, "should parse extension value correctly")
+ })
+ }
+}
+
+func TestDiscriminator_Unmarshal_MinimalObject_Success(t *testing.T) {
+ t.Parallel()
+
+ ctx := t.Context()
+ yaml := `propertyName: type`
+
+ var target Discriminator
+ validationErrs, err := marshaller.UnmarshalCore(ctx, "", parseYAML(t, yaml), &target)
+
+ require.NoError(t, err, "unmarshal should succeed")
+ require.Empty(t, validationErrs, "should not have validation errors")
+ assert.Equal(t, "type", target.PropertyName.Value, "should parse propertyName")
+ assert.Nil(t, target.Mapping.Value, "mapping should be nil")
+ assert.Nil(t, target.DefaultMapping.Value, "defaultMapping should be nil")
+}
+
+func TestDiscriminator_Unmarshal_EmptyMapping_Success(t *testing.T) {
+ t.Parallel()
+
+ ctx := t.Context()
+ yaml := `
+propertyName: type
+mapping: {}
+`
+
+ var target Discriminator
+ validationErrs, err := marshaller.UnmarshalCore(ctx, "", parseYAML(t, yaml), &target)
+
+ require.NoError(t, err, "unmarshal should succeed")
+ require.Empty(t, validationErrs, "should not have validation errors")
+ assert.Equal(t, "type", target.PropertyName.Value, "should parse propertyName")
+ require.NotNil(t, target.Mapping.Value, "mapping should not be nil")
+ assert.Equal(t, 0, target.Mapping.Value.Len(), "mapping should be empty")
+}
diff --git a/jsonschema/oas3/core/externaldoc_test.go b/jsonschema/oas3/core/externaldoc_test.go
new file mode 100644
index 00000000..ea4afea6
--- /dev/null
+++ b/jsonschema/oas3/core/externaldoc_test.go
@@ -0,0 +1,228 @@
+package core
+
+import (
+ "testing"
+
+ "github.com/speakeasy-api/openapi/marshaller"
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestExternalDocumentation_Unmarshal_AllFields_Success(t *testing.T) {
+ t.Parallel()
+
+ tests := []struct {
+ name string
+ yaml string
+ }{
+ {
+ name: "all fields populated",
+ yaml: `
+url: https://example.com/docs
+description: Additional documentation
+x-custom: value
+`,
+ },
+ {
+ name: "only required url field",
+ yaml: `
+url: https://example.com
+`,
+ },
+ {
+ name: "url with description",
+ yaml: `
+url: https://api.example.com/reference
+description: API Reference Documentation
+`,
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ var target ExternalDocumentation
+ validationErrs, err := marshaller.UnmarshalCore(ctx, "", parseYAML(t, tt.yaml), &target)
+
+ require.NoError(t, err, "unmarshal should succeed")
+ require.Empty(t, validationErrs, "should not have validation errors")
+ assert.NotNil(t, target, "ExternalDocumentation should not be nil")
+ })
+ }
+}
+
+func TestExternalDocumentation_Unmarshal_URLField_Success(t *testing.T) {
+ t.Parallel()
+
+ tests := []struct {
+ name string
+ yaml string
+ expectedURL string
+ }{
+ {
+ name: "https url",
+ yaml: `url: https://example.com/docs`,
+ expectedURL: "https://example.com/docs",
+ },
+ {
+ name: "http url",
+ yaml: `url: http://example.com/docs`,
+ expectedURL: "http://example.com/docs",
+ },
+ {
+ name: "url with path",
+ yaml: `url: https://api.example.com/v1/reference`,
+ expectedURL: "https://api.example.com/v1/reference",
+ },
+ {
+ name: "url with query params",
+ yaml: `url: https://example.com/docs?version=2.0`,
+ expectedURL: "https://example.com/docs?version=2.0",
+ },
+ {
+ name: "url with fragment",
+ yaml: `url: https://example.com/docs#section`,
+ expectedURL: "https://example.com/docs#section",
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ var target ExternalDocumentation
+ validationErrs, err := marshaller.UnmarshalCore(ctx, "", parseYAML(t, tt.yaml), &target)
+
+ require.NoError(t, err, "unmarshal should succeed")
+ require.Empty(t, validationErrs, "should not have validation errors")
+ assert.Equal(t, tt.expectedURL, target.URL.Value, "should parse url correctly")
+ })
+ }
+}
+
+func TestExternalDocumentation_Unmarshal_DescriptionField_Success(t *testing.T) {
+ t.Parallel()
+
+ tests := []struct {
+ name string
+ yaml string
+ expectedDescription string
+ }{
+ {
+ name: "simple description",
+ yaml: `
+url: https://example.com
+description: Documentation
+`,
+ expectedDescription: "Documentation",
+ },
+ {
+ name: "multi-word description",
+ yaml: `
+url: https://example.com
+description: Complete API documentation and reference guide
+`,
+ expectedDescription: "Complete API documentation and reference guide",
+ },
+ {
+ name: "description with special chars",
+ yaml: `
+url: https://example.com
+description: "Documentation: API & SDK Guide"
+`,
+ expectedDescription: "Documentation: API & SDK Guide",
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ var target ExternalDocumentation
+ validationErrs, err := marshaller.UnmarshalCore(ctx, "", parseYAML(t, tt.yaml), &target)
+
+ require.NoError(t, err, "unmarshal should succeed")
+ require.Empty(t, validationErrs, "should not have validation errors")
+ require.NotNil(t, target.Description.Value, "description should be set")
+ assert.Equal(t, tt.expectedDescription, *target.Description.Value, "should parse description correctly")
+ })
+ }
+}
+
+func TestExternalDocumentation_Unmarshal_Extensions_Success(t *testing.T) {
+ t.Parallel()
+
+ tests := []struct {
+ name string
+ yaml string
+ extensionKey string
+ expectedValue string
+ }{
+ {
+ name: "single extension",
+ yaml: `
+url: https://example.com
+x-custom: value
+`,
+ extensionKey: "x-custom",
+ expectedValue: "value",
+ },
+ {
+ name: "multiple extensions",
+ yaml: `
+url: https://example.com
+x-first: value1
+x-second: value2
+`,
+ extensionKey: "x-first",
+ expectedValue: "value1",
+ },
+ {
+ name: "extension with url and description",
+ yaml: `
+url: https://example.com/docs
+description: API docs
+x-vendor: custom-value
+`,
+ extensionKey: "x-vendor",
+ expectedValue: "custom-value",
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ var target ExternalDocumentation
+ validationErrs, err := marshaller.UnmarshalCore(ctx, "", parseYAML(t, tt.yaml), &target)
+
+ require.NoError(t, err, "unmarshal should succeed")
+ require.Empty(t, validationErrs, "should not have validation errors")
+ require.NotNil(t, target.Extensions, "extensions should be set")
+
+ ext, found := target.Extensions.Get(tt.extensionKey)
+ require.True(t, found, "should find extension")
+ assert.Equal(t, tt.expectedValue, ext.Value.Value, "should parse extension value correctly")
+ })
+ }
+}
+
+func TestExternalDocumentation_Unmarshal_MinimalObject_Success(t *testing.T) {
+ t.Parallel()
+
+ ctx := t.Context()
+ yaml := `url: https://example.com`
+
+ var target ExternalDocumentation
+ validationErrs, err := marshaller.UnmarshalCore(ctx, "", parseYAML(t, yaml), &target)
+
+ require.NoError(t, err, "unmarshal should succeed")
+ require.Empty(t, validationErrs, "should not have validation errors")
+ assert.Equal(t, "https://example.com", target.URL.Value, "should parse url")
+ assert.Nil(t, target.Description.Value, "description should be nil")
+}
diff --git a/jsonschema/oas3/core/factory_registration.go b/jsonschema/oas3/core/factory_registration.go
index 6f710f6c..8fcb91ae 100644
--- a/jsonschema/oas3/core/factory_registration.go
+++ b/jsonschema/oas3/core/factory_registration.go
@@ -20,13 +20,8 @@ func init() {
return &core.EitherValue[[]marshaller.Node[string], string]{}
})
- // Register Node-wrapped EitherValue for additionalProperties
- marshaller.RegisterType(func() *marshaller.Node[*core.EitherValue[Schema, bool]] {
- return &marshaller.Node[*core.EitherValue[Schema, bool]]{}
- })
-
- // Register sequencedmap for additionalProperties (used in properties field)
- marshaller.RegisterType(func() *sequencedmap.Map[string, marshaller.Node[*core.EitherValue[Schema, bool]]] {
- return &sequencedmap.Map[string, marshaller.Node[*core.EitherValue[Schema, bool]]]{}
+ // Register sequencedmap for properties and similar fields
+ marshaller.RegisterType(func() *sequencedmap.Map[string, *core.EitherValue[Schema, bool]] {
+ return &sequencedmap.Map[string, *core.EitherValue[Schema, bool]]{}
})
}
diff --git a/jsonschema/oas3/core/jsonschema_test.go b/jsonschema/oas3/core/jsonschema_test.go
index d5a944bb..297fe11f 100644
--- a/jsonschema/oas3/core/jsonschema_test.go
+++ b/jsonschema/oas3/core/jsonschema_test.go
@@ -70,3 +70,222 @@ minLength: 1
assert.True(t, target.Left.Value.Type.Value.IsRight, "Type should be Right type (string)")
assert.Equal(t, "string", target.Left.Value.Type.Value.Right.Value, "Type should be 'string'")
}
+
+func TestJSONSchema_Unmarshal_TypeArray_Success(t *testing.T) {
+ t.Parallel()
+
+ ctx := t.Context()
+
+ // YAML with type as array (tests EitherValue[[]marshaller.Node[string], string])
+ testYaml := `
+type: [string, number]
+`
+ var node yaml.Node
+ err := yaml.Unmarshal([]byte(testYaml), &node)
+ require.NoError(t, err)
+
+ var target JSONSchema
+ validationErrs, err := marshaller.UnmarshalCore(ctx, "", node.Content[0], &target)
+
+ require.NoError(t, err, "Should not have syntax errors")
+ require.Empty(t, validationErrs, "Should not have validation errors")
+ require.NotNil(t, target, "JSONSchema should not be nil")
+ assert.True(t, target.IsLeft, "JSONSchema should be Left type (Schema)")
+
+ // Verify type array was unmarshaled
+ require.NotNil(t, target.Left.Value.Type.Value, "Type should be set")
+ assert.True(t, target.Left.Value.Type.Value.IsLeft, "Type should be Left type (array)")
+ assert.Len(t, target.Left.Value.Type.Value.Left.Value, 2, "Should have 2 types")
+}
+
+func TestJSONSchema_Unmarshal_PropertiesWithAdditionalProperties_Success(t *testing.T) {
+ t.Parallel()
+
+ ctx := t.Context()
+
+ // YAML with properties and additionalProperties (tests sequencedmap and nested schemas)
+ testYaml := `
+type: object
+properties:
+ name:
+ type: string
+ age:
+ type: integer
+additionalProperties:
+ type: string
+`
+ var node yaml.Node
+ err := yaml.Unmarshal([]byte(testYaml), &node)
+ require.NoError(t, err)
+
+ var target JSONSchema
+ validationErrs, err := marshaller.UnmarshalCore(ctx, "", node.Content[0], &target)
+
+ require.NoError(t, err, "Should not have syntax errors")
+ require.Empty(t, validationErrs, "Should not have validation errors")
+ require.NotNil(t, target, "JSONSchema should not be nil")
+
+ // Verify properties map
+ require.NotNil(t, target.Left.Value.Properties.Value, "Properties should be set")
+ assert.Equal(t, 2, target.Left.Value.Properties.Value.Len(), "Should have 2 properties")
+
+ // Verify additionalProperties schema
+ require.NotNil(t, target.Left.Value.AdditionalProperties.Value, "AdditionalProperties should be set")
+ assert.True(t, target.Left.Value.AdditionalProperties.Value.IsLeft, "AdditionalProperties should be schema")
+}
+
+func TestJSONSchema_Unmarshal_WithDiscriminator_Success(t *testing.T) {
+ t.Parallel()
+
+ ctx := t.Context()
+
+ // YAML with discriminator (tests Discriminator type registration)
+ testYaml := `
+type: object
+discriminator:
+ propertyName: petType
+ mapping:
+ dog: "#/components/schemas/Dog"
+ cat: "#/components/schemas/Cat"
+`
+ var node yaml.Node
+ err := yaml.Unmarshal([]byte(testYaml), &node)
+ require.NoError(t, err)
+
+ var target JSONSchema
+ validationErrs, err := marshaller.UnmarshalCore(ctx, "", node.Content[0], &target)
+
+ require.NoError(t, err, "Should not have syntax errors")
+ require.Empty(t, validationErrs, "Should not have validation errors")
+ require.NotNil(t, target, "JSONSchema should not be nil")
+ assert.True(t, target.IsLeft, "JSONSchema should be Left type (Schema)")
+
+ // Verify discriminator was unmarshaled
+ require.NotNil(t, target.Left.Value.Discriminator.Value, "Discriminator should be set")
+ assert.Equal(t, "petType", target.Left.Value.Discriminator.Value.PropertyName.Value, "Should parse propertyName")
+ require.NotNil(t, target.Left.Value.Discriminator.Value.Mapping.Value, "Mapping should be set")
+ assert.Equal(t, 2, target.Left.Value.Discriminator.Value.Mapping.Value.Len(), "Should have 2 mappings")
+}
+
+func TestJSONSchema_Unmarshal_WithExternalDocs_Success(t *testing.T) {
+ t.Parallel()
+
+ ctx := t.Context()
+
+ // YAML with externalDocs (tests ExternalDocumentation type registration)
+ testYaml := `
+type: string
+description: A user identifier
+externalDocs:
+ url: https://example.com/docs/user-id
+ description: User ID documentation
+`
+ var node yaml.Node
+ err := yaml.Unmarshal([]byte(testYaml), &node)
+ require.NoError(t, err)
+
+ var target JSONSchema
+ validationErrs, err := marshaller.UnmarshalCore(ctx, "", node.Content[0], &target)
+
+ require.NoError(t, err, "Should not have syntax errors")
+ require.Empty(t, validationErrs, "Should not have validation errors")
+ require.NotNil(t, target, "JSONSchema should not be nil")
+ assert.True(t, target.IsLeft, "JSONSchema should be Left type (Schema)")
+
+ // Verify externalDocs was unmarshaled
+ require.NotNil(t, target.Left.Value.ExternalDocs.Value, "ExternalDocs should be set")
+ assert.Equal(t, "https://example.com/docs/user-id", target.Left.Value.ExternalDocs.Value.URL.Value, "Should parse URL")
+ require.NotNil(t, target.Left.Value.ExternalDocs.Value.Description.Value, "Description should be set")
+ assert.Equal(t, "User ID documentation", *target.Left.Value.ExternalDocs.Value.Description.Value, "Should parse description")
+}
+
+func TestJSONSchema_Unmarshal_WithXML_Success(t *testing.T) {
+ t.Parallel()
+
+ ctx := t.Context()
+
+ // YAML with xml (tests XML type registration)
+ testYaml := `
+type: object
+xml:
+ name: Person
+ namespace: http://example.com/schema
+ prefix: per
+ wrapped: true
+`
+ var node yaml.Node
+ err := yaml.Unmarshal([]byte(testYaml), &node)
+ require.NoError(t, err)
+
+ var target JSONSchema
+ validationErrs, err := marshaller.UnmarshalCore(ctx, "", node.Content[0], &target)
+
+ require.NoError(t, err, "Should not have syntax errors")
+ require.Empty(t, validationErrs, "Should not have validation errors")
+ require.NotNil(t, target, "JSONSchema should not be nil")
+ assert.True(t, target.IsLeft, "JSONSchema should be Left type (Schema)")
+
+ // Verify xml was unmarshaled
+ require.NotNil(t, target.Left.Value.XML.Value, "XML should be set")
+ require.NotNil(t, target.Left.Value.XML.Value.Name.Value, "Name should be set")
+ assert.Equal(t, "Person", *target.Left.Value.XML.Value.Name.Value, "Should parse name")
+ require.NotNil(t, target.Left.Value.XML.Value.Namespace.Value, "Namespace should be set")
+ assert.Equal(t, "http://example.com/schema", *target.Left.Value.XML.Value.Namespace.Value, "Should parse namespace")
+ require.NotNil(t, target.Left.Value.XML.Value.Prefix.Value, "Prefix should be set")
+ assert.Equal(t, "per", *target.Left.Value.XML.Value.Prefix.Value, "Should parse prefix")
+ require.NotNil(t, target.Left.Value.XML.Value.Wrapped.Value, "Wrapped should be set")
+ assert.True(t, *target.Left.Value.XML.Value.Wrapped.Value, "Should parse wrapped as true")
+}
+
+func TestJSONSchema_Unmarshal_ComplexSchema_Success(t *testing.T) {
+ t.Parallel()
+
+ ctx := t.Context()
+
+ // YAML with multiple nested features to test all registrations together
+ testYaml := `
+type: object
+properties:
+ id:
+ type: string
+ xml:
+ attribute: true
+ name:
+ type: string
+discriminator:
+ propertyName: type
+externalDocs:
+ url: https://example.com/docs
+`
+ var node yaml.Node
+ err := yaml.Unmarshal([]byte(testYaml), &node)
+ require.NoError(t, err)
+
+ var target JSONSchema
+ validationErrs, err := marshaller.UnmarshalCore(ctx, "", node.Content[0], &target)
+
+ require.NoError(t, err, "Should not have syntax errors")
+ require.Empty(t, validationErrs, "Should not have validation errors")
+ require.NotNil(t, target, "JSONSchema should not be nil")
+ assert.True(t, target.IsLeft, "JSONSchema should be Left type (Schema)")
+
+ // Verify properties
+ require.NotNil(t, target.Left.Value.Properties.Value, "Properties should be set")
+ assert.Equal(t, 2, target.Left.Value.Properties.Value.Len(), "Should have 2 properties")
+
+ // Verify id property has xml
+ idProp, found := target.Left.Value.Properties.Value.Get("id")
+ require.True(t, found, "Should find id property")
+ require.NotNil(t, idProp, "id property should not be nil")
+ require.NotNil(t, idProp.Left.Value.XML.Value, "id should have XML")
+ require.NotNil(t, idProp.Left.Value.XML.Value.Attribute.Value, "XML attribute should be set")
+ assert.True(t, *idProp.Left.Value.XML.Value.Attribute.Value, "XML attribute should be true")
+
+ // Verify discriminator
+ require.NotNil(t, target.Left.Value.Discriminator.Value, "Discriminator should be set")
+ assert.Equal(t, "type", target.Left.Value.Discriminator.Value.PropertyName.Value, "Should parse discriminator propertyName")
+
+ // Verify externalDocs
+ require.NotNil(t, target.Left.Value.ExternalDocs.Value, "ExternalDocs should be set")
+ assert.Equal(t, "https://example.com/docs", target.Left.Value.ExternalDocs.Value.URL.Value, "Should parse externalDocs URL")
+}
diff --git a/jsonschema/oas3/core/xml_test.go b/jsonschema/oas3/core/xml_test.go
new file mode 100644
index 00000000..493a8b86
--- /dev/null
+++ b/jsonschema/oas3/core/xml_test.go
@@ -0,0 +1,333 @@
+package core
+
+import (
+ "testing"
+
+ "github.com/speakeasy-api/openapi/marshaller"
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+ "gopkg.in/yaml.v3"
+)
+
+func parseYAML(t *testing.T, yml string) *yaml.Node {
+ t.Helper()
+ var node yaml.Node
+ err := yaml.Unmarshal([]byte(yml), &node)
+ require.NoError(t, err)
+ return node.Content[0]
+}
+
+func TestXML_Unmarshal_AllFields_Success(t *testing.T) {
+ t.Parallel()
+
+ tests := []struct {
+ name string
+ yaml string
+ }{
+ {
+ name: "all fields populated",
+ yaml: `
+name: Person
+namespace: http://example.com/schema/Person
+prefix: per
+attribute: true
+wrapped: false
+x-custom: value
+`,
+ },
+ {
+ name: "only required fields",
+ yaml: `
+name: Item
+`,
+ },
+ {
+ name: "namespace and prefix",
+ yaml: `
+namespace: http://example.com/ns
+prefix: ex
+`,
+ },
+ {
+ name: "boolean flags",
+ yaml: `
+attribute: true
+wrapped: true
+`,
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ var target XML
+ validationErrs, err := marshaller.UnmarshalCore(ctx, "", parseYAML(t, tt.yaml), &target)
+
+ require.NoError(t, err, "unmarshal should succeed")
+ require.Empty(t, validationErrs, "should not have validation errors")
+ assert.NotNil(t, target, "XML should not be nil")
+ })
+ }
+}
+
+func TestXML_Unmarshal_NameField_Success(t *testing.T) {
+ t.Parallel()
+
+ tests := []struct {
+ name string
+ yaml string
+ expectedName string
+ }{
+ {
+ name: "simple name",
+ yaml: `name: Person`,
+ expectedName: "Person",
+ },
+ {
+ name: "camelCase name",
+ yaml: `name: personDetails`,
+ expectedName: "personDetails",
+ },
+ {
+ name: "PascalCase name",
+ yaml: `name: PersonDetails`,
+ expectedName: "PersonDetails",
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ var target XML
+ validationErrs, err := marshaller.UnmarshalCore(ctx, "", parseYAML(t, tt.yaml), &target)
+
+ require.NoError(t, err, "unmarshal should succeed")
+ require.Empty(t, validationErrs, "should not have validation errors")
+ require.NotNil(t, target.Name.Value, "name should be set")
+ assert.Equal(t, tt.expectedName, *target.Name.Value, "should parse name correctly")
+ })
+ }
+}
+
+func TestXML_Unmarshal_NamespaceField_Success(t *testing.T) {
+ t.Parallel()
+
+ tests := []struct {
+ name string
+ yaml string
+ expectedNamespace string
+ }{
+ {
+ name: "http namespace",
+ yaml: `namespace: http://example.com/schema`,
+ expectedNamespace: "http://example.com/schema",
+ },
+ {
+ name: "https namespace",
+ yaml: `namespace: https://example.com/api/v1`,
+ expectedNamespace: "https://example.com/api/v1",
+ },
+ {
+ name: "urn namespace",
+ yaml: `namespace: urn:example:schema`,
+ expectedNamespace: "urn:example:schema",
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ var target XML
+ validationErrs, err := marshaller.UnmarshalCore(ctx, "", parseYAML(t, tt.yaml), &target)
+
+ require.NoError(t, err, "unmarshal should succeed")
+ require.Empty(t, validationErrs, "should not have validation errors")
+ require.NotNil(t, target.Namespace.Value, "namespace should be set")
+ assert.Equal(t, tt.expectedNamespace, *target.Namespace.Value, "should parse namespace correctly")
+ })
+ }
+}
+
+func TestXML_Unmarshal_PrefixField_Success(t *testing.T) {
+ t.Parallel()
+
+ tests := []struct {
+ name string
+ yaml string
+ expectedPrefix string
+ }{
+ {
+ name: "short prefix",
+ yaml: `prefix: ex`,
+ expectedPrefix: "ex",
+ },
+ {
+ name: "longer prefix",
+ yaml: `prefix: example`,
+ expectedPrefix: "example",
+ },
+ {
+ name: "single char prefix",
+ yaml: `prefix: x`,
+ expectedPrefix: "x",
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ var target XML
+ validationErrs, err := marshaller.UnmarshalCore(ctx, "", parseYAML(t, tt.yaml), &target)
+
+ require.NoError(t, err, "unmarshal should succeed")
+ require.Empty(t, validationErrs, "should not have validation errors")
+ require.NotNil(t, target.Prefix.Value, "prefix should be set")
+ assert.Equal(t, tt.expectedPrefix, *target.Prefix.Value, "should parse prefix correctly")
+ })
+ }
+}
+
+func TestXML_Unmarshal_AttributeField_Success(t *testing.T) {
+ t.Parallel()
+
+ tests := []struct {
+ name string
+ yaml string
+ expectedAttribute bool
+ }{
+ {
+ name: "attribute true",
+ yaml: `attribute: true`,
+ expectedAttribute: true,
+ },
+ {
+ name: "attribute false",
+ yaml: `attribute: false`,
+ expectedAttribute: false,
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ var target XML
+ validationErrs, err := marshaller.UnmarshalCore(ctx, "", parseYAML(t, tt.yaml), &target)
+
+ require.NoError(t, err, "unmarshal should succeed")
+ require.Empty(t, validationErrs, "should not have validation errors")
+ require.NotNil(t, target.Attribute.Value, "attribute should be set")
+ assert.Equal(t, tt.expectedAttribute, *target.Attribute.Value, "should parse attribute correctly")
+ })
+ }
+}
+
+func TestXML_Unmarshal_WrappedField_Success(t *testing.T) {
+ t.Parallel()
+
+ tests := []struct {
+ name string
+ yaml string
+ expectedWrapped bool
+ }{
+ {
+ name: "wrapped true",
+ yaml: `wrapped: true`,
+ expectedWrapped: true,
+ },
+ {
+ name: "wrapped false",
+ yaml: `wrapped: false`,
+ expectedWrapped: false,
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ var target XML
+ validationErrs, err := marshaller.UnmarshalCore(ctx, "", parseYAML(t, tt.yaml), &target)
+
+ require.NoError(t, err, "unmarshal should succeed")
+ require.Empty(t, validationErrs, "should not have validation errors")
+ require.NotNil(t, target.Wrapped.Value, "wrapped should be set")
+ assert.Equal(t, tt.expectedWrapped, *target.Wrapped.Value, "should parse wrapped correctly")
+ })
+ }
+}
+
+func TestXML_Unmarshal_Extensions_Success(t *testing.T) {
+ t.Parallel()
+
+ tests := []struct {
+ name string
+ yaml string
+ extensionKey string
+ expectedValue string
+ }{
+ {
+ name: "single extension",
+ yaml: `
+x-custom: value
+`,
+ extensionKey: "x-custom",
+ expectedValue: "value",
+ },
+ {
+ name: "multiple extensions",
+ yaml: `
+x-first: value1
+x-second: value2
+`,
+ extensionKey: "x-first",
+ expectedValue: "value1",
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ var target XML
+ validationErrs, err := marshaller.UnmarshalCore(ctx, "", parseYAML(t, tt.yaml), &target)
+
+ require.NoError(t, err, "unmarshal should succeed")
+ require.Empty(t, validationErrs, "should not have validation errors")
+ require.NotNil(t, target.Extensions, "extensions should be set")
+
+ ext, found := target.Extensions.Get(tt.extensionKey)
+ require.True(t, found, "should find extension")
+ assert.Equal(t, tt.expectedValue, ext.Value.Value, "should parse extension value correctly")
+ })
+ }
+}
+
+func TestXML_Unmarshal_EmptyObject_Success(t *testing.T) {
+ t.Parallel()
+
+ ctx := t.Context()
+ yaml := `{}`
+
+ var target XML
+ validationErrs, err := marshaller.UnmarshalCore(ctx, "", parseYAML(t, yaml), &target)
+
+ require.NoError(t, err, "unmarshal should succeed")
+ require.Empty(t, validationErrs, "should not have validation errors")
+ assert.Nil(t, target.Name.Value, "name should be nil")
+ assert.Nil(t, target.Namespace.Value, "namespace should be nil")
+ assert.Nil(t, target.Prefix.Value, "prefix should be nil")
+ assert.Nil(t, target.Attribute.Value, "attribute should be nil")
+ assert.Nil(t, target.Wrapped.Value, "wrapped should be nil")
+}
diff --git a/jsonschema/oas3/discriminator.go b/jsonschema/oas3/discriminator.go
index f10f00b2..4510cd23 100644
--- a/jsonschema/oas3/discriminator.go
+++ b/jsonschema/oas3/discriminator.go
@@ -2,6 +2,7 @@ package oas3
import (
"context"
+ "errors"
"github.com/speakeasy-api/openapi/extensions"
"github.com/speakeasy-api/openapi/internal/interfaces"
@@ -72,17 +73,13 @@ func (d *Discriminator) Validate(ctx context.Context, opts ...validation.Option)
errs := []error{}
// propertyName is REQUIRED in all OpenAPI versions
- if core.PropertyName.Present {
- if core.PropertyName.Value == "" {
- errs = append(errs, validation.NewValueError(validation.NewMissingValueError("discriminator.propertyName is required"), core, core.PropertyName))
- }
- } else {
- errs = append(errs, validation.NewValueError(validation.NewMissingValueError("discriminator.propertyName is required"), core, core.PropertyName))
+ if core.PropertyName.Present && d.PropertyName == "" {
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("`discriminator.propertyName` is required"), core, core.PropertyName))
}
// defaultMapping validation - must not be empty if present
- if core.DefaultMapping.Present && (core.DefaultMapping.Value == nil || *core.DefaultMapping.Value == "") {
- errs = append(errs, validation.NewValueError(validation.NewMissingValueError("discriminator.defaultMapping cannot be empty"), core, core.DefaultMapping))
+ if core.DefaultMapping.Present && d.GetDefaultMapping() == "" {
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationEmptyValue, errors.New("discriminator.defaultMapping cannot be empty"), core, core.DefaultMapping))
}
d.Valid = len(errs) == 0 && core.GetValid()
diff --git a/jsonschema/oas3/discriminator_validate_test.go b/jsonschema/oas3/discriminator_validate_test.go
index e8dbf2c8..69e7d3c6 100644
--- a/jsonschema/oas3/discriminator_validate_test.go
+++ b/jsonschema/oas3/discriminator_validate_test.go
@@ -94,8 +94,7 @@ func TestDiscriminator_Validate_Error(t *testing.T) {
dog: "#/components/schemas/Dog"
`,
wantErrs: []string{
- "[1:1] discriminator.propertyName is missing",
- "[1:1] discriminator.propertyName is required",
+ "[1:1] error validation-required-field `discriminator.propertyName` is required",
},
},
{
@@ -105,7 +104,7 @@ propertyName: ""
mapping:
dog: "#/components/schemas/Dog"
`,
- wantErrs: []string{"[2:15] discriminator.propertyName is required"},
+ wantErrs: []string{"[2:15] error validation-required-field `discriminator.propertyName` is required"},
},
}
diff --git a/jsonschema/oas3/externaldoc.go b/jsonschema/oas3/externaldoc.go
index 9063d746..507efada 100644
--- a/jsonschema/oas3/externaldoc.go
+++ b/jsonschema/oas3/externaldoc.go
@@ -2,6 +2,8 @@ package oas3
import (
"context"
+ "errors"
+ "fmt"
"net/url"
"reflect"
@@ -86,10 +88,10 @@ func (e *ExternalDocumentation) Validate(ctx context.Context, opts ...validation
if core.URL.Present {
if core.URL.Value == "" {
- errs = append(errs, validation.NewValueError(validation.NewMissingValueError("externalDocumentation.url is required"), core, core.URL))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("`externalDocumentation.url` is required"), core, core.URL))
} else {
if _, err := url.Parse(core.URL.Value); err != nil {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("externalDocumentation.url is not a valid uri: %s", err), core, core.URL))
+ errs = append(errs, validation.NewValueError(validation.SeverityWarning, validation.RuleValidationInvalidFormat, fmt.Errorf("externalDocumentation.url is not a valid uri: %w", err), core, core.URL))
}
}
}
diff --git a/jsonschema/oas3/externaldoc_validate_test.go b/jsonschema/oas3/externaldoc_validate_test.go
index bdecc955..e17ce717 100644
--- a/jsonschema/oas3/externaldoc_validate_test.go
+++ b/jsonschema/oas3/externaldoc_validate_test.go
@@ -86,7 +86,7 @@ func TestExternalDoc_Validate_Error(t *testing.T) {
yml: `
description: Some documentation
`,
- wantErrs: []string{"[2:1] externalDocumentation.url is missing"},
+ wantErrs: []string{"[2:1] error validation-required-field `externalDocumentation.url` is required"},
},
{
name: "empty URL",
@@ -94,7 +94,7 @@ description: Some documentation
description: Some documentation
url: ""
`,
- wantErrs: []string{"[3:6] externalDocumentation.url is required"},
+ wantErrs: []string{"[3:6] error validation-required-field `externalDocumentation.url` is required"},
},
{
name: "invalid URL format",
diff --git a/jsonschema/oas3/jsonschema_validate_test.go b/jsonschema/oas3/jsonschema_validate_test.go
index 577ee2a4..29c5f230 100644
--- a/jsonschema/oas3/jsonschema_validate_test.go
+++ b/jsonschema/oas3/jsonschema_validate_test.go
@@ -227,7 +227,7 @@ func TestJSONSchema_Validate_Error(t *testing.T) {
name: "schema fails direct validation",
yml: `
"test"`,
- wantErrs: []string{"[2:1] failed to validate either Schema [expected object, got `te...`] or bool [line 2: cannot unmarshal !!str `test` into bool]"},
+ wantErrs: []string{"[2:1] error validation-type-mismatch failed to validate either Schema [expected `object`, got `te...`] or bool [line 2: cannot unmarshal !!str `test` into bool]"},
},
{
name: "child schema fails validation",
@@ -243,8 +243,8 @@ description:
$ref: "#/components/schemas/stream/properties/profiles/description"
`,
wantErrs: []string{
- "[2:1] schema.description expected string, got object",
- "[10:5] schema.description expected string, got object",
+ "[2:1] error validation-type-mismatch schema.description expected `string`, got `object`",
+ "[10:5] error validation-type-mismatch schema.description expected `string`, got `object`",
},
},
{
@@ -253,8 +253,8 @@ description:
type: invalid_type
`,
wantErrs: []string{
- "[2:7] schema.type expected array, got string",
- "[2:7] schema.type value must be one of 'array', 'boolean', 'integer', 'null', 'number', 'object', 'string'",
+ "[2:7] error validation-invalid-schema schema.type value must be one of 'array', 'boolean', 'integer', 'null', 'number', 'object', 'string'",
+ "[2:7] error validation-type-mismatch schema.type expected `array`, got `string`",
},
},
}
diff --git a/jsonschema/oas3/resolution.go b/jsonschema/oas3/resolution.go
index 22c615f6..16bff2ab 100644
--- a/jsonschema/oas3/resolution.go
+++ b/jsonschema/oas3/resolution.go
@@ -64,7 +64,7 @@ func (j *JSONSchema[Referenceable]) GetAbsRef() references.Reference {
if j.referenceResolutionCache == nil {
return ref
}
- return references.Reference(j.referenceResolutionCache.AbsoluteReference + "#" + ref.GetJSONPointer().String())
+ return references.Reference(j.referenceResolutionCache.AbsoluteDocumentPath + "#" + ref.GetJSONPointer().String())
}
// Resolve will fully resolve the reference and return the JSONSchema referenced. This will recursively resolve any intermediate references as well.
@@ -180,7 +180,7 @@ func (s *JSONSchema[Referenceable]) resolve(ctx context.Context, opts references
// The ResolveResult.ResolvedDocument should be used as the new TargetDocument
if s.referenceResolutionCache.ResolvedDocument != nil {
opts.TargetDocument = s.referenceResolutionCache.ResolvedDocument
- opts.TargetLocation = s.referenceResolutionCache.AbsoluteReference
+ opts.TargetLocation = s.referenceResolutionCache.AbsoluteDocumentPath
}
}
@@ -195,7 +195,7 @@ func (s *JSONSchema[Referenceable]) resolve(ctx context.Context, opts references
if result := s.tryResolveViaRegistry(ctx, ref, opts); result != nil {
// Compute absolute reference for circular detection
// Use the result's AbsoluteReference combined with any anchor/fragment
- absRef := result.AbsoluteReference
+ absRef := result.AbsoluteDocumentPath
if anchor := ExtractAnchor(string(ref)); anchor != "" {
absRef = absRef + "#" + anchor
} else if jp := ref.GetJSONPointer(); jp != "" {
@@ -279,7 +279,7 @@ func (s *JSONSchema[Referenceable]) resolve(ctx context.Context, opts references
// Use $id as base URI if present in the resolved schema (JSON Schema spec)
// The $id keyword identifies a schema resource with its canonical URI
// and serves as the base URI for relative references within that schema
- baseURI := result.AbsoluteReference
+ baseURI := result.AbsoluteDocumentPath
if !schema.IsBool() && schema.GetSchema() != nil {
if schemaID := schema.GetSchema().GetID(); schemaID != "" {
baseURI = schemaID
@@ -290,6 +290,9 @@ func (s *JSONSchema[Referenceable]) resolve(ctx context.Context, opts references
// This enables $id and $anchor resolution within the fetched document
setupRemoteSchemaRegistry(ctx, schema, baseURI)
+ // Collect nested reference schemas that need parent links set
+ var nestedRefs []*JSONSchemaReferenceable
+
for item := range Walk(ctx, schema) {
_ = item.Match(SchemaMatcher{
Schema: func(js *JSONSchemaReferenceable) error {
@@ -301,16 +304,36 @@ func (s *JSONSchema[Referenceable]) resolve(ctx context.Context, opts references
localBaseURI = jsID
}
}
+ // Get the ref to build absolute reference with fragment
+ jsRef := js.GetRef()
+ absRef := utils.BuildAbsoluteReference(localBaseURI, string(jsRef.GetJSONPointer()))
js.referenceResolutionCache = &references.ResolveResult[JSONSchemaReferenceable]{
- AbsoluteReference: localBaseURI,
- ResolvedDocument: result.ResolvedDocument,
+ AbsoluteDocumentPath: localBaseURI,
+ AbsoluteReference: references.Reference(absRef),
+ ResolvedDocument: result.ResolvedDocument,
}
+
+ // Collect this reference for setting parent links after the walk
+ nestedRefs = append(nestedRefs, js)
}
return nil
},
})
}
+ // Set parent links for all nested references found during the walk
+ // This maintains reference chain tracking when accessing properties of resolved schemas
+ var topLevel *JSONSchemaReferenceable
+ if s.topLevelParent != nil {
+ topLevel = s.topLevelParent
+ } else {
+ topLevel = (*JSONSchemaReferenceable)(s)
+ }
+ for _, js := range nestedRefs {
+ js.SetParent((*JSONSchemaReferenceable)(s))
+ js.SetTopLevelParent(topLevel)
+ }
+
s.referenceResolutionCache = result
s.validationErrsCache = validationErrs
diff --git a/jsonschema/oas3/resolution_defs.go b/jsonschema/oas3/resolution_defs.go
index 9d12dd86..97b09daf 100644
--- a/jsonschema/oas3/resolution_defs.go
+++ b/jsonschema/oas3/resolution_defs.go
@@ -5,6 +5,7 @@ import (
"fmt"
"strings"
+ "github.com/speakeasy-api/openapi/internal/utils"
"github.com/speakeasy-api/openapi/jsonpointer"
"github.com/speakeasy-api/openapi/references"
"gopkg.in/yaml.v3"
@@ -144,9 +145,11 @@ func (s *JSONSchema[Referenceable]) tryResolveLocalDefs(_ context.Context, ref r
absRef = schemaID
}
+ absRefWithFragment := utils.BuildAbsoluteReference(absRef, string(ref.GetJSONPointer()))
return &references.ResolveResult[JSONSchemaReferenceable]{
- Object: defSchema,
- AbsoluteReference: absRef,
+ Object: defSchema,
+ AbsoluteDocumentPath: absRef,
+ AbsoluteReference: references.Reference(absRefWithFragment),
}
}
diff --git a/jsonschema/oas3/resolution_external.go b/jsonschema/oas3/resolution_external.go
index a1860902..a97d1896 100644
--- a/jsonschema/oas3/resolution_external.go
+++ b/jsonschema/oas3/resolution_external.go
@@ -5,6 +5,7 @@ import (
"errors"
"fmt"
+ "github.com/speakeasy-api/openapi/internal/utils"
"github.com/speakeasy-api/openapi/jsonpointer"
"github.com/speakeasy-api/openapi/marshaller"
"github.com/speakeasy-api/openapi/references"
@@ -33,7 +34,7 @@ func (s *JSONSchema[Referenceable]) resolveExternalAnchorReference(ctx context.C
// Use $id as base URI if present in the resolved schema (JSON Schema spec)
// The $id keyword identifies a schema resource with its canonical URI
// and serves as the base URI for anchor lookups within that schema
- baseURI := docResult.AbsoluteReference
+ baseURI := docResult.AbsoluteDocumentPath
if !externalDoc.IsBool() && externalDoc.GetSchema() != nil {
if schemaID := externalDoc.GetSchema().GetID(); schemaID != "" {
baseURI = schemaID
@@ -60,8 +61,8 @@ func (s *JSONSchema[Referenceable]) resolveExternalAnchorReference(ctx context.C
// This handles the case where the reference uses the retrieval URL instead of the canonical $id
// Example: fetch https://example.com/a.json, but $id is https://cdn.example.com/canonical.json
// A reference to "https://example.com/a.json#foo" should still resolve
- if resolved == nil && docResult.AbsoluteReference != "" && docResult.AbsoluteReference != baseURI {
- resolved = registry.LookupByAnchor(docResult.AbsoluteReference, anchor)
+ if resolved == nil && docResult.AbsoluteDocumentPath != "" && docResult.AbsoluteDocumentPath != baseURI {
+ resolved = registry.LookupByAnchor(docResult.AbsoluteDocumentPath, anchor)
}
// Fallback: try with empty base URI
@@ -73,10 +74,12 @@ func (s *JSONSchema[Referenceable]) resolveExternalAnchorReference(ctx context.C
return nil, validationErrs, fmt.Errorf("anchor not found in external document: %s#%s", ref.GetURI(), anchor)
}
+ absRef := utils.BuildAbsoluteReference(baseURI, "#"+anchor)
return &references.ResolveResult[JSONSchemaReferenceable]{
- Object: resolved,
- AbsoluteReference: baseURI,
- ResolvedDocument: docResult.ResolvedDocument,
+ Object: resolved,
+ AbsoluteDocumentPath: baseURI,
+ AbsoluteReference: references.Reference(absRef),
+ ResolvedDocument: docResult.ResolvedDocument,
}, validationErrs, nil
}
@@ -105,7 +108,7 @@ func (s *JSONSchema[Referenceable]) resolveExternalRefWithFragment(ctx context.C
// Use $id as base URI if present in the resolved schema (JSON Schema spec)
// The $id keyword identifies a schema resource with its canonical URI
// and serves as the base URI for relative references within that schema
- baseURI := docResult.AbsoluteReference
+ baseURI := docResult.AbsoluteDocumentPath
if !externalDoc.IsBool() && externalDoc.GetSchema() != nil {
if schemaID := externalDoc.GetSchema().GetID(); schemaID != "" {
baseURI = schemaID
@@ -119,9 +122,10 @@ func (s *JSONSchema[Referenceable]) resolveExternalRefWithFragment(ctx context.C
if jp == "" {
// No fragment, return the whole document with canonical base URI
return &references.ResolveResult[JSONSchemaReferenceable]{
- Object: externalDoc,
- AbsoluteReference: baseURI,
- ResolvedDocument: docResult.ResolvedDocument,
+ Object: externalDoc,
+ AbsoluteDocumentPath: baseURI,
+ AbsoluteReference: references.Reference(baseURI),
+ ResolvedDocument: docResult.ResolvedDocument,
}, validationErrs, nil
}
@@ -150,10 +154,12 @@ func (s *JSONSchema[Referenceable]) resolveExternalRefWithFragment(ctx context.C
target.GetSchema().SetEffectiveBaseURI(baseURI)
}
+ absRef := utils.BuildAbsoluteReference(baseURI, string(jp))
return &references.ResolveResult[JSONSchemaReferenceable]{
- Object: target,
- AbsoluteReference: baseURI,
- ResolvedDocument: docResult.ResolvedDocument,
+ Object: target,
+ AbsoluteDocumentPath: baseURI,
+ AbsoluteReference: references.Reference(absRef),
+ ResolvedDocument: docResult.ResolvedDocument,
}, validationErrs, nil
}
diff --git a/jsonschema/oas3/resolution_registry.go b/jsonschema/oas3/resolution_registry.go
index 89b7c14f..2bd0e3eb 100644
--- a/jsonschema/oas3/resolution_registry.go
+++ b/jsonschema/oas3/resolution_registry.go
@@ -3,6 +3,7 @@ package oas3
import (
"context"
+ "github.com/speakeasy-api/openapi/internal/utils"
"github.com/speakeasy-api/openapi/references"
)
@@ -34,10 +35,12 @@ func (s *JSONSchema[Referenceable]) tryResolveViaRegistry(ctx context.Context, r
}
if resolved := registry.LookupByAnchor(anchorBase, anchor); resolved != nil {
+ absRef := utils.BuildAbsoluteReference(anchorBase, "#"+anchor)
return &references.ResolveResult[JSONSchemaReferenceable]{
- Object: resolved,
- AbsoluteReference: anchorBase,
- ResolvedDocument: opts.TargetDocument,
+ Object: resolved,
+ AbsoluteDocumentPath: anchorBase,
+ AbsoluteReference: references.Reference(absRef),
+ ResolvedDocument: opts.TargetDocument,
}
}
@@ -45,10 +48,12 @@ func (s *JSONSchema[Referenceable]) tryResolveViaRegistry(ctx context.Context, r
// This handles the case where anchors were registered without a document base URI
if ref.GetURI() == "" && anchorBase != "" {
if resolved := registry.LookupByAnchor("", anchor); resolved != nil {
+ absRef := "#" + anchor
return &references.ResolveResult[JSONSchemaReferenceable]{
- Object: resolved,
- AbsoluteReference: "",
- ResolvedDocument: opts.TargetDocument,
+ Object: resolved,
+ AbsoluteDocumentPath: "",
+ AbsoluteReference: references.Reference(absRef),
+ ResolvedDocument: opts.TargetDocument,
}
}
}
@@ -57,10 +62,12 @@ func (s *JSONSchema[Referenceable]) tryResolveViaRegistry(ctx context.Context, r
docBase := registry.GetDocumentBaseURI()
if docBase != "" && docBase != anchorBase {
if resolved := registry.LookupByAnchor(docBase, anchor); resolved != nil {
+ absRef := utils.BuildAbsoluteReference(docBase, "#"+anchor)
return &references.ResolveResult[JSONSchemaReferenceable]{
- Object: resolved,
- AbsoluteReference: docBase,
- ResolvedDocument: opts.TargetDocument,
+ Object: resolved,
+ AbsoluteDocumentPath: docBase,
+ AbsoluteReference: references.Reference(absRef),
+ ResolvedDocument: opts.TargetDocument,
}
}
}
@@ -108,19 +115,22 @@ func (s *JSONSchema[Referenceable]) tryResolveViaRegistry(ctx context.Context, r
// If there's no JSON pointer, return the schema directly
if jp == "" {
return &references.ResolveResult[JSONSchemaReferenceable]{
- Object: resolvedSchema,
- AbsoluteReference: absoluteReference,
- ResolvedDocument: opts.TargetDocument,
+ Object: resolvedSchema,
+ AbsoluteDocumentPath: absoluteReference,
+ AbsoluteReference: references.Reference(absoluteReference),
+ ResolvedDocument: opts.TargetDocument,
}
}
// There's a JSON pointer - navigate within the found schema
target, err := navigateJSONPointer(ctx, resolvedSchema, jp)
if err == nil && target != nil {
+ absRef := utils.BuildAbsoluteReference(absoluteReference, string(jp))
return &references.ResolveResult[JSONSchemaReferenceable]{
- Object: target,
- AbsoluteReference: absoluteReference,
- ResolvedDocument: opts.TargetDocument,
+ Object: target,
+ AbsoluteDocumentPath: absoluteReference,
+ AbsoluteReference: references.Reference(absRef),
+ ResolvedDocument: opts.TargetDocument,
}
}
// If navigation failed, fall through to external resolution
@@ -171,8 +181,8 @@ func (s *JSONSchema[Referenceable]) getEffectiveBaseURI(opts references.ResolveO
}
// Check if we have a cached absolute reference
- if s.referenceResolutionCache != nil && s.referenceResolutionCache.AbsoluteReference != "" {
- return s.referenceResolutionCache.AbsoluteReference
+ if s.referenceResolutionCache != nil && s.referenceResolutionCache.AbsoluteDocumentPath != "" {
+ return s.referenceResolutionCache.AbsoluteDocumentPath
}
// Fall back to target location
diff --git a/jsonschema/oas3/resolution_test.go b/jsonschema/oas3/resolution_test.go
index 2a5ba3e4..5100bb26 100644
--- a/jsonschema/oas3/resolution_test.go
+++ b/jsonschema/oas3/resolution_test.go
@@ -23,14 +23,16 @@ import (
// MockResolutionTarget implements references.ResolutionTarget for testing
type MockResolutionTarget struct {
- objCache map[string]any
- docCache map[string][]byte
+ objCache map[string]any
+ docCache map[string][]byte
+ extDocCache map[string]any
}
func NewMockResolutionTarget() *MockResolutionTarget {
return &MockResolutionTarget{
- objCache: make(map[string]any),
- docCache: make(map[string][]byte),
+ objCache: make(map[string]any),
+ docCache: make(map[string][]byte),
+ extDocCache: make(map[string]any),
}
}
@@ -59,6 +61,18 @@ func (m *MockResolutionTarget) InitCache() {
if m.docCache == nil {
m.docCache = make(map[string][]byte)
}
+ if m.extDocCache == nil {
+ m.extDocCache = make(map[string]any)
+ }
+}
+
+func (m *MockResolutionTarget) GetCachedExternalDocument(key string) (any, bool) {
+ data, exists := m.extDocCache[key]
+ return data, exists
+}
+
+func (m *MockResolutionTarget) StoreExternalDocumentInCache(key string, doc any) {
+ m.extDocCache[key] = doc
}
// MockVirtualFS implements system.VirtualFS for testing
@@ -485,9 +499,9 @@ func TestJSONSchema_Resolve_Caching(t *testing.T) {
// Set up cached resolved schema using the actual cache field
schema.referenceResolutionCache = &references.ResolveResult[JSONSchema[Referenceable]]{
- Object: resolved,
- AbsoluteReference: "testdata/simple_schema.yaml#/components/schemas/User",
- ResolvedDocument: resolved,
+ Object: resolved,
+ AbsoluteDocumentPath: "testdata/simple_schema.yaml#/components/schemas/User",
+ ResolvedDocument: resolved,
}
root, err := LoadTestSchemaFromFile(t.Context(), "testdata/simple_schema.yaml")
@@ -1928,7 +1942,7 @@ func TestGetEffectiveBaseURI_Success(t *testing.T) {
schema := createSchemaWithRef("#foo")
schema.referenceResolutionCache = &references.ResolveResult[JSONSchema[Referenceable]]{
- AbsoluteReference: "https://example.com/cached.json",
+ AbsoluteDocumentPath: "https://example.com/cached.json",
}
opts := ResolveOptions{
diff --git a/jsonschema/oas3/schema.go b/jsonschema/oas3/schema.go
index 88b14bfe..495f2964 100644
--- a/jsonschema/oas3/schema.go
+++ b/jsonschema/oas3/schema.go
@@ -494,6 +494,68 @@ func (s *Schema) GetFormat() string {
return *s.Format
}
+// IsReferenceOnly returns true if this schema only contains a $ref and no other properties.
+// This is used for the no-ref-siblings linter rule in OAS 3.0.x (in OAS 3.1+, $ref can have siblings).
+func (s *Schema) IsReferenceOnly() bool {
+ if !s.IsReference() {
+ return false
+ }
+
+ // Check all schema fields - if any are set, it's not reference-only
+ return s.Type == nil &&
+ len(s.AllOf) == 0 &&
+ len(s.OneOf) == 0 &&
+ len(s.AnyOf) == 0 &&
+ s.Discriminator == nil &&
+ len(s.Examples) == 0 &&
+ len(s.PrefixItems) == 0 &&
+ s.Contains == nil &&
+ s.MinContains == nil &&
+ s.MaxContains == nil &&
+ s.If == nil &&
+ s.Else == nil &&
+ s.Then == nil &&
+ (s.DependentSchemas == nil || s.DependentSchemas.Len() == 0) &&
+ (s.PatternProperties == nil || s.PatternProperties.Len() == 0) &&
+ s.PropertyNames == nil &&
+ s.UnevaluatedItems == nil &&
+ s.UnevaluatedProperties == nil &&
+ s.Items == nil &&
+ s.Anchor == nil &&
+ s.ID == nil &&
+ s.Not == nil &&
+ (s.Properties == nil || s.Properties.Len() == 0) &&
+ (s.Defs == nil || s.Defs.Len() == 0) &&
+ s.Title == nil &&
+ s.MultipleOf == nil &&
+ s.Maximum == nil &&
+ s.Minimum == nil &&
+ s.MaxLength == nil &&
+ s.MinLength == nil &&
+ s.Pattern == nil &&
+ s.Format == nil &&
+ s.MaxItems == nil &&
+ s.MinItems == nil &&
+ s.UniqueItems == nil &&
+ s.MaxProperties == nil &&
+ s.MinProperties == nil &&
+ len(s.Required) == 0 &&
+ len(s.Enum) == 0 &&
+ s.AdditionalProperties == nil &&
+ s.Description == nil &&
+ s.Default == nil &&
+ s.Const == nil &&
+ s.Nullable == nil &&
+ s.ReadOnly == nil &&
+ s.WriteOnly == nil &&
+ s.ExternalDocs == nil &&
+ s.Example == nil &&
+ s.Deprecated == nil &&
+ s.Schema == nil &&
+ s.XML == nil &&
+ (s.Extensions == nil || s.Extensions.Len() == 0)
+}
+
// GetMaxItems returns the value of the MaxItems field. Returns nil if not set.
func (s *Schema) GetMaxItems() *int64 {
if s == nil {
diff --git a/jsonschema/oas3/schema_exclusive_validation_test.go b/jsonschema/oas3/schema_exclusive_validation_test.go
index b2b16c93..dd5a98e6 100644
--- a/jsonschema/oas3/schema_exclusive_validation_test.go
+++ b/jsonschema/oas3/schema_exclusive_validation_test.go
@@ -227,7 +227,7 @@ exclusiveMinimum: true
exclusiveMaximum: false
`,
openAPIVersion: pointer.From("3.1.0"),
- wantErrs: []string{"[5:19] schema.exclusiveMinimum expected number, got boolean", "[6:19] schema.exclusiveMaximum expected number, got boolean"},
+ wantErrs: []string{"[5:19] error validation-type-mismatch schema.exclusiveMinimum expected `number`, got `boolean`", "[6:19] error validation-type-mismatch schema.exclusiveMaximum expected `number`, got `boolean`"},
},
{
name: "boolean exclusiveMinimum with 3.1 $schema should fail",
@@ -239,7 +239,7 @@ maximum: 100
exclusiveMinimum: true
exclusiveMaximum: false
`,
- wantErrs: []string{"[6:19] schema.exclusiveMinimum expected number, got boolean", "[7:19] schema.exclusiveMaximum expected number, got boolean"},
+ wantErrs: []string{"[6:19] error validation-type-mismatch schema.exclusiveMinimum expected `number`, got `boolean`", "[7:19] error validation-type-mismatch schema.exclusiveMaximum expected `number`, got `boolean`"},
},
// Invalid types should always fail
{
@@ -248,7 +248,7 @@ exclusiveMaximum: false
type: number
exclusiveMinimum: "invalid"
`,
- wantErrs: []string{"[2:1] schema.exclusiveMinimum expected number, got string", "[3:19] schema.exclusiveMinimum failed to validate either bool [schema.exclusiveMinimum line 3: cannot unmarshal !!str `invalid` into bool] or float64 [schema.exclusiveMinimum line 3: cannot unmarshal !!str `invalid` into float64]"},
+ wantErrs: []string{"[2:1] error validation-type-mismatch schema.exclusiveMinimum expected `number`, got `string`", "[3:19] error validation-type-mismatch schema.exclusiveMinimum failed to validate either bool [schema.exclusiveMinimum line 3: cannot unmarshal !!str `invalid` into bool] or float64 [schema.exclusiveMinimum line 3: cannot unmarshal !!str `invalid` into float64]"},
},
{
name: "invalid string type for exclusiveMaximum",
@@ -256,7 +256,7 @@ exclusiveMinimum: "invalid"
type: number
exclusiveMaximum: "invalid"
`,
- wantErrs: []string{"[2:1] schema.exclusiveMaximum expected number, got string", "[3:19] schema.exclusiveMaximum failed to validate either bool [schema.exclusiveMaximum line 3: cannot unmarshal !!str `invalid` into bool] or float64 [schema.exclusiveMaximum line 3: cannot unmarshal !!str `invalid` into float64]"},
+ wantErrs: []string{"[2:1] error validation-type-mismatch schema.exclusiveMaximum expected `number`, got `string`", "[3:19] error validation-type-mismatch schema.exclusiveMaximum failed to validate either bool [schema.exclusiveMaximum line 3: cannot unmarshal !!str `invalid` into bool] or float64 [schema.exclusiveMaximum line 3: cannot unmarshal !!str `invalid` into float64]"},
},
{
name: "invalid array type for exclusiveMinimum",
@@ -264,7 +264,7 @@ exclusiveMaximum: "invalid"
type: number
exclusiveMinimum: [1, 2, 3]
`,
- wantErrs: []string{"[2:1] schema.exclusiveMinimum expected number, got array", "[3:19] schema.exclusiveMinimum failed to validate either bool [schema.exclusiveMinimum expected bool, got sequence] or float64 [schema.exclusiveMinimum expected float64, got sequence]"},
+ wantErrs: []string{"[2:1] error validation-type-mismatch schema.exclusiveMinimum expected `number`, got `array`", "[3:19] error validation-type-mismatch schema.exclusiveMinimum failed to validate either bool [schema.exclusiveMinimum expected `bool`, got `sequence`] or float64 [schema.exclusiveMinimum expected `float64`, got `sequence`]"},
},
// Mixed boolean and numeric should fail with OpenAPI 3.0 (only supports boolean)
{
@@ -276,7 +276,7 @@ exclusiveMinimum: true
exclusiveMaximum: 50.5
`,
openAPIVersion: pointer.From("3.0.3"),
- wantErrs: []string{"[5:19] schema.exclusiveMaximum expected boolean, got number"},
+ wantErrs: []string{"[5:19] error validation-type-mismatch schema.exclusiveMaximum expected `boolean`, got `number`"},
},
{
name: "mixed numeric exclusiveMinimum and boolean exclusiveMaximum with OpenAPI 3.0 should fail",
@@ -287,7 +287,7 @@ exclusiveMinimum: 0.5
exclusiveMaximum: true
`,
openAPIVersion: pointer.From("3.0.3"),
- wantErrs: []string{"[4:19] schema.exclusiveMinimum expected boolean, got number"},
+ wantErrs: []string{"[4:19] error validation-type-mismatch schema.exclusiveMinimum expected `boolean`, got `number`"},
},
}
diff --git a/jsonschema/oas3/schema_validate_test.go b/jsonschema/oas3/schema_validate_test.go
index 644c6c63..8dafa271 100644
--- a/jsonschema/oas3/schema_validate_test.go
+++ b/jsonschema/oas3/schema_validate_test.go
@@ -379,8 +379,8 @@ externalDocs:
description: More information
`,
wantErrs: []string{
- "[2:1] schema.externalDocs missing property 'url'",
- "[5:3] externalDocumentation.url is missing",
+ "[2:1] error validation-required-field `schema.externalDocs` missing property `url`",
+ "[5:3] error validation-required-field `externalDocumentation.url` is required",
},
},
{
@@ -390,8 +390,8 @@ type: invalid_type
title: Invalid Type
`,
wantErrs: []string{
- "[2:7] schema.type expected array, got string",
- "[2:7] schema.type value must be one of 'array', 'boolean', 'integer', 'null', 'number', 'object', 'string'",
+ "[2:7] error validation-invalid-schema schema.type value must be one of 'array', 'boolean', 'integer', 'null', 'number', 'object', 'string'",
+ "[2:7] error validation-type-mismatch schema.type expected `array`, got `string`",
},
},
{
@@ -400,7 +400,7 @@ title: Invalid Type
type: string
minLength: -1
`,
- wantErrs: []string{"[3:12] schema.minLength minimum: got -1, want 0"},
+ wantErrs: []string{"[3:12] error validation-invalid-schema schema.minLength minimum: got -1, want 0"},
},
{
name: "negative multipleOf",
@@ -408,7 +408,7 @@ minLength: -1
type: number
multipleOf: -1
`,
- wantErrs: []string{"[3:13] schema.multipleOf exclusiveMinimum: got -1, want 0"},
+ wantErrs: []string{"[3:13] error validation-invalid-schema schema.multipleOf exclusiveMinimum: got -1, want 0"},
},
{
name: "zero multipleOf",
@@ -416,7 +416,7 @@ multipleOf: -1
type: number
multipleOf: 0
`,
- wantErrs: []string{"[3:13] schema.multipleOf exclusiveMinimum: got 0, want 0"},
+ wantErrs: []string{"[3:13] error validation-invalid-schema schema.multipleOf exclusiveMinimum: got 0, want 0"},
},
{
name: "invalid additionalProperties type",
@@ -425,9 +425,9 @@ type: object
additionalProperties: "invalid"
`,
wantErrs: []string{
- "[2:1] schema.additionalProperties expected one of [boolean, object], got string",
- "[2:1] schema.additionalProperties expected one of [boolean, object], got string",
- "[3:23] schema.additionalProperties failed to validate either Schema [schema.additionalProperties expected object, got `invalid`] or bool [schema.additionalProperties line 3: cannot unmarshal !!str `invalid` into bool]",
+ "[2:1] error validation-type-mismatch schema.additionalProperties expected one of [`boolean`, `object`], got `string`",
+ "[2:1] error validation-type-mismatch schema.additionalProperties expected one of [`boolean`, `object`], got `string`",
+ "[3:23] error validation-type-mismatch schema.additionalProperties failed to validate either Schema [schema.additionalProperties expected `object`, got `invalid`] or bool [schema.additionalProperties line 3: cannot unmarshal !!str `invalid` into bool]",
},
},
{
@@ -436,7 +436,7 @@ additionalProperties: "invalid"
type: array
minItems: -1
`,
- wantErrs: []string{"[3:11] schema.minItems minimum: got -1, want 0"},
+ wantErrs: []string{"[3:11] error validation-invalid-schema schema.minItems minimum: got -1, want 0"},
},
{
name: "negative minProperties",
@@ -444,7 +444,7 @@ minItems: -1
type: object
minProperties: -1
`,
- wantErrs: []string{"[3:16] schema.minProperties minimum: got -1, want 0"},
+ wantErrs: []string{"[3:16] error validation-invalid-schema schema.minProperties minimum: got -1, want 0"},
},
{
name: "invalid items type",
@@ -453,9 +453,9 @@ type: array
items: "invalid"
`,
wantErrs: []string{
- "[2:1] schema.items expected one of [boolean, object], got string",
- "[2:1] schema.items expected one of [boolean, object], got string",
- "[3:8] schema.items failed to validate either Schema [schema.items expected object, got `invalid`] or bool [schema.items line 3: cannot unmarshal !!str `invalid` into bool]",
+ "[2:1] error validation-type-mismatch schema.items expected one of [`boolean`, `object`], got `string`",
+ "[2:1] error validation-type-mismatch schema.items expected one of [`boolean`, `object`], got `string`",
+ "[3:8] error validation-type-mismatch schema.items failed to validate either Schema [schema.items expected `object`, got `invalid`] or bool [schema.items line 3: cannot unmarshal !!str `invalid` into bool]",
},
},
{
@@ -465,8 +465,8 @@ type: object
required: "invalid"
`,
wantErrs: []string{
- "[2:1] schema.required expected array, got string",
- "[3:11] schema.required expected sequence, got `invalid`",
+ "[2:1] error validation-type-mismatch schema.required expected `array`, got `string`",
+ "[3:11] error validation-type-mismatch schema.required expected `sequence`, got `invalid`",
},
},
{
@@ -475,8 +475,8 @@ required: "invalid"
allOf: "invalid"
`,
wantErrs: []string{
- "[2:1] schema.allOf expected array, got string",
- "[2:8] schema.allOf expected sequence, got `invalid`",
+ "[2:1] error validation-type-mismatch schema.allOf expected `array`, got `string`",
+ "[2:8] error validation-type-mismatch schema.allOf expected `sequence`, got `invalid`",
},
},
{
@@ -485,8 +485,8 @@ allOf: "invalid"
anyOf: "invalid"
`,
wantErrs: []string{
- "[2:1] schema.anyOf expected array, got string",
- "[2:8] schema.anyOf expected sequence, got `invalid`",
+ "[2:1] error validation-type-mismatch schema.anyOf expected `array`, got `string`",
+ "[2:8] error validation-type-mismatch schema.anyOf expected `sequence`, got `invalid`",
},
},
{
@@ -495,8 +495,8 @@ anyOf: "invalid"
oneOf: "invalid"
`,
wantErrs: []string{
- "[2:1] schema.oneOf expected array, got string",
- "[2:8] schema.oneOf expected sequence, got `invalid`",
+ "[2:1] error validation-type-mismatch schema.oneOf expected `array`, got `string`",
+ "[2:8] error validation-type-mismatch schema.oneOf expected `sequence`, got `invalid`",
},
},
{
@@ -506,49 +506,49 @@ $schema: "https://spec.openapis.org/oas/3.0/dialect/2024-10-18"
$ref: "#/components/schemas/User"
required: ["name", "email"]
`,
- wantErrs: []string{"[2:1] schema. additional properties '$ref' not allowed"},
+ wantErrs: []string{"[2:1] error validation-invalid-schema schema. additional properties '$ref' not allowed"},
},
{
name: "empty component name in $ref",
yml: `
$ref: "#/components/schemas/"
`,
- wantErrs: []string{"[2:1] invalid reference: component name cannot be empty"},
+ wantErrs: []string{"[2:1] error validation-invalid-reference invalid reference: component name cannot be empty"},
},
{
name: "missing component name in $ref",
yml: `
$ref: "#/components/schemas"
`,
- wantErrs: []string{"[2:1] invalid reference: component name cannot be empty"},
+ wantErrs: []string{"[2:1] error validation-invalid-reference invalid reference: component name cannot be empty"},
},
{
name: "component name with invalid characters in $ref",
yml: `
$ref: "#/components/schemas/User@Schema"
`,
- wantErrs: []string{`[2:1] invalid reference: component name "User@Schema" must match pattern ^[a-zA-Z0-9.\-_]+$`},
+ wantErrs: []string{`[2:1] error validation-invalid-reference invalid reference: component name "User@Schema" must match pattern ^[a-zA-Z0-9.\-_]+$`},
},
{
name: "component name with space in $ref",
yml: `
$ref: "#/components/schemas/User Schema"
`,
- wantErrs: []string{`[2:1] invalid reference: component name "User Schema" must match pattern ^[a-zA-Z0-9.\-_]+$`},
+ wantErrs: []string{`[2:1] error validation-invalid-reference invalid reference: component name "User Schema" must match pattern ^[a-zA-Z0-9.\-_]+$`},
},
{
name: "invalid JSON pointer - missing leading slash in $ref",
yml: `
$ref: "#components/schemas/User"
`,
- wantErrs: []string{"[2:1] invalid reference JSON pointer: validation error -- jsonpointer must start with /: components/schemas/User"},
+ wantErrs: []string{"[2:1] error validation-invalid-reference invalid reference JSON pointer: validation error -- jsonpointer must start with /: components/schemas/User"},
},
{
name: "empty JSON pointer in $ref",
yml: `
$ref: "#"
`,
- wantErrs: []string{"[2:1] invalid reference JSON pointer: empty"},
+ wantErrs: []string{"[2:1] error validation-invalid-reference invalid reference JSON pointer: empty"},
},
}
diff --git a/jsonschema/oas3/validation.go b/jsonschema/oas3/validation.go
index b27ac26f..07716155 100644
--- a/jsonschema/oas3/validation.go
+++ b/jsonschema/oas3/validation.go
@@ -85,7 +85,7 @@ func (js *Schema) Validate(ctx context.Context, opts ...validation.Option) []err
// Validate reference string if present
if js.IsReference() {
if err := js.GetRef().Validate(); err != nil {
- errs = append(errs, validation.NewValidationError(err, js.GetCore().Ref.GetKeyNodeOrRoot(js.GetRootNode())))
+ errs = append(errs, validation.NewValidationError(validation.SeverityError, validation.RuleValidationInvalidReference, err, js.GetCore().Ref.GetKeyNodeOrRoot(js.GetRootNode())))
}
}
@@ -129,14 +129,14 @@ func (js *Schema) Validate(ctx context.Context, opts ...validation.Option) []err
if err := json.YAMLToJSON(core.RootNode, 0, buf); err != nil {
return []error{
- validation.NewValidationError(fmt.Errorf("schema is not valid json: %w", err), core.RootNode),
+ validation.NewValidationError(validation.SeverityError, validation.RuleValidationInvalidSyntax, fmt.Errorf("schema is not valid json: %w", err), core.RootNode),
}
}
jsAny, err := jsValidator.UnmarshalJSON(buf)
if err != nil {
return []error{
- validation.NewValidationError(fmt.Errorf("schema is not valid json: %w", err), core.RootNode),
+ validation.NewValidationError(validation.SeverityError, validation.RuleValidationInvalidSyntax, fmt.Errorf("schema is not valid json: %w", err), core.RootNode),
}
}
@@ -146,7 +146,7 @@ func (js *Schema) Validate(ctx context.Context, opts ...validation.Option) []err
if errors.As(err, &validationErr) {
errs = append(errs, getRootCauses(validationErr, *core)...)
} else {
- errs = append(errs, validation.NewValidationError(validation.NewValueValidationError("schema invalid: %s", err.Error()), core.RootNode))
+ errs = append(errs, validation.NewValidationError(validation.SeverityError, validation.RuleValidationInvalidSchema, fmt.Errorf("schema invalid: %s", err.Error()), core.RootNode))
}
}
@@ -172,7 +172,7 @@ func getRootCauses(err *jsValidator.ValidationError, js core.Schema) []error {
t, err := jsonpointer.GetTarget(js, errJP, jsonpointer.WithStructTags("key"))
if err != nil {
- errs = append(errs, validation.NewValidationError(err, js.GetRootNode()))
+ errs = append(errs, validation.NewValidationError(validation.SeverityError, validation.RuleValidationInvalidTarget, err, js.GetRootNode()))
continue
}
@@ -199,18 +199,25 @@ func getRootCauses(err *jsValidator.ValidationError, js core.Schema) []error {
case *kind.Type:
var want string
if len(t.Want) == 1 {
- want = t.Want[0]
+ want = "`" + t.Want[0] + "`"
} else {
- want = fmt.Sprintf("one of [%s]", strings.Join(t.Want, ", "))
+ // Wrap each type in backticks
+ wrappedTypes := make([]string, len(t.Want))
+ for i, typ := range t.Want {
+ wrappedTypes[i] = "`" + typ + "`"
+ }
+ want = fmt.Sprintf("one of [%s]", strings.Join(wrappedTypes, ", "))
}
- msg = fmt.Sprintf("expected %s, got %s", want, t.Got)
+ msg = fmt.Sprintf("expected %s, got `%s`", want, t.Got)
- newErr = validation.NewValidationError(validation.NewTypeMismatchError(parentName, msg), valueNode)
+ newErr = validation.NewValidationError(validation.SeverityError, validation.RuleValidationTypeMismatch, validation.NewTypeMismatchError(parentName, msg), valueNode)
case *kind.Required:
- newErr = validation.NewValidationError(validation.NewMissingFieldError("%s %s", parentName, msg), valueNode)
+ // Replace single quotes with backticks in the message
+ msg = strings.ReplaceAll(msg, "'", "`")
+ newErr = validation.NewValidationError(validation.SeverityError, validation.RuleValidationRequiredField, fmt.Errorf("`%s` %s", parentName, msg), valueNode)
default:
- newErr = validation.NewValidationError(validation.NewValueValidationError("%s %s", parentName, msg), valueNode)
+ newErr = validation.NewValidationError(validation.SeverityError, validation.RuleValidationInvalidSchema, fmt.Errorf("%s %s", parentName, msg), valueNode)
}
if newErr != nil {
errs = append(errs, newErr)
diff --git a/jsonschema/oas3/walk.go b/jsonschema/oas3/walk.go
index f2a9440b..34c3f438 100644
--- a/jsonschema/oas3/walk.go
+++ b/jsonschema/oas3/walk.go
@@ -65,7 +65,7 @@ func walkSchema(ctx context.Context, schema *JSONSchema[Referenceable], loc walk
}
if schema.IsSchema() {
- js := schema.Left
+ js := schema.GetSchema()
// Walk through allOf schemas
for i, schema := range js.AllOf {
diff --git a/jsonschema/oas3/xml.go b/jsonschema/oas3/xml.go
index e8c16ac0..052dc46b 100644
--- a/jsonschema/oas3/xml.go
+++ b/jsonschema/oas3/xml.go
@@ -2,6 +2,7 @@ package oas3
import (
"context"
+ "fmt"
"net/url"
"reflect"
@@ -124,9 +125,9 @@ func (x *XML) Validate(ctx context.Context, opts ...validation.Option) []error {
if x.Namespace != nil {
u, err := url.Parse(*x.Namespace)
if err != nil {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("xml.namespace is not a valid uri: %s", err), core, core.Namespace))
+ errs = append(errs, validation.NewValueError(validation.SeverityWarning, validation.RuleValidationInvalidFormat, fmt.Errorf("xml.namespace is not a valid uri: %w", err), core, core.Namespace))
} else if !u.IsAbs() {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("xml.namespace must be an absolute uri: %s", *x.Namespace), core, core.Namespace))
+ errs = append(errs, validation.NewValueError(validation.SeverityWarning, validation.RuleValidationInvalidFormat, fmt.Errorf("xml.namespace must be an absolute uri: %s", *x.Namespace), core, core.Namespace))
}
}
diff --git a/linter/README.md b/linter/README.md
new file mode 100644
index 00000000..81b98137
--- /dev/null
+++ b/linter/README.md
@@ -0,0 +1,260 @@
+# Linter Engine
+
+This document provides an overview of the linter engine implementation.
+
+## Architecture Overview
+
+The linter engine is a generic, spec-agnostic framework for implementing configurable linting rules across different API specifications (OpenAPI, Arazzo, Swagger).
+
+### Core Components
+
+1. **Generic Linter Engine** ([`linter/`](linter/))
+ - [`Linter[T]`](linter/linter.go) - Main linting engine with configuration support
+ - [`Registry[T]`](linter/registry.go) - Rule registry with category management
+ - [`Rule`](linter/rule.go) - Base rule interface and specialized interfaces
+ - [`RuleConfig`](linter/config.go) - Per-rule configuration with severity overrides
+ - [`DocumentInfo[T]`](linter/document.go) - Document + location for reference resolution
+ - Format types for text and JSON output
+ - Parallel rule execution for improved performance
+
+2. **OpenAPI Linter** ([`openapi/linter/`](openapi/linter/))
+ - OpenAPI-specific linter implementation
+ - Rule registry with built-in rules
+ - Integration with OpenAPI parser and validator
+
+3. **Rules** ([`openapi/linter/rules/`](openapi/linter/rules/))
+ - Individual linting rules (e.g., [`style-path-params`](openapi/linter/rules/path_params.go))
+ - Each rule implements the [`RuleRunner[*openapi.OpenAPI]`](linter/rule.go) interface
+
+4. **CLI Integration** ([`cmd/openapi/commands/openapi/lint.go`](cmd/openapi/commands/openapi/lint.go))
+ - `openapi spec lint` command
+ - Configuration file support (`lint.yaml`)
+ - Rule documentation generation (`--list-rules`)
+
+## Key Features
+
+### 1. Rule Configuration
+
+Rules can be configured via YAML configuration file:
+
+```yaml
+extends:
+ - all # or specific rulesets like "recommended", "strict"
+
+categories:
+ style:
+ enabled: true
+ severity: warning
+
+rules:
+ - id: style-path-params
+ severity: error
+
+ - id: validation-required-field
+ match: ".*info\\.title is required.*"
+ disabled: true
+```
+
+### 2. Severity Overrides
+
+Rules have default severities that can be overridden:
+- Fatal errors (terminate execution)
+- Error severity (build failures)
+- Warning severity (informational)
+
+### 3. External Reference Resolution
+
+Rules automatically resolve external references (HTTP URLs, file paths):
+
+```yaml
+paths:
+ /users/{userId}:
+ get:
+ parameters:
+ - $ref: "https://example.com/params/user-id.yaml"
+ responses:
+ '200':
+ description: ok
+```
+
+The linter:
+- Uses [`DocumentInfo.Location`](linter/document.go) as the base for resolving relative references
+- Supports custom HTTP clients and virtual filesystems via [`LintOptions.ResolveOptions`](linter/document.go)
+- Reports resolution errors as validation errors with proper severity and location
+
+### 5. Quick Fix Suggestions
+
+Rules can suggest fixes using [`validation.Error`](validation/validation.go) with quick fix support:
+
+```go
+validation.NewValidationErrorWithQuickFix(
+ severity,
+ rule,
+ fmt.Errorf("path parameter {%s} is not defined", param),
+ node,
+ &validation.QuickFix{
+ Description: "Add missing path parameter",
+ Replacement: "...",
+ },
+)
+```
+
+## Implemented Rules
+
+### style-path-params
+
+Ensures path template variables (e.g., `{userId}`) have corresponding parameter definitions with `in='path'`.
+
+**Checks:**
+- All template params must have corresponding parameter definitions
+- All path parameters must be used in the template
+- Works with parameters at PathItem level (inherited) and Operation level (can override)
+- Resolves external references to parameters
+
+**Example:**
+
+```yaml
+# ✅ Valid
+paths:
+ /users/{userId}:
+ get:
+ parameters:
+ - name: userId
+ in: path
+ required: true
+
+# ❌ Invalid - missing parameter definition
+paths:
+ /users/{userId}:
+ get:
+ responses:
+ '200':
+ description: ok
+```
+
+## Usage
+
+### CLI
+
+```bash
+# Lint with default configuration
+openapi spec lint openapi.yaml
+
+# Lint with custom config
+openapi spec lint --config /path/to/lint.yaml openapi.yaml
+
+# List all available rules
+openapi spec lint --list-rules
+
+# Output in JSON format
+openapi spec lint --format json openapi.yaml
+```
+
+### Programmatic
+
+```go
+import (
+ "context"
+ "github.com/speakeasy-api/openapi/linter"
+ openapiLinter "github.com/speakeasy-api/openapi/openapi/linter"
+)
+
+// Create linter with configuration
+config := &linter.Config{
+ Extends: []string{"all"},
+}
+lntr := openapiLinter.NewOpenAPILinter(config)
+
+// Lint document
+docInfo := &linter.DocumentInfo[*openapi.OpenAPI]{
+ Document: doc,
+ Location: "/path/to/openapi.yaml",
+}
+output, err := lntr.Lint(ctx, docInfo, nil, nil)
+if err != nil {
+ // Handle error
+}
+
+// Check results
+if output.HasErrors() {
+ fmt.Println(output.FormatText())
+}
+```
+
+## Filtering Errors After Linting
+
+To apply the config filters to additional errors after the initial lint (for example, errors discovered during lazy reference resolution), use [`FilterErrors`](linter/linter.go:237):
+
+```go
+filtered := lntr.FilterErrors(extraErrors)
+```
+
+## Adding New Rules
+
+To add a new rule:
+
+1. **Create the rule** in [`openapi/linter/rules/`](openapi/linter/rules/)
+
+```go
+type MyRule struct{}
+
+func (r *MyRule) ID() string { return "style-my-rule" }
+func (r *MyRule) Category() string { return "style" }
+func (r *MyRule) Description() string { return "..." }
+func (r *MyRule) Link() string { return "..." }
+func (r *MyRule) DefaultSeverity() validation.Severity {
+ return validation.SeverityWarning
+}
+func (r *MyRule) Versions() []string { return nil }
+
+func (r *MyRule) Run(ctx context.Context, docInfo *linter.DocumentInfo[*openapi.OpenAPI], config *linter.RuleConfig) []error {
+ doc := docInfo.Document
+ // Implement rule logic
+ // Use openapi.Walk() to traverse the document
+ // Return validation.Error instances for violations
+ return nil
+}
+```
+
+2. **Register the rule** in [`openapi/linter/linter.go`](openapi/linter/linter.go)
+
+```go
+registry.Register(&rules.MyRule{})
+```
+
+3. **Write tests** in [`openapi/linter/rules/my_rule_test.go`](openapi/linter/rules/)
+
+```go
+func TestMyRule_Success(t *testing.T) {
+ t.Parallel()
+ // ... test implementation
+}
+```
+
+## Custom Rule Loading
+
+The linter engine supports custom rule loaders that can be registered via the `RegisterCustomRuleLoader` function. This allows spec-specific linters to support custom rules written in different languages or formats.
+
+```go
+// CustomRuleLoaderFunc loads custom rules from configuration
+type CustomRuleLoaderFunc func(config *CustomRulesConfig) ([]RuleRunner[T], error)
+
+// Register a custom rule loader
+linter.RegisterCustomRuleLoader(myLoader)
+```
+
+Custom rules loaded through registered loaders:
+
+- Are automatically registered with the rule registry
+- Support the same configuration options as built-in rules (severity, disabled, match)
+- Integrate seamlessly with category-based configuration
+
+## Design Principles
+
+1. **Generic Architecture** - The core linter is spec-agnostic (`Linter[T any]`)
+2. **Type Safety** - Spec-specific rules use typed interfaces (`RuleRunner[*openapi.OpenAPI]`)
+3. **Separation of Concerns** - Core engine, spec linters, and rules are separate packages
+4. **Extensibility** - Easy to add new rules, rulesets, specs, and custom rule loaders
+5. **Configuration Over Code** - Rule behavior controlled via YAML config
+6. **Reference Resolution** - Automatic external reference resolution with proper error handling
+7. **Testing** - Comprehensive test coverage with parallel execution
diff --git a/linter/config.go b/linter/config.go
new file mode 100644
index 00000000..97d199e7
--- /dev/null
+++ b/linter/config.go
@@ -0,0 +1,207 @@
+package linter
+
+import (
+ "errors"
+ "fmt"
+ "regexp"
+ "strings"
+ "time"
+
+ "github.com/speakeasy-api/openapi/references"
+ "github.com/speakeasy-api/openapi/validation"
+ "gopkg.in/yaml.v3"
+)
+
+// Config represents the linter configuration
+type Config struct {
+ // Extends specifies rulesets to extend (e.g., "recommended", "all")
+ Extends []string `yaml:"extends,omitempty" json:"extends,omitempty"`
+
+ // Rules contains per-rule configuration
+ Rules []RuleEntry `yaml:"rules,omitempty" json:"rules,omitempty"`
+
+ // Categories contains per-category configuration
+ Categories map[string]CategoryConfig `yaml:"categories,omitempty" json:"categories,omitempty"`
+
+ // OutputFormat specifies the output format
+ OutputFormat OutputFormat `yaml:"output_format,omitempty" json:"output_format,omitempty"`
+
+ // CustomRules configures custom rule loading (requires customrules package import)
+ CustomRules *CustomRulesConfig `yaml:"custom_rules,omitempty" json:"custom_rules,omitempty"`
+}
+
+// CustomRulesConfig configures custom rule loading.
+// This is the YAML-serializable configuration. The customrules package
+// extends this with additional programmatic options like Logger.
+type CustomRulesConfig struct {
+ // Paths are glob patterns for rule files (e.g., "./rules/*.ts")
+ Paths []string `yaml:"paths,omitempty" json:"paths,omitempty"`
+
+ // Timeout is the maximum execution time per rule (default: 30s)
+ Timeout time.Duration `yaml:"timeout,omitempty" json:"timeout,omitempty"`
+}
+
+// UnmarshalYAML supports "extends" as string or list and severity aliases.
+func (c *Config) UnmarshalYAML(value *yaml.Node) error {
+ var raw struct {
+ Extends yaml.Node `yaml:"extends,omitempty"`
+ Rules []RuleEntry `yaml:"rules,omitempty"`
+ Categories map[string]CategoryConfig `yaml:"categories,omitempty"`
+ OutputFormat OutputFormat `yaml:"output_format,omitempty"`
+ CustomRules *CustomRulesConfig `yaml:"custom_rules,omitempty"`
+ }
+ if err := value.Decode(&raw); err != nil {
+ return err
+ }
+
+ if raw.Extends.Kind != 0 {
+ switch raw.Extends.Kind {
+ case yaml.ScalarNode:
+ switch raw.Extends.Tag {
+ case "!!null":
+ c.Extends = nil
+ case "!!str", "":
+ c.Extends = []string{raw.Extends.Value}
+ default:
+ return errors.New("extends must be a string or list of strings")
+ }
+ case yaml.SequenceNode:
+ var list []string
+ if err := raw.Extends.Decode(&list); err != nil {
+ return err
+ }
+ c.Extends = list
+ default:
+ return errors.New("extends must be a string or list of strings")
+ }
+ }
+
+ c.Rules = raw.Rules
+ c.Categories = raw.Categories
+ c.OutputFormat = raw.OutputFormat
+ c.CustomRules = raw.CustomRules
+ return nil
+}
+
+// RuleEntry configures rule behavior in lint.yaml.
+type RuleEntry struct {
+ ID string `yaml:"id" json:"id"`
+ Severity *validation.Severity `yaml:"severity,omitempty" json:"severity,omitempty"`
+ Disabled *bool `yaml:"disabled,omitempty" json:"disabled,omitempty"`
+ Match *regexp.Regexp `yaml:"match,omitempty" json:"match,omitempty"`
+}
+
+// UnmarshalYAML allows severity aliases (warn, info) in rule entries.
+func (r *RuleEntry) UnmarshalYAML(value *yaml.Node) error {
+ var raw struct {
+ ID string `yaml:"id"`
+ Severity *string `yaml:"severity,omitempty"`
+ Disabled *bool `yaml:"disabled,omitempty"`
+ Match *regexp.Regexp `yaml:"match,omitempty"`
+ }
+ if err := value.Decode(&raw); err != nil {
+ return err
+ }
+
+ r.ID = raw.ID
+ r.Disabled = raw.Disabled
+ r.Match = raw.Match
+ if raw.Severity != nil {
+ sev, err := parseSeverity(*raw.Severity)
+ if err != nil {
+ return err
+ }
+ r.Severity = &sev
+ }
+ return nil
+}
+
+// Validate checks for missing rule IDs in the configuration.
+func (c *Config) Validate() error {
+ for _, entry := range c.Rules {
+ if strings.TrimSpace(entry.ID) == "" {
+ return errors.New("rule entry missing id")
+ }
+ }
+ return nil
+}
+
+// RuleConfig configures a specific rule
+type RuleConfig struct {
+ // Enabled controls whether the rule is active
+ Enabled *bool `yaml:"enabled,omitempty" json:"enabled,omitempty"`
+
+ // Severity overrides the default severity
+ Severity *validation.Severity `yaml:"severity,omitempty" json:"severity,omitempty"`
+
+ // ResolveOptions contains runtime options for reference resolution (not serialized)
+ // These are set by the linter engine when running rules
+ ResolveOptions *references.ResolveOptions `yaml:"-" json:"-"`
+}
+
+// GetSeverity returns the effective severity, falling back to default if not overridden
+func (c *RuleConfig) GetSeverity(defaultSeverity validation.Severity) validation.Severity {
+ if c != nil && c.Severity != nil {
+ return *c.Severity
+ }
+ return defaultSeverity
+}
+
+// CategoryConfig configures an entire category of rules
+type CategoryConfig struct {
+ // Enabled controls whether all rules in the category are active
+ Enabled *bool `yaml:"enabled,omitempty" json:"enabled,omitempty"`
+
+ // Severity overrides the default severity for all rules in the category
+ Severity *validation.Severity `yaml:"severity,omitempty" json:"severity,omitempty"`
+}
+
+// UnmarshalYAML allows severity aliases (warn, info) in categories.
+func (c *CategoryConfig) UnmarshalYAML(value *yaml.Node) error {
+ var raw struct {
+ Enabled *bool `yaml:"enabled,omitempty"`
+ Severity *string `yaml:"severity,omitempty"`
+ }
+ if err := value.Decode(&raw); err != nil {
+ return err
+ }
+ if raw.Severity != nil {
+ sev, err := parseSeverity(*raw.Severity)
+ if err != nil {
+ return err
+ }
+ c.Severity = &sev
+ }
+ c.Enabled = raw.Enabled
+ return nil
+}
+
+type OutputFormat string
+
+const (
+ OutputFormatText OutputFormat = "text"
+ OutputFormatJSON OutputFormat = "json"
+)
+
+// NewConfig creates a new default configuration
+func NewConfig() *Config {
+ return &Config{
+ Extends: []string{"all"},
+ Rules: []RuleEntry{},
+ Categories: make(map[string]CategoryConfig),
+ OutputFormat: OutputFormatText,
+ }
+}
+
+func parseSeverity(value string) (validation.Severity, error) {
+ switch strings.ToLower(strings.TrimSpace(value)) {
+ case "error":
+ return validation.SeverityError, nil
+ case "warn", "warning":
+ return validation.SeverityWarning, nil
+ case "hint", "info":
+ return validation.SeverityHint, nil
+ default:
+ return "", fmt.Errorf("unknown severity %q", value)
+ }
+}
diff --git a/linter/config_loader.go b/linter/config_loader.go
new file mode 100644
index 00000000..0400cdf1
--- /dev/null
+++ b/linter/config_loader.go
@@ -0,0 +1,51 @@
+package linter
+
+import (
+ "fmt"
+ "io"
+ "os"
+
+ "gopkg.in/yaml.v3"
+)
+
+// LoadConfig loads lint configuration from a YAML reader.
+func LoadConfig(r io.Reader) (*Config, error) {
+ data, err := io.ReadAll(r)
+ if err != nil {
+ return nil, fmt.Errorf("failed to read config: %w", err)
+ }
+
+ var cfg Config
+ if err := yaml.Unmarshal(data, &cfg); err != nil {
+ return nil, fmt.Errorf("failed to parse config: %w", err)
+ }
+
+ if len(cfg.Extends) == 0 {
+ cfg.Extends = []string{"all"}
+ }
+ if cfg.Categories == nil {
+ cfg.Categories = make(map[string]CategoryConfig)
+ }
+ if cfg.Rules == nil {
+ cfg.Rules = []RuleEntry{}
+ }
+ if cfg.OutputFormat == "" {
+ cfg.OutputFormat = OutputFormatText
+ }
+ if err := cfg.Validate(); err != nil {
+ return nil, err
+ }
+
+ return &cfg, nil
+}
+
+// LoadConfigFromFile loads lint configuration from a YAML file.
+func LoadConfigFromFile(path string) (*Config, error) {
+ f, err := os.Open(path) //nolint:gosec
+ if err != nil {
+ return nil, fmt.Errorf("failed to open config file: %w", err)
+ }
+ defer f.Close()
+
+ return LoadConfig(f)
+}
diff --git a/linter/config_test.go b/linter/config_test.go
new file mode 100644
index 00000000..4f6009e3
--- /dev/null
+++ b/linter/config_test.go
@@ -0,0 +1,284 @@
+package linter_test
+
+import (
+ "os"
+ "regexp"
+ "strings"
+ "testing"
+
+ "github.com/speakeasy-api/openapi/linter"
+ "github.com/speakeasy-api/openapi/validation"
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestRuleConfig_GetSeverity(t *testing.T) {
+ t.Parallel()
+
+ t.Run("returns configured severity when set", func(t *testing.T) {
+ t.Parallel()
+
+ warningSeverity := validation.SeverityWarning
+ config := linter.RuleConfig{
+ Severity: &warningSeverity,
+ }
+
+ assert.Equal(t, validation.SeverityWarning, config.GetSeverity(validation.SeverityError))
+ })
+
+ t.Run("returns default severity when not set", func(t *testing.T) {
+ t.Parallel()
+
+ config := linter.RuleConfig{}
+
+ assert.Equal(t, validation.SeverityError, config.GetSeverity(validation.SeverityError))
+ })
+
+ t.Run("returns configured severity overriding different default", func(t *testing.T) {
+ t.Parallel()
+
+ hintSeverity := validation.SeverityHint
+ config := linter.RuleConfig{
+ Severity: &hintSeverity,
+ }
+
+ assert.Equal(t, validation.SeverityHint, config.GetSeverity(validation.SeverityWarning))
+ })
+}
+
+func TestNewConfig(t *testing.T) {
+ t.Parallel()
+
+ config := linter.NewConfig()
+ assert.NotNil(t, config)
+ assert.Equal(t, linter.OutputFormatText, config.OutputFormat)
+ assert.NotNil(t, config.Rules)
+ assert.NotNil(t, config.Categories)
+ assert.NotNil(t, config.Extends)
+}
+
+func TestLoadConfig_ExtendsString(t *testing.T) {
+ t.Parallel()
+
+ configYAML := `extends: recommended`
+ config, err := linter.LoadConfig(strings.NewReader(configYAML))
+ require.NoError(t, err)
+ assert.Equal(t, []string{"recommended"}, config.Extends)
+}
+
+func TestLoadConfig_ExtendsList(t *testing.T) {
+ t.Parallel()
+
+ configYAML := `extends:
+ - recommended
+ - strict`
+ config, err := linter.LoadConfig(strings.NewReader(configYAML))
+ require.NoError(t, err)
+ assert.Equal(t, []string{"recommended", "strict"}, config.Extends)
+}
+
+func TestLoadConfig_MatchRegex(t *testing.T) {
+ t.Parallel()
+
+ configYAML := `rules:
+ - id: validation-required
+ match: ".*title.*"`
+ config, err := linter.LoadConfig(strings.NewReader(configYAML))
+ require.NoError(t, err)
+ require.Len(t, config.Rules, 1)
+ require.NotNil(t, config.Rules[0].Match)
+ assert.Equal(t, regexp.MustCompile(".*title.*").String(), config.Rules[0].Match.String())
+}
+
+func TestLoadConfig_CustomRulesRoundTrip(t *testing.T) {
+ t.Parallel()
+
+ configYAML := `extends: all
+custom_rules:
+ paths:
+ - "./rules/*.ts"
+ - "./extra/*.ts"`
+ config, err := linter.LoadConfig(strings.NewReader(configYAML))
+ require.NoError(t, err, "should load config with custom_rules")
+ require.NotNil(t, config.CustomRules, "custom_rules should survive UnmarshalYAML round-trip")
+ assert.Equal(t, []string{"./rules/*.ts", "./extra/*.ts"}, config.CustomRules.Paths, "custom_rules.paths should be preserved")
+}
+
+func TestLoadConfig_CategorySeverityAliases(t *testing.T) {
+ t.Parallel()
+
+ tests := []struct {
+ name string
+ yaml string
+ expectedSeverity validation.Severity
+ }{
+ {
+ name: "error severity",
+ yaml: `categories:
+ style:
+ severity: error`,
+ expectedSeverity: validation.SeverityError,
+ },
+ {
+ name: "warn alias for warning",
+ yaml: `categories:
+ style:
+ severity: warn`,
+ expectedSeverity: validation.SeverityWarning,
+ },
+ {
+ name: "warning severity",
+ yaml: `categories:
+ style:
+ severity: warning`,
+ expectedSeverity: validation.SeverityWarning,
+ },
+ {
+ name: "hint severity",
+ yaml: `categories:
+ style:
+ severity: hint`,
+ expectedSeverity: validation.SeverityHint,
+ },
+ {
+ name: "info alias for hint",
+ yaml: `categories:
+ style:
+ severity: info`,
+ expectedSeverity: validation.SeverityHint,
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ t.Parallel()
+
+ config, err := linter.LoadConfig(strings.NewReader(tt.yaml))
+ require.NoError(t, err)
+ require.NotNil(t, config.Categories["style"].Severity, "severity should be set")
+ assert.Equal(t, tt.expectedSeverity, *config.Categories["style"].Severity, "severity should match expected")
+ })
+ }
+}
+
+func TestLoadConfig_CategoryEnabled(t *testing.T) {
+ t.Parallel()
+
+ configYAML := `categories:
+ security:
+ enabled: false`
+ config, err := linter.LoadConfig(strings.NewReader(configYAML))
+ require.NoError(t, err)
+ require.NotNil(t, config.Categories["security"].Enabled, "enabled should be set")
+ assert.False(t, *config.Categories["security"].Enabled, "security category should be disabled")
+}
+
+func TestLoadConfig_CategoryInvalidSeverity(t *testing.T) {
+ t.Parallel()
+
+ configYAML := `categories:
+ style:
+ severity: critical`
+ _, err := linter.LoadConfig(strings.NewReader(configYAML))
+ require.Error(t, err)
+ assert.Contains(t, err.Error(), "unknown severity")
+}
+
+func TestLoadConfig_RuleSeverityAliases(t *testing.T) {
+ t.Parallel()
+
+ tests := []struct {
+ name string
+ yaml string
+ expectedSeverity validation.Severity
+ }{
+ {
+ name: "warn alias",
+ yaml: `rules:
+ - id: test-rule
+ severity: warn`,
+ expectedSeverity: validation.SeverityWarning,
+ },
+ {
+ name: "info alias",
+ yaml: `rules:
+ - id: test-rule
+ severity: info`,
+ expectedSeverity: validation.SeverityHint,
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ t.Parallel()
+
+ config, err := linter.LoadConfig(strings.NewReader(tt.yaml))
+ require.NoError(t, err)
+ require.Len(t, config.Rules, 1)
+ require.NotNil(t, config.Rules[0].Severity, "severity should be set")
+ assert.Equal(t, tt.expectedSeverity, *config.Rules[0].Severity, "severity should match expected")
+ })
+ }
+}
+
+func TestLoadConfig_RuleInvalidSeverity(t *testing.T) {
+ t.Parallel()
+
+ configYAML := `rules:
+ - id: test-rule
+ severity: critical`
+ _, err := linter.LoadConfig(strings.NewReader(configYAML))
+ require.Error(t, err)
+ assert.Contains(t, err.Error(), "unknown severity")
+}
+
+func TestLoadConfig_ExtendsInvalidType(t *testing.T) {
+ t.Parallel()
+
+ configYAML := `extends:
+ key: value`
+ _, err := linter.LoadConfig(strings.NewReader(configYAML))
+ require.Error(t, err)
+ assert.Contains(t, err.Error(), "extends must be a string or list of strings")
+}
+
+func TestLoadConfig_ExtendsNull(t *testing.T) {
+ t.Parallel()
+
+ configYAML := `extends: null`
+ config, err := linter.LoadConfig(strings.NewReader(configYAML))
+ require.NoError(t, err)
+ assert.Equal(t, []string{"all"}, config.Extends, "null extends should default to all")
+}
+
+func TestLoadConfigFromFile_Success(t *testing.T) {
+ t.Parallel()
+
+ tmpFile := t.TempDir() + "/lint.yaml"
+ err := os.WriteFile(tmpFile, []byte("extends: recommended\n"), 0644)
+ require.NoError(t, err)
+
+ config, err := linter.LoadConfigFromFile(tmpFile)
+ require.NoError(t, err)
+ assert.Equal(t, []string{"recommended"}, config.Extends)
+}
+
+func TestLoadConfigFromFile_Error(t *testing.T) {
+ t.Parallel()
+
+ _, err := linter.LoadConfigFromFile("/nonexistent/path/lint.yaml")
+ require.Error(t, err)
+ assert.Contains(t, err.Error(), "failed to open config file")
+}
+
+func TestConfig_ValidateMissingRuleID(t *testing.T) {
+ t.Parallel()
+
+ config := &linter.Config{
+ Rules: []linter.RuleEntry{{}},
+ }
+
+ err := config.Validate()
+ require.Error(t, err)
+ assert.Contains(t, err.Error(), "rule entry missing id")
+}
diff --git a/linter/doc.go b/linter/doc.go
new file mode 100644
index 00000000..efb743f8
--- /dev/null
+++ b/linter/doc.go
@@ -0,0 +1,267 @@
+package linter
+
+import (
+ "encoding/json"
+ "fmt"
+ "io"
+ "strings"
+)
+
+// DocGenerator generates documentation from registered rules
+type DocGenerator[T any] struct {
+ registry *Registry[T]
+}
+
+// NewDocGenerator creates a new documentation generator
+func NewDocGenerator[T any](registry *Registry[T]) *DocGenerator[T] {
+ return &DocGenerator[T]{registry: registry}
+}
+
+// RuleDoc represents documentation for a single rule
+type RuleDoc struct {
+ ID string `json:"id" yaml:"id"`
+ Category string `json:"category" yaml:"category"`
+ Summary string `json:"summary" yaml:"summary"`
+ Description string `json:"description" yaml:"description"`
+ Rationale string `json:"rationale,omitempty" yaml:"rationale,omitempty"`
+ Link string `json:"link,omitempty" yaml:"link,omitempty"`
+ DefaultSeverity string `json:"default_severity" yaml:"default_severity"`
+ Versions []string `json:"versions,omitempty" yaml:"versions,omitempty"`
+ GoodExample string `json:"good_example,omitempty" yaml:"good_example,omitempty"`
+ BadExample string `json:"bad_example,omitempty" yaml:"bad_example,omitempty"`
+ FixAvailable bool `json:"fix_available" yaml:"fix_available"`
+ ConfigSchema map[string]any `json:"config_schema,omitempty" yaml:"config_schema,omitempty"`
+ ConfigDefaults map[string]any `json:"config_defaults,omitempty" yaml:"config_defaults,omitempty"`
+ Rulesets []string `json:"rulesets" yaml:"rulesets"`
+}
+
+// GenerateRuleDoc generates documentation for a single rule
+func (g *DocGenerator[T]) GenerateRuleDoc(rule RuleRunner[T]) *RuleDoc {
+ doc := &RuleDoc{
+ ID: rule.ID(),
+ Category: rule.Category(),
+ Summary: rule.Summary(),
+ Description: rule.Description(),
+ Link: rule.Link(),
+ DefaultSeverity: rule.DefaultSeverity().String(),
+ Versions: rule.Versions(),
+ Rulesets: g.registry.RulesetsContaining(rule.ID()),
+ }
+
+ // Check for optional documentation interface
+ if documented, ok := any(rule).(DocumentedRule); ok {
+ doc.GoodExample = documented.GoodExample()
+ doc.BadExample = documented.BadExample()
+ doc.Rationale = documented.Rationale()
+ doc.FixAvailable = documented.FixAvailable()
+ }
+
+ // Check for configuration interface
+ if configurable, ok := any(rule).(ConfigurableRule); ok {
+ doc.ConfigSchema = configurable.ConfigSchema()
+ doc.ConfigDefaults = configurable.ConfigDefaults()
+ }
+
+ return doc
+}
+
+// GenerateAllRuleDocs generates documentation for all registered rules
+func (g *DocGenerator[T]) GenerateAllRuleDocs() []*RuleDoc {
+ var docs []*RuleDoc
+ for _, rule := range g.registry.AllRules() {
+ docs = append(docs, g.GenerateRuleDoc(rule))
+ }
+ return docs
+}
+
+// GenerateCategoryDocs groups rules by category
+func (g *DocGenerator[T]) GenerateCategoryDocs() map[string][]*RuleDoc {
+ categories := make(map[string][]*RuleDoc)
+ for _, rule := range g.registry.AllRules() {
+ doc := g.GenerateRuleDoc(rule)
+ categories[doc.Category] = append(categories[doc.Category], doc)
+ }
+ return categories
+}
+
+// WriteJSON writes rule documentation as JSON
+func (g *DocGenerator[T]) WriteJSON(w io.Writer) error {
+ docs := g.GenerateAllRuleDocs()
+ enc := json.NewEncoder(w)
+ enc.SetIndent("", " ")
+ return enc.Encode(map[string]any{
+ "rules": docs,
+ "categories": g.registry.AllCategories(),
+ "rulesets": g.registry.AllRulesets(),
+ })
+}
+
+// WriteMarkdown writes rule documentation as Markdown
+func (g *DocGenerator[T]) WriteMarkdown(w io.Writer) error {
+ docs := g.GenerateCategoryDocs()
+
+ if err := writeLine(w, "# Lint Rules Reference"); err != nil {
+ return err
+ }
+ if err := writeEmptyLine(w); err != nil {
+ return err
+ }
+
+ // Table of contents
+ if err := writeLine(w, "## Categories"); err != nil {
+ return err
+ }
+ if err := writeEmptyLine(w); err != nil {
+ return err
+ }
+ for category := range docs {
+ if err := writeF(w, "- [%s](#%s)\n", category, category); err != nil {
+ return err
+ }
+ }
+ if err := writeEmptyLine(w); err != nil {
+ return err
+ }
+
+ // Rules by category
+ for category, rules := range docs {
+ if err := writeF(w, "## %s\n\n", category); err != nil {
+ return err
+ }
+
+ for _, rule := range rules {
+ if err := g.writeRuleMarkdown(w, rule); err != nil {
+ return err
+ }
+ }
+ }
+
+ return nil
+}
+
+func (g *DocGenerator[T]) writeRuleMarkdown(w io.Writer, rule *RuleDoc) error {
+ if err := writeF(w, "### %s\n\n", rule.ID); err != nil {
+ return err
+ }
+ if err := writeF(w, "**Severity:** %s \n", rule.DefaultSeverity); err != nil {
+ return err
+ }
+ if err := writeF(w, "**Category:** %s \n", rule.Category); err != nil {
+ return err
+ }
+ if rule.Summary != "" {
+ if err := writeF(w, "**Summary:** %s \n", rule.Summary); err != nil {
+ return err
+ }
+ }
+
+ if len(rule.Versions) > 0 {
+ if err := writeF(w, "**Applies to:** %s \n", strings.Join(rule.Versions, ", ")); err != nil {
+ return err
+ }
+ }
+
+ if rule.FixAvailable {
+ if err := writeLine(w, "**Auto-fix available:** Yes "); err != nil {
+ return err
+ }
+ }
+ if err := writeEmptyLine(w); err != nil {
+ return err
+ }
+
+ if err := writeF(w, "%s\n\n", rule.Description); err != nil {
+ return err
+ }
+
+ if rule.Rationale != "" {
+ if err := writeF(w, "#### Rationale\n\n%s\n\n", rule.Rationale); err != nil {
+ return err
+ }
+ }
+
+ if rule.BadExample != "" {
+ if err := writeLine(w, "#### ❌ Incorrect"); err != nil {
+ return err
+ }
+ if err := writeLine(w, "```yaml"); err != nil {
+ return err
+ }
+ if err := writeLine(w, rule.BadExample); err != nil {
+ return err
+ }
+ if err := writeLine(w, "```"); err != nil {
+ return err
+ }
+ if err := writeEmptyLine(w); err != nil {
+ return err
+ }
+ }
+
+ if rule.GoodExample != "" {
+ if err := writeLine(w, "#### ✅ Correct"); err != nil {
+ return err
+ }
+ if err := writeLine(w, "```yaml"); err != nil {
+ return err
+ }
+ if err := writeLine(w, rule.GoodExample); err != nil {
+ return err
+ }
+ if err := writeLine(w, "```"); err != nil {
+ return err
+ }
+ if err := writeEmptyLine(w); err != nil {
+ return err
+ }
+ }
+
+ if len(rule.ConfigSchema) > 0 {
+ if err := writeLine(w, "#### Configuration"); err != nil {
+ return err
+ }
+ if err := writeEmptyLine(w); err != nil {
+ return err
+ }
+ if err := writeLine(w, "| Option | Type | Default | Description |"); err != nil {
+ return err
+ }
+ if err := writeLine(w, "|--------|------|---------|-------------|"); err != nil {
+ return err
+ }
+ // Write config options table
+ if err := writeEmptyLine(w); err != nil {
+ return err
+ }
+ }
+
+ if rule.Link != "" {
+ if err := writeF(w, "[Documentation →](%s)\n\n", rule.Link); err != nil {
+ return err
+ }
+ }
+
+ if err := writeLine(w, "---"); err != nil {
+ return err
+ }
+ if err := writeEmptyLine(w); err != nil {
+ return err
+ }
+
+ return nil
+}
+
+func writeLine(w io.Writer, text string) error {
+ _, err := fmt.Fprintln(w, text)
+ return err
+}
+
+func writeEmptyLine(w io.Writer) error {
+ _, err := fmt.Fprintln(w)
+ return err
+}
+
+func writeF(w io.Writer, format string, args ...any) error {
+ _, err := fmt.Fprintf(w, format, args...)
+ return err
+}
diff --git a/linter/doc_test.go b/linter/doc_test.go
new file mode 100644
index 00000000..5ebfb605
--- /dev/null
+++ b/linter/doc_test.go
@@ -0,0 +1,280 @@
+package linter_test
+
+import (
+ "bytes"
+ "encoding/json"
+ "testing"
+
+ "github.com/speakeasy-api/openapi/linter"
+ "github.com/speakeasy-api/openapi/validation"
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestDocGenerator_GenerateRuleDoc(t *testing.T) {
+ t.Parallel()
+
+ t.Run("basic rule documentation", func(t *testing.T) {
+ t.Parallel()
+
+ registry := linter.NewRegistry[*MockDoc]()
+ registry.Register(&mockRule{
+ id: "test-rule",
+ category: "style",
+ summary: "Test rule summary",
+ description: "Test rule description",
+ link: "https://example.com/rules/test-rule",
+ defaultSeverity: validation.SeverityError,
+ versions: []string{"3.1.0", "3.2.0"},
+ })
+
+ generator := linter.NewDocGenerator(registry)
+ rule, _ := registry.GetRule("test-rule")
+ doc := generator.GenerateRuleDoc(rule)
+
+ assert.Equal(t, "test-rule", doc.ID)
+ assert.Equal(t, "style", doc.Category)
+ assert.Equal(t, "Test rule summary", doc.Summary)
+ assert.Equal(t, "Test rule description", doc.Description)
+ assert.Equal(t, "https://example.com/rules/test-rule", doc.Link)
+ assert.Equal(t, "error", doc.DefaultSeverity)
+ assert.Equal(t, []string{"3.1.0", "3.2.0"}, doc.Versions)
+ assert.Contains(t, doc.Rulesets, "all")
+ })
+
+ t.Run("documented rule with examples", func(t *testing.T) {
+ t.Parallel()
+
+ registry := linter.NewRegistry[*MockDoc]()
+ registry.Register(&documentedMockRule{
+ mockRule: mockRule{
+ id: "documented-rule",
+ category: "style",
+ summary: "Documented rule summary",
+ description: "Rule with examples",
+ defaultSeverity: validation.SeverityWarning,
+ },
+ goodExample: "good:\n example: value",
+ badExample: "bad:\n example: value",
+ rationale: "This is why the rule exists",
+ fixAvailable: true,
+ })
+
+ generator := linter.NewDocGenerator(registry)
+ rule, _ := registry.GetRule("documented-rule")
+ doc := generator.GenerateRuleDoc(rule)
+
+ assert.Equal(t, "good:\n example: value", doc.GoodExample)
+ assert.Equal(t, "bad:\n example: value", doc.BadExample)
+ assert.Equal(t, "This is why the rule exists", doc.Rationale)
+ assert.True(t, doc.FixAvailable)
+ })
+
+ t.Run("configurable rule with schema", func(t *testing.T) {
+ t.Parallel()
+
+ registry := linter.NewRegistry[*MockDoc]()
+ registry.Register(&configurableMockRule{
+ mockRule: mockRule{
+ id: "configurable-rule",
+ category: "style",
+ summary: "Configurable rule summary",
+ description: "Configurable rule",
+ defaultSeverity: validation.SeverityError,
+ },
+ configSchema: map[string]any{
+ "maxLength": map[string]any{"type": "integer"},
+ },
+ configDefaults: map[string]any{
+ "maxLength": 100,
+ },
+ })
+
+ generator := linter.NewDocGenerator(registry)
+ rule, _ := registry.GetRule("configurable-rule")
+ doc := generator.GenerateRuleDoc(rule)
+
+ assert.NotNil(t, doc.ConfigSchema)
+ assert.Contains(t, doc.ConfigSchema, "maxLength")
+ assert.NotNil(t, doc.ConfigDefaults)
+ assert.Equal(t, 100, doc.ConfigDefaults["maxLength"])
+ })
+}
+
+func TestDocGenerator_GenerateAllRuleDocs(t *testing.T) {
+ t.Parallel()
+
+ registry := linter.NewRegistry[*MockDoc]()
+ registry.Register(&mockRule{id: "rule-1", category: "style", defaultSeverity: validation.SeverityError, description: "Rule 1"})
+ registry.Register(&mockRule{id: "rule-2", category: "security", defaultSeverity: validation.SeverityWarning, description: "Rule 2"})
+ registry.Register(&mockRule{id: "rule-3", category: "style", defaultSeverity: validation.SeverityHint, description: "Rule 3"})
+
+ generator := linter.NewDocGenerator(registry)
+ docs := generator.GenerateAllRuleDocs()
+
+ assert.Len(t, docs, 3)
+
+ // Verify all rules are documented
+ ids := make([]string, len(docs))
+ for i, doc := range docs {
+ ids[i] = doc.ID
+ }
+ assert.ElementsMatch(t, []string{"rule-1", "rule-2", "rule-3"}, ids)
+}
+
+func TestDocGenerator_GenerateCategoryDocs(t *testing.T) {
+ t.Parallel()
+
+ registry := linter.NewRegistry[*MockDoc]()
+ registry.Register(&mockRule{id: "style-1", category: "style", defaultSeverity: validation.SeverityError, description: "Style 1"})
+ registry.Register(&mockRule{id: "style-2", category: "style", defaultSeverity: validation.SeverityError, description: "Style 2"})
+ registry.Register(&mockRule{id: "security-1", category: "security", defaultSeverity: validation.SeverityError, description: "Security 1"})
+
+ generator := linter.NewDocGenerator(registry)
+ categoryDocs := generator.GenerateCategoryDocs()
+
+ assert.Len(t, categoryDocs, 2)
+ assert.Len(t, categoryDocs["style"], 2)
+ assert.Len(t, categoryDocs["security"], 1)
+
+ // Verify correct grouping
+ styleIDs := []string{categoryDocs["style"][0].ID, categoryDocs["style"][1].ID}
+ assert.ElementsMatch(t, []string{"style-1", "style-2"}, styleIDs)
+ assert.Equal(t, "security-1", categoryDocs["security"][0].ID)
+}
+
+func TestDocGenerator_WriteJSON(t *testing.T) {
+ t.Parallel()
+
+ registry := linter.NewRegistry[*MockDoc]()
+ registry.Register(&mockRule{
+ id: "test-rule",
+ category: "style",
+ summary: "Test rule summary",
+ description: "Test description",
+ link: "https://example.com",
+ defaultSeverity: validation.SeverityError,
+ })
+ _ = registry.RegisterRuleset("recommended", []string{"test-rule"})
+
+ generator := linter.NewDocGenerator(registry)
+
+ var buf bytes.Buffer
+ err := generator.WriteJSON(&buf)
+ require.NoError(t, err)
+
+ // Verify valid JSON
+ var result map[string]any
+ err = json.Unmarshal(buf.Bytes(), &result)
+ require.NoError(t, err)
+
+ // Verify structure
+ assert.Contains(t, result, "rules")
+ assert.Contains(t, result, "categories")
+ assert.Contains(t, result, "rulesets")
+
+ // Verify rules array
+ rules, ok := result["rules"].([]any)
+ require.True(t, ok)
+ assert.Len(t, rules, 1)
+
+ // Verify rule details
+ ruleMap, ok := rules[0].(map[string]any)
+ require.True(t, ok)
+ assert.Equal(t, "test-rule", ruleMap["id"])
+ assert.Equal(t, "style", ruleMap["category"])
+}
+
+func TestDocGenerator_WriteMarkdown(t *testing.T) {
+ t.Parallel()
+
+ registry := linter.NewRegistry[*MockDoc]()
+ registry.Register(&documentedMockRule{
+ mockRule: mockRule{
+ id: "test-rule",
+ category: "style",
+ summary: "Test rule summary",
+ description: "Test rule description",
+ link: "https://docs.example.com/rules/test-rule",
+ defaultSeverity: validation.SeverityError,
+ },
+ goodExample: "good:\n value: correct",
+ badExample: "bad:\n value: incorrect",
+ rationale: "This rule ensures consistency",
+ fixAvailable: true,
+ })
+
+ generator := linter.NewDocGenerator(registry)
+
+ var buf bytes.Buffer
+ err := generator.WriteMarkdown(&buf)
+ require.NoError(t, err)
+
+ output := buf.String()
+
+ // Verify markdown structure
+ assert.Contains(t, output, "# Lint Rules Reference")
+ assert.Contains(t, output, "## Categories")
+ assert.Contains(t, output, "## style") // Category header
+ assert.Contains(t, output, "### test-rule") // Rule header
+ assert.Contains(t, output, "**Severity:** error")
+ assert.Contains(t, output, "**Category:** style")
+ assert.Contains(t, output, "**Summary:** Test rule summary")
+ assert.Contains(t, output, "Test rule description")
+ assert.Contains(t, output, "#### Rationale")
+ assert.Contains(t, output, "This rule ensures consistency")
+ assert.Contains(t, output, "#### ❌ Incorrect")
+ assert.Contains(t, output, "bad:\n value: incorrect")
+ assert.Contains(t, output, "#### ✅ Correct")
+ assert.Contains(t, output, "good:\n value: correct")
+ assert.Contains(t, output, "**Auto-fix available:** Yes")
+ assert.Contains(t, output, "[Documentation →](https://docs.example.com/rules/test-rule)")
+ assert.Contains(t, output, "---") // Separator
+}
+
+func TestDocGenerator_WriteMarkdown_WithVersions(t *testing.T) {
+ t.Parallel()
+
+ registry := linter.NewRegistry[*MockDoc]()
+ registry.Register(&mockRule{
+ id: "versioned-rule",
+ category: "validation",
+ summary: "Versioned rule summary",
+ description: "Version-specific rule",
+ defaultSeverity: validation.SeverityError,
+ versions: []string{"3.1.0", "3.2.0"},
+ })
+
+ generator := linter.NewDocGenerator(registry)
+
+ var buf bytes.Buffer
+ err := generator.WriteMarkdown(&buf)
+ require.NoError(t, err)
+
+ output := buf.String()
+ assert.Contains(t, output, "**Applies to:** 3.1.0, 3.2.0")
+}
+
+// documentedMockRule implements DocumentedRule interface
+type documentedMockRule struct {
+ mockRule
+ goodExample string
+ badExample string
+ rationale string
+ fixAvailable bool
+}
+
+func (r *documentedMockRule) GoodExample() string { return r.goodExample }
+func (r *documentedMockRule) BadExample() string { return r.badExample }
+func (r *documentedMockRule) Rationale() string { return r.rationale }
+func (r *documentedMockRule) FixAvailable() bool { return r.fixAvailable }
+
+// configurableMockRule implements ConfigurableRule interface
+type configurableMockRule struct {
+ mockRule
+ configSchema map[string]any
+ configDefaults map[string]any
+}
+
+func (r *configurableMockRule) ConfigSchema() map[string]any { return r.configSchema }
+func (r *configurableMockRule) ConfigDefaults() map[string]any { return r.configDefaults }
diff --git a/linter/document.go b/linter/document.go
new file mode 100644
index 00000000..b4fe8c86
--- /dev/null
+++ b/linter/document.go
@@ -0,0 +1,48 @@
+package linter
+
+import (
+ "github.com/speakeasy-api/openapi/openapi"
+ "github.com/speakeasy-api/openapi/references"
+)
+
+// DocumentInfo contains a document and its metadata for linting
+type DocumentInfo[T any] struct {
+ // Document is the parsed document to lint
+ Document T
+
+ // Location is the absolute location (URL or file path) of the document
+ // This is used for resolving relative references
+ Location string
+
+ // Index contains an index of various nodes from the provided document
+ Index *openapi.Index
+}
+
+// NewDocumentInfo creates a new DocumentInfo with the given document and location
+func NewDocumentInfo[T any](doc T, location string) *DocumentInfo[T] {
+ return &DocumentInfo[T]{
+ Document: doc,
+ Location: location,
+ }
+}
+
+// NewDocumentInfoWithIndex creates a new DocumentInfo with a pre-computed index
+func NewDocumentInfoWithIndex[T any](doc T, location string, index *openapi.Index) *DocumentInfo[T] {
+ return &DocumentInfo[T]{
+ Document: doc,
+ Location: location,
+ Index: index,
+ }
+}
+
+// LintOptions contains runtime options for linting
+type LintOptions struct {
+ // ResolveOptions contains options for reference resolution
+ // If nil, default options will be used
+ ResolveOptions *references.ResolveOptions
+
+ // VersionFilter is the document version (e.g., "3.0", "3.1")
+ // If set, only rules that apply to this version will be run
+ // Rules with nil/empty Versions() apply to all versions
+ VersionFilter *string
+}
diff --git a/linter/document_test.go b/linter/document_test.go
new file mode 100644
index 00000000..89b8d45d
--- /dev/null
+++ b/linter/document_test.go
@@ -0,0 +1,38 @@
+package linter_test
+
+import (
+ "testing"
+
+ "github.com/speakeasy-api/openapi/linter"
+ "github.com/speakeasy-api/openapi/openapi"
+ "github.com/stretchr/testify/assert"
+)
+
+func TestNewDocumentInfo(t *testing.T) {
+ t.Parallel()
+
+ doc := &MockDoc{ID: "test-doc"}
+ location := "/path/to/openapi.yaml"
+
+ docInfo := linter.NewDocumentInfo(doc, location)
+
+ assert.NotNil(t, docInfo)
+ assert.Equal(t, doc, docInfo.Document)
+ assert.Equal(t, location, docInfo.Location)
+ assert.Nil(t, docInfo.Index)
+}
+
+func TestNewDocumentInfoWithIndex(t *testing.T) {
+ t.Parallel()
+
+ doc := &MockDoc{ID: "test-doc"}
+ location := "/path/to/openapi.yaml"
+ index := &openapi.Index{}
+
+ docInfo := linter.NewDocumentInfoWithIndex(doc, location, index)
+
+ assert.NotNil(t, docInfo)
+ assert.Equal(t, doc, docInfo.Document)
+ assert.Equal(t, location, docInfo.Location)
+ assert.Equal(t, index, docInfo.Index)
+}
diff --git a/linter/format/format_test.go b/linter/format/format_test.go
new file mode 100644
index 00000000..766d7452
--- /dev/null
+++ b/linter/format/format_test.go
@@ -0,0 +1,140 @@
+package format_test
+
+import (
+ "errors"
+ "strings"
+ "testing"
+
+ "github.com/speakeasy-api/openapi/linter/format"
+ "github.com/speakeasy-api/openapi/validation"
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+ "gopkg.in/yaml.v3"
+)
+
+func TestTextFormatter_Format(t *testing.T) {
+ t.Parallel()
+
+ tests := []struct {
+ name string
+ errors []error
+ contains []string
+ }{
+ {
+ name: "empty errors",
+ errors: []error{},
+ contains: []string{},
+ },
+ {
+ name: "single error",
+ errors: []error{
+ validation.NewValidationError(validation.SeverityError, "test-rule", errors.New("test error message"), nil),
+ },
+ contains: []string{"error", "test-rule", "test error message"},
+ },
+ {
+ name: "multiple errors with different severities",
+ errors: []error{
+ validation.NewValidationError(validation.SeverityError, "error-rule", errors.New("error message"), nil),
+ validation.NewValidationError(validation.SeverityWarning, "warning-rule", errors.New("warning message"), nil),
+ validation.NewValidationError(validation.SeverityHint, "hint-rule", errors.New("hint message"), nil),
+ },
+ contains: []string{
+ "error", "error-rule", "error message",
+ "warning", "warning-rule", "warning message",
+ "hint", "hint-rule", "hint message",
+ },
+ },
+ {
+ name: "error with line number",
+ errors: []error{
+ &validation.Error{
+ UnderlyingError: errors.New("at specific location"),
+ Node: &yaml.Node{Line: 42, Column: 10},
+ Severity: validation.SeverityError,
+ Rule: "location-rule",
+ },
+ },
+ contains: []string{"42", "10", "location-rule"},
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ t.Parallel()
+
+ formatter := format.NewTextFormatter()
+ result, err := formatter.Format(tt.errors)
+ require.NoError(t, err)
+
+ for _, substr := range tt.contains {
+ assert.Contains(t, result, substr, "output should contain %q", substr)
+ }
+ })
+ }
+}
+
+func TestJSONFormatter_Format(t *testing.T) {
+ t.Parallel()
+
+ tests := []struct {
+ name string
+ errors []error
+ contains []string
+ }{
+ {
+ name: "empty errors",
+ errors: []error{},
+ contains: []string{`"results"`, `"summary"`},
+ },
+ {
+ name: "single error",
+ errors: []error{
+ validation.NewValidationError(validation.SeverityError, "test-rule", errors.New("test error message"), nil),
+ },
+ contains: []string{`"error"`, `"test-rule"`, `"test error message"`},
+ },
+ {
+ name: "multiple errors",
+ errors: []error{
+ validation.NewValidationError(validation.SeverityError, "rule-1", errors.New("error 1"), nil),
+ validation.NewValidationError(validation.SeverityWarning, "rule-2", errors.New("error 2"), nil),
+ },
+ contains: []string{
+ `"rule-1"`, `"error 1"`,
+ `"rule-2"`, `"error 2"`,
+ `"warning"`,
+ },
+ },
+ {
+ name: "error with location",
+ errors: []error{
+ &validation.Error{
+ UnderlyingError: errors.New("located error"),
+ Node: &yaml.Node{Line: 15, Column: 25},
+ Severity: validation.SeverityError,
+ Rule: "location-rule",
+ },
+ },
+ contains: []string{`"line": 15`, `"column": 25`, `"location-rule"`},
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ t.Parallel()
+
+ formatter := format.NewJSONFormatter()
+ result, err := formatter.Format(tt.errors)
+ require.NoError(t, err)
+
+ // Verify it's valid JSON by checking structure (it's an object, not an array)
+ assert.True(t, strings.HasPrefix(strings.TrimSpace(result), "{"), "should start with {")
+ assert.True(t, strings.HasSuffix(strings.TrimSpace(result), "}"), "should end with }")
+
+ for _, substr := range tt.contains {
+ assert.Contains(t, result, substr, "JSON should contain %q", substr)
+ }
+ })
+ }
+}
diff --git a/linter/format/formatter.go b/linter/format/formatter.go
new file mode 100644
index 00000000..fac6c55a
--- /dev/null
+++ b/linter/format/formatter.go
@@ -0,0 +1,5 @@
+package format
+
+type Formatter interface {
+ Format(results []error) (string, error)
+}
diff --git a/linter/format/json.go b/linter/format/json.go
new file mode 100644
index 00000000..6a60e199
--- /dev/null
+++ b/linter/format/json.go
@@ -0,0 +1,113 @@
+package format
+
+import (
+ "encoding/json"
+ "errors"
+ "strings"
+
+ "github.com/speakeasy-api/openapi/validation"
+)
+
+type JSONFormatter struct{}
+
+func NewJSONFormatter() *JSONFormatter {
+ return &JSONFormatter{}
+}
+
+type jsonOutput struct {
+ Results []jsonResult `json:"results"`
+ Summary jsonSummary `json:"summary"`
+}
+
+type jsonResult struct {
+ Rule string `json:"rule"`
+ Category string `json:"category"`
+ Severity string `json:"severity"`
+ Message string `json:"message"`
+ Location jsonLocation `json:"location"`
+ Document string `json:"document,omitempty"`
+ Fix *jsonFix `json:"fix,omitempty"`
+}
+
+type jsonLocation struct {
+ Line int `json:"line"`
+ Column int `json:"column"`
+ Pointer string `json:"pointer,omitempty"` // TODO: Add pointer support
+}
+
+type jsonFix struct {
+ Description string `json:"description"`
+}
+
+type jsonSummary struct {
+ Total int `json:"total"`
+ Errors int `json:"errors"`
+ Warnings int `json:"warnings"`
+ Hints int `json:"hints"`
+}
+
+func (f *JSONFormatter) Format(results []error) (string, error) {
+ output := jsonOutput{
+ Results: make([]jsonResult, 0, len(results)),
+ }
+
+ for _, err := range results {
+ var vErr *validation.Error
+ if errors.As(err, &vErr) {
+ category := "unknown"
+ if idx := strings.Index(vErr.Rule, "-"); idx > 0 {
+ category = vErr.Rule[:idx]
+ }
+
+ result := jsonResult{
+ Rule: vErr.Rule,
+ Category: category,
+ Severity: vErr.Severity.String(),
+ Message: vErr.UnderlyingError.Error(),
+ Location: jsonLocation{
+ Line: vErr.GetLineNumber(),
+ Column: vErr.GetColumnNumber(),
+ },
+ }
+
+ if vErr.DocumentLocation != "" {
+ result.Document = vErr.DocumentLocation
+ }
+
+ if vErr.Fix != nil {
+ result.Fix = &jsonFix{
+ Description: vErr.Fix.FixDescription(),
+ }
+ }
+
+ output.Results = append(output.Results, result)
+
+ switch vErr.Severity {
+ case validation.SeverityError:
+ output.Summary.Errors++
+ case validation.SeverityWarning:
+ output.Summary.Warnings++
+ case validation.SeverityHint:
+ output.Summary.Hints++
+ }
+ } else {
+ // Non-validation error
+ output.Results = append(output.Results, jsonResult{
+ Rule: "internal",
+ Category: "internal",
+ Severity: "error",
+ Message: err.Error(),
+ })
+ output.Summary.Errors++
+ }
+ }
+
+ output.Summary.Total = len(results)
+
+ bytes, err := json.MarshalIndent(output, "", " ")
+ if err != nil {
+ return "", err
+ }
+
+ return string(bytes), nil
+}
diff --git a/linter/format/text.go b/linter/format/text.go
new file mode 100644
index 00000000..cdf30f8c
--- /dev/null
+++ b/linter/format/text.go
@@ -0,0 +1,59 @@
+package format
+
+import (
+ "errors"
+ "fmt"
+ "strings"
+
+ "github.com/speakeasy-api/openapi/validation"
+)
+
+type TextFormatter struct{}
+
+func NewTextFormatter() *TextFormatter {
+ return &TextFormatter{}
+}
+
+func (f *TextFormatter) Format(results []error) (string, error) {
+ var sb strings.Builder
+
+ errorCount := 0
+ warningCount := 0
+ hintCount := 0
+
+ for _, err := range results {
+ var vErr *validation.Error
+ if errors.As(err, &vErr) {
+ line := vErr.GetLineNumber()
+ col := vErr.GetColumnNumber()
+ severity := vErr.Severity
+ rule := vErr.Rule
+ msg := vErr.UnderlyingError.Error()
+ if vErr.DocumentLocation != "" {
+ msg = fmt.Sprintf("%s (document: %s)", msg, vErr.DocumentLocation)
+ }
+
+ sb.WriteString(fmt.Sprintf("%d:%d\t%s\t%s\t%s\n", line, col, severity, rule, msg))
+
+ switch severity {
+ case validation.SeverityError:
+ errorCount++
+ case validation.SeverityWarning:
+ warningCount++
+ case validation.SeverityHint:
+ hintCount++
+ }
+ } else {
+ // Non-validation error
+ sb.WriteString(fmt.Sprintf("-\t-\terror\tinternal\t%s\n", err.Error()))
+ errorCount++
+ }
+ }
+
+ if len(results) > 0 {
+ sb.WriteString("\n")
+ sb.WriteString(fmt.Sprintf("✖ %d problems (%d errors, %d warnings, %d hints)\n", len(results), errorCount, warningCount, hintCount))
+ }
+
+ return sb.String(), nil
+}
diff --git a/linter/linter.go b/linter/linter.go
new file mode 100644
index 00000000..8d067bda
--- /dev/null
+++ b/linter/linter.go
@@ -0,0 +1,407 @@
+package linter
+
+import (
+ "context"
+ "errors"
+ "regexp"
+ "sort"
+ "sync"
+
+ "github.com/speakeasy-api/openapi/linter/format"
+ "github.com/speakeasy-api/openapi/validation"
+)
+
+// Linter is the main linting engine
+type Linter[T any] struct {
+ config *Config
+ registry *Registry[T]
+}
+
+type ruleOverride struct {
+ present bool
+ severity *validation.Severity
+ disabled *bool
+}
+
+type matchFilter struct {
+ ruleID string
+ pattern *regexp.Regexp
+ severity *validation.Severity
+ disabled *bool
+}
+
+// NewLinter creates a new linter with the given configuration
+func NewLinter[T any](config *Config, registry *Registry[T]) *Linter[T] {
+ return &Linter[T]{
+ config: config,
+ registry: registry,
+ }
+}
+
+// Registry returns the rule registry for documentation generation
+func (l *Linter[T]) Registry() *Registry[T] {
+ return l.registry
+}
+
+// Lint runs all configured rules against the document
+func (l *Linter[T]) Lint(ctx context.Context, docInfo *DocumentInfo[T], preExistingErrors []error, opts *LintOptions) (*Output, error) {
+ var allErrs []error
+
+ if len(preExistingErrors) > 0 {
+ allErrs = append(allErrs, preExistingErrors...)
+ }
+
+ // Run lint rules - these also return validation.Error instances
+ lintErrs := l.runRules(ctx, docInfo, opts)
+ allErrs = append(allErrs, lintErrs...)
+
+ // Apply severity overrides from config
+ allErrs = l.applySeverityOverrides(allErrs)
+
+ allErrs = l.FilterErrors(allErrs)
+
+ // Sort errors by location
+ validation.SortValidationErrors(allErrs)
+
+ // Format output
+ return l.formatOutput(allErrs), nil
+}
+
+func (l *Linter[T]) runRules(ctx context.Context, docInfo *DocumentInfo[T], opts *LintOptions) []error {
+ // Determine enabled rules
+ enabledRules := l.getEnabledRules()
+
+ // Run rules in parallel for better performance
+ var (
+ mu sync.Mutex
+ errs []error
+ wg sync.WaitGroup
+ )
+
+ for _, rule := range enabledRules {
+ ruleConfig := l.getRuleConfig(rule.ID())
+
+ // Skip if disabled (though getEnabledRules should handle this, double check)
+ if ruleConfig.Enabled != nil && !*ruleConfig.Enabled {
+ continue
+ }
+
+ // Filter rules based on version if VersionFilter is set
+ if opts != nil && opts.VersionFilter != nil && *opts.VersionFilter != "" {
+ ruleVersions := rule.Versions()
+ // If rule specifies versions, check if current version matches
+ if len(ruleVersions) > 0 {
+ versionMatches := false
+ for _, ruleVersion := range ruleVersions {
+ // Match against rule's supported versions
+ // Support both "3.1" and "3.1.0" formats
+ if ruleVersion == *opts.VersionFilter ||
+ (len(*opts.VersionFilter) > len(ruleVersion) &&
+ (*opts.VersionFilter)[:len(ruleVersion)] == ruleVersion) {
+ versionMatches = true
+ break
+ }
+ }
+ if !versionMatches {
+ continue // Skip this rule - doesn't apply to this version
+ }
+ }
+ // If rule.Versions() is nil/empty, it applies to all versions
+ }
+
+ // Set resolve options if provided
+ if opts != nil && opts.ResolveOptions != nil {
+ resolveOpts := *opts.ResolveOptions
+ // Set document location as target location if not already set
+ if resolveOpts.TargetLocation == "" && docInfo.Location != "" {
+ resolveOpts.TargetLocation = docInfo.Location
+ }
+ ruleConfig.ResolveOptions = &resolveOpts
+ }
+
+ // Run rule in parallel
+ wg.Add(1)
+ go func(r RuleRunner[T], cfg RuleConfig) {
+ defer wg.Done()
+
+ ruleErrs := r.Run(ctx, docInfo, &cfg)
+
+ mu.Lock()
+ errs = append(errs, ruleErrs...)
+ mu.Unlock()
+ }(rule, ruleConfig)
+ }
+
+ wg.Wait()
+ return errs
+}
+
+func (l *Linter[T]) getEnabledRules() []RuleRunner[T] {
+ // Start with all rules if "all" is extended (default)
+ // Or specific rulesets
+
+ // For now, simple implementation: check config for enabled rules
+ // If config.Extends contains "all", include all rules unless disabled
+
+ // Map to track enabled status: ruleID -> enabled
+ ruleStatus := make(map[string]bool)
+
+ // Apply rulesets
+ for _, ruleset := range l.config.Extends {
+ if ids, ok := l.registry.GetRuleset(ruleset); ok {
+ for _, id := range ids {
+ ruleStatus[id] = true
+ }
+ }
+ }
+
+ // Apply category config
+ // Category config overrides ruleset config but is overridden by individual rule config
+ for _, rule := range l.registry.AllRules() {
+ if catConfig, ok := l.config.Categories[rule.Category()]; ok {
+ if catConfig.Enabled != nil {
+ ruleStatus[rule.ID()] = *catConfig.Enabled
+ }
+ }
+ }
+
+ // Apply rule config from list entries without match
+ for id, override := range l.ruleOverrides() {
+ if override.disabled != nil {
+ ruleStatus[id] = !*override.disabled
+ continue
+ }
+ if override.present {
+ ruleStatus[id] = true
+ }
+ }
+
+ var enabled []RuleRunner[T]
+ for id, enabledFlag := range ruleStatus {
+ if enabledFlag {
+ if rule, ok := l.registry.GetRule(id); ok {
+ enabled = append(enabled, rule)
+ }
+ }
+ }
+
+ // Sort for deterministic order
+ sort.Slice(enabled, func(i, j int) bool {
+ return enabled[i].ID() < enabled[j].ID()
+ })
+
+ return enabled
+}
+
+func (l *Linter[T]) getRuleConfig(ruleID string) RuleConfig {
+ // Start with default config
+ config := RuleConfig{}
+
+ // Apply category config
+ if rule, ok := l.registry.GetRule(ruleID); ok {
+ if catConfig, ok := l.config.Categories[rule.Category()]; ok {
+ if catConfig.Severity != nil {
+ config.Severity = catConfig.Severity
+ }
+ }
+ }
+
+ // Apply rule config from list entries without match
+ if override, ok := l.ruleOverrides()[ruleID]; ok {
+ if override.severity != nil {
+ config.Severity = override.severity
+ }
+ if override.disabled != nil {
+ enabled := !*override.disabled
+ config.Enabled = &enabled
+ }
+ }
+
+ return config
+}
+
+func (l *Linter[T]) applySeverityOverrides(errs []error) []error {
+ for _, err := range errs {
+ var vErr *validation.Error
+ if errors.As(err, &vErr) {
+ config := l.getRuleConfig(vErr.Rule)
+ if config.Severity != nil {
+ vErr.Severity = *config.Severity
+ }
+ }
+ }
+ return errs
+}
+
+// FilterErrors applies rule-level overrides and match filters to any errors.
+func (l *Linter[T]) FilterErrors(errs []error) []error {
+ filters := l.buildMatchFilters()
+
+ var filtered []error
+ for _, err := range errs {
+ var vErr *validation.Error
+ if !errors.As(err, &vErr) {
+ filtered = append(filtered, err)
+ continue
+ }
+
+ updatedErr, include := applyMatchFilters(vErr, filters)
+ if include {
+ filtered = append(filtered, updatedErr)
+ }
+ }
+
+ return filtered
+}
+
+func (l *Linter[T]) formatOutput(errs []error) *Output {
+ return &Output{
+ Results: errs,
+ Format: l.config.OutputFormat,
+ }
+}
+
+func (l *Linter[T]) ruleOverrides() map[string]ruleOverride {
+ overrides := make(map[string]ruleOverride)
+ for _, entry := range l.config.Rules {
+ if entry.Match != nil {
+ continue
+ }
+ if entry.ID == "" {
+ continue
+ }
+ override := overrides[entry.ID]
+ override.present = true
+ if entry.Severity != nil {
+ override.severity = entry.Severity
+ }
+ if entry.Disabled != nil {
+ override.disabled = entry.Disabled
+ }
+ overrides[entry.ID] = override
+ }
+ return overrides
+}
+
+func (l *Linter[T]) buildMatchFilters() []matchFilter {
+ var filters []matchFilter
+ for _, entry := range l.config.Rules {
+ if entry.ID == "" {
+ continue
+ }
+
+ if entry.Match == nil {
+ if entry.Severity == nil && entry.Disabled == nil {
+ continue
+ }
+ filters = append(filters, matchFilter{
+ ruleID: entry.ID,
+ pattern: nil,
+ severity: entry.Severity,
+ disabled: entry.Disabled,
+ })
+ continue
+ }
+ filters = append(filters, matchFilter{
+ ruleID: entry.ID,
+ pattern: entry.Match,
+ severity: entry.Severity,
+ disabled: entry.Disabled,
+ })
+ }
+ return filters
+}
+
+func applyMatchFilters(vErr *validation.Error, filters []matchFilter) (*validation.Error, bool) {
+ var (
+ matched bool
+ severity *validation.Severity
+ disabled *bool
+ )
+
+ message := ""
+ if vErr.UnderlyingError != nil {
+ message = vErr.UnderlyingError.Error()
+ }
+
+ for _, filter := range filters {
+ if filter.ruleID != "" && filter.ruleID != vErr.Rule {
+ continue
+ }
+ if filter.pattern != nil && !filter.pattern.MatchString(message) {
+ continue
+ }
+
+ matched = true
+ if filter.severity != nil {
+ severity = filter.severity
+ }
+ if filter.disabled != nil {
+ disabled = filter.disabled
+ }
+ }
+
+ if !matched {
+ return vErr, true
+ }
+
+ if disabled != nil && *disabled {
+ return nil, false
+ }
+
+ if severity != nil {
+ modifiedErr := *vErr
+ modifiedErr.Severity = *severity
+ return &modifiedErr, true
+ }
+
+ return vErr, true
+}
+
+// Output represents the result of linting
+type Output struct {
+ Results []error
+ Format OutputFormat
+}
+
+func (o *Output) HasErrors() bool {
+ for _, err := range o.Results {
+ var vErr *validation.Error
+ if errors.As(err, &vErr) {
+ if vErr.Severity == validation.SeverityError {
+ return true
+ }
+ } else {
+ // Non-validation errors are treated as errors
+ return true
+ }
+ }
+ return false
+}
+
+func (o *Output) ErrorCount() int {
+ count := 0
+ for _, err := range o.Results {
+ var vErr *validation.Error
+ if errors.As(err, &vErr) {
+ if vErr.Severity == validation.SeverityError {
+ count++
+ }
+ } else {
+ count++
+ }
+ }
+ return count
+}
+
+func (o *Output) FormatText() string {
+ f := format.NewTextFormatter()
+ s, _ := f.Format(o.Results)
+ return s
+}
+
+func (o *Output) FormatJSON() string {
+ f := format.NewJSONFormatter()
+ s, _ := f.Format(o.Results)
+ return s
+}
diff --git a/linter/linter_test.go b/linter/linter_test.go
new file mode 100644
index 00000000..1259a808
--- /dev/null
+++ b/linter/linter_test.go
@@ -0,0 +1,701 @@
+package linter_test
+
+import (
+ "context"
+ "errors"
+ "fmt"
+ "regexp"
+ "testing"
+
+ "github.com/speakeasy-api/openapi/linter"
+ "github.com/speakeasy-api/openapi/pointer"
+ "github.com/speakeasy-api/openapi/validation"
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+// Mock document type for testing
+type MockDoc struct {
+ ID string
+}
+
+// Mock rule for testing
+type mockRule struct {
+ id string
+ category string
+ description string
+ link string
+ defaultSeverity validation.Severity
+ versions []string
+ summary string
+ runFunc func(ctx context.Context, docInfo *linter.DocumentInfo[*MockDoc], config *linter.RuleConfig) []error
+}
+
+func (r *mockRule) ID() string { return r.id }
+func (r *mockRule) Category() string { return r.category }
+func (r *mockRule) Summary() string { return r.summary }
+func (r *mockRule) Description() string { return r.description }
+func (r *mockRule) Link() string { return r.link }
+func (r *mockRule) DefaultSeverity() validation.Severity { return r.defaultSeverity }
+func (r *mockRule) Versions() []string { return r.versions }
+
+func (r *mockRule) Run(ctx context.Context, docInfo *linter.DocumentInfo[*MockDoc], config *linter.RuleConfig) []error {
+ if r.runFunc != nil {
+ return r.runFunc(ctx, docInfo, config)
+ }
+ return nil
+}
+
+func TestLinter_RuleSelection(t *testing.T) {
+ t.Parallel()
+
+ t.Run("extends all includes all rules", func(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ registry := linter.NewRegistry[*MockDoc]()
+ registry.Register(&mockRule{
+ id: "test-rule-1",
+ category: "style",
+ defaultSeverity: validation.SeverityError,
+ runFunc: func(_ context.Context, _ *linter.DocumentInfo[*MockDoc], _ *linter.RuleConfig) []error {
+ return []error{validation.NewValidationError(validation.SeverityError, "test-rule-1", errors.New("test error"), nil)}
+ },
+ })
+ registry.Register(&mockRule{
+ id: "test-rule-2",
+ category: "security",
+ defaultSeverity: validation.SeverityWarning,
+ runFunc: func(_ context.Context, _ *linter.DocumentInfo[*MockDoc], _ *linter.RuleConfig) []error {
+ return []error{validation.NewValidationError(validation.SeverityWarning, "test-rule-2", errors.New("test warning"), nil)}
+ },
+ })
+
+ config := &linter.Config{
+ Extends: []string{"all"},
+ }
+
+ lntr := linter.NewLinter(config, registry)
+ docInfo := &linter.DocumentInfo[*MockDoc]{
+ Document: &MockDoc{ID: "test"},
+ }
+
+ output, err := lntr.Lint(ctx, docInfo, nil, nil)
+ require.NoError(t, err)
+
+ // Should have errors from both rules
+ assert.Len(t, output.Results, 2)
+ })
+
+ t.Run("disabled rule not executed", func(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ registry := linter.NewRegistry[*MockDoc]()
+ registry.Register(&mockRule{
+ id: "test-rule-1",
+ category: "style",
+ defaultSeverity: validation.SeverityError,
+ runFunc: func(_ context.Context, _ *linter.DocumentInfo[*MockDoc], _ *linter.RuleConfig) []error {
+ return []error{validation.NewValidationError(validation.SeverityError, "test-rule-1", errors.New("test error"), nil)}
+ },
+ })
+
+ config := &linter.Config{
+ Extends: []string{"all"},
+ Rules: []linter.RuleEntry{
+ {
+ ID: "test-rule-1",
+ Disabled: pointer.From(true),
+ },
+ },
+ }
+
+ lntr := linter.NewLinter(config, registry)
+ docInfo := &linter.DocumentInfo[*MockDoc]{
+ Document: &MockDoc{ID: "test"},
+ }
+
+ output, err := lntr.Lint(ctx, docInfo, nil, nil)
+ require.NoError(t, err)
+
+ // Should have no errors since rule is disabled
+ assert.Empty(t, output.Results)
+ })
+
+ t.Run("category disabled affects all rules in category", func(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ registry := linter.NewRegistry[*MockDoc]()
+ registry.Register(&mockRule{
+ id: "style-rule-1",
+ category: "style",
+ defaultSeverity: validation.SeverityError,
+ runFunc: func(_ context.Context, _ *linter.DocumentInfo[*MockDoc], _ *linter.RuleConfig) []error {
+ return []error{validation.NewValidationError(validation.SeverityError, "style-rule-1", errors.New("style error 1"), nil)}
+ },
+ })
+ registry.Register(&mockRule{
+ id: "style-rule-2",
+ category: "style",
+ defaultSeverity: validation.SeverityError,
+ runFunc: func(_ context.Context, _ *linter.DocumentInfo[*MockDoc], _ *linter.RuleConfig) []error {
+ return []error{validation.NewValidationError(validation.SeverityError, "style-rule-2", errors.New("style error 2"), nil)}
+ },
+ })
+ registry.Register(&mockRule{
+ id: "security-rule-1",
+ category: "security",
+ defaultSeverity: validation.SeverityError,
+ runFunc: func(_ context.Context, _ *linter.DocumentInfo[*MockDoc], _ *linter.RuleConfig) []error {
+ return []error{validation.NewValidationError(validation.SeverityError, "security-rule-1", errors.New("security error"), nil)}
+ },
+ })
+
+ falseVal := false
+ config := &linter.Config{
+ Extends: []string{"all"},
+ Categories: map[string]linter.CategoryConfig{
+ "style": {
+ Enabled: &falseVal,
+ },
+ },
+ }
+
+ lntr := linter.NewLinter(config, registry)
+ docInfo := &linter.DocumentInfo[*MockDoc]{
+ Document: &MockDoc{ID: "test"},
+ }
+
+ output, err := lntr.Lint(ctx, docInfo, nil, nil)
+ require.NoError(t, err)
+
+ // Should only have security error, style rules disabled
+ require.Len(t, output.Results, 1)
+ assert.Contains(t, output.Results[0].Error(), "security-rule-1")
+ })
+}
+
+func TestLinter_SeverityOverrides(t *testing.T) {
+ t.Parallel()
+
+ t.Run("rule severity override", func(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ registry := linter.NewRegistry[*MockDoc]()
+ registry.Register(&mockRule{
+ id: "test-rule",
+ category: "style",
+ defaultSeverity: validation.SeverityError,
+ runFunc: func(_ context.Context, _ *linter.DocumentInfo[*MockDoc], _ *linter.RuleConfig) []error {
+ return []error{validation.NewValidationError(validation.SeverityError, "test-rule", errors.New("test error"), nil)}
+ },
+ })
+
+ warningSeverity := validation.SeverityWarning
+ config := &linter.Config{
+ Extends: []string{"all"},
+ Rules: []linter.RuleEntry{
+ {
+ ID: "test-rule",
+ Severity: &warningSeverity,
+ },
+ },
+ }
+
+ lntr := linter.NewLinter(config, registry)
+ docInfo := &linter.DocumentInfo[*MockDoc]{
+ Document: &MockDoc{ID: "test"},
+ }
+
+ output, err := lntr.Lint(ctx, docInfo, nil, nil)
+ require.NoError(t, err)
+
+ require.Len(t, output.Results, 1)
+ var vErr *validation.Error
+ require.ErrorAs(t, output.Results[0], &vErr)
+ assert.Equal(t, validation.SeverityWarning, vErr.Severity)
+ })
+
+ t.Run("category severity override", func(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ registry := linter.NewRegistry[*MockDoc]()
+ registry.Register(&mockRule{
+ id: "style-rule",
+ category: "style",
+ defaultSeverity: validation.SeverityError,
+ runFunc: func(_ context.Context, _ *linter.DocumentInfo[*MockDoc], _ *linter.RuleConfig) []error {
+ return []error{validation.NewValidationError(validation.SeverityError, "style-rule", errors.New("style error"), nil)}
+ },
+ })
+
+ warningSeverity := validation.SeverityWarning
+ config := &linter.Config{
+ Extends: []string{"all"},
+ Categories: map[string]linter.CategoryConfig{
+ "style": {
+ Severity: &warningSeverity,
+ },
+ },
+ }
+
+ lntr := linter.NewLinter(config, registry)
+ docInfo := &linter.DocumentInfo[*MockDoc]{
+ Document: &MockDoc{ID: "test"},
+ }
+
+ output, err := lntr.Lint(ctx, docInfo, nil, nil)
+ require.NoError(t, err)
+
+ require.Len(t, output.Results, 1)
+ var vErr *validation.Error
+ require.ErrorAs(t, output.Results[0], &vErr)
+ assert.Equal(t, validation.SeverityWarning, vErr.Severity)
+ })
+
+ t.Run("rule severity override takes precedence over category", func(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ registry := linter.NewRegistry[*MockDoc]()
+ registry.Register(&mockRule{
+ id: "style-rule",
+ category: "style",
+ defaultSeverity: validation.SeverityError,
+ runFunc: func(_ context.Context, _ *linter.DocumentInfo[*MockDoc], _ *linter.RuleConfig) []error {
+ return []error{validation.NewValidationError(validation.SeverityError, "style-rule", errors.New("style error"), nil)}
+ },
+ })
+
+ warningSeverity := validation.SeverityWarning
+ hintSeverity := validation.SeverityHint
+ config := &linter.Config{
+ Extends: []string{"all"},
+ Categories: map[string]linter.CategoryConfig{
+ "style": {
+ Severity: &warningSeverity,
+ },
+ },
+ Rules: []linter.RuleEntry{
+ {
+ ID: "style-rule",
+ Severity: &hintSeverity,
+ },
+ },
+ }
+
+ lntr := linter.NewLinter(config, registry)
+ docInfo := &linter.DocumentInfo[*MockDoc]{
+ Document: &MockDoc{ID: "test"},
+ }
+
+ output, err := lntr.Lint(ctx, docInfo, nil, nil)
+ require.NoError(t, err)
+
+ require.Len(t, output.Results, 1)
+ var vErr *validation.Error
+ require.ErrorAs(t, output.Results[0], &vErr)
+ // Rule severity should override category severity
+ assert.Equal(t, validation.SeverityHint, vErr.Severity)
+ })
+}
+
+func TestLinter_PreExistingErrors(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ registry := linter.NewRegistry[*MockDoc]()
+ registry.Register(&mockRule{
+ id: "test-rule",
+ category: "style",
+ defaultSeverity: validation.SeverityError,
+ runFunc: func(_ context.Context, _ *linter.DocumentInfo[*MockDoc], _ *linter.RuleConfig) []error {
+ return []error{validation.NewValidationError(validation.SeverityError, "test-rule", errors.New("lint error"), nil)}
+ },
+ })
+
+ config := &linter.Config{
+ Extends: []string{"all"},
+ }
+
+ lntr := linter.NewLinter(config, registry)
+ docInfo := &linter.DocumentInfo[*MockDoc]{
+ Document: &MockDoc{ID: "test"},
+ }
+
+ preExistingErrs := []error{
+ validation.NewValidationError(validation.SeverityError, "validation-required", errors.New("validation error"), nil),
+ }
+
+ output, err := lntr.Lint(ctx, docInfo, preExistingErrs, nil)
+ require.NoError(t, err)
+
+ // Should include both pre-existing and lint errors
+ assert.Len(t, output.Results, 2)
+}
+
+func TestLinter_FilterErrors_RuleLevelOverride(t *testing.T) {
+ t.Parallel()
+
+ warningSeverity := validation.SeverityWarning
+ config := &linter.Config{
+ Extends: []string{"all"},
+ Rules: []linter.RuleEntry{
+ {
+ ID: "validation-required",
+ Severity: &warningSeverity,
+ },
+ },
+ }
+
+ lntr := linter.NewLinter(config, linter.NewRegistry[*MockDoc]())
+ input := []error{
+ validation.NewValidationError(validation.SeverityError, "validation-required", errors.New("validation error"), nil),
+ }
+
+ filtered := lntr.FilterErrors(input)
+ require.Len(t, filtered, 1)
+
+ var vErr *validation.Error
+ require.ErrorAs(t, filtered[0], &vErr)
+ assert.Equal(t, validation.SeverityWarning, vErr.Severity)
+}
+
+func TestLinter_FilterErrors_UnknownRuleNoMatch_Passthrough(t *testing.T) {
+ t.Parallel()
+
+ config := &linter.Config{
+ Extends: []string{"all"},
+ Rules: []linter.RuleEntry{
+ {
+ ID: "validation-required",
+ },
+ },
+ }
+
+ lntr := linter.NewLinter(config, linter.NewRegistry[*MockDoc]())
+ input := []error{
+ validation.NewValidationError(validation.SeverityError, "validation-required", errors.New("validation error"), nil),
+ }
+
+ filtered := lntr.FilterErrors(input)
+ require.Len(t, filtered, 1)
+
+ var vErr *validation.Error
+ require.ErrorAs(t, filtered[0], &vErr)
+ assert.Equal(t, validation.SeverityError, vErr.Severity)
+}
+
+func TestLinter_FilterErrors_MatchOrder_LastWins(t *testing.T) {
+ t.Parallel()
+
+ warningSeverity := validation.SeverityWarning
+ hintSeverity := validation.SeverityHint
+ config := &linter.Config{
+ Extends: []string{"all"},
+ Rules: []linter.RuleEntry{
+ {
+ ID: "validation-required",
+ Match: regexp.MustCompile(".*title.*"),
+ Severity: &warningSeverity,
+ },
+ {
+ ID: "validation-required",
+ Match: regexp.MustCompile(".*title.*"),
+ Severity: &hintSeverity,
+ },
+ },
+ }
+
+ lntr := linter.NewLinter(config, linter.NewRegistry[*MockDoc]())
+ input := []error{
+ validation.NewValidationError(validation.SeverityError, "validation-required", errors.New("info.title is required"), nil),
+ }
+
+ filtered := lntr.FilterErrors(input)
+ require.Len(t, filtered, 1)
+
+ var vErr *validation.Error
+ require.ErrorAs(t, filtered[0], &vErr)
+ assert.Equal(t, validation.SeverityHint, vErr.Severity)
+}
+
+func TestLinter_FilterErrors_MatchDisable(t *testing.T) {
+ t.Parallel()
+
+ disabled := true
+ config := &linter.Config{
+ Extends: []string{"all"},
+ Rules: []linter.RuleEntry{
+ {
+ ID: "validation-required",
+ Match: regexp.MustCompile(".*title.*"),
+ Disabled: &disabled,
+ },
+ },
+ }
+
+ lntr := linter.NewLinter(config, linter.NewRegistry[*MockDoc]())
+ input := []error{
+ validation.NewValidationError(validation.SeverityError, "validation-required", errors.New("info.title is required"), nil),
+ }
+
+ filtered := lntr.FilterErrors(input)
+ assert.Empty(t, filtered)
+}
+
+func TestLinter_ParallelExecution(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ registry := linter.NewRegistry[*MockDoc]()
+
+ // Create multiple rules that all run
+ for i := 0; i < 10; i++ {
+ ruleID := fmt.Sprintf("test-rule-%d", i)
+ registry.Register(&mockRule{
+ id: ruleID,
+ category: "test",
+ defaultSeverity: validation.SeverityError,
+ runFunc: func(_ context.Context, _ *linter.DocumentInfo[*MockDoc], _ *linter.RuleConfig) []error {
+ return []error{validation.NewValidationError(validation.SeverityError, ruleID, fmt.Errorf("error from %s", ruleID), nil)}
+ },
+ })
+ }
+
+ config := &linter.Config{
+ Extends: []string{"all"},
+ }
+
+ lntr := linter.NewLinter(config, registry)
+ docInfo := &linter.DocumentInfo[*MockDoc]{
+ Document: &MockDoc{ID: "test"},
+ }
+
+ output, err := lntr.Lint(ctx, docInfo, nil, nil)
+ require.NoError(t, err)
+
+ // Should have errors from all 10 rules
+ assert.Len(t, output.Results, 10)
+
+ // Verify all rules executed
+ foundRules := make(map[string]bool)
+ for _, result := range output.Results {
+ var vErr *validation.Error
+ if errors.As(result, &vErr) {
+ foundRules[vErr.Rule] = true
+ }
+ }
+ assert.Len(t, foundRules, 10, "all rules should have executed")
+}
+
+func TestOutput_HasErrors(t *testing.T) {
+ t.Parallel()
+
+ tests := []struct {
+ name string
+ results []error
+ hasErrors bool
+ }{
+ {
+ name: "no errors",
+ results: []error{},
+ hasErrors: false,
+ },
+ {
+ name: "only warnings",
+ results: []error{
+ validation.NewValidationError(validation.SeverityWarning, "test-rule", errors.New("warning"), nil),
+ },
+ hasErrors: false,
+ },
+ {
+ name: "only hints",
+ results: []error{
+ validation.NewValidationError(validation.SeverityHint, "test-rule", errors.New("hint"), nil),
+ },
+ hasErrors: false,
+ },
+ {
+ name: "has error severity",
+ results: []error{
+ validation.NewValidationError(validation.SeverityError, "test-rule", errors.New("error"), nil),
+ },
+ hasErrors: true,
+ },
+ {
+ name: "mixed severities with error",
+ results: []error{
+ validation.NewValidationError(validation.SeverityWarning, "test-rule", errors.New("warning"), nil),
+ validation.NewValidationError(validation.SeverityError, "test-rule", errors.New("error"), nil),
+ },
+ hasErrors: true,
+ },
+ {
+ name: "non-validation error treated as error",
+ results: []error{
+ errors.New("plain error"),
+ },
+ hasErrors: true,
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ t.Parallel()
+
+ output := &linter.Output{
+ Results: tt.results,
+ }
+
+ assert.Equal(t, tt.hasErrors, output.HasErrors())
+ })
+ }
+}
+
+func TestOutput_ErrorCount(t *testing.T) {
+ t.Parallel()
+
+ tests := []struct {
+ name string
+ results []error
+ errorCount int
+ }{
+ {
+ name: "no errors",
+ results: []error{},
+ errorCount: 0,
+ },
+ {
+ name: "only warnings",
+ results: []error{
+ validation.NewValidationError(validation.SeverityWarning, "test-rule", errors.New("warning"), nil),
+ },
+ errorCount: 0,
+ },
+ {
+ name: "one error",
+ results: []error{
+ validation.NewValidationError(validation.SeverityError, "test-rule", errors.New("error"), nil),
+ },
+ errorCount: 1,
+ },
+ {
+ name: "mixed severities",
+ results: []error{
+ validation.NewValidationError(validation.SeverityWarning, "test-rule", errors.New("warning"), nil),
+ validation.NewValidationError(validation.SeverityError, "test-rule-1", errors.New("error 1"), nil),
+ validation.NewValidationError(validation.SeverityHint, "test-rule", errors.New("hint"), nil),
+ validation.NewValidationError(validation.SeverityError, "test-rule-2", errors.New("error 2"), nil),
+ },
+ errorCount: 2,
+ },
+ {
+ name: "non-validation errors counted",
+ results: []error{
+ errors.New("plain error 1"),
+ validation.NewValidationError(validation.SeverityWarning, "test-rule", errors.New("warning"), nil),
+ errors.New("plain error 2"),
+ },
+ errorCount: 2,
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ t.Parallel()
+
+ output := &linter.Output{
+ Results: tt.results,
+ }
+
+ assert.Equal(t, tt.errorCount, output.ErrorCount())
+ })
+ }
+}
+
+func TestOutput_Formatting(t *testing.T) {
+ t.Parallel()
+
+ output := &linter.Output{
+ Results: []error{
+ validation.NewValidationError(validation.SeverityError, "test-rule", errors.New("test error"), nil),
+ },
+ Format: linter.OutputFormatText,
+ }
+
+ t.Run("format text non-empty", func(t *testing.T) {
+ t.Parallel()
+ text := output.FormatText()
+ assert.NotEmpty(t, text)
+ assert.Contains(t, text, "test-rule")
+ })
+
+ t.Run("format json non-empty", func(t *testing.T) {
+ t.Parallel()
+ json := output.FormatJSON()
+ assert.NotEmpty(t, json)
+ assert.Contains(t, json, "test-rule")
+ })
+}
+
+func TestLinter_ErrorSorting(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ registry := linter.NewRegistry[*MockDoc]()
+ registry.Register(&mockRule{
+ id: "test-rule",
+ category: "style",
+ defaultSeverity: validation.SeverityError,
+ runFunc: func(_ context.Context, _ *linter.DocumentInfo[*MockDoc], _ *linter.RuleConfig) []error {
+ // Return errors in unsorted order
+ return []error{
+ validation.NewValidationError(validation.SeverityError, "test-rule", errors.New("error 3"), nil),
+ validation.NewValidationError(validation.SeverityError, "test-rule", errors.New("error 1"), nil),
+ validation.NewValidationError(validation.SeverityError, "test-rule", errors.New("error 2"), nil),
+ }
+ },
+ })
+
+ config := &linter.Config{
+ Extends: []string{"all"},
+ }
+
+ lntr := linter.NewLinter(config, registry)
+ docInfo := &linter.DocumentInfo[*MockDoc]{
+ Document: &MockDoc{ID: "test"},
+ }
+
+ output, err := lntr.Lint(ctx, docInfo, nil, nil)
+ require.NoError(t, err)
+
+ // Errors should be sorted by validation.SortValidationErrors
+ assert.Len(t, output.Results, 3)
+}
+
+func TestLinter_Registry(t *testing.T) {
+ t.Parallel()
+
+ registry := linter.NewRegistry[*MockDoc]()
+ registry.Register(&mockRule{
+ id: "test-rule",
+ category: "style",
+ defaultSeverity: validation.SeverityError,
+ })
+
+ config := &linter.Config{}
+ lntr := linter.NewLinter(config, registry)
+
+ // Should be able to access registry for documentation
+ reg := lntr.Registry()
+ require.NotNil(t, reg)
+
+ rule, exists := reg.GetRule("test-rule")
+ assert.True(t, exists)
+ assert.Equal(t, "test-rule", rule.ID())
+}
diff --git a/linter/registry.go b/linter/registry.go
new file mode 100644
index 00000000..4366e8ca
--- /dev/null
+++ b/linter/registry.go
@@ -0,0 +1,125 @@
+package linter
+
+import (
+ "fmt"
+ "sort"
+)
+
+// Registry holds registered rules
+type Registry[T any] struct {
+ rules map[string]RuleRunner[T]
+ rulesets map[string][]string // ruleset name -> rule IDs
+}
+
+// NewRegistry creates a new rule registry
+func NewRegistry[T any]() *Registry[T] {
+ return &Registry[T]{
+ rules: make(map[string]RuleRunner[T]),
+ rulesets: make(map[string][]string),
+ }
+}
+
+// Register registers a rule
+func (r *Registry[T]) Register(rule RuleRunner[T]) {
+ r.rules[rule.ID()] = rule
+}
+
+// RegisterRuleset registers a ruleset
+func (r *Registry[T]) RegisterRuleset(name string, ruleIDs []string) error {
+ if _, exists := r.rulesets[name]; exists {
+ return fmt.Errorf("ruleset %q already registered", name)
+ }
+
+ // Validate rule IDs
+ for _, id := range ruleIDs {
+ if _, exists := r.rules[id]; !exists {
+ return fmt.Errorf("rule %q in ruleset %q not found", id, name)
+ }
+ }
+
+ r.rulesets[name] = ruleIDs
+ return nil
+}
+
+// GetRule returns a rule by ID
+func (r *Registry[T]) GetRule(id string) (RuleRunner[T], bool) {
+ rule, ok := r.rules[id]
+ return rule, ok
+}
+
+// GetRuleset returns rule IDs for a ruleset
+func (r *Registry[T]) GetRuleset(name string) ([]string, bool) {
+ if name == "all" {
+ return r.AllRuleIDs(), true
+ }
+ ids, ok := r.rulesets[name]
+ return ids, ok
+}
+
+// AllRules returns all registered rules
+func (r *Registry[T]) AllRules() []RuleRunner[T] {
+ rules := make([]RuleRunner[T], 0, len(r.rules))
+ for _, rule := range r.rules {
+ rules = append(rules, rule)
+ }
+ // Sort for deterministic order
+ sort.Slice(rules, func(i, j int) bool {
+ return rules[i].ID() < rules[j].ID()
+ })
+ return rules
+}
+
+// AllRuleIDs returns all registered rule IDs
+func (r *Registry[T]) AllRuleIDs() []string {
+ ids := make([]string, 0, len(r.rules))
+ for id := range r.rules {
+ ids = append(ids, id)
+ }
+ sort.Strings(ids)
+ return ids
+}
+
+// AllCategories returns all unique categories
+func (r *Registry[T]) AllCategories() []string {
+ categories := make(map[string]bool)
+ for _, rule := range r.rules {
+ categories[rule.Category()] = true
+ }
+
+ cats := make([]string, 0, len(categories))
+ for cat := range categories {
+ cats = append(cats, cat)
+ }
+ sort.Strings(cats)
+ return cats
+}
+
+// AllRulesets returns all registered ruleset names
+func (r *Registry[T]) AllRulesets() []string {
+ names := make([]string, 0, len(r.rulesets)+1)
+ names = append(names, "all")
+ for name := range r.rulesets {
+ names = append(names, name)
+ }
+ sort.Strings(names)
+ return names
+}
+
+// RulesetsContaining returns names of rulesets that contain the given rule ID
+func (r *Registry[T]) RulesetsContaining(ruleID string) []string {
+ var sets []string
+
+ // "all" always contains everything
+ sets = append(sets, "all")
+
+ for name, ids := range r.rulesets {
+ for _, id := range ids {
+ if id == ruleID {
+ sets = append(sets, name)
+ break
+ }
+ }
+ }
+ sort.Strings(sets)
+ return sets
+}
diff --git a/linter/registry_test.go b/linter/registry_test.go
new file mode 100644
index 00000000..232123b6
--- /dev/null
+++ b/linter/registry_test.go
@@ -0,0 +1,127 @@
+package linter_test
+
+import (
+ "testing"
+
+ "github.com/speakeasy-api/openapi/linter"
+ "github.com/speakeasy-api/openapi/validation"
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestRegistry_RegisterRuleset(t *testing.T) {
+ t.Parallel()
+
+ t.Run("successfully register ruleset", func(t *testing.T) {
+ t.Parallel()
+
+ registry := linter.NewRegistry[*MockDoc]()
+ registry.Register(&mockRule{id: "rule-1", category: "style", defaultSeverity: validation.SeverityError})
+ registry.Register(&mockRule{id: "rule-2", category: "style", defaultSeverity: validation.SeverityError})
+
+ err := registry.RegisterRuleset("recommended", []string{"rule-1", "rule-2"})
+ require.NoError(t, err)
+
+ ruleIDs, exists := registry.GetRuleset("recommended")
+ assert.True(t, exists)
+ assert.ElementsMatch(t, []string{"rule-1", "rule-2"}, ruleIDs)
+ })
+
+ t.Run("error when rule not found", func(t *testing.T) {
+ t.Parallel()
+
+ registry := linter.NewRegistry[*MockDoc]()
+ registry.Register(&mockRule{id: "rule-1", category: "style", defaultSeverity: validation.SeverityError})
+
+ err := registry.RegisterRuleset("test", []string{"rule-1", "nonexistent"})
+ require.Error(t, err)
+ assert.Contains(t, err.Error(), "nonexistent")
+ assert.Contains(t, err.Error(), "not found")
+ })
+
+ t.Run("error when ruleset already registered", func(t *testing.T) {
+ t.Parallel()
+
+ registry := linter.NewRegistry[*MockDoc]()
+ registry.Register(&mockRule{id: "rule-1", category: "style", defaultSeverity: validation.SeverityError})
+
+ err := registry.RegisterRuleset("test", []string{"rule-1"})
+ require.NoError(t, err)
+
+ err = registry.RegisterRuleset("test", []string{"rule-1"})
+ require.Error(t, err)
+ assert.Contains(t, err.Error(), "already registered")
+ })
+}
+
+func TestRegistry_AllCategories(t *testing.T) {
+ t.Parallel()
+
+ registry := linter.NewRegistry[*MockDoc]()
+ registry.Register(&mockRule{id: "rule-1", category: "style", defaultSeverity: validation.SeverityError})
+ registry.Register(&mockRule{id: "rule-2", category: "style", defaultSeverity: validation.SeverityError})
+ registry.Register(&mockRule{id: "rule-3", category: "security", defaultSeverity: validation.SeverityError})
+ registry.Register(&mockRule{id: "rule-4", category: "best-practices", defaultSeverity: validation.SeverityError})
+
+ categories := registry.AllCategories()
+ // Should be sorted
+ assert.Equal(t, []string{"best-practices", "security", "style"}, categories)
+}
+
+func TestRegistry_AllRulesets(t *testing.T) {
+ t.Parallel()
+
+ registry := linter.NewRegistry[*MockDoc]()
+ registry.Register(&mockRule{id: "rule-1", category: "style", defaultSeverity: validation.SeverityError})
+ require.NoError(t, registry.RegisterRuleset("recommended", []string{"rule-1"}))
+ require.NoError(t, registry.RegisterRuleset("strict", []string{"rule-1"}))
+
+ rulesets := registry.AllRulesets()
+ assert.Contains(t, rulesets, "all")
+ assert.Contains(t, rulesets, "recommended")
+ assert.Contains(t, rulesets, "strict")
+ // Should be sorted
+ assert.Equal(t, "all", rulesets[0])
+}
+
+func TestRegistry_RulesetsContaining(t *testing.T) {
+ t.Parallel()
+
+ registry := linter.NewRegistry[*MockDoc]()
+ registry.Register(&mockRule{id: "rule-1", category: "style", defaultSeverity: validation.SeverityError})
+ registry.Register(&mockRule{id: "rule-2", category: "security", defaultSeverity: validation.SeverityError})
+ require.NoError(t, registry.RegisterRuleset("recommended", []string{"rule-1"}))
+ require.NoError(t, registry.RegisterRuleset("strict", []string{"rule-1", "rule-2"}))
+
+ t.Run("rule in multiple rulesets", func(t *testing.T) {
+ t.Parallel()
+ rulesets := registry.RulesetsContaining("rule-1")
+ assert.Contains(t, rulesets, "all")
+ assert.Contains(t, rulesets, "recommended")
+ assert.Contains(t, rulesets, "strict")
+ })
+
+ t.Run("rule in subset of rulesets", func(t *testing.T) {
+ t.Parallel()
+ rulesets := registry.RulesetsContaining("rule-2")
+ assert.Contains(t, rulesets, "all")
+ assert.Contains(t, rulesets, "strict")
+ assert.NotContains(t, rulesets, "recommended")
+ })
+}
+
+func TestRegistry_GetRuleset_UnknownReturnsFalse(t *testing.T) {
+ t.Parallel()
+
+ registry := linter.NewRegistry[*MockDoc]()
+ _, exists := registry.GetRuleset("nonexistent")
+ assert.False(t, exists)
+}
+
+func TestRegistry_GetRule_UnknownReturnsFalse(t *testing.T) {
+ t.Parallel()
+
+ registry := linter.NewRegistry[*MockDoc]()
+ _, exists := registry.GetRule("nonexistent")
+ assert.False(t, exists)
+}
diff --git a/linter/rule.go b/linter/rule.go
new file mode 100644
index 00000000..dd53c623
--- /dev/null
+++ b/linter/rule.go
@@ -0,0 +1,70 @@
+package linter
+
+import (
+ "context"
+
+ "github.com/speakeasy-api/openapi/validation"
+)
+
+// Rule represents a single linting rule
+type Rule interface {
+ // ID returns the unique identifier for this rule (e.g., "style-path-params")
+ ID() string
+
+ // Category returns the rule category (e.g., "style", "validation", "security")
+ Category() string
+
+ // Description returns a human-readable description of what the rule checks
+ Description() string
+
+ // Summary returns a short summary of what the rule checks
+ Summary() string
+
+ // Link returns an optional URL to documentation for this rule
+ Link() string
+
+ // DefaultSeverity returns the default severity level for this rule
+ DefaultSeverity() validation.Severity
+
+ // Versions returns the spec versions this rule applies to (nil = all versions)
+ Versions() []string
+}
+
+// RuleRunner is the interface rules must implement to execute their logic
+// This is separate from Rule to allow different runner types for different specs
+type RuleRunner[T any] interface {
+ Rule
+
+ // Run executes the rule against the provided document
+ // DocumentInfo provides both the document and its location for resolving external references
+ // Returns any issues found as validation errors
+ Run(ctx context.Context, docInfo *DocumentInfo[T], config *RuleConfig) []error
+}
+
+// DocumentedRule provides extended documentation for a rule
+type DocumentedRule interface {
+ Rule
+
+ // GoodExample returns YAML showing correct usage
+ GoodExample() string
+
+ // BadExample returns YAML showing incorrect usage
+ BadExample() string
+
+ // Rationale explains why this rule exists
+ Rationale() string
+
+ // FixAvailable returns true if the rule provides auto-fix suggestions
+ FixAvailable() bool
+}
+
+// ConfigurableRule indicates a rule has configurable options
+type ConfigurableRule interface {
+ Rule
+
+ // ConfigSchema returns JSON Schema for rule-specific options
+ ConfigSchema() map[string]any
+
+ // ConfigDefaults returns default values for options
+ ConfigDefaults() map[string]any
+}
diff --git a/marshaller/model.go b/marshaller/model.go
index daee7cec..ec7feae7 100644
--- a/marshaller/model.go
+++ b/marshaller/model.go
@@ -49,8 +49,9 @@ type Model[T any] struct {
Valid bool
core T
- objectCache *sync.Map
- documentCache *sync.Map
+ objectCache *sync.Map
+ documentCache *sync.Map
+ externalDocumentCache *sync.Map
}
// GetCore will return the low level representation of the model.
@@ -119,37 +120,45 @@ func (m *Model[T]) GetRootNodeColumn() int {
return -1
}
-func (m *Model[T]) GetPropertyLine(prop string) int {
+func (m *Model[T]) GetPropertyNode(prop string) *yaml.Node {
// Use reflection to find the property in the core and then see if it is a marshaller.Node and if it is get the line of the key node if set
if m == nil {
- return -1
+ return nil
}
// Get reflection value of the core
coreValue := reflect.ValueOf(&m.core).Elem()
if !coreValue.IsValid() {
- return -1
+ return nil
}
// Find the field by name
fieldValue := coreValue.FieldByName(prop)
if !fieldValue.IsValid() {
- return -1
+ return nil
}
// Check if the field implements the interface we need to get the key node
// We need to check if it has a GetKeyNode method or if it's a Node type
fieldInterface := fieldValue.Interface()
+ var keyNode *yaml.Node
+
// Try to cast to a Node-like interface that has GetKeyNode method
if nodeWithKeyNode, ok := fieldInterface.(interface{ GetKeyNode() *yaml.Node }); ok {
- keyNode := nodeWithKeyNode.GetKeyNode()
- if keyNode != nil {
- return keyNode.Line
- }
+ keyNode = nodeWithKeyNode.GetKeyNode()
+
}
- return -1
+ return keyNode
+}
+
+func (m *Model[T]) GetPropertyLine(prop string) int {
+ node := m.GetPropertyNode(prop)
+ if node == nil {
+ return -1
+ }
+ return node.Line
}
// SetCore implements CoreAccessor interface
@@ -195,6 +204,17 @@ func (m *Model[T]) StoreReferenceDocumentInCache(key string, doc []byte) {
m.documentCache.Store(key, doc)
}
+func (m *Model[T]) GetCachedExternalDocument(key string) (any, bool) {
+ if m == nil || m.externalDocumentCache == nil {
+ return nil, false
+ }
+ return m.externalDocumentCache.Load(key)
+}
+
+func (m *Model[T]) StoreExternalDocumentInCache(key string, doc any) {
+ m.externalDocumentCache.Store(key, doc)
+}
+
func (m *Model[T]) InitCache() {
if m.objectCache == nil {
m.objectCache = &sync.Map{}
@@ -202,4 +222,7 @@ func (m *Model[T]) InitCache() {
if m.documentCache == nil {
m.documentCache = &sync.Map{}
}
+ if m.externalDocumentCache == nil {
+ m.externalDocumentCache = &sync.Map{}
+ }
}
diff --git a/marshaller/model_test.go b/marshaller/model_test.go
index c96e4dee..1e1822c1 100644
--- a/marshaller/model_test.go
+++ b/marshaller/model_test.go
@@ -9,6 +9,153 @@ import (
"gopkg.in/yaml.v3"
)
+// TestModel_GetPropertyNode_Success tests the GetPropertyNode method with valid inputs
+func TestModel_GetPropertyNode_Success(t *testing.T) {
+ t.Parallel()
+
+ tests := []struct {
+ name string
+ setup func() *marshaller.Model[core.TestPrimitiveModel]
+ prop string
+ expected int
+ }{
+ {
+ name: "property with key node returns line number",
+ setup: func() *marshaller.Model[core.TestPrimitiveModel] {
+ keyNode := &yaml.Node{Line: 42}
+ coreModel := core.TestPrimitiveModel{
+ StringField: marshaller.Node[string]{
+ KeyNode: keyNode,
+ Key: "stringField",
+ Value: "testValue",
+ Present: true,
+ },
+ }
+ model := &marshaller.Model[core.TestPrimitiveModel]{
+ Valid: true,
+ }
+ model.SetCore(&coreModel)
+ return model
+ },
+ prop: "StringField",
+ expected: 42,
+ },
+ {
+ name: "property with nil key node returns -1",
+ setup: func() *marshaller.Model[core.TestPrimitiveModel] {
+ coreModel := core.TestPrimitiveModel{
+ StringField: marshaller.Node[string]{
+ KeyNode: nil,
+ Key: "stringField",
+ Value: "testValue",
+ Present: true,
+ },
+ }
+ model := &marshaller.Model[core.TestPrimitiveModel]{
+ Valid: true,
+ }
+ model.SetCore(&coreModel)
+ return model
+ },
+ prop: "StringField",
+ expected: -1,
+ },
+ {
+ name: "bool field with key node returns line number",
+ setup: func() *marshaller.Model[core.TestPrimitiveModel] {
+ keyNode := &yaml.Node{Line: 15}
+ coreModel := core.TestPrimitiveModel{
+ BoolField: marshaller.Node[bool]{
+ KeyNode: keyNode,
+ Key: "boolField",
+ Value: true,
+ Present: true,
+ },
+ }
+ model := &marshaller.Model[core.TestPrimitiveModel]{
+ Valid: true,
+ }
+ model.SetCore(&coreModel)
+ return model
+ },
+ prop: "BoolField",
+ expected: 15,
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ t.Parallel()
+
+ model := tt.setup()
+ actual := model.GetPropertyNode(tt.prop)
+ line := -1
+ if actual != nil {
+ line = actual.Line
+ }
+ assert.Equal(t, tt.expected, line, "line number should match expected value")
+ })
+ }
+}
+
+// TestModel_GetPropertyNode_Error tests the GetPropertyNode method with error conditions
+func TestModel_GetPropertyNode_Error(t *testing.T) {
+ t.Parallel()
+
+ tests := []struct {
+ name string
+ setup func() *marshaller.Model[core.TestPrimitiveModel]
+ prop string
+ expected int
+ }{
+ {
+ name: "nil model returns -1",
+ setup: func() *marshaller.Model[core.TestPrimitiveModel] {
+ return nil
+ },
+ prop: "StringField",
+ expected: -1,
+ },
+ {
+ name: "non-existent property returns -1",
+ setup: func() *marshaller.Model[core.TestPrimitiveModel] {
+ return &marshaller.Model[core.TestPrimitiveModel]{}
+ },
+ prop: "NonExistentField",
+ expected: -1,
+ },
+ {
+ name: "property that is not a Node returns -1",
+ setup: func() *marshaller.Model[core.TestPrimitiveModel] {
+ coreModel := core.TestPrimitiveModel{
+ CoreModel: marshaller.CoreModel{}, // This field doesn't implement GetKeyNode
+ }
+ model := &marshaller.Model[core.TestPrimitiveModel]{
+ Valid: true,
+ }
+ model.SetCore(&coreModel)
+ return model
+ },
+ prop: "CoreModel",
+ expected: -1,
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ t.Parallel()
+
+ model := tt.setup()
+ actual := model.GetPropertyNode(tt.prop)
+ if actual == nil {
+ assert.Equal(t, tt.expected, -1, "should return -1 for error conditions")
+ } else {
+ assert.Equal(t, tt.expected, actual.Line, "line number should match expected value")
+ }
+ })
+ }
+}
+
// TestModel_GetPropertyLine_Success tests the GetPropertyLine method with valid inputs
func TestModel_GetPropertyLine_Success(t *testing.T) {
t.Parallel()
diff --git a/marshaller/nodecollector.go b/marshaller/nodecollector.go
new file mode 100644
index 00000000..39d1b5e2
--- /dev/null
+++ b/marshaller/nodecollector.go
@@ -0,0 +1,238 @@
+package marshaller
+
+import (
+ "reflect"
+
+ "gopkg.in/yaml.v3"
+)
+
+// NodeCollector provides utilities for collecting yaml.Node pointers from core models.
+// This is useful for features that need to map nodes to contexts (like operation tracking).
+
+// CollectLeafNodes extracts all KeyNode and ValueNode pointers from marshaller.Node fields
+// within a core model. It only returns nodes for "leaf" fields - those whose values are
+// primitive types or slices/maps of primitives, not nested core models (which get visited
+// separately by the walk).
+//
+// The returned nodes can be used for features like node-to-operation mapping where you
+// need to track all yaml.Nodes within a model's scope.
+func CollectLeafNodes(core any) []*yaml.Node {
+ if core == nil {
+ return nil
+ }
+
+ var nodes []*yaml.Node
+ collectLeafNodesRecursive(reflect.ValueOf(core), &nodes, make(map[uintptr]bool))
+ return nodes
+}
+
+// collectLeafNodesRecursive traverses the struct using reflection to find marshaller.Node fields
+func collectLeafNodesRecursive(v reflect.Value, nodes *[]*yaml.Node, visited map[uintptr]bool) {
+ // Handle pointers and interfaces
+ for v.Kind() == reflect.Ptr || v.Kind() == reflect.Interface {
+ if v.IsNil() {
+ return
+ }
+ v = v.Elem()
+ }
+
+ // Only process structs
+ if v.Kind() != reflect.Struct {
+ return
+ }
+
+ // Check for cycles (using pointer address of the struct)
+ if v.CanAddr() {
+ ptr := v.Addr().Pointer()
+ if visited[ptr] {
+ return
+ }
+ visited[ptr] = true
+ }
+
+ t := v.Type()
+
+ // Iterate through all fields
+ for i := 0; i < v.NumField(); i++ {
+ field := v.Field(i)
+ fieldType := t.Field(i)
+
+ // Skip unexported fields
+ if !fieldType.IsExported() {
+ continue
+ }
+
+ // Check if it's a marshaller.Node type by looking for KeyNode/ValueNode fields
+ if isNodeType(fieldType.Type) {
+ collectFromNodeField(field, nodes)
+ continue
+ }
+
+ // Recurse into embedded structs (like CoreModel)
+ if fieldType.Anonymous {
+ collectLeafNodesRecursive(field, nodes, visited)
+ }
+ }
+}
+
+// isNodeType checks if a type is marshaller.Node[T] by looking for characteristic fields
+func isNodeType(t reflect.Type) bool {
+ // Handle pointers
+ for t.Kind() == reflect.Ptr {
+ t = t.Elem()
+ }
+
+ if t.Kind() != reflect.Struct {
+ return false
+ }
+
+ // Check for the characteristic fields of marshaller.Node
+ hasKeyNode := false
+ hasValueNode := false
+ hasPresent := false
+
+ for i := 0; i < t.NumField(); i++ {
+ field := t.Field(i)
+ switch field.Name {
+ case "KeyNode":
+ if field.Type == reflect.TypeOf((*yaml.Node)(nil)) {
+ hasKeyNode = true
+ }
+ case "ValueNode":
+ if field.Type == reflect.TypeOf((*yaml.Node)(nil)) {
+ hasValueNode = true
+ }
+ case "Present":
+ if field.Type.Kind() == reflect.Bool {
+ hasPresent = true
+ }
+ }
+ }
+
+ return hasKeyNode && hasValueNode && hasPresent
+}
+
+// collectFromNodeField extracts nodes from a marshaller.Node field
+func collectFromNodeField(field reflect.Value, nodes *[]*yaml.Node) {
+ // Handle pointers
+ for field.Kind() == reflect.Ptr {
+ if field.IsNil() {
+ return
+ }
+ field = field.Elem()
+ }
+
+ if field.Kind() != reflect.Struct {
+ return
+ }
+
+ // Get KeyNode and ValueNode fields
+ keyNodeField := field.FieldByName("KeyNode")
+ valueNodeField := field.FieldByName("ValueNode")
+ presentField := field.FieldByName("Present")
+ valueField := field.FieldByName("Value")
+
+ // Only collect if present
+ if presentField.IsValid() && !presentField.Bool() {
+ return
+ }
+
+ // Add KeyNode if not nil
+ if keyNodeField.IsValid() && !keyNodeField.IsNil() {
+ if node, ok := keyNodeField.Interface().(*yaml.Node); ok && node != nil {
+ *nodes = append(*nodes, node)
+ }
+ }
+
+ // Add ValueNode if not nil
+ if valueNodeField.IsValid() && !valueNodeField.IsNil() {
+ if node, ok := valueNodeField.Interface().(*yaml.Node); ok && node != nil {
+ *nodes = append(*nodes, node)
+
+ // If the Value is a primitive type (or slice/map of primitives),
+ // also collect child nodes from the ValueNode
+ if valueField.IsValid() && isLeafValueType(valueField.Type()) {
+ collectYAMLNodeChildren(node, nodes)
+ }
+ }
+ }
+}
+
+// isLeafValueType returns true if the type represents a leaf value (primitive or container of primitives)
+// rather than a core model that will be walked separately
+func isLeafValueType(t reflect.Type) bool {
+ // Handle pointers
+ for t.Kind() == reflect.Ptr {
+ t = t.Elem()
+ }
+
+ switch t.Kind() {
+ case reflect.Bool, reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64,
+ reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64,
+ reflect.Float32, reflect.Float64, reflect.String:
+ return true
+
+ case reflect.Slice:
+ elemType := t.Elem()
+ // Slices of primitives are leaf types
+ // Slices of core models are not (they get walked)
+ return isLeafValueType(elemType)
+
+ case reflect.Map:
+ // Maps with primitive keys and values are leaf types
+ return isLeafValueType(t.Key()) && isLeafValueType(t.Elem())
+
+ case reflect.Struct:
+ // Check if it's a CoreModeler (has GetRootNode method)
+ // If so, it's not a leaf - it will be walked separately
+ if hasCoreModelerMethod(t) {
+ return false
+ }
+ // Check if it's a marshaller.Node type
+ if isNodeType(t) {
+ // Get the inner value type and check that
+ valueField, found := t.FieldByName("Value")
+ if found {
+ return isLeafValueType(valueField.Type)
+ }
+ }
+ // Other structs might be leaf types (like custom value types)
+ return true
+
+ case reflect.Interface:
+ // Can't determine at compile time - assume not leaf
+ return false
+
+ default:
+ return false
+ }
+}
+
+// hasCoreModelerMethod checks if a type implements GetRootNode() *yaml.Node
+func hasCoreModelerMethod(t reflect.Type) bool {
+ // Check both value and pointer receiver
+ _, hasMethod := t.MethodByName("GetRootNode")
+ if hasMethod {
+ return true
+ }
+ if t.Kind() != reflect.Ptr {
+ ptrType := reflect.PointerTo(t)
+ _, hasMethod = ptrType.MethodByName("GetRootNode")
+ }
+ return hasMethod
+}
+
+// collectYAMLNodeChildren adds all direct children of a YAML node to the nodes slice
+// This is used for simple values like slices of strings where the individual items
+// aren't core models but we still want to track their nodes
+func collectYAMLNodeChildren(node *yaml.Node, nodes *[]*yaml.Node) {
+ if node == nil || node.Content == nil {
+ return
+ }
+
+ for _, child := range node.Content {
+ if child != nil {
+ *nodes = append(*nodes, child)
+ }
+ }
+}
diff --git a/marshaller/nodecollector_test.go b/marshaller/nodecollector_test.go
new file mode 100644
index 00000000..edc24445
--- /dev/null
+++ b/marshaller/nodecollector_test.go
@@ -0,0 +1,412 @@
+package marshaller_test
+
+import (
+ "testing"
+
+ "github.com/speakeasy-api/openapi/marshaller"
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+ "gopkg.in/yaml.v3"
+)
+
+// Test models to verify CollectLeafNodes behavior
+
+// SimpleModel has only primitive leaf fields - all should be collected
+type SimpleModel struct {
+ marshaller.CoreModel
+
+ StringField marshaller.Node[*string] `key:"stringField"`
+ IntField marshaller.Node[*int] `key:"intField"`
+ BoolField marshaller.Node[*bool] `key:"boolField"`
+}
+
+// ModelWithSlice has a slice of primitives - all items should be collected
+type ModelWithSlice struct {
+ marshaller.CoreModel
+
+ Items marshaller.Node[[]string] `key:"items"`
+}
+
+// ModelWithNodeSlice has a slice of Node[string] - all items should be collected
+type ModelWithNodeSlice struct {
+ marshaller.CoreModel
+
+ Tags marshaller.Node[[]marshaller.Node[string]] `key:"tags"`
+}
+
+// NestedCoreModel represents a model that would be walked separately
+type NestedCoreModel struct {
+ marshaller.CoreModel
+
+ Name marshaller.Node[*string] `key:"name"`
+}
+
+func (n *NestedCoreModel) GetRootNode() *yaml.Node {
+ return n.RootNode
+}
+
+// ModelWithNestedCore has a nested core model - the nested model's nodes should NOT be collected
+type ModelWithNestedCore struct {
+ marshaller.CoreModel
+
+ Title marshaller.Node[*string] `key:"title"`
+ Nested marshaller.Node[*NestedCoreModel] `key:"nested"`
+}
+
+// ModelWithSliceOfCoreModels has a slice of core models - those nodes should NOT be collected
+type ModelWithSliceOfCoreModels struct {
+ marshaller.CoreModel
+
+ Description marshaller.Node[*string] `key:"description"`
+ Children marshaller.Node[[]*NestedCoreModel] `key:"children"`
+}
+
+func TestCollectLeafNodes_NilInput_Success(t *testing.T) {
+ t.Parallel()
+
+ nodes := marshaller.CollectLeafNodes(nil)
+ assert.Nil(t, nodes, "should return nil for nil input")
+}
+
+func TestCollectLeafNodes_SimpleModel_CollectsAllNodes(t *testing.T) {
+ t.Parallel()
+
+ // Create YAML nodes
+ stringKeyNode := &yaml.Node{Kind: yaml.ScalarNode, Value: "stringField"}
+ stringValueNode := &yaml.Node{Kind: yaml.ScalarNode, Value: "hello"}
+ intKeyNode := &yaml.Node{Kind: yaml.ScalarNode, Value: "intField"}
+ intValueNode := &yaml.Node{Kind: yaml.ScalarNode, Value: "42"}
+ boolKeyNode := &yaml.Node{Kind: yaml.ScalarNode, Value: "boolField"}
+ boolValueNode := &yaml.Node{Kind: yaml.ScalarNode, Value: "true"}
+
+ str := "hello"
+ intVal := 42
+ boolVal := true
+
+ model := &SimpleModel{
+ StringField: marshaller.Node[*string]{
+ KeyNode: stringKeyNode,
+ ValueNode: stringValueNode,
+ Value: &str,
+ Present: true,
+ },
+ IntField: marshaller.Node[*int]{
+ KeyNode: intKeyNode,
+ ValueNode: intValueNode,
+ Value: &intVal,
+ Present: true,
+ },
+ BoolField: marshaller.Node[*bool]{
+ KeyNode: boolKeyNode,
+ ValueNode: boolValueNode,
+ Value: &boolVal,
+ Present: true,
+ },
+ }
+
+ nodes := marshaller.CollectLeafNodes(model)
+
+ // Should have 6 nodes (KeyNode + ValueNode for each of 3 fields)
+ require.Len(t, nodes, 6, "should collect all key and value nodes")
+
+ // Verify all nodes are collected
+ nodeSet := make(map[*yaml.Node]bool)
+ for _, n := range nodes {
+ nodeSet[n] = true
+ }
+
+ assert.True(t, nodeSet[stringKeyNode], "should include stringField key node")
+ assert.True(t, nodeSet[stringValueNode], "should include stringField value node")
+ assert.True(t, nodeSet[intKeyNode], "should include intField key node")
+ assert.True(t, nodeSet[intValueNode], "should include intField value node")
+ assert.True(t, nodeSet[boolKeyNode], "should include boolField key node")
+ assert.True(t, nodeSet[boolValueNode], "should include boolField value node")
+}
+
+func TestCollectLeafNodes_NotPresent_SkipsField(t *testing.T) {
+ t.Parallel()
+
+ stringKeyNode := &yaml.Node{Kind: yaml.ScalarNode, Value: "stringField"}
+ stringValueNode := &yaml.Node{Kind: yaml.ScalarNode, Value: "hello"}
+ intKeyNode := &yaml.Node{Kind: yaml.ScalarNode, Value: "intField"}
+ intValueNode := &yaml.Node{Kind: yaml.ScalarNode, Value: "42"}
+
+ str := "hello"
+
+ model := &SimpleModel{
+ StringField: marshaller.Node[*string]{
+ KeyNode: stringKeyNode,
+ ValueNode: stringValueNode,
+ Value: &str,
+ Present: true,
+ },
+ IntField: marshaller.Node[*int]{
+ KeyNode: intKeyNode,
+ ValueNode: intValueNode,
+ Value: nil,
+ Present: false, // Not present - should be skipped
+ },
+ }
+
+ nodes := marshaller.CollectLeafNodes(model)
+
+ // Should have 2 nodes (only StringField)
+ require.Len(t, nodes, 2, "should only collect present fields")
+
+ nodeSet := make(map[*yaml.Node]bool)
+ for _, n := range nodes {
+ nodeSet[n] = true
+ }
+
+ assert.True(t, nodeSet[stringKeyNode], "should include present field key node")
+ assert.True(t, nodeSet[stringValueNode], "should include present field value node")
+ assert.False(t, nodeSet[intKeyNode], "should not include non-present field key node")
+ assert.False(t, nodeSet[intValueNode], "should not include non-present field value node")
+}
+
+func TestCollectLeafNodes_SliceOfPrimitives_CollectsChildren(t *testing.T) {
+ t.Parallel()
+
+ itemsKeyNode := &yaml.Node{Kind: yaml.ScalarNode, Value: "items"}
+ item1Node := &yaml.Node{Kind: yaml.ScalarNode, Value: "item1"}
+ item2Node := &yaml.Node{Kind: yaml.ScalarNode, Value: "item2"}
+ item3Node := &yaml.Node{Kind: yaml.ScalarNode, Value: "item3"}
+ itemsValueNode := &yaml.Node{
+ Kind: yaml.SequenceNode,
+ Content: []*yaml.Node{item1Node, item2Node, item3Node},
+ }
+
+ model := &ModelWithSlice{
+ Items: marshaller.Node[[]string]{
+ KeyNode: itemsKeyNode,
+ ValueNode: itemsValueNode,
+ Value: []string{"item1", "item2", "item3"},
+ Present: true,
+ },
+ }
+
+ nodes := marshaller.CollectLeafNodes(model)
+
+ // Should have: keyNode + valueNode + 3 child nodes = 5
+ require.Len(t, nodes, 5, "should collect key, value, and child nodes")
+
+ nodeSet := make(map[*yaml.Node]bool)
+ for _, n := range nodes {
+ nodeSet[n] = true
+ }
+
+ assert.True(t, nodeSet[itemsKeyNode], "should include items key node")
+ assert.True(t, nodeSet[itemsValueNode], "should include items value node")
+ assert.True(t, nodeSet[item1Node], "should include item1 node")
+ assert.True(t, nodeSet[item2Node], "should include item2 node")
+ assert.True(t, nodeSet[item3Node], "should include item3 node")
+}
+
+func TestCollectLeafNodes_NestedCoreModel_DoesNotCollectNestedNodes(t *testing.T) {
+ t.Parallel()
+
+ // Parent's leaf field
+ titleKeyNode := &yaml.Node{Kind: yaml.ScalarNode, Value: "title"}
+ titleValueNode := &yaml.Node{Kind: yaml.ScalarNode, Value: "My Title"}
+
+ // Nested model's field - should NOT be collected
+ nestedNameKeyNode := &yaml.Node{Kind: yaml.ScalarNode, Value: "name"}
+ nestedNameValueNode := &yaml.Node{Kind: yaml.ScalarNode, Value: "Nested Name"}
+ nestedKeyNode := &yaml.Node{Kind: yaml.ScalarNode, Value: "nested"}
+ nestedValueNode := &yaml.Node{
+ Kind: yaml.MappingNode,
+ Content: []*yaml.Node{
+ nestedNameKeyNode,
+ nestedNameValueNode,
+ },
+ }
+
+ nestedName := "Nested Name"
+ title := "My Title"
+
+ nestedCore := &NestedCoreModel{
+ Name: marshaller.Node[*string]{
+ KeyNode: nestedNameKeyNode,
+ ValueNode: nestedNameValueNode,
+ Value: &nestedName,
+ Present: true,
+ },
+ }
+ nestedCore.RootNode = nestedValueNode
+
+ model := &ModelWithNestedCore{
+ Title: marshaller.Node[*string]{
+ KeyNode: titleKeyNode,
+ ValueNode: titleValueNode,
+ Value: &title,
+ Present: true,
+ },
+ Nested: marshaller.Node[*NestedCoreModel]{
+ KeyNode: nestedKeyNode,
+ ValueNode: nestedValueNode,
+ Value: nestedCore,
+ Present: true,
+ },
+ }
+
+ nodes := marshaller.CollectLeafNodes(model)
+
+ nodeSet := make(map[*yaml.Node]bool)
+ for _, n := range nodes {
+ nodeSet[n] = true
+ }
+
+ // Should collect Title field nodes (leaf)
+ assert.True(t, nodeSet[titleKeyNode], "should include title key node")
+ assert.True(t, nodeSet[titleValueNode], "should include title value node")
+
+ // Should NOT collect nested model's internal field nodes
+ // (the nested model itself will be walked separately)
+ assert.False(t, nodeSet[nestedNameKeyNode], "should NOT include nested model's internal key node")
+ assert.False(t, nodeSet[nestedNameValueNode], "should NOT include nested model's internal value node")
+}
+
+func TestCollectLeafNodes_SliceOfCoreModels_DoesNotCollectNestedNodes(t *testing.T) {
+ t.Parallel()
+
+ // Parent's leaf field
+ descKeyNode := &yaml.Node{Kind: yaml.ScalarNode, Value: "description"}
+ descValueNode := &yaml.Node{Kind: yaml.ScalarNode, Value: "A description"}
+
+ // Child 1 - should NOT be collected
+ child1NameKeyNode := &yaml.Node{Kind: yaml.ScalarNode, Value: "name"}
+ child1NameValueNode := &yaml.Node{Kind: yaml.ScalarNode, Value: "Child 1"}
+ child1RootNode := &yaml.Node{
+ Kind: yaml.MappingNode,
+ Content: []*yaml.Node{
+ child1NameKeyNode,
+ child1NameValueNode,
+ },
+ }
+
+ // Child 2 - should NOT be collected
+ child2NameKeyNode := &yaml.Node{Kind: yaml.ScalarNode, Value: "name"}
+ child2NameValueNode := &yaml.Node{Kind: yaml.ScalarNode, Value: "Child 2"}
+ child2RootNode := &yaml.Node{
+ Kind: yaml.MappingNode,
+ Content: []*yaml.Node{
+ child2NameKeyNode,
+ child2NameValueNode,
+ },
+ }
+
+ childrenKeyNode := &yaml.Node{Kind: yaml.ScalarNode, Value: "children"}
+ childrenValueNode := &yaml.Node{
+ Kind: yaml.SequenceNode,
+ Content: []*yaml.Node{child1RootNode, child2RootNode},
+ }
+
+ desc := "A description"
+ child1Name := "Child 1"
+ child2Name := "Child 2"
+
+ child1 := &NestedCoreModel{
+ Name: marshaller.Node[*string]{
+ KeyNode: child1NameKeyNode,
+ ValueNode: child1NameValueNode,
+ Value: &child1Name,
+ Present: true,
+ },
+ }
+ child1.RootNode = child1RootNode
+
+ child2 := &NestedCoreModel{
+ Name: marshaller.Node[*string]{
+ KeyNode: child2NameKeyNode,
+ ValueNode: child2NameValueNode,
+ Value: &child2Name,
+ Present: true,
+ },
+ }
+ child2.RootNode = child2RootNode
+
+ model := &ModelWithSliceOfCoreModels{
+ Description: marshaller.Node[*string]{
+ KeyNode: descKeyNode,
+ ValueNode: descValueNode,
+ Value: &desc,
+ Present: true,
+ },
+ Children: marshaller.Node[[]*NestedCoreModel]{
+ KeyNode: childrenKeyNode,
+ ValueNode: childrenValueNode,
+ Value: []*NestedCoreModel{child1, child2},
+ Present: true,
+ },
+ }
+
+ nodes := marshaller.CollectLeafNodes(model)
+
+ nodeSet := make(map[*yaml.Node]bool)
+ for _, n := range nodes {
+ nodeSet[n] = true
+ }
+
+ // Should collect Description field nodes (leaf)
+ assert.True(t, nodeSet[descKeyNode], "should include description key node")
+ assert.True(t, nodeSet[descValueNode], "should include description value node")
+
+ // Should NOT collect Children array's child model nodes
+ // (they will be walked separately)
+ assert.False(t, nodeSet[child1NameKeyNode], "should NOT include child1's name key node")
+ assert.False(t, nodeSet[child1NameValueNode], "should NOT include child1's name value node")
+ assert.False(t, nodeSet[child2NameKeyNode], "should NOT include child2's name key node")
+ assert.False(t, nodeSet[child2NameValueNode], "should NOT include child2's name value node")
+}
+
+func TestCollectLeafNodes_NilKeyNode_SkipsKeyNode(t *testing.T) {
+ t.Parallel()
+
+ valueNode := &yaml.Node{Kind: yaml.ScalarNode, Value: "hello"}
+ str := "hello"
+
+ model := &SimpleModel{
+ StringField: marshaller.Node[*string]{
+ KeyNode: nil, // No key node
+ ValueNode: valueNode,
+ Value: &str,
+ Present: true,
+ },
+ }
+
+ nodes := marshaller.CollectLeafNodes(model)
+
+ require.Len(t, nodes, 1, "should only collect value node")
+ assert.Equal(t, valueNode, nodes[0], "should collect value node")
+}
+
+func TestCollectLeafNodes_NilValueNode_SkipsValueNode(t *testing.T) {
+ t.Parallel()
+
+ keyNode := &yaml.Node{Kind: yaml.ScalarNode, Value: "stringField"}
+ str := "hello"
+
+ model := &SimpleModel{
+ StringField: marshaller.Node[*string]{
+ KeyNode: keyNode,
+ ValueNode: nil, // No value node
+ Value: &str,
+ Present: true,
+ },
+ }
+
+ nodes := marshaller.CollectLeafNodes(model)
+
+ require.Len(t, nodes, 1, "should only collect key node")
+ assert.Equal(t, keyNode, nodes[0], "should collect key node")
+}
+
+func TestCollectLeafNodes_EmptyModel_ReturnsEmpty(t *testing.T) {
+ t.Parallel()
+
+ model := &SimpleModel{}
+
+ nodes := marshaller.CollectLeafNodes(model)
+
+ assert.Empty(t, nodes, "should return empty for model with no present fields")
+}
diff --git a/marshaller/populator.go b/marshaller/populator.go
index 5d94a752..d5b75eb5 100644
--- a/marshaller/populator.go
+++ b/marshaller/populator.go
@@ -100,7 +100,7 @@ func PopulateModelWithContext(source any, target any, ctx *PopulationContext) er
}
if s.Kind() != reflect.Struct {
- return fmt.Errorf("expected struct, got %s", s.Kind())
+ return fmt.Errorf("expected `struct`, got `%s`", s.Kind())
}
sType := s.Type()
diff --git a/marshaller/sequencedmap.go b/marshaller/sequencedmap.go
index bf55c1a8..0bbb583c 100644
--- a/marshaller/sequencedmap.go
+++ b/marshaller/sequencedmap.go
@@ -30,7 +30,7 @@ func unmarshalSequencedMap(ctx context.Context, parentName string, node *yaml.No
// Check if the node is actually a mapping node
if resolvedNode.Kind != yaml.MappingNode {
validationErr := validation.NewTypeMismatchError(parentName, "expected mapping node for sequenced map, got %v", resolvedNode.Kind)
- return []error{validation.NewValidationError(validationErr, resolvedNode)}, nil
+ return []error{validation.NewValidationError(validation.SeverityError, validation.RuleValidationTypeMismatch, validationErr, resolvedNode)}, nil
}
target.Init()
@@ -57,7 +57,9 @@ func unmarshalSequencedMap(ctx context.Context, parentName string, node *yaml.No
indicesToSkip[existing.lastIndex] = true
// Create validation error for the earlier occurrence
duplicateKeyErrs = append(duplicateKeyErrs, validation.NewValidationError(
- validation.NewValueValidationError("mapping key %q at line %d is a duplicate; previous definition at line %d", key, keyNode.Line, existing.firstLine),
+ validation.SeverityWarning,
+ validation.RuleValidationDuplicateKey,
+ fmt.Errorf("mapping key %q at line %d is a duplicate; previous definition at line %d", key, keyNode.Line, existing.firstLine),
keyNode,
))
// Update to point to current (last) occurrence
diff --git a/marshaller/syncer.go b/marshaller/syncer.go
index a9b622fb..ea529340 100644
--- a/marshaller/syncer.go
+++ b/marshaller/syncer.go
@@ -128,7 +128,7 @@ func syncChanges(ctx context.Context, source any, target any, valueNode *yaml.No
t = getUnderlyingValue(t)
if sUnderlying.Kind() != reflect.Struct {
- return nil, fmt.Errorf("syncChanges expected struct, got %s", s.Type())
+ return nil, fmt.Errorf("syncChanges expected `struct`, got `%s`", s.Type())
}
valid := true
diff --git a/marshaller/unmarshaller.go b/marshaller/unmarshaller.go
index c066e88b..42c6faeb 100644
--- a/marshaller/unmarshaller.go
+++ b/marshaller/unmarshaller.go
@@ -85,7 +85,7 @@ func UnmarshalCore(ctx context.Context, parentName string, node *yaml.Node, out
var documentNode *yaml.Node
if node.Kind == yaml.DocumentNode {
if len(node.Content) != 1 {
- return nil, fmt.Errorf("expected 1 node, got %d at line %d, column %d", len(node.Content), node.Line, node.Column)
+ return nil, fmt.Errorf("expected 1 node, got `%d` at line `%d`, column `%d`", len(node.Content), node.Line, node.Column)
}
// Save the document node for potential use by CoreModeler implementations
@@ -168,7 +168,7 @@ func unmarshal(ctx context.Context, parentName string, node *yaml.Node, out refl
nodeMutator, ok := out.Interface().(NodeMutator)
if !ok {
- return nil, fmt.Errorf("expected NodeMutator, got %s at line %d, column %d", out.Type(), resolvedNode.Line, resolvedNode.Column)
+ return nil, fmt.Errorf("expected NodeMutator, got `%s` at line `%d`, column `%d`", out.Type(), resolvedNode.Line, resolvedNode.Column)
}
return nodeMutator.Unmarshal(ctx, parentName, nil, node)
@@ -189,10 +189,21 @@ func unmarshal(ctx context.Context, parentName string, node *yaml.Node, out refl
unmarshallable, ok := out.Interface().(Unmarshallable)
if !ok {
- return nil, fmt.Errorf("expected Unmarshallable, got %s at line %d, column %d", out.Type(), resolvedNode.Line, resolvedNode.Column)
+ return nil, fmt.Errorf("expected Unmarshallable, got `%s` at line `%d`, column `%d`", out.Type(), resolvedNode.Line, resolvedNode.Column)
}
- return unmarshallable.Unmarshal(ctx, parentName, node)
+ validationErrs, err := unmarshallable.Unmarshal(ctx, parentName, node)
+ if err != nil {
+ return nil, err
+ }
+
+ if implementsInterface(out, coreModelerType) {
+ if coreModeler, ok := out.Interface().(CoreModeler); ok {
+ coreModeler.SetRootNode(node)
+ }
+ }
+
+ return validationErrs, nil
}
if implementsInterface(out, sequencedMapType) {
@@ -206,7 +217,7 @@ func unmarshal(ctx context.Context, parentName string, node *yaml.Node, out refl
seqMapInterface, ok := out.Interface().(interfaces.SequencedMapInterface)
if !ok {
- return nil, fmt.Errorf("expected sequencedMapInterface, got %s at line %d, column %d", out.Type(), resolvedNode.Line, resolvedNode.Column)
+ return nil, fmt.Errorf("expected sequencedMapInterface, got `%s` at line `%d`, column `%d`", out.Type(), resolvedNode.Line, resolvedNode.Column)
}
return unmarshalSequencedMap(ctx, parentName, node, seqMapInterface)
@@ -268,9 +279,9 @@ func unmarshalMapping(ctx context.Context, parentName string, node *yaml.Node, o
return unmarshalStruct(ctx, parentName, node, out.Addr().Interface())
}
case out.Kind() == reflect.Map:
- return nil, fmt.Errorf("currently unsupported out kind: %v (type: %s) at line %d, column %d", out.Kind(), out.Type(), resolvedNode.Line, resolvedNode.Column)
+ return nil, fmt.Errorf("currently unsupported out kind: `%v` (type: `%s`) at line `%d`, column `%d`", out.Kind(), out.Type(), resolvedNode.Line, resolvedNode.Column)
default:
- return nil, fmt.Errorf("expected struct or map, got %s (type: %s) at line %d, column %d", out.Kind(), out.Type(), resolvedNode.Line, resolvedNode.Column)
+ return nil, fmt.Errorf("expected struct or map, got `%s` (type: `%s`) at line `%d`, column `%d`", out.Kind(), out.Type(), resolvedNode.Line, resolvedNode.Column)
}
}
@@ -287,27 +298,27 @@ func unmarshalModel(ctx context.Context, parentName string, node *yaml.Node, str
}
if out.Kind() != reflect.Struct {
- return nil, fmt.Errorf("expected a struct, got %s (type: %s) at line %d, column %d", out.Kind(), out.Type(), resolvedNode.Line, resolvedNode.Column)
+ return nil, fmt.Errorf("expected a struct, got `%s` (type: `%s`) at line `%d`, column `%d`", out.Kind(), out.Type(), resolvedNode.Line, resolvedNode.Column)
}
structType := out.Type()
// Get the "model" tag value from the embedded CoreModel field which should be the first field always
if structType.NumField() < 1 {
- return nil, fmt.Errorf("expected embedded CoreModel field, got %s at line %d, column %d", out.Type(), resolvedNode.Line, resolvedNode.Column)
+ return nil, fmt.Errorf("expected embedded CoreModel field, got `%s` at line `%d`, column `%d`", out.Type(), resolvedNode.Line, resolvedNode.Column)
}
field := structType.Field(0)
if field.Type != reflect.TypeOf(CoreModel{}) {
- return nil, fmt.Errorf("expected embedded CoreModel field to be of type CoreModel, got %s at line %d, column %d", out.Type(), resolvedNode.Line, resolvedNode.Column)
+ return nil, fmt.Errorf("expected embedded CoreModel field to be of type CoreModel, got `%s` at line `%d`, column `%d`", out.Type(), resolvedNode.Line, resolvedNode.Column)
}
modelTag := field.Tag.Get("model")
if modelTag == "" {
- return nil, fmt.Errorf("expected embedded CoreModel field to have a 'model' tag, got %s at line %d, column %d", out.Type(), resolvedNode.Line, resolvedNode.Column)
+ return nil, fmt.Errorf("expected embedded CoreModel field to have a 'model' tag, got `%s` at line `%d`, column `%d`", out.Type(), resolvedNode.Line, resolvedNode.Column)
}
if resolvedNode.Kind != yaml.MappingNode {
return []error{
- validation.NewValidationError(validation.NewTypeMismatchError(parentName, "expected object, got %s", yml.NodeKindToString(resolvedNode.Kind)), resolvedNode),
+ validation.NewValidationError(validation.SeverityError, validation.RuleValidationTypeMismatch, validation.NewTypeMismatchError(parentName, "expected `object`, got `%s`", yml.NodeKindToString(resolvedNode.Kind)), resolvedNode),
}, nil
}
@@ -318,10 +329,10 @@ func unmarshalModel(ctx context.Context, parentName string, node *yaml.Node, str
var ok bool
unmarshallable, ok = out.Addr().Interface().(CoreModeler)
if !ok {
- return nil, fmt.Errorf("expected CoreModeler, got %s at line %d, column %d", out.Type(), resolvedNode.Line, resolvedNode.Column)
+ return nil, fmt.Errorf("expected CoreModeler, got `%s` at line `%d`, column `%d`", out.Type(), resolvedNode.Line, resolvedNode.Column)
}
} else {
- return nil, fmt.Errorf("expected struct to implement CoreModeler, got %s at line %d, column %d", out.Type(), resolvedNode.Line, resolvedNode.Column)
+ return nil, fmt.Errorf("expected struct to implement CoreModeler, got `%s` at line `%d`, column `%d`", out.Type(), resolvedNode.Line, resolvedNode.Column)
}
unmarshallable.SetRootNode(node)
@@ -368,7 +379,9 @@ func unmarshalModel(ctx context.Context, parentName string, node *yaml.Node, str
indicesToSkip[info.lastIndex] = true
// Create validation error for the earlier occurrence
duplicateKeyErrs = append(duplicateKeyErrs, validation.NewValidationError(
- validation.NewValueValidationError("mapping key %q at line %d is a duplicate; previous definition at line %d", key, keyNode.Line, info.firstLine),
+ validation.SeverityWarning,
+ validation.RuleValidationDuplicateKey,
+ fmt.Errorf("mapping key `%q` at line `%d` is a duplicate; previous definition at line `%d`", key, keyNode.Line, info.firstLine),
keyNode,
))
// Update to track this as the new last occurrence
@@ -460,7 +473,7 @@ func unmarshalModel(ctx context.Context, parentName string, node *yaml.Node, str
foundRequiredFields.Store(key, true)
}
} else {
- return fmt.Errorf("expected field '%s' to be marshaller.Node, got %s at line %d, column %d (key: %s)", cachedField.Name, fieldVal.Type(), keyNode.Line, keyNode.Column, key)
+ return fmt.Errorf("expected field `%s` to be marshaller.Node, got `%s` at line `%d`, column `%d` (key: `%s`)", cachedField.Name, fieldVal.Type(), keyNode.Line, keyNode.Column, key)
}
}
@@ -489,7 +502,7 @@ func unmarshalModel(ctx context.Context, parentName string, node *yaml.Node, str
// Check for missing required fields using cached required field info
for tag := range fieldMap.RequiredFields {
if _, ok := foundRequiredFields.Load(tag); !ok {
- validationErrs = append(validationErrs, validation.NewValidationError(validation.NewMissingFieldError("%s.%s is missing", modelTag, tag), resolvedNode))
+ validationErrs = append(validationErrs, validation.NewValidationError(validation.SeverityError, validation.RuleValidationRequiredField, fmt.Errorf("`%s.%s` is required", modelTag, tag), resolvedNode))
}
}
@@ -532,7 +545,7 @@ func decodeNode(_ context.Context, parentName string, node *yaml.Node, out any)
// Check if this is a type mismatch error
if yamlTypeErr := asTypeMismatchError(err); yamlTypeErr != nil {
// Convert type mismatch to validation error
- validationErr := validation.NewValidationError(validation.NewTypeMismatchError(parentName, strings.Join(yamlTypeErr.Errors, ", ")), resolvedNode)
+ validationErr := validation.NewValidationError(validation.SeverityError, validation.RuleValidationTypeMismatch, validation.NewTypeMismatchError(parentName, strings.Join(yamlTypeErr.Errors, ", ")), resolvedNode)
return []error{validationErr}, nil //nolint:nilerr
}
@@ -547,7 +560,7 @@ func unmarshalSequence(ctx context.Context, parentName string, node *yaml.Node,
}
if out.Kind() != reflect.Slice {
- return nil, fmt.Errorf("expected slice, got %s (type: %s) at line %d, column %d", out.Kind(), out.Type(), resolvedNode.Line, resolvedNode.Column)
+ return nil, fmt.Errorf("expected `slice`, got `%s` (type: `%s`) at line `%d`, column `%d`", out.Kind(), out.Type(), resolvedNode.Line, resolvedNode.Column)
}
out.Set(reflect.MakeSlice(out.Type(), len(resolvedNode.Content), len(resolvedNode.Content)))
@@ -604,13 +617,13 @@ func unmarshalNode(ctx context.Context, parentName string, keyNode, valueNode *y
out.Set(reflect.New(out.Type().Elem()))
ref = out.Elem().Addr()
} else {
- return nil, fmt.Errorf("field %s is a nil pointer and cannot be set at line %d, column %d", fieldName, resolvedKeyNode.Line, resolvedKeyNode.Column)
+ return nil, fmt.Errorf("field `%s` is a nil pointer and cannot be set at line `%d`, column `%d`", fieldName, resolvedKeyNode.Line, resolvedKeyNode.Column)
}
}
unmarshallable, ok := ref.Interface().(NodeMutator)
if !ok {
- return nil, fmt.Errorf("expected field '%s' to be marshaller.Node, got %s at line %d, column %d", fieldName, ref.Type(), resolvedKeyNode.Line, resolvedKeyNode.Column)
+ return nil, fmt.Errorf("expected field `%s` to be marshaller.Node, got `%s` at line `%d`, column `%d`", fieldName, ref.Type(), resolvedKeyNode.Line, resolvedKeyNode.Column)
}
validationErrs, err := unmarshallable.Unmarshal(ctx, parentName, keyNode, valueNode)
@@ -678,7 +691,7 @@ func isMapType(out reflect.Value) bool {
// validateNodeKind checks if the node kind matches the expected kind and returns appropriate error
func validateNodeKind(resolvedNode *yaml.Node, expectedKind yaml.Kind, parentName string, reflectType reflect.Type, expectedType string) error {
if resolvedNode == nil {
- return validation.NewValidationError(validation.NewTypeMismatchError(parentName, "expected %s, got nil", yml.NodeKindToString(expectedKind)), nil)
+ return validation.NewValidationError(validation.SeverityError, validation.RuleValidationTypeMismatch, validation.NewTypeMismatchError(parentName, "expected `%s`, got nil", yml.NodeKindToString(expectedKind)), nil)
}
// Check if the node kind matches
@@ -723,13 +736,15 @@ func validateNodeKind(resolvedNode *yaml.Node, expectedKind yaml.Kind, parentNam
value = value[:maxLen] + "..."
}
actualKindStr = fmt.Sprintf("`%s`", value)
+ } else {
+ actualKindStr = fmt.Sprintf("`%s`", actualKindStr)
}
- return validation.NewValidationError(validation.NewTypeMismatchError(parentName, "expected %s, got %s", expectedType, actualKindStr), resolvedNode)
+ return validation.NewValidationError(validation.SeverityError, validation.RuleValidationTypeMismatch, validation.NewTypeMismatchError(parentName, "expected `%s`, got %s", expectedType, actualKindStr), resolvedNode)
}
if !tagMatches {
- return validation.NewValidationError(validation.NewTypeMismatchError(parentName, "expected %s, got %s", expectedType, yml.NodeTagToString(resolvedNode.Tag)), resolvedNode)
+ return validation.NewValidationError(validation.SeverityError, validation.RuleValidationTypeMismatch, validation.NewTypeMismatchError(parentName, "expected `%s`, got `%s`", expectedType, yml.NodeTagToString(resolvedNode.Tag)), resolvedNode)
}
return nil
}
diff --git a/marshaller/unmarshalling_test.go b/marshaller/unmarshalling_test.go
index aa0ab3de..fc64e1f5 100644
--- a/marshaller/unmarshalling_test.go
+++ b/marshaller/unmarshalling_test.go
@@ -153,10 +153,10 @@ func TestUnmarshal_PrimitiveTypes_Error(t *testing.T) {
stringPtrField: "optional field"
`,
wantErrs: []string{
- "[2:1] testPrimitiveModel.boolField is missing",
- "[2:1] testPrimitiveModel.float64Field is missing",
- "[2:1] testPrimitiveModel.intField is missing",
- "[2:1] testPrimitiveModel.stringField is missing",
+ "[2:1] error validation-required-field `testPrimitiveModel.boolField` is required",
+ "[2:1] error validation-required-field `testPrimitiveModel.float64Field` is required",
+ "[2:1] error validation-required-field `testPrimitiveModel.intField` is required",
+ "[2:1] error validation-required-field `testPrimitiveModel.stringField` is required",
},
},
{
@@ -167,7 +167,7 @@ boolField: true
intField: 42
float64Field: 3.14
`,
- wantErrs: []string{"[2:14] testPrimitiveModel.stringField expected string, got sequence"},
+ wantErrs: []string{"[2:14] error validation-type-mismatch testPrimitiveModel.stringField expected `string`, got `sequence`"},
},
{
name: "type mismatch - bool field gets string",
@@ -177,7 +177,7 @@ boolField: "not a bool"
intField: 42
float64Field: 3.14
`,
- wantErrs: []string{"[3:12] testPrimitiveModel.boolField line 3: cannot unmarshal !!str `not a bool` into bool"},
+ wantErrs: []string{"[3:12] error validation-type-mismatch testPrimitiveModel.boolField line 3: cannot unmarshal !!str `not a bool` into bool"},
},
{
name: "type mismatch - int field gets string",
@@ -187,7 +187,7 @@ boolField: true
intField: "not an int"
float64Field: 3.14
`,
- wantErrs: []string{"[4:11] testPrimitiveModel.intField line 4: cannot unmarshal !!str `not an int` into int"},
+ wantErrs: []string{"[4:11] error validation-type-mismatch testPrimitiveModel.intField line 4: cannot unmarshal !!str `not an int` into int"},
},
{
name: "type mismatch - float field gets string",
@@ -197,7 +197,7 @@ boolField: true
intField: 42
float64Field: "not a float"
`,
- wantErrs: []string{"[5:15] testPrimitiveModel.float64Field line 5: cannot unmarshal !!str `not a f...` into float64"},
+ wantErrs: []string{"[5:15] error validation-type-mismatch testPrimitiveModel.float64Field line 5: cannot unmarshal !!str `not a f...` into float64"},
},
{
name: "multiple validation errors",
@@ -206,10 +206,10 @@ boolField: "not a bool"
intField: "not an int"
`,
wantErrs: []string{
- "[2:1] testPrimitiveModel.float64Field is missing",
- "[2:1] testPrimitiveModel.stringField is missing",
- "[2:12] testPrimitiveModel.boolField line 2: cannot unmarshal !!str `not a bool` into bool",
- "[3:11] testPrimitiveModel.intField line 3: cannot unmarshal !!str `not an int` into int",
+ "[2:1] error validation-required-field `testPrimitiveModel.float64Field` is required",
+ "[2:1] error validation-required-field `testPrimitiveModel.stringField` is required",
+ "[2:12] error validation-type-mismatch testPrimitiveModel.boolField line 2: cannot unmarshal !!str `not a bool` into bool",
+ "[3:11] error validation-type-mismatch testPrimitiveModel.intField line 3: cannot unmarshal !!str `not an int` into int",
},
},
}
@@ -358,9 +358,9 @@ nestedModel:
# missing required stringField, boolField, float64Field
`,
wantErrs: []string{
- "[8:3] testPrimitiveModel.stringField is missing",
- "[8:3] testPrimitiveModel.boolField is missing",
- "[8:3] testPrimitiveModel.float64Field is missing",
+ "[8:3] error validation-required-field `testPrimitiveModel.stringField` is required",
+ "[8:3] error validation-required-field `testPrimitiveModel.boolField` is required",
+ "[8:3] error validation-required-field `testPrimitiveModel.float64Field` is required",
},
},
{
@@ -374,7 +374,7 @@ nestedModelValue:
nestedModel:
- "this should be an object"
`,
- wantErrs: []string{"[8:3] testComplexModel.nestedModel expected object, got sequence"},
+ wantErrs: []string{"[8:3] error validation-type-mismatch testComplexModel.nestedModel expected `object`, got `sequence`"},
},
{
name: "type mismatch - array field gets object",
@@ -387,7 +387,7 @@ nestedModelValue:
arrayField:
key: "this should be an array"
`,
- wantErrs: []string{"[8:3] testComplexModel.arrayField expected sequence, got object"},
+ wantErrs: []string{"[8:3] error validation-type-mismatch testComplexModel.arrayField expected `sequence`, got `object`"},
},
{
name: "deeply nested validation error",
@@ -407,7 +407,7 @@ structArrayField:
float64Field: 4.56
# missing required stringField in second element
`,
- wantErrs: []string{"[12:5] testPrimitiveModel.stringField is missing"},
+ wantErrs: []string{"[12:5] error validation-required-field `testPrimitiveModel.stringField` is required"},
},
}
@@ -658,7 +658,7 @@ func TestUnmarshal_RequiredPointer_Error(t *testing.T) {
yml: `
optionalPtr: "only optional set"
`,
- wantErrs: []string{"[2:1] testRequiredPointerModel.requiredPtr is missing"},
+ wantErrs: []string{"[2:1] error validation-required-field `testRequiredPointerModel.requiredPtr` is required"},
},
{
name: "required pointer field with null value should be valid",
@@ -768,12 +768,12 @@ func TestUnmarshal_RequiredNilableTypes_Error(t *testing.T) {
optionalPtr: "only optional set"
`,
wantErrs: []string{
- "[2:1] testRequiredNilableModel.requiredEither is missing",
- "[2:1] testRequiredNilableModel.requiredMap is missing",
- "[2:1] testRequiredNilableModel.requiredPtr is missing",
- "[2:1] testRequiredNilableModel.requiredRawNode is missing",
- "[2:1] testRequiredNilableModel.requiredSlice is missing",
- "[2:1] testRequiredNilableModel.requiredStruct is missing",
+ "[2:1] error validation-required-field `testRequiredNilableModel.requiredEither` is required",
+ "[2:1] error validation-required-field `testRequiredNilableModel.requiredMap` is required",
+ "[2:1] error validation-required-field `testRequiredNilableModel.requiredPtr` is required",
+ "[2:1] error validation-required-field `testRequiredNilableModel.requiredRawNode` is required",
+ "[2:1] error validation-required-field `testRequiredNilableModel.requiredSlice` is required",
+ "[2:1] error validation-required-field `testRequiredNilableModel.requiredStruct` is required",
},
},
{
@@ -784,10 +784,10 @@ requiredSlice: ["item1"]
# missing requiredMap, requiredStruct, requiredEither, requiredRawNode
`,
wantErrs: []string{
- "[2:1] testRequiredNilableModel.requiredEither is missing",
- "[2:1] testRequiredNilableModel.requiredMap is missing",
- "[2:1] testRequiredNilableModel.requiredRawNode is missing",
- "[2:1] testRequiredNilableModel.requiredStruct is missing",
+ "[2:1] error validation-required-field `testRequiredNilableModel.requiredEither` is required",
+ "[2:1] error validation-required-field `testRequiredNilableModel.requiredMap` is required",
+ "[2:1] error validation-required-field `testRequiredNilableModel.requiredRawNode` is required",
+ "[2:1] error validation-required-field `testRequiredNilableModel.requiredStruct` is required",
},
},
{
@@ -804,10 +804,10 @@ requiredEither: "string value"
requiredRawNode: "raw value"
`,
wantErrs: []string{
- "[8:3] testPrimitiveModel.boolField is missing",
- "[8:3] testPrimitiveModel.float64Field is missing",
- "[8:3] testPrimitiveModel.intField is missing",
- "[8:3] testPrimitiveModel.stringField is missing",
+ "[8:3] error validation-required-field `testPrimitiveModel.boolField` is required",
+ "[8:3] error validation-required-field `testPrimitiveModel.float64Field` is required",
+ "[8:3] error validation-required-field `testPrimitiveModel.intField` is required",
+ "[8:3] error validation-required-field `testPrimitiveModel.stringField` is required",
},
},
}
diff --git a/mise-tasks/test b/mise-tasks/test
index 8dee87e7..357c46c1 100755
--- a/mise-tasks/test
+++ b/mise-tasks/test
@@ -19,6 +19,8 @@ else
echo "🧪 Running tests in separate modules..."
(cd jsonschema/oas3/tests && GOWORK=off gotestsum --format testname -- -race ./...)
+ (cd openapi/linter/customrules && GOWORK=off gotestsum --format testname -- -race ./...)
+ (cd openapi/linter/converter/tests && GOWORK=off gotestsum --format testname -- -race ./...)
fi
echo "✅ All tests passed!"
\ No newline at end of file
diff --git a/mise-tasks/update-lint-docs b/mise-tasks/update-lint-docs
new file mode 100755
index 00000000..d32e476c
--- /dev/null
+++ b/mise-tasks/update-lint-docs
@@ -0,0 +1,18 @@
+#!/usr/bin/env bash
+set -euo pipefail
+
+# Update Lint Docs - Automatically update lint rule documentation in READMEs
+# This script uses a Go program to generate a rules table from registered linter rules
+# and updates the corresponding README.md files between the lint rules tags.
+
+echo "🔄 Updating lint rules in README files..."
+
+echo "🚀 Running update-lint-docs tool..."
+go run ./cmd/update-lint-docs
+
+echo ""
+echo "📋 Summary:"
+echo " • Updated openapi/linter/README.md with rules from the OpenAPI linter registry"
+echo ""
+echo "💡 Rule documentation is automatically generated from the Rule interface methods."
+echo " To update the docs, modify the rule's Description(), Link(), etc. methods and re-run this task."
diff --git a/openapi/bundle.go b/openapi/bundle.go
index 84be0452..abce65de 100644
--- a/openapi/bundle.go
+++ b/openapi/bundle.go
@@ -299,7 +299,7 @@ func bundleSchema(ctx context.Context, schema *oas3.JSONSchema[oas3.Referenceabl
if err := bundleObject(ctx, resolvedRefSchema, namingStrategy, references.ResolveOptions{
RootDocument: opts.RootDocument,
TargetDocument: targetDocInfo.ResolvedDocument,
- TargetLocation: targetDocInfo.AbsoluteReference,
+ TargetLocation: targetDocInfo.AbsoluteDocumentPath,
}, componentStorage); err != nil {
return fmt.Errorf("failed to bundle nested references in %s: %w", ref, err)
}
@@ -702,12 +702,12 @@ func bundleGenericReference[T any, V interfaces.Validator[T], C marshaller.CoreM
if targetDocInfo == nil {
return fmt.Errorf("failed to get resolution info for %s reference %s", componentType, refStr)
}
- componentStorage.componentLocations[componentType+"/"+componentName] = targetDocInfo.AbsoluteReference
+ componentStorage.componentLocations[componentType+"/"+componentName] = targetDocInfo.AbsoluteDocumentPath
if err := bundleObject(ctx, bundledRef, namingStrategy, references.ResolveOptions{
RootDocument: opts.RootDocument,
TargetDocument: targetDocInfo.ResolvedDocument,
- TargetLocation: targetDocInfo.AbsoluteReference,
+ TargetLocation: targetDocInfo.AbsoluteDocumentPath,
}, componentStorage); err != nil {
return fmt.Errorf("failed to bundle nested references in %s: %w", ref.GetReference(), err)
}
@@ -736,7 +736,7 @@ func getFinalAbsoluteRef[T any, V interfaces.Validator[T], C marshaller.CoreMode
nextRefInfo := resInfo.Object.GetReferenceResolutionInfo()
if nextRefInfo != nil {
// Build the absolute reference from the final resolution
- finalRef := nextRefInfo.AbsoluteReference
+ finalRef := nextRefInfo.AbsoluteDocumentPath
if nextRefInfo.Object != nil && nextRefInfo.Object.Reference != nil {
// Add the fragment from the chained reference
fragment := string(nextRefInfo.Object.Reference.GetJSONPointer())
diff --git a/openapi/callbacks.go b/openapi/callbacks.go
index c6b770cf..b17bb62f 100644
--- a/openapi/callbacks.go
+++ b/openapi/callbacks.go
@@ -2,6 +2,7 @@ package openapi
import (
"context"
+ "fmt"
"github.com/speakeasy-api/openapi/expression"
"github.com/speakeasy-api/openapi/extensions"
@@ -64,7 +65,7 @@ func (c *Callback) Validate(ctx context.Context, opts ...validation.Option) []er
}
}
- errs = append(errs, validation.NewValidationError(validation.NewValueValidationError("callback expression is invalid: %s", err.Error()), node))
+ errs = append(errs, validation.NewValidationError(validation.SeverityWarning, validation.RuleValidationInvalidFormat, fmt.Errorf("callback expression is invalid: %w", err), node))
}
errs = append(errs, pathItem.Validate(ctx, opts...)...)
diff --git a/openapi/callbacks_validate_test.go b/openapi/callbacks_validate_test.go
index bb5e8161..41d5ee90 100644
--- a/openapi/callbacks_validate_test.go
+++ b/openapi/callbacks_validate_test.go
@@ -117,7 +117,7 @@ func TestCallback_Validate_Error(t *testing.T) {
'200':
description: Webhook received
`,
- wantErrs: []string{"[2:1] callback expression is invalid: expression is not valid, must begin with $: request.body#/webhookUrl"},
+ wantErrs: []string{"[2:1] warning validation-invalid-format callback expression is invalid: expression is not valid, must begin with $: request.body#/webhookUrl"},
},
{
name: "invalid_expression_unknown_type",
@@ -129,7 +129,7 @@ func TestCallback_Validate_Error(t *testing.T) {
'200':
description: Webhook received
`,
- wantErrs: []string{"[2:1] callback expression is invalid: expression is not valid, must begin with one of [url, method, statusCode, request, response, inputs, outputs, steps, workflows, sourceDescriptions, components]: {$unknown.body#/webhookUrl}"},
+ wantErrs: []string{"[2:1] warning validation-invalid-format callback expression is invalid: expression is not valid, must begin with one of [url, method, statusCode, request, response, inputs, outputs, steps, workflows, sourceDescriptions, components]: {$unknown.body#/webhookUrl}"},
},
{
name: "invalid_expression_url_with_extra_parts",
@@ -141,7 +141,7 @@ func TestCallback_Validate_Error(t *testing.T) {
'200':
description: Webhook received
`,
- wantErrs: []string{"[2:1] callback expression is invalid: expression is not valid, extra characters after $url: {$url.extra}"},
+ wantErrs: []string{"[2:1] warning validation-invalid-format callback expression is invalid: expression is not valid, extra characters after $url: {$url.extra}"},
},
{
name: "invalid_expression_request_without_reference",
@@ -153,7 +153,7 @@ func TestCallback_Validate_Error(t *testing.T) {
'200':
description: Webhook received
`,
- wantErrs: []string{"[2:1] callback expression is invalid: expression is not valid, expected one of [header, query, path, body] after $request: {$request}"},
+ wantErrs: []string{"[2:1] warning validation-invalid-format callback expression is invalid: expression is not valid, expected one of [header, query, path, body] after $request: {$request}"},
},
{
name: "invalid_expression_request_unknown_reference",
@@ -165,7 +165,7 @@ func TestCallback_Validate_Error(t *testing.T) {
'200':
description: Webhook received
`,
- wantErrs: []string{"[2:1] callback expression is invalid: expression is not valid, expected one of [header, query, path, body] after $request: {$request.unknown}"},
+ wantErrs: []string{"[2:1] warning validation-invalid-format callback expression is invalid: expression is not valid, expected one of [header, query, path, body] after $request: {$request.unknown}"},
},
{
name: "invalid_expression_request_header_missing_token",
@@ -177,7 +177,7 @@ func TestCallback_Validate_Error(t *testing.T) {
'200':
description: Webhook received
`,
- wantErrs: []string{"[2:1] callback expression is invalid: expression is not valid, expected token after $request.header: {$request.header}"},
+ wantErrs: []string{"[2:1] warning validation-invalid-format callback expression is invalid: expression is not valid, expected token after $request.header: {$request.header}"},
},
{
name: "invalid_expression_request_header_invalid_token",
@@ -189,7 +189,7 @@ func TestCallback_Validate_Error(t *testing.T) {
'200':
description: Webhook received
`,
- wantErrs: []string{"[2:1] callback expression is invalid: header reference must be a valid token [^[!#$%&'*+\\-.^_`|~\\dA-Za-z]+$]: {$request.header.some@header}"},
+ wantErrs: []string{"[2:1] warning validation-invalid-format callback expression is invalid: header reference must be a valid token [^[!#$%&'*+\\-.^_`|~\\dA-Za-z]+$]: {$request.header.some@header}"},
},
{
name: "invalid_expression_request_query_missing_name",
@@ -201,7 +201,7 @@ func TestCallback_Validate_Error(t *testing.T) {
'200':
description: Webhook received
`,
- wantErrs: []string{"[2:1] callback expression is invalid: expression is not valid, expected name after $request.query: {$request.query}"},
+ wantErrs: []string{"[2:1] warning validation-invalid-format callback expression is invalid: expression is not valid, expected name after $request.query: {$request.query}"},
},
{
name: "invalid_expression_request_path_missing_name",
@@ -213,7 +213,7 @@ func TestCallback_Validate_Error(t *testing.T) {
'200':
description: Webhook received
`,
- wantErrs: []string{"[2:1] callback expression is invalid: expression is not valid, expected name after $request.path: {$request.path}"},
+ wantErrs: []string{"[2:1] warning validation-invalid-format callback expression is invalid: expression is not valid, expected name after $request.path: {$request.path}"},
},
{
name: "invalid_expression_request_body_with_extra_parts",
@@ -225,7 +225,7 @@ func TestCallback_Validate_Error(t *testing.T) {
'200':
description: Webhook received
`,
- wantErrs: []string{"[2:1] callback expression is invalid: expression is not valid, only json pointers are allowed after $request.body: {$request.body.extra}"},
+ wantErrs: []string{"[2:1] warning validation-invalid-format callback expression is invalid: expression is not valid, only json pointers are allowed after $request.body: {$request.body.extra}"},
},
{
name: "invalid_expression_invalid_json_pointer",
@@ -237,7 +237,7 @@ func TestCallback_Validate_Error(t *testing.T) {
'200':
description: Webhook received
`,
- wantErrs: []string{"[2:1] callback expression is invalid: validation error -- jsonpointer must start with /: some/path}"},
+ wantErrs: []string{"[2:1] warning validation-invalid-format callback expression is invalid: validation error -- jsonpointer must start with /: some/path}"},
},
{
name: "invalid_nested_pathitem_invalid_server",
@@ -251,7 +251,7 @@ func TestCallback_Validate_Error(t *testing.T) {
'200':
description: Webhook received
`,
- wantErrs: []string{"[4:7] server.url is missing"},
+ wantErrs: []string{"[4:7] error validation-required-field `server.url` is required"},
},
}
diff --git a/openapi/components_validate_test.go b/openapi/components_validate_test.go
index 01bffe8a..390fc5ee 100644
--- a/openapi/components_validate_test.go
+++ b/openapi/components_validate_test.go
@@ -255,7 +255,7 @@ securitySchemes:
InvalidScheme:
description: Some scheme
`,
- wantErrs: []string{"[4:5] securityScheme.type is missing"},
+ wantErrs: []string{"[4:5] error validation-required-field `securityScheme.type` is required"},
},
}
diff --git a/openapi/core/reference.go b/openapi/core/reference.go
index 988e63a7..ee5d2882 100644
--- a/openapi/core/reference.go
+++ b/openapi/core/reference.go
@@ -34,7 +34,7 @@ func (r *Reference[T]) Unmarshal(ctx context.Context, parentName string, node *y
if resolvedNode.Kind != yaml.MappingNode {
r.SetValid(false, false)
- return []error{validation.NewValidationError(validation.NewTypeMismatchError(parentName, "reference expected object, got %s", yml.NodeKindToString(resolvedNode.Kind)), resolvedNode)}, nil
+ return []error{validation.NewValidationError(validation.SeverityError, validation.RuleValidationTypeMismatch, validation.NewTypeMismatchError(parentName, "reference expected `object`, got %s", yml.NodeKindToString(resolvedNode.Kind)), resolvedNode)}, nil
}
if _, _, ok := yml.GetMapElementNodes(ctx, resolvedNode, "$ref"); ok {
diff --git a/openapi/encoding.go b/openapi/encoding.go
index 50b000f1..ca1fdc9b 100644
--- a/openapi/encoding.go
+++ b/openapi/encoding.go
@@ -128,7 +128,7 @@ func (e *Encoding) Validate(ctx context.Context, opts ...validation.Option) []er
for _, mediaType := range mediaTypes {
_, _, err := mime.ParseMediaType(mediaType)
if err != nil {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError(fmt.Sprintf("encoding.contentType %s is not a valid media type: %s", mediaType, err)), core, core.ContentType))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidFormat, fmt.Errorf("encoding.contentType %s is not a valid media type: %w", mediaType, err), core, core.ContentType))
}
}
}
@@ -140,7 +140,7 @@ func (e *Encoding) Validate(ctx context.Context, opts ...validation.Option) []er
if core.Style.Present {
allowedStyles := []string{string(SerializationStyleForm), string(SerializationStyleSpaceDelimited), string(SerializationStylePipeDelimited), string(SerializationStyleDeepObject)}
if !slices.Contains(allowedStyles, string(*e.Style)) {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError(fmt.Sprintf("encoding.style must be one of [%s]", strings.Join(allowedStyles, ", "))), core, core.Style))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationAllowedValues, fmt.Errorf("encoding.style must be one of [`%s`]", strings.Join(allowedStyles, ", ")), core, core.Style))
}
}
diff --git a/openapi/encoding_validate_test.go b/openapi/encoding_validate_test.go
index ea99cad0..5e16f4e1 100644
--- a/openapi/encoding_validate_test.go
+++ b/openapi/encoding_validate_test.go
@@ -146,7 +146,7 @@ func TestEncoding_Validate_Error(t *testing.T) {
yml: `
style: invalidStyle
`,
- expectedErr: "style must be one of [form, spaceDelimited, pipeDelimited, deepObject]",
+ expectedErr: "style must be one of [`form, spaceDelimited, pipeDelimited, deepObject`]",
},
}
diff --git a/openapi/examples.go b/openapi/examples.go
index f2bc01ba..2e8c064f 100644
--- a/openapi/examples.go
+++ b/openapi/examples.go
@@ -2,6 +2,7 @@ package openapi
import (
"context"
+ "errors"
"fmt"
"net/url"
@@ -104,27 +105,27 @@ func (e *Example) Validate(ctx context.Context, opts ...validation.Option) []err
// Check mutual exclusivity: value and externalValue
if core.Value.Present && core.ExternalValue.Present {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("example.value and externalValue are mutually exclusive"), core, core.Value))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationMutuallyExclusiveFields, errors.New("example.value and example.externalValue are mutually exclusive"), core, core.Value))
}
// Check mutual exclusivity: dataValue and value
if core.DataValue.Present && core.Value.Present {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("example.dataValue and value are mutually exclusive"), core, core.DataValue))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationMutuallyExclusiveFields, errors.New("example.dataValue and example.value are mutually exclusive"), core, core.DataValue))
}
// Check mutual exclusivity: serializedValue and value
if core.SerializedValue.Present && core.Value.Present {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("example.serializedValue and value are mutually exclusive"), core, core.SerializedValue))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationMutuallyExclusiveFields, errors.New("example.serializedValue and example.value are mutually exclusive"), core, core.SerializedValue))
}
// Check mutual exclusivity: serializedValue and externalValue
if core.SerializedValue.Present && core.ExternalValue.Present {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("example.serializedValue and externalValue are mutually exclusive"), core, core.SerializedValue))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationMutuallyExclusiveFields, errors.New("example.serializedValue and example.externalValue are mutually exclusive"), core, core.SerializedValue))
}
if core.ExternalValue.Present {
if _, err := url.Parse(*e.ExternalValue); err != nil {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError(fmt.Sprintf("example.externalValue is not a valid uri: %s", err)), core, core.ExternalValue))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidFormat, fmt.Errorf("example.externalValue is not a valid uri: %w", err), core, core.ExternalValue))
}
}
diff --git a/openapi/examples_validate_test.go b/openapi/examples_validate_test.go
index dd1ac9a6..7a7c670a 100644
--- a/openapi/examples_validate_test.go
+++ b/openapi/examples_validate_test.go
@@ -157,14 +157,14 @@ func TestExample_Validate_Error(t *testing.T) {
summary: Example with invalid URL
externalValue: ":invalid"
`,
- wantErrs: []string{"[3:16] example.externalValue is not a valid uri: parse \":invalid\": missing protocol scheme"},
+ wantErrs: []string{"[3:16] error validation-invalid-format example.externalValue is not a valid uri: parse \":invalid\": missing protocol scheme"},
},
{
name: "invalid external value URL with spaces",
yml: `
externalValue: ":invalid url"
`,
- wantErrs: []string{"[2:16] example.externalValue is not a valid uri: parse \":invalid url\": missing protocol scheme"},
+ wantErrs: []string{"[2:16] error validation-invalid-format example.externalValue is not a valid uri: parse \":invalid url\": missing protocol scheme"},
},
{
name: "both value and external value provided",
@@ -173,7 +173,7 @@ summary: Invalid example
value: "test"
externalValue: "https://example.com/test.json"
`,
- wantErrs: []string{"[3:8] example.value and externalValue are mutually exclusive"},
+ wantErrs: []string{"[3:8] error validation-mutually-exclusive-fields example.value and example.externalValue are mutually exclusive"},
},
{
name: "multiple validation errors",
@@ -182,8 +182,8 @@ value: "test"
externalValue: ":invalid"
`,
wantErrs: []string{
- "[2:8] example.value and externalValue are mutually exclusive",
- "[3:16] example.externalValue is not a valid uri: parse \":invalid\": missing protocol scheme",
+ "[2:8] error validation-mutually-exclusive-fields example.value and example.externalValue are mutually exclusive",
+ "[3:16] error validation-invalid-format example.externalValue is not a valid uri: parse \":invalid\": missing protocol scheme",
},
},
{
@@ -194,7 +194,7 @@ dataValue:
id: 123
value: "test"
`,
- wantErrs: []string{"example.dataValue and value are mutually exclusive"},
+ wantErrs: []string{"error validation-mutually-exclusive-fields example.dataValue and example.value are mutually exclusive"},
},
{
name: "serializedValue and value are mutually exclusive",
@@ -203,7 +203,7 @@ summary: Invalid example
serializedValue: "test=123"
value: "test"
`,
- wantErrs: []string{"example.serializedValue and value are mutually exclusive"},
+ wantErrs: []string{"error validation-mutually-exclusive-fields example.serializedValue and example.value are mutually exclusive"},
},
{
name: "serializedValue and externalValue are mutually exclusive",
@@ -212,23 +212,23 @@ summary: Invalid example
serializedValue: "test=123"
externalValue: https://example.com/test.json
`,
- wantErrs: []string{"example.serializedValue and externalValue are mutually exclusive"},
+ wantErrs: []string{"error validation-mutually-exclusive-fields example.serializedValue and example.externalValue are mutually exclusive"},
},
{
name: "multiple mutual exclusivity violations",
yml: `
summary: Invalid example
dataValue:
- id: 123
+ id: 123
value: "test"
serializedValue: "test=123"
externalValue: https://example.com/test.json
`,
wantErrs: []string{
- "example.value and externalValue are mutually exclusive",
- "example.dataValue and value are mutually exclusive",
- "example.serializedValue and value are mutually exclusive",
- "example.serializedValue and externalValue are mutually exclusive",
+ "error validation-mutually-exclusive-fields example.value and example.externalValue are mutually exclusive",
+ "error validation-mutually-exclusive-fields example.dataValue and example.value are mutually exclusive",
+ "error validation-mutually-exclusive-fields example.serializedValue and example.value are mutually exclusive",
+ "error validation-mutually-exclusive-fields example.serializedValue and example.externalValue are mutually exclusive",
},
},
}
diff --git a/openapi/header.go b/openapi/header.go
index 36591af4..bcae281b 100644
--- a/openapi/header.go
+++ b/openapi/header.go
@@ -2,6 +2,7 @@ package openapi
import (
"context"
+ "fmt"
"slices"
"strings"
@@ -131,7 +132,7 @@ func (h *Header) Validate(ctx context.Context, opts ...validation.Option) []erro
if core.Style.Present {
allowedStyles := []string{string(SerializationStyleSimple)}
if !slices.Contains(allowedStyles, string(*h.Style)) {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("header.style must be one of [%s]", strings.Join(allowedStyles, ", ")), core, core.Style))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationAllowedValues, fmt.Errorf("header.style must be one of [`%s`]", strings.Join(allowedStyles, ", ")), core, core.Style))
}
}
diff --git a/openapi/header_validate_test.go b/openapi/header_validate_test.go
index 4e229c64..b5174a05 100644
--- a/openapi/header_validate_test.go
+++ b/openapi/header_validate_test.go
@@ -128,8 +128,8 @@ schema:
description: Header with invalid schema
`,
wantErrs: []string{
- "[3:9] schema.type value must be one of 'array', 'boolean', 'integer', 'null', 'number', 'object', 'string'",
- "[3:9] schema.type expected array, got string",
+ "[3:9] error validation-invalid-schema schema.type value must be one of 'array', 'boolean', 'integer', 'null', 'number', 'object', 'string'",
+ "[3:9] error validation-type-mismatch schema.type expected `array`, got `string`",
},
},
}
diff --git a/openapi/index.go b/openapi/index.go
new file mode 100644
index 00000000..c8a1eb87
--- /dev/null
+++ b/openapi/index.go
@@ -0,0 +1,2528 @@
+package openapi
+
+import (
+ "context"
+ "errors"
+ "fmt"
+ "strings"
+
+ "github.com/speakeasy-api/openapi/internal/interfaces"
+ "github.com/speakeasy-api/openapi/jsonschema/oas3"
+ "github.com/speakeasy-api/openapi/marshaller"
+ "github.com/speakeasy-api/openapi/pointer"
+ "github.com/speakeasy-api/openapi/references"
+ "github.com/speakeasy-api/openapi/validation"
+ "gopkg.in/yaml.v3"
+)
+
+// CircularClassification represents the classification of a circular reference.
+type CircularClassification int
+
+const (
+ // CircularUnclassified means the circular reference has not been classified yet.
+ CircularUnclassified CircularClassification = iota
+ // CircularValid means the circular reference is valid (has a termination point).
+ CircularValid
+ // CircularInvalid means the circular reference is invalid (no termination point).
+ CircularInvalid
+ // CircularPending means the circular reference is part of polymorphic and needs post-processing.
+ CircularPending
+)
+
+// CircularPathSegment represents a segment of the path through the schema tree.
+// It captures constraint information needed to determine if a circular reference can terminate.
+type CircularPathSegment struct {
+ Field string // e.g., "properties", "items", "allOf", "oneOf", "anyOf", "additionalProperties"
+ PropertyName string // Set if Field == "properties"
+ IsRequired bool // Set if this property is in parent's Required array
+ ArrayMinItems int64 // Parent's MinItems value (0 means empty array terminates)
+ MinProperties int64 // Parent's MinProperties value (0 means empty object terminates)
+ BranchIndex int // Index in oneOf/anyOf/allOf array
+ IsNullable bool // True if this schema allows null (termination point)
+ ParentSchema *oas3.JSONSchemaReferenceable // The parent schema (for polymorphic cases)
+}
+
+// SchemaVisitInfo tracks the visitation state of a schema during indexing.
+type SchemaVisitInfo struct {
+ Location Locations // Location where first seen
+ InCurrentPath bool // True while actively walking this schema's children
+ CircularType CircularClassification // Classification result
+}
+
+// PolymorphicCircularRef tracks a polymorphic schema with recursive branches.
+// Used for post-processing to determine if all branches recurse.
+type PolymorphicCircularRef struct {
+ ParentSchema *oas3.JSONSchemaReferenceable // The parent with oneOf/anyOf/allOf
+ ParentLocation Locations // Location of the parent
+ Field string // "oneOf", "anyOf", or "allOf"
+ BranchResults map[int]CircularClassification // Index -> classification per branch
+ TotalBranches int // Total number of branches
+}
+
+// referenceStackEntry tracks a schema in the active reference resolution chain.
+// Uses JSON pointer strings for identity to handle type differences.
+type referenceStackEntry struct {
+ refTarget string // The $ref target (JSON pointer or URI)
+ location Locations // Where this reference was encountered
+}
+
+type Descriptioner interface {
+ GetDescription() string
+}
+
+type Summarizer interface {
+ GetSummary() string
+}
+
+type DescriptionAndSummary interface {
+ GetDescription() string
+ GetSummary() string
+}
+
+func (i *Index) currentDocumentPath() string {
+ if i == nil {
+ return ""
+ }
+ if len(i.currentDocumentStack) == 0 {
+ return ""
+ }
+ return i.currentDocumentStack[len(i.currentDocumentStack)-1]
+}
+
+// Index represents a pre-computed index of an OpenAPI document.
+// It provides efficient access to document elements without repeated full traversals.
+type Index struct {
+ Doc *OpenAPI
+
+ ExternalDocumentation []*IndexNode[*oas3.ExternalDocumentation] // All external documentation nodes
+
+ Tags []*IndexNode[*Tag] // All tags defined in the document
+
+ Servers []*IndexNode[*Server] // All servers defined in the document
+ ServerVariables []*IndexNode[*ServerVariable] // All server variables from all servers
+
+ BooleanSchemas []*IndexNode[*oas3.JSONSchemaReferenceable] // Boolean schema values (true/false)
+ InlineSchemas []*IndexNode[*oas3.JSONSchemaReferenceable] // Schemas defined inline (properties, items, etc.)
+ ComponentSchemas []*IndexNode[*oas3.JSONSchemaReferenceable] // Schemas in /components/schemas/ of main document
+ ExternalSchemas []*IndexNode[*oas3.JSONSchemaReferenceable] // Top-level schemas in external documents
+ SchemaReferences []*IndexNode[*oas3.JSONSchemaReferenceable] // All $ref pointers
+
+ InlinePathItems []*IndexNode[*ReferencedPathItem] // PathItems defined inline (in paths map)
+ ComponentPathItems []*IndexNode[*ReferencedPathItem] // PathItems in /components/pathItems/
+ ExternalPathItems []*IndexNode[*ReferencedPathItem] // Top-level PathItems in external documents
+ PathItemReferences []*IndexNode[*ReferencedPathItem] // All PathItem $ref pointers
+
+ Operations []*IndexNode[*Operation] // All operations (GET, POST, PUT, DELETE, PATCH, HEAD, OPTIONS, TRACE, etc.)
+
+ InlineParameters []*IndexNode[*ReferencedParameter] // Parameters defined inline in operations/path items
+ ComponentParameters []*IndexNode[*ReferencedParameter] // Parameters in /components/parameters/
+ ExternalParameters []*IndexNode[*ReferencedParameter] // Top-level Parameters in external documents
+ ParameterReferences []*IndexNode[*ReferencedParameter] // All Parameter $ref pointers
+
+ Responses []*IndexNode[*Responses] // All Responses containers (operation.responses)
+
+ InlineResponses []*IndexNode[*ReferencedResponse] // Responses defined inline in operations
+ ComponentResponses []*IndexNode[*ReferencedResponse] // Responses in /components/responses/
+ ExternalResponses []*IndexNode[*ReferencedResponse] // Top-level Responses in external documents
+ ResponseReferences []*IndexNode[*ReferencedResponse] // All Response $ref pointers
+
+ InlineRequestBodies []*IndexNode[*ReferencedRequestBody] // RequestBodies defined inline in operations
+ ComponentRequestBodies []*IndexNode[*ReferencedRequestBody] // RequestBodies in /components/requestBodies/
+ ExternalRequestBodies []*IndexNode[*ReferencedRequestBody] // Top-level RequestBodies in external documents
+ RequestBodyReferences []*IndexNode[*ReferencedRequestBody] // All RequestBody $ref pointers
+
+ InlineHeaders []*IndexNode[*ReferencedHeader] // Headers defined inline
+ ComponentHeaders []*IndexNode[*ReferencedHeader] // Headers in /components/headers/
+ ExternalHeaders []*IndexNode[*ReferencedHeader] // Top-level Headers in external documents
+ HeaderReferences []*IndexNode[*ReferencedHeader] // All Header $ref pointers
+
+ InlineExamples []*IndexNode[*ReferencedExample] // Examples defined inline
+ ComponentExamples []*IndexNode[*ReferencedExample] // Examples in /components/examples/
+ ExternalExamples []*IndexNode[*ReferencedExample] // Top-level Examples in external documents
+ ExampleReferences []*IndexNode[*ReferencedExample] // All Example $ref pointers
+
+ InlineLinks []*IndexNode[*ReferencedLink] // Links defined inline in responses
+ ComponentLinks []*IndexNode[*ReferencedLink] // Links in /components/links/
+ ExternalLinks []*IndexNode[*ReferencedLink] // Top-level Links in external documents
+ LinkReferences []*IndexNode[*ReferencedLink] // All Link $ref pointers
+
+ InlineCallbacks []*IndexNode[*ReferencedCallback] // Callbacks defined inline in operations
+ ComponentCallbacks []*IndexNode[*ReferencedCallback] // Callbacks in /components/callbacks/
+ ExternalCallbacks []*IndexNode[*ReferencedCallback] // Top-level Callbacks in external documents
+ CallbackReferences []*IndexNode[*ReferencedCallback] // All Callback $ref pointers
+
+ ComponentSecuritySchemes []*IndexNode[*ReferencedSecurityScheme] // SecuritySchemes in /components/securitySchemes/
+ SecuritySchemeReferences []*IndexNode[*ReferencedSecurityScheme] // All SecurityScheme $ref pointers
+ SecurityRequirements []*IndexNode[*SecurityRequirement] // All security requirement objects
+
+ Discriminators []*IndexNode[*oas3.Discriminator] // All discriminator objects in schemas
+ XMLs []*IndexNode[*oas3.XML] // All XML metadata in schemas
+ MediaTypes []*IndexNode[*MediaType] // All media types in request/response bodies
+ Encodings []*IndexNode[*Encoding] // All encoding objects in media types
+ OAuthFlows []*IndexNode[*OAuthFlows] // All OAuth flows containers
+ OAuthFlowItems []*IndexNode[*OAuthFlow] // Individual OAuth flow objects (implicit, password, clientCredentials, authorizationCode)
+
+ DescriptionNodes []*IndexNode[Descriptioner] // All nodes that have a Description field
+ SummaryNodes []*IndexNode[Summarizer] // All nodes that have a Summary field
+ DescriptionAndSummaryNodes []*IndexNode[DescriptionAndSummary] // All nodes that have both Description and Summary fields
+
+ // NodeToOperations maps yaml.Node pointers to the operations that reference them.
+ // A node may be referenced by multiple operations (e.g., shared schemas via $ref).
+ // This is only populated when BuildIndex is called with WithNodeOperationMap().
+ // nil when the feature is disabled.
+ NodeToOperations map[*yaml.Node][]*IndexNode[*Operation]
+
+ validationErrs []error
+ resolutionErrs []error
+ circularErrs []error
+
+ validCircularRefs int // Count of valid (terminating) circular references
+ invalidCircularRefs int // Count of invalid (non-terminating) circular references
+
+ resolveOpts references.ResolveOptions
+
+ // Circular reference tracking (internal)
+ indexedSchemas map[*oas3.JSONSchemaReferenceable]bool // Tracks which schemas have been fully indexed
+ indexedParameters map[*Parameter]bool // Tracks which parameters have been fully indexed
+ indexedResponses map[*Response]bool // Tracks which responses have been fully indexed
+ indexedRequestBodies map[*RequestBody]bool // Tracks which request bodies have been fully indexed
+ indexedHeaders map[*Header]bool // Tracks which headers have been fully indexed
+ indexedExamples map[*Example]bool // Tracks which examples have been fully indexed
+ indexedLinks map[*Link]bool // Tracks which links have been fully indexed
+ indexedCallbacks map[*Callback]bool // Tracks which callbacks have been fully indexed
+ indexedPathItems map[*PathItem]bool // Tracks which path items have been fully indexed
+ referenceStack []referenceStackEntry // Active reference resolution chain (by ref target)
+ polymorphicRefs []*PolymorphicCircularRef // Pending polymorphic circulars
+ visitedRefs map[string]bool // Tracks visited ref targets to avoid duplicates
+ indexedReferences map[any]bool // Tracks indexed reference objects to ensure each $ref appears once
+ reportedUnknownProps map[marshaller.CoreModeler]map[string]bool // Tracks which unknown properties have been reported per core model
+ currentDocumentStack []string // Stack of document paths being walked (for determining external vs main)
+ buildNodeOperationMap bool // Whether to build the node-to-operation map
+ currentOperation *IndexNode[*Operation] // Current operation being walked (for node-to-operation mapping)
+ operationLocationDepth int // Location depth when we entered the current operation
+}
+
+// IndexNode wraps a node with its location in the document.
+type IndexNode[T any] struct {
+ Node T
+
+ Location Locations
+}
+
+// IndexOptions configures optional features when building the index.
+type IndexOptions struct {
+ // BuildNodeOperationMap enables building the NodeToOperations map
+ // which tracks which operations reference each yaml.Node.
+ // This is disabled by default as it adds overhead.
+ // Enable this when you need to determine which operations are affected
+ // by issues found on specific nodes (e.g., for validity tracking).
+ BuildNodeOperationMap bool
+}
+
+// IndexOption is a function that configures IndexOptions.
+type IndexOption func(*IndexOptions)
+
+// WithNodeOperationMap enables building the node-to-operation mapping.
+func WithNodeOperationMap() IndexOption {
+ return func(opts *IndexOptions) {
+ opts.BuildNodeOperationMap = true
+ }
+}
+
+// IsWebhookLocation returns true if this location is within the webhooks section.
+func IsWebhookLocation(loc Locations) bool {
+ for _, l := range loc {
+ if l.ParentField == "webhooks" {
+ return true
+ }
+ }
+ return false
+}
+
+// ExtractOperationInfo extracts path/webhook name, method, and whether it's a webhook
+// from a location. Works for any location within an operation's subtree.
+func ExtractOperationInfo(loc Locations) (path, method string, isWebhook bool) {
+ for i := len(loc) - 1; i >= 0; i-- {
+ l := loc[i]
+ parentType := GetParentType(l)
+
+ switch parentType {
+ case "Paths":
+ if l.ParentKey != nil {
+ path = *l.ParentKey
+ }
+ case "PathItem", "ReferencedPathItem":
+ if l.ParentKey != nil {
+ method = *l.ParentKey
+ }
+ }
+
+ if l.ParentField == "webhooks" {
+ isWebhook = true
+ if l.ParentKey != nil {
+ path = *l.ParentKey
+ }
+ }
+ }
+ return
+}
+
+// BuildIndex creates a new Index by walking the entire OpenAPI document.
+// It resolves references and detects circular reference patterns.
+// Requires resolveOpts to have RootDocument, TargetDocument, and TargetLocation set.
+// Optional features can be enabled via IndexOption functions.
+func BuildIndex(ctx context.Context, doc *OpenAPI, resolveOpts references.ResolveOptions, opts ...IndexOption) *Index {
+ if resolveOpts.RootDocument == nil {
+ panic("BuildIndex: resolveOpts.RootDocument is required")
+ }
+ if resolveOpts.TargetDocument == nil {
+ panic("BuildIndex: resolveOpts.TargetDocument is required")
+ }
+ if resolveOpts.TargetLocation == "" {
+ panic("BuildIndex: resolveOpts.TargetLocation is required")
+ }
+
+ // Apply options
+ var options IndexOptions
+ for _, opt := range opts {
+ opt(&options)
+ }
+
+ idx := &Index{
+ Doc: doc,
+ resolveOpts: resolveOpts,
+ indexedSchemas: make(map[*oas3.JSONSchemaReferenceable]bool),
+ indexedParameters: make(map[*Parameter]bool),
+ indexedResponses: make(map[*Response]bool),
+ indexedRequestBodies: make(map[*RequestBody]bool),
+ indexedHeaders: make(map[*Header]bool),
+ indexedExamples: make(map[*Example]bool),
+ indexedLinks: make(map[*Link]bool),
+ indexedCallbacks: make(map[*Callback]bool),
+ indexedPathItems: make(map[*PathItem]bool),
+ referenceStack: make([]referenceStackEntry, 0),
+ polymorphicRefs: make([]*PolymorphicCircularRef, 0),
+ visitedRefs: make(map[string]bool),
+ indexedReferences: make(map[any]bool),
+ reportedUnknownProps: make(map[marshaller.CoreModeler]map[string]bool),
+ currentDocumentStack: []string{resolveOpts.TargetLocation}, // Start with main document
+ buildNodeOperationMap: options.BuildNodeOperationMap,
+ }
+
+ // Initialize the node-to-operation map if enabled
+ if options.BuildNodeOperationMap {
+ idx.NodeToOperations = make(map[*yaml.Node][]*IndexNode[*Operation])
+ }
+
+ // Phase 1: Walk and index everything
+ _ = buildIndex(ctx, idx, doc)
+
+ // Phase 2: Post-process polymorphic circular refs
+ idx.finalizePolymorphicCirculars()
+
+ return idx
+}
+
+// GetAllSchemas returns all schemas in the index (boolean, inline, component, external, and references).
+func (i *Index) GetAllSchemas() []*IndexNode[*oas3.JSONSchemaReferenceable] {
+ if i == nil {
+ return nil
+ }
+
+ allSchemas := make([]*IndexNode[*oas3.JSONSchemaReferenceable], 0, len(i.BooleanSchemas)+
+ len(i.InlineSchemas)+
+ len(i.ComponentSchemas)+
+ len(i.ExternalSchemas),
+ )
+ allSchemas = append(allSchemas, i.BooleanSchemas...)
+ allSchemas = append(allSchemas, i.InlineSchemas...)
+ allSchemas = append(allSchemas, i.ComponentSchemas...)
+ allSchemas = append(allSchemas, i.ExternalSchemas...)
+ return allSchemas
+}
+
+// GetAllPathItems returns all path items in the index (inline, component, and external).
+func (i *Index) GetAllPathItems() []*IndexNode[*ReferencedPathItem] {
+ if i == nil {
+ return nil
+ }
+
+ allPathItems := make([]*IndexNode[*ReferencedPathItem], 0, len(i.InlinePathItems)+
+ len(i.ComponentPathItems)+
+ len(i.ExternalPathItems),
+ )
+ allPathItems = append(allPathItems, i.InlinePathItems...)
+ allPathItems = append(allPathItems, i.ComponentPathItems...)
+ allPathItems = append(allPathItems, i.ExternalPathItems...)
+ return allPathItems
+}
+
+// GetAllParameters returns all parameters in the index (inline, component, and external).
+func (i *Index) GetAllParameters() []*IndexNode[*ReferencedParameter] {
+ if i == nil {
+ return nil
+ }
+
+ allParameters := make([]*IndexNode[*ReferencedParameter], 0, len(i.InlineParameters)+
+ len(i.ComponentParameters)+
+ len(i.ExternalParameters),
+ )
+ allParameters = append(allParameters, i.InlineParameters...)
+ allParameters = append(allParameters, i.ComponentParameters...)
+ allParameters = append(allParameters, i.ExternalParameters...)
+ return allParameters
+}
+
+// GetAllResponses returns all responses in the index (inline, component, and external).
+func (i *Index) GetAllResponses() []*IndexNode[*ReferencedResponse] {
+ if i == nil {
+ return nil
+ }
+
+ allResponses := make([]*IndexNode[*ReferencedResponse], 0, len(i.InlineResponses)+
+ len(i.ComponentResponses)+
+ len(i.ExternalResponses),
+ )
+ allResponses = append(allResponses, i.InlineResponses...)
+ allResponses = append(allResponses, i.ComponentResponses...)
+ allResponses = append(allResponses, i.ExternalResponses...)
+ return allResponses
+}
+
+// GetAllRequestBodies returns all request bodies in the index (inline, component, and external).
+func (i *Index) GetAllRequestBodies() []*IndexNode[*ReferencedRequestBody] {
+ if i == nil {
+ return nil
+ }
+
+ allRequestBodies := make([]*IndexNode[*ReferencedRequestBody], 0, len(i.InlineRequestBodies)+
+ len(i.ComponentRequestBodies)+
+ len(i.ExternalRequestBodies),
+ )
+ allRequestBodies = append(allRequestBodies, i.InlineRequestBodies...)
+ allRequestBodies = append(allRequestBodies, i.ComponentRequestBodies...)
+ allRequestBodies = append(allRequestBodies, i.ExternalRequestBodies...)
+ return allRequestBodies
+}
+
+// GetAllHeaders returns all headers in the index (inline, component, and external).
+func (i *Index) GetAllHeaders() []*IndexNode[*ReferencedHeader] {
+ if i == nil {
+ return nil
+ }
+
+ allHeaders := make([]*IndexNode[*ReferencedHeader], 0, len(i.InlineHeaders)+
+ len(i.ComponentHeaders)+
+ len(i.ExternalHeaders),
+ )
+ allHeaders = append(allHeaders, i.InlineHeaders...)
+ allHeaders = append(allHeaders, i.ComponentHeaders...)
+ allHeaders = append(allHeaders, i.ExternalHeaders...)
+ return allHeaders
+}
+
+// GetAllExamples returns all examples in the index (inline, component, and external).
+func (i *Index) GetAllExamples() []*IndexNode[*ReferencedExample] {
+ if i == nil {
+ return nil
+ }
+
+ allExamples := make([]*IndexNode[*ReferencedExample], 0, len(i.InlineExamples)+
+ len(i.ComponentExamples)+
+ len(i.ExternalExamples),
+ )
+ allExamples = append(allExamples, i.InlineExamples...)
+ allExamples = append(allExamples, i.ComponentExamples...)
+ allExamples = append(allExamples, i.ExternalExamples...)
+ return allExamples
+}
+
+// GetAllLinks returns all links in the index (inline, component, and external).
+func (i *Index) GetAllLinks() []*IndexNode[*ReferencedLink] {
+ if i == nil {
+ return nil
+ }
+
+ allLinks := make([]*IndexNode[*ReferencedLink], 0, len(i.InlineLinks)+
+ len(i.ComponentLinks)+
+ len(i.ExternalLinks),
+ )
+ allLinks = append(allLinks, i.InlineLinks...)
+ allLinks = append(allLinks, i.ComponentLinks...)
+ allLinks = append(allLinks, i.ExternalLinks...)
+ return allLinks
+}
+
+// GetAllCallbacks returns all callbacks in the index (inline, component, and external).
+func (i *Index) GetAllCallbacks() []*IndexNode[*ReferencedCallback] {
+ if i == nil {
+ return nil
+ }
+
+ allCallbacks := make([]*IndexNode[*ReferencedCallback], 0, len(i.InlineCallbacks)+
+ len(i.ComponentCallbacks)+
+ len(i.ExternalCallbacks),
+ )
+ allCallbacks = append(allCallbacks, i.InlineCallbacks...)
+ allCallbacks = append(allCallbacks, i.ComponentCallbacks...)
+ allCallbacks = append(allCallbacks, i.ExternalCallbacks...)
+ return allCallbacks
+}
+
+// ReferenceNode represents any node that can be a reference in an OpenAPI document.
+// This interface is satisfied by both Reference[T, V, C] types (PathItem, Parameter, Response, etc.)
+// and JSONSchemaReferenceable.
+type ReferenceNode interface {
+ GetReference() references.Reference
+ IsReference() bool
+ GetRootNode() *yaml.Node
+}
+
+// GetAllReferences returns all reference nodes in the index across all reference types.
+// This includes SchemaReferences, PathItemReferences, ParameterReferences, ResponseReferences,
+// RequestBodyReferences, HeaderReferences, ExampleReferences, LinkReferences, CallbackReferences,
+// and SecuritySchemeReferences.
+func (i *Index) GetAllReferences() []*IndexNode[ReferenceNode] {
+ if i == nil {
+ return nil
+ }
+
+ totalCount := len(i.SchemaReferences) +
+ len(i.PathItemReferences) +
+ len(i.ParameterReferences) +
+ len(i.ResponseReferences) +
+ len(i.RequestBodyReferences) +
+ len(i.HeaderReferences) +
+ len(i.ExampleReferences) +
+ len(i.LinkReferences) +
+ len(i.CallbackReferences) +
+ len(i.SecuritySchemeReferences)
+
+ allReferences := make([]*IndexNode[ReferenceNode], 0, totalCount)
+
+ // Add schema references
+ for _, ref := range i.SchemaReferences {
+ allReferences = append(allReferences, &IndexNode[ReferenceNode]{
+ Node: ref.Node,
+ Location: ref.Location,
+ })
+ }
+
+ // Add path item references
+ for _, ref := range i.PathItemReferences {
+ allReferences = append(allReferences, &IndexNode[ReferenceNode]{
+ Node: ref.Node,
+ Location: ref.Location,
+ })
+ }
+
+ // Add parameter references
+ for _, ref := range i.ParameterReferences {
+ allReferences = append(allReferences, &IndexNode[ReferenceNode]{
+ Node: ref.Node,
+ Location: ref.Location,
+ })
+ }
+
+ // Add response references
+ for _, ref := range i.ResponseReferences {
+ allReferences = append(allReferences, &IndexNode[ReferenceNode]{
+ Node: ref.Node,
+ Location: ref.Location,
+ })
+ }
+
+ // Add request body references
+ for _, ref := range i.RequestBodyReferences {
+ allReferences = append(allReferences, &IndexNode[ReferenceNode]{
+ Node: ref.Node,
+ Location: ref.Location,
+ })
+ }
+
+ // Add header references
+ for _, ref := range i.HeaderReferences {
+ allReferences = append(allReferences, &IndexNode[ReferenceNode]{
+ Node: ref.Node,
+ Location: ref.Location,
+ })
+ }
+
+ // Add example references
+ for _, ref := range i.ExampleReferences {
+ allReferences = append(allReferences, &IndexNode[ReferenceNode]{
+ Node: ref.Node,
+ Location: ref.Location,
+ })
+ }
+
+ // Add link references
+ for _, ref := range i.LinkReferences {
+ allReferences = append(allReferences, &IndexNode[ReferenceNode]{
+ Node: ref.Node,
+ Location: ref.Location,
+ })
+ }
+
+ // Add callback references
+ for _, ref := range i.CallbackReferences {
+ allReferences = append(allReferences, &IndexNode[ReferenceNode]{
+ Node: ref.Node,
+ Location: ref.Location,
+ })
+ }
+
+ // Add security scheme references
+ for _, ref := range i.SecuritySchemeReferences {
+ allReferences = append(allReferences, &IndexNode[ReferenceNode]{
+ Node: ref.Node,
+ Location: ref.Location,
+ })
+ }
+
+ return allReferences
+}
+
+// GetValidationErrors returns validation errors from resolution operations.
+func (i *Index) GetValidationErrors() []error {
+ if i == nil {
+ return nil
+ }
+ return i.validationErrs
+}
+
+// GetResolutionErrors returns errors from failed reference resolution.
+func (i *Index) GetResolutionErrors() []error {
+ if i == nil {
+ return nil
+ }
+ return i.resolutionErrs
+}
+
+// GetCircularReferenceErrors returns invalid (non-terminating) circular reference errors.
+func (i *Index) GetCircularReferenceErrors() []error {
+ if i == nil {
+ return nil
+ }
+ return i.circularErrs
+}
+
+// GetAllErrors returns all errors collected during indexing.
+func (i *Index) GetAllErrors() []error {
+ if i == nil {
+ return nil
+ }
+ all := make([]error, 0, len(i.validationErrs)+len(i.resolutionErrs)+len(i.circularErrs))
+ all = append(all, i.validationErrs...)
+ all = append(all, i.resolutionErrs...)
+ all = append(all, i.circularErrs...)
+ return all
+}
+
+// HasErrors returns true if any errors were collected during indexing.
+func (i *Index) HasErrors() bool {
+ if i == nil {
+ return false
+ }
+ return len(i.validationErrs) > 0 || len(i.resolutionErrs) > 0 || len(i.circularErrs) > 0
+}
+
+// GetValidCircularRefCount returns the count of valid (terminating) circular references found during indexing.
+func (i *Index) GetValidCircularRefCount() int {
+ if i == nil {
+ return 0
+ }
+ return i.validCircularRefs
+}
+
+// GetInvalidCircularRefCount returns the count of invalid (non-terminating) circular references found during indexing.
+func (i *Index) GetInvalidCircularRefCount() int {
+ if i == nil {
+ return 0
+ }
+ return i.invalidCircularRefs
+}
+
+// GetNodeOperations returns the operations that reference a given yaml.Node.
+// Returns nil if the node was not found or if the node-to-operation mapping was not enabled.
+// Enable this feature by passing WithNodeOperationMap() to BuildIndex.
+func (i *Index) GetNodeOperations(node *yaml.Node) []*IndexNode[*Operation] {
+ if i == nil || i.NodeToOperations == nil || node == nil {
+ return nil
+ }
+ return i.NodeToOperations[node]
+}
+
+// registerNodeWithOperation adds a node-operation mapping, avoiding duplicates.
+func (i *Index) registerNodeWithOperation(node *yaml.Node, op *IndexNode[*Operation]) {
+ if node == nil || op == nil || i.NodeToOperations == nil {
+ return
+ }
+ // Check for duplicates
+ existing := i.NodeToOperations[node]
+ for _, existingOp := range existing {
+ if existingOp == op {
+ return
+ }
+ }
+ i.NodeToOperations[node] = append(existing, op)
+}
+
+func buildIndex[T any](ctx context.Context, index *Index, obj *T) error {
+ for item := range Walk(ctx, obj) {
+ if err := item.Match(Matcher{
+ ExternalDocs: func(ed *oas3.ExternalDocumentation) error {
+ index.indexExternalDocs(ctx, item.Location, ed)
+ return nil
+ },
+ Tag: func(t *Tag) error { index.indexTag(ctx, item.Location, t); return nil },
+ Server: func(s *Server) error { index.indexServer(ctx, item.Location, s); return nil },
+ ServerVariable: func(sv *ServerVariable) error { index.indexServerVariable(ctx, item.Location, sv); return nil },
+ ReferencedPathItem: func(rpi *ReferencedPathItem) error {
+ index.indexReferencedPathItem(ctx, item.Location, rpi)
+ return nil
+ },
+ ReferencedParameter: func(rp *ReferencedParameter) error {
+ index.indexReferencedParameter(ctx, item.Location, rp)
+ return nil
+ },
+ Schema: func(j *oas3.JSONSchemaReferenceable) error {
+ return index.indexSchema(ctx, item.Location, j)
+ },
+ Discriminator: func(d *oas3.Discriminator) error {
+ index.indexDiscriminator(ctx, item.Location, d)
+ return nil
+ },
+ XML: func(x *oas3.XML) error {
+ index.indexXML(ctx, item.Location, x)
+ return nil
+ },
+ MediaType: func(mt *MediaType) error {
+ index.indexMediaType(ctx, item.Location, mt)
+ return nil
+ },
+ Encoding: func(enc *Encoding) error {
+ index.indexEncoding(ctx, item.Location, enc)
+ return nil
+ },
+ ReferencedHeader: func(rh *ReferencedHeader) error {
+ index.indexReferencedHeader(ctx, item.Location, rh)
+ return nil
+ },
+ ReferencedExample: func(re *ReferencedExample) error {
+ index.indexReferencedExample(ctx, item.Location, re)
+ return nil
+ },
+ Operation: func(op *Operation) error {
+ index.indexOperation(ctx, item.Location, op)
+ return nil
+ },
+ ReferencedRequestBody: func(rb *ReferencedRequestBody) error {
+ index.indexReferencedRequestBody(ctx, item.Location, rb)
+ return nil
+ },
+ Responses: func(r *Responses) error {
+ index.indexResponses(ctx, item.Location, r)
+ return nil
+ },
+ ReferencedResponse: func(rr *ReferencedResponse) error {
+ index.indexReferencedResponse(ctx, item.Location, rr)
+ return nil
+ },
+ ReferencedLink: func(rl *ReferencedLink) error {
+ index.indexReferencedLink(ctx, item.Location, rl)
+ return nil
+ },
+ ReferencedCallback: func(rc *ReferencedCallback) error {
+ index.indexReferencedCallback(ctx, item.Location, rc)
+ return nil
+ },
+ ReferencedSecurityScheme: func(rss *ReferencedSecurityScheme) error {
+ index.indexReferencedSecurityScheme(ctx, item.Location, rss)
+ return nil
+ },
+ Security: func(req *SecurityRequirement) error {
+ index.indexSecurityRequirement(ctx, item.Location, req)
+ return nil
+ },
+ OAuthFlows: func(of *OAuthFlows) error {
+ index.indexOAuthFlows(ctx, item.Location, of)
+ return nil
+ },
+ OAuthFlow: func(of *OAuthFlow) error {
+ index.indexOAuthFlow(ctx, item.Location, of)
+ return nil
+ },
+ Any: func(a any) error {
+ // Node-to-operation mapping: check if we've exited the current operation's subtree
+ // Only check location depth when NOT in a reference walk
+ // During reference walks (len(referenceStack) > 0), location depth resets for the resolved target
+ // but we should continue associating nodes with the current operation
+ if index.buildNodeOperationMap && index.currentOperation != nil && len(index.referenceStack) == 0 {
+ if len(item.Location) < index.operationLocationDepth {
+ // We've moved to a shallower level - no longer under the operation
+ index.currentOperation = nil
+ }
+ }
+
+ // Register nodes with current operation if applicable
+ if index.buildNodeOperationMap && index.currentOperation != nil {
+ // Register the root node
+ if rootNode := getRootNodeFromAny(a); rootNode != nil {
+ index.registerNodeWithOperation(rootNode, index.currentOperation)
+ }
+ // Also register all leaf nodes from the core model
+ // This ensures scalar values (like items: true) are also mapped
+ if core := getCoreModelFromAny(a); core != nil {
+ for _, node := range marshaller.CollectLeafNodes(core) {
+ index.registerNodeWithOperation(node, index.currentOperation)
+ }
+ }
+ }
+
+ // Check for unknown properties on any model with a core
+ if core := getCoreModelFromAny(a); core != nil {
+ if coreModeler, ok := core.(marshaller.CoreModeler); ok {
+ index.checkUnknownProperties(ctx, coreModeler)
+ }
+ }
+
+ if d, ok := a.(Descriptioner); ok {
+ index.indexDescriptionNode(ctx, item.Location, d)
+ }
+ if s, ok := a.(Summarizer); ok {
+ index.indexSummaryNode(ctx, item.Location, s)
+ }
+ if ds, ok := a.(DescriptionAndSummary); ok {
+ index.indexDescriptionAndSummaryNode(ctx, item.Location, ds)
+ }
+ return nil
+ },
+ }); err != nil {
+ return err
+ }
+ }
+
+ return nil
+}
+
+func (i *Index) indexSchema(ctx context.Context, loc Locations, schema *oas3.JSONSchemaReferenceable) error {
+ // Resolve if needed (do this first to get the resolved schema for tracking)
+ if !schema.IsResolved() {
+ vErrs, err := schema.Resolve(ctx, i.getCurrentResolveOptions())
+ if err != nil {
+ i.resolutionErrs = append(i.resolutionErrs, validation.NewValidationErrorWithDocumentLocation(
+ validation.SeverityError,
+ "resolution-json-schema",
+ err,
+ getSchemaErrorNode(schema),
+ i.documentPathForSchema(schema),
+ ))
+ return nil
+ }
+ i.validationErrs = append(i.validationErrs, i.applyDocumentLocation(vErrs, i.documentPathForSchema(schema))...)
+ if resolved := schema.GetResolvedSchema(); resolved != nil && i.Doc != nil {
+ opts := i.referenceValidationOptions()
+ schemaErrs := resolved.Validate(ctx, opts...)
+ i.validationErrs = append(i.validationErrs, i.applyDocumentLocation(schemaErrs, i.documentPathForSchema(schema))...)
+ }
+ }
+
+ // Index the schema based on its type
+ if schema.IsBool() {
+ if !i.indexedSchemas[schema] {
+ i.BooleanSchemas = append(i.BooleanSchemas, &IndexNode[*oas3.JSONSchemaReferenceable]{
+ Node: schema,
+ Location: loc,
+ })
+ i.indexedSchemas[schema] = true
+ }
+ return nil
+ }
+
+ if schema.IsReference() {
+ // Add to references list only if this exact schema object hasn't been indexed yet
+ // This ensures each $ref in the source document is indexed exactly once
+ if !i.indexedSchemas[schema] {
+ i.SchemaReferences = append(i.SchemaReferences, &IndexNode[*oas3.JSONSchemaReferenceable]{
+ Node: schema,
+ Location: loc,
+ })
+ i.indexedSchemas[schema] = true
+ }
+
+ // Get the $ref target for tracking
+ refTarget := getRefTarget(schema)
+ if refTarget == "" {
+ return nil // Can't track without a ref target
+ }
+
+ // IMPORTANT: Check circular reference BEFORE walking
+ // A schema might be visited AND currently in the reference stack (circular case)
+ for stackIdx, entry := range i.referenceStack {
+ if entry.refTarget == refTarget {
+ // CIRCULAR REFERENCE DETECTED - this is the SECOND+ encounter
+ // Build path segments from first occurrence to current
+ pathSegments := i.buildPathSegmentsFromStack(stackIdx, loc)
+ externalDocumentPath := ""
+ currentDocPath := i.currentDocumentPath()
+ if currentDocPath != i.resolveOpts.TargetLocation {
+ externalDocumentPath = currentDocPath
+ }
+ circularChain := i.buildCircularReferenceChain(stackIdx, refTarget)
+
+ // Classify the circular reference
+ classification, polymorphicInfo := i.classifyCircularPath(schema, pathSegments, loc)
+
+ switch classification {
+ case CircularInvalid:
+ i.invalidCircularRefs++
+ err := fmt.Errorf("non-terminating circular reference detected: %s", joinReferenceChainWithArrows(circularChain))
+ i.circularErrs = append(i.circularErrs, validation.NewValidationErrorWithDocumentLocation(
+ validation.SeverityError,
+ "circular-reference-invalid",
+ err,
+ getSchemaErrorNode(schema),
+ externalDocumentPath,
+ ))
+ case CircularPending:
+ if polymorphicInfo != nil {
+ i.recordPolymorphicBranch(polymorphicInfo)
+ }
+ case CircularValid:
+ i.validCircularRefs++
+ case CircularUnclassified:
+ // No action needed for unclassified circulars
+ }
+
+ // Stop processing this branch - don't walk the same schema again
+ return nil
+ }
+ }
+
+ // Get the document path for the resolved schema
+ info := schema.GetReferenceResolutionInfo()
+ var docPath string
+ if info != nil {
+ docPath = info.AbsoluteDocumentPath
+ }
+
+ // Push ref target onto reference stack
+ i.referenceStack = append(i.referenceStack, referenceStackEntry{
+ refTarget: refTarget,
+ location: copyLocations(loc),
+ })
+
+ // Push document path onto document stack BEFORE walking
+ // This allows nested resolved documents (including returning to main) to
+ // attribute errors to the correct document.
+ currentDoc := ""
+ if len(i.currentDocumentStack) > 0 {
+ currentDoc = i.currentDocumentStack[len(i.currentDocumentStack)-1]
+ }
+ if docPath != "" && docPath != currentDoc {
+ i.currentDocumentStack = append(i.currentDocumentStack, docPath)
+ defer func() {
+ // Pop from document stack
+ if len(i.currentDocumentStack) > 1 {
+ i.currentDocumentStack = i.currentDocumentStack[:len(i.currentDocumentStack)-1]
+ }
+ }()
+ }
+
+ // Get the resolved schema and recursively walk it
+ // Walk API doesn't walk resolved references automatically - we must walk them
+ resolved := schema.GetResolvedSchema()
+ if resolved != nil {
+ // Convert Concrete to Referenceable for walking
+ refableResolved := oas3.ConcreteToReferenceable(resolved)
+ if err := buildIndex(ctx, i, refableResolved); err != nil {
+ i.referenceStack = i.referenceStack[:len(i.referenceStack)-1]
+ return err
+ }
+ }
+
+ // Pop from reference stack
+ i.referenceStack = i.referenceStack[:len(i.referenceStack)-1]
+
+ return nil
+ }
+
+ // Non-reference schema (component, external, or inline)
+ // Note: We don't use indexedSchemas check here because schemas can be referenced
+ // from multiple paths and should be indexed for each occurrence
+
+ // Check if this is a top-level component in the main document
+ if isTopLevelComponent(loc, "schemas") {
+ if !i.indexedSchemas[schema] {
+ i.ComponentSchemas = append(i.ComponentSchemas, &IndexNode[*oas3.JSONSchemaReferenceable]{
+ Node: schema,
+ Location: loc,
+ })
+ i.indexedSchemas[schema] = true
+ }
+ return nil
+ }
+
+ // Check if this is a top-level schema in an external document
+ // Important: Only mark as external if it's NOT from the main document
+ if isTopLevelExternalSchema(loc) {
+ if !i.isFromMainDocument() && !i.indexedSchemas[schema] {
+ i.ExternalSchemas = append(i.ExternalSchemas, &IndexNode[*oas3.JSONSchemaReferenceable]{
+ Node: schema,
+ Location: loc,
+ })
+ i.indexedSchemas[schema] = true
+ }
+ return nil
+ }
+
+ // Everything else is an inline schema
+ // Inline schemas can appear multiple times (e.g., same property type in different schemas)
+ // but we only index each unique schema object once
+ if !i.indexedSchemas[schema] {
+ i.InlineSchemas = append(i.InlineSchemas, &IndexNode[*oas3.JSONSchemaReferenceable]{
+ Node: schema,
+ Location: loc,
+ })
+ i.indexedSchemas[schema] = true
+ }
+
+ return nil
+}
+
+// isTopLevelExternalSchema checks if the location represents a top-level schema
+// in an external document (i.e., at the root of an external document, not under /components/).
+func isTopLevelExternalSchema(loc Locations) bool {
+ // Top-level external schemas appear at location "/" (root of external doc)
+ // They have 0 location contexts (empty Locations slice)
+ if len(loc) == 0 {
+ return true
+ }
+
+ // Single context with no ParentField (or empty ParentField) also indicates root
+ if len(loc) == 1 && loc[0].ParentField == "" {
+ return true
+ }
+
+ return false
+}
+
+// isFromMainDocument checks if we're currently walking the main document
+// by checking the current document stack.
+func (i *Index) isFromMainDocument() bool {
+ if len(i.currentDocumentStack) == 0 {
+ return true // Safety fallback - assume main document
+ }
+
+ currentDoc := i.currentDocumentStack[len(i.currentDocumentStack)-1]
+ mainDoc := i.resolveOpts.TargetLocation
+
+ return currentDoc == mainDoc
+}
+
+// buildPathSegmentsFromStack builds path segments from a point in the reference stack to current location.
+func (i *Index) buildPathSegmentsFromStack(startStackIdx int, currentLoc Locations) []CircularPathSegment {
+ // Collect only the segments WITHIN the circular loop.
+ // Skip referenceStack[startStackIdx].location because it contains the path
+ // leading TO the circular loop start (outside the loop), not the path within it.
+ // Only include entries after startStackIdx (intermediate refs in the loop) plus currentLoc.
+ var segments []CircularPathSegment
+
+ for stackIdx := startStackIdx + 1; stackIdx < len(i.referenceStack); stackIdx++ {
+ entry := i.referenceStack[stackIdx]
+ for _, locCtx := range entry.location {
+ segments = append(segments, buildPathSegment(locCtx))
+ }
+ }
+
+ // Add segments from current location
+ for _, locCtx := range currentLoc {
+ segments = append(segments, buildPathSegment(locCtx))
+ }
+
+ return segments
+}
+
+func (i *Index) buildCircularReferenceChain(startStackIdx int, refTarget string) []string {
+ chain := make([]string, 0, len(i.referenceStack)-startStackIdx+1)
+ for stackIdx := startStackIdx; stackIdx < len(i.referenceStack); stackIdx++ {
+ chain = append(chain, i.referenceStack[stackIdx].refTarget)
+ }
+ chain = append(chain, refTarget)
+ return chain
+}
+
+// checkUnknownProperties checks for unknown properties in a core model and adds warnings.
+func (i *Index) checkUnknownProperties(_ context.Context, core marshaller.CoreModeler) {
+ if core == nil {
+ return
+ }
+
+ unknownProps := core.GetUnknownProperties()
+ if len(unknownProps) == 0 {
+ return
+ }
+
+ // Initialize the map for this core model if not already present
+ if i.reportedUnknownProps[core] == nil {
+ i.reportedUnknownProps[core] = make(map[string]bool)
+ }
+
+ docPath := ""
+ if len(i.currentDocumentStack) > 0 {
+ currentDoc := i.currentDocumentStack[len(i.currentDocumentStack)-1]
+ if currentDoc != i.resolveOpts.TargetLocation {
+ docPath = currentDoc
+ }
+ }
+
+ for _, prop := range unknownProps {
+ // Skip if this property has already been reported for this core model
+ if i.reportedUnknownProps[core][prop] {
+ continue
+ }
+
+ // Mark as reported
+ i.reportedUnknownProps[core][prop] = true
+
+ err := fmt.Errorf("unknown property `%s` found", prop)
+ i.validationErrs = append(i.validationErrs, validation.NewValidationErrorWithDocumentLocation(
+ validation.SeverityWarning,
+ "validation-unknown-properties",
+ err,
+ core.GetRootNode(),
+ docPath,
+ ))
+ }
+}
+
+func (i *Index) indexExternalDocs(_ context.Context, loc Locations, ed *oas3.ExternalDocumentation) {
+ i.ExternalDocumentation = append(i.ExternalDocumentation, &IndexNode[*oas3.ExternalDocumentation]{
+ Node: ed,
+ Location: loc,
+ })
+}
+
+func (i *Index) indexTag(_ context.Context, loc Locations, tag *Tag) {
+ i.Tags = append(i.Tags, &IndexNode[*Tag]{
+ Node: tag,
+ Location: loc,
+ })
+}
+
+func (i *Index) indexServer(_ context.Context, loc Locations, server *Server) {
+ i.Servers = append(i.Servers, &IndexNode[*Server]{
+ Node: server,
+ Location: loc,
+ })
+}
+
+func (i *Index) indexServerVariable(_ context.Context, loc Locations, serverVariable *ServerVariable) {
+ i.ServerVariables = append(i.ServerVariables, &IndexNode[*ServerVariable]{
+ Node: serverVariable,
+ Location: loc,
+ })
+}
+
+func (i *Index) indexReferencedPathItem(ctx context.Context, loc Locations, pathItem *ReferencedPathItem) {
+ if pathItem == nil {
+ return
+ }
+
+ if pathItem.IsReference() && !pathItem.IsResolved() {
+ resolveAndValidateReference(i, ctx, pathItem)
+ }
+
+ // Index description and summary if both are present
+ // For PathItems wrapped in References, we need to get the underlying PathItem
+ obj := pathItem.GetObject()
+ if obj != nil {
+ desc := obj.GetDescription()
+ summary := obj.GetSummary()
+
+ if desc != "" {
+ i.indexDescriptionNode(ctx, loc, obj)
+ }
+ if summary != "" {
+ i.indexSummaryNode(ctx, loc, obj)
+ }
+ if desc != "" && summary != "" {
+ i.indexDescriptionAndSummaryNode(ctx, loc, obj)
+ }
+ }
+
+ // Categorize path items similarly to schemas
+ if pathItem.IsReference() {
+ // Add to references list only if this exact reference object hasn't been indexed
+ if !i.indexedReferences[pathItem] {
+ i.PathItemReferences = append(i.PathItemReferences, &IndexNode[*ReferencedPathItem]{
+ Node: pathItem,
+ Location: loc,
+ })
+ i.indexedReferences[pathItem] = true
+ }
+
+ // Get the document path for the resolved path item
+ info := pathItem.GetReferenceResolutionInfo()
+ var docPath string
+ if info != nil {
+ docPath = info.AbsoluteDocumentPath
+ }
+
+ // Push document path onto document stack BEFORE walking
+ currentDoc := ""
+ if len(i.currentDocumentStack) > 0 {
+ currentDoc = i.currentDocumentStack[len(i.currentDocumentStack)-1]
+ }
+ if docPath != "" && docPath != currentDoc {
+ i.currentDocumentStack = append(i.currentDocumentStack, docPath)
+ defer func() {
+ // Pop from document stack
+ if len(i.currentDocumentStack) > 1 {
+ i.currentDocumentStack = i.currentDocumentStack[:len(i.currentDocumentStack)-1]
+ }
+ }()
+ }
+
+ // If resolved, explicitly walk the resolved content (similar to how schemas are handled)
+ resolved := pathItem.GetObject()
+ if resolved != nil {
+ // Wrap the resolved PathItem back into a ReferencedPathItem for walking
+ wrapped := &ReferencedPathItem{Object: resolved}
+ _ = buildIndex(ctx, i, wrapped)
+ }
+ return
+ }
+
+ if obj == nil {
+ return
+ }
+
+ // Check if this is a component path item
+ if isTopLevelComponent(loc, "pathItems") {
+ if !i.indexedPathItems[obj] {
+ i.ComponentPathItems = append(i.ComponentPathItems, &IndexNode[*ReferencedPathItem]{
+ Node: pathItem,
+ Location: loc,
+ })
+ i.indexedPathItems[obj] = true
+ }
+ return
+ }
+
+ // Check if this is a top-level path item in an external document
+ // External path items appear at location "/" (root of external doc)
+ if isTopLevelExternalSchema(loc) {
+ if !i.indexedPathItems[obj] {
+ i.ExternalPathItems = append(i.ExternalPathItems, &IndexNode[*ReferencedPathItem]{
+ Node: pathItem,
+ Location: loc,
+ })
+ i.indexedPathItems[obj] = true
+ }
+ return
+ }
+
+ // Everything else is an inline path item
+ if !i.indexedPathItems[obj] {
+ i.InlinePathItems = append(i.InlinePathItems, &IndexNode[*ReferencedPathItem]{
+ Node: pathItem,
+ Location: loc,
+ })
+ i.indexedPathItems[obj] = true
+ }
+}
+
+func (i *Index) indexOperation(_ context.Context, loc Locations, operation *Operation) {
+ if operation == nil {
+ return
+ }
+
+ indexNode := &IndexNode[*Operation]{
+ Node: operation,
+ Location: loc,
+ }
+ i.Operations = append(i.Operations, indexNode)
+
+ // Track current operation for node-to-operation mapping
+ if i.buildNodeOperationMap {
+ i.currentOperation = indexNode
+ i.operationLocationDepth = len(loc)
+ }
+}
+
+func (i *Index) indexReferencedParameter(ctx context.Context, loc Locations, param *ReferencedParameter) {
+ if param == nil {
+ return
+ }
+
+ if param.IsReference() && !param.IsResolved() {
+ resolveAndValidateReference(i, ctx, param)
+ }
+
+ if param.IsReference() {
+ // Add to references list only if this exact reference object hasn't been indexed
+ if !i.indexedReferences[param] {
+ i.ParameterReferences = append(i.ParameterReferences, &IndexNode[*ReferencedParameter]{
+ Node: param,
+ Location: loc,
+ })
+ i.indexedReferences[param] = true
+ }
+
+ // Get the document path for the resolved parameter
+ info := param.GetReferenceResolutionInfo()
+ var docPath string
+ if info != nil {
+ docPath = info.AbsoluteDocumentPath
+ }
+
+ // Push document path onto document stack BEFORE walking
+ currentDoc := ""
+ if len(i.currentDocumentStack) > 0 {
+ currentDoc = i.currentDocumentStack[len(i.currentDocumentStack)-1]
+ }
+ if docPath != "" && docPath != currentDoc {
+ i.currentDocumentStack = append(i.currentDocumentStack, docPath)
+ defer func() {
+ // Pop from document stack
+ if len(i.currentDocumentStack) > 1 {
+ i.currentDocumentStack = i.currentDocumentStack[:len(i.currentDocumentStack)-1]
+ }
+ }()
+ }
+
+ // If resolved, explicitly walk the resolved content
+ resolved := param.GetObject()
+ if resolved != nil {
+ wrapped := &ReferencedParameter{Object: resolved}
+ _ = buildIndex(ctx, i, wrapped)
+ }
+ return
+ }
+
+ obj := param.GetObject()
+ if obj == nil {
+ return
+ }
+
+ if isTopLevelComponent(loc, "parameters") {
+ if !i.indexedParameters[obj] {
+ i.ComponentParameters = append(i.ComponentParameters, &IndexNode[*ReferencedParameter]{
+ Node: param,
+ Location: loc,
+ })
+ i.indexedParameters[obj] = true
+ }
+ return
+ }
+
+ // Check if this is a top-level parameter in an external document
+ // Important: Only mark as external if it's NOT from the main document
+ if isTopLevelExternalSchema(loc) {
+ if !i.isFromMainDocument() && !i.indexedParameters[obj] {
+ i.ExternalParameters = append(i.ExternalParameters, &IndexNode[*ReferencedParameter]{
+ Node: param,
+ Location: loc,
+ })
+ i.indexedParameters[obj] = true
+ }
+ return
+ }
+
+ // Everything else is an inline parameter
+ if !i.indexedParameters[obj] {
+ i.InlineParameters = append(i.InlineParameters, &IndexNode[*ReferencedParameter]{
+ Node: param,
+ Location: loc,
+ })
+ i.indexedParameters[obj] = true
+ }
+}
+
+func (i *Index) indexResponses(_ context.Context, loc Locations, responses *Responses) {
+ if responses == nil {
+ return
+ }
+ i.Responses = append(i.Responses, &IndexNode[*Responses]{
+ Node: responses,
+ Location: loc,
+ })
+}
+
+func (i *Index) indexReferencedResponse(ctx context.Context, loc Locations, resp *ReferencedResponse) {
+ if resp == nil {
+ return
+ }
+
+ if resp.IsReference() && !resp.IsResolved() {
+ resolveAndValidateReference(i, ctx, resp)
+ }
+
+ if resp.IsReference() {
+ // Add to references list only if this exact reference object hasn't been indexed
+ if !i.indexedReferences[resp] {
+ i.ResponseReferences = append(i.ResponseReferences, &IndexNode[*ReferencedResponse]{
+ Node: resp,
+ Location: loc,
+ })
+ i.indexedReferences[resp] = true
+ }
+
+ // Get the document path for the resolved response
+ info := resp.GetReferenceResolutionInfo()
+ var docPath string
+ if info != nil {
+ docPath = info.AbsoluteDocumentPath
+ }
+
+ // Push document path onto document stack BEFORE walking
+ currentDoc := ""
+ if len(i.currentDocumentStack) > 0 {
+ currentDoc = i.currentDocumentStack[len(i.currentDocumentStack)-1]
+ }
+ if docPath != "" && docPath != currentDoc {
+ i.currentDocumentStack = append(i.currentDocumentStack, docPath)
+ defer func() {
+ // Pop from document stack
+ if len(i.currentDocumentStack) > 1 {
+ i.currentDocumentStack = i.currentDocumentStack[:len(i.currentDocumentStack)-1]
+ }
+ }()
+ }
+
+ // If resolved, explicitly walk the resolved content
+ resolved := resp.GetObject()
+ if resolved != nil {
+ wrapped := &ReferencedResponse{Object: resolved}
+ _ = buildIndex(ctx, i, wrapped)
+ }
+ return
+ }
+
+ obj := resp.GetObject()
+ if obj == nil {
+ return
+ }
+
+ if isTopLevelComponent(loc, "responses") {
+ if !i.indexedResponses[obj] {
+ i.ComponentResponses = append(i.ComponentResponses, &IndexNode[*ReferencedResponse]{
+ Node: resp,
+ Location: loc,
+ })
+ i.indexedResponses[obj] = true
+ }
+ return
+ }
+
+ // Check if this is a top-level response in an external document
+ // Important: Only mark as external if it's NOT from the main document
+ if isTopLevelExternalSchema(loc) {
+ if !i.isFromMainDocument() && !i.indexedResponses[obj] {
+ i.ExternalResponses = append(i.ExternalResponses, &IndexNode[*ReferencedResponse]{
+ Node: resp,
+ Location: loc,
+ })
+ i.indexedResponses[obj] = true
+ }
+ return
+ }
+
+ // Everything else is an inline response
+ if !i.indexedResponses[obj] {
+ i.InlineResponses = append(i.InlineResponses, &IndexNode[*ReferencedResponse]{
+ Node: resp,
+ Location: loc,
+ })
+ i.indexedResponses[obj] = true
+ }
+}
+
+func (i *Index) indexReferencedRequestBody(ctx context.Context, loc Locations, rb *ReferencedRequestBody) {
+ if rb == nil {
+ return
+ }
+
+ if rb.IsReference() && !rb.IsResolved() {
+ resolveAndValidateReference(i, ctx, rb)
+ }
+
+ if rb.IsReference() {
+ // Add to references list only if this exact reference object hasn't been indexed
+ if !i.indexedReferences[rb] {
+ i.RequestBodyReferences = append(i.RequestBodyReferences, &IndexNode[*ReferencedRequestBody]{
+ Node: rb,
+ Location: loc,
+ })
+ i.indexedReferences[rb] = true
+ }
+
+ // Get the document path for the resolved request body
+ info := rb.GetReferenceResolutionInfo()
+ var docPath string
+ if info != nil {
+ docPath = info.AbsoluteDocumentPath
+ }
+
+ // Push document path onto document stack BEFORE walking
+ currentDoc := ""
+ if len(i.currentDocumentStack) > 0 {
+ currentDoc = i.currentDocumentStack[len(i.currentDocumentStack)-1]
+ }
+ if docPath != "" && docPath != currentDoc {
+ i.currentDocumentStack = append(i.currentDocumentStack, docPath)
+ defer func() {
+ // Pop from document stack
+ if len(i.currentDocumentStack) > 1 {
+ i.currentDocumentStack = i.currentDocumentStack[:len(i.currentDocumentStack)-1]
+ }
+ }()
+ }
+
+ // If resolved, explicitly walk the resolved content
+ resolved := rb.GetObject()
+ if resolved != nil {
+ wrapped := &ReferencedRequestBody{Object: resolved}
+ _ = buildIndex(ctx, i, wrapped)
+ }
+ return
+ }
+
+ obj := rb.GetObject()
+ if obj == nil {
+ return
+ }
+
+ if isTopLevelComponent(loc, "requestBodies") {
+ if !i.indexedRequestBodies[obj] {
+ i.ComponentRequestBodies = append(i.ComponentRequestBodies, &IndexNode[*ReferencedRequestBody]{
+ Node: rb,
+ Location: loc,
+ })
+ i.indexedRequestBodies[obj] = true
+ }
+ return
+ }
+
+ // Check if this is a top-level request body in an external document
+ // Important: Only mark as external if it's NOT from the main document
+ if isTopLevelExternalSchema(loc) {
+ if !i.isFromMainDocument() && !i.indexedRequestBodies[obj] {
+ i.ExternalRequestBodies = append(i.ExternalRequestBodies, &IndexNode[*ReferencedRequestBody]{
+ Node: rb,
+ Location: loc,
+ })
+ i.indexedRequestBodies[obj] = true
+ }
+ return
+ }
+
+ // Everything else is an inline request body
+ if !i.indexedRequestBodies[obj] {
+ i.InlineRequestBodies = append(i.InlineRequestBodies, &IndexNode[*ReferencedRequestBody]{
+ Node: rb,
+ Location: loc,
+ })
+ i.indexedRequestBodies[obj] = true
+ }
+}
+
+func (i *Index) indexReferencedHeader(ctx context.Context, loc Locations, header *ReferencedHeader) {
+ if header == nil {
+ return
+ }
+
+ if header.IsReference() && !header.IsResolved() {
+ resolveAndValidateReference(i, ctx, header)
+ }
+
+ if header.IsReference() {
+ // Add to references list only if this exact reference object hasn't been indexed
+ if !i.indexedReferences[header] {
+ i.HeaderReferences = append(i.HeaderReferences, &IndexNode[*ReferencedHeader]{
+ Node: header,
+ Location: loc,
+ })
+ i.indexedReferences[header] = true
+ }
+
+ // Get the document path for the resolved header
+ info := header.GetReferenceResolutionInfo()
+ var docPath string
+ if info != nil {
+ docPath = info.AbsoluteDocumentPath
+ }
+
+ // Push document path onto document stack BEFORE walking
+ currentDoc := ""
+ if len(i.currentDocumentStack) > 0 {
+ currentDoc = i.currentDocumentStack[len(i.currentDocumentStack)-1]
+ }
+ if docPath != "" && docPath != currentDoc {
+ i.currentDocumentStack = append(i.currentDocumentStack, docPath)
+ defer func() {
+ // Pop from document stack
+ if len(i.currentDocumentStack) > 1 {
+ i.currentDocumentStack = i.currentDocumentStack[:len(i.currentDocumentStack)-1]
+ }
+ }()
+ }
+
+ // If resolved, explicitly walk the resolved content
+ resolved := header.GetObject()
+ if resolved != nil {
+ wrapped := &ReferencedHeader{Object: resolved}
+ _ = buildIndex(ctx, i, wrapped)
+ }
+ return
+ }
+
+ obj := header.GetObject()
+ if obj == nil {
+ return
+ }
+
+ if isTopLevelComponent(loc, "headers") {
+ if !i.indexedHeaders[obj] {
+ i.ComponentHeaders = append(i.ComponentHeaders, &IndexNode[*ReferencedHeader]{
+ Node: header,
+ Location: loc,
+ })
+ i.indexedHeaders[obj] = true
+ }
+ return
+ }
+
+ // Check if this is a top-level header in an external document
+ // Important: Only mark as external if it's NOT from the main document
+ if isTopLevelExternalSchema(loc) {
+ if !i.isFromMainDocument() && !i.indexedHeaders[obj] {
+ i.ExternalHeaders = append(i.ExternalHeaders, &IndexNode[*ReferencedHeader]{
+ Node: header,
+ Location: loc,
+ })
+ i.indexedHeaders[obj] = true
+ }
+ return
+ }
+
+ // Everything else is an inline header
+ if !i.indexedHeaders[obj] {
+ i.InlineHeaders = append(i.InlineHeaders, &IndexNode[*ReferencedHeader]{
+ Node: header,
+ Location: loc,
+ })
+ i.indexedHeaders[obj] = true
+ }
+}
+
+func (i *Index) indexReferencedExample(ctx context.Context, loc Locations, example *ReferencedExample) {
+ if example == nil {
+ return
+ }
+
+ if example.IsReference() && !example.IsResolved() {
+ resolveAndValidateReference(i, ctx, example)
+ }
+
+ if example.IsReference() {
+ // Add to references list only if this exact reference object hasn't been indexed
+ if !i.indexedReferences[example] {
+ i.ExampleReferences = append(i.ExampleReferences, &IndexNode[*ReferencedExample]{
+ Node: example,
+ Location: loc,
+ })
+ i.indexedReferences[example] = true
+ }
+
+ // Get the document path for the resolved example
+ info := example.GetReferenceResolutionInfo()
+ var docPath string
+ if info != nil {
+ docPath = info.AbsoluteDocumentPath
+ }
+
+ // Push document path onto document stack BEFORE walking
+ currentDoc := ""
+ if len(i.currentDocumentStack) > 0 {
+ currentDoc = i.currentDocumentStack[len(i.currentDocumentStack)-1]
+ }
+ if docPath != "" && docPath != currentDoc {
+ i.currentDocumentStack = append(i.currentDocumentStack, docPath)
+ defer func() {
+ // Pop from document stack
+ if len(i.currentDocumentStack) > 1 {
+ i.currentDocumentStack = i.currentDocumentStack[:len(i.currentDocumentStack)-1]
+ }
+ }()
+ }
+
+ // If resolved, explicitly walk the resolved content
+ resolved := example.GetObject()
+ if resolved != nil {
+ wrapped := &ReferencedExample{Object: resolved}
+ _ = buildIndex(ctx, i, wrapped)
+ }
+ return
+ }
+
+ obj := example.GetObject()
+ if obj == nil {
+ return
+ }
+
+ if isTopLevelComponent(loc, "examples") {
+ if !i.indexedExamples[obj] {
+ i.ComponentExamples = append(i.ComponentExamples, &IndexNode[*ReferencedExample]{
+ Node: example,
+ Location: loc,
+ })
+ i.indexedExamples[obj] = true
+ }
+ return
+ }
+
+ // Check if this is a top-level example in an external document
+ // Important: Only mark as external if it's NOT from the main document
+ if isTopLevelExternalSchema(loc) {
+ if !i.isFromMainDocument() && !i.indexedExamples[obj] {
+ i.ExternalExamples = append(i.ExternalExamples, &IndexNode[*ReferencedExample]{
+ Node: example,
+ Location: loc,
+ })
+ i.indexedExamples[obj] = true
+ }
+ return
+ }
+
+ // Everything else is an inline example
+ if !i.indexedExamples[obj] {
+ i.InlineExamples = append(i.InlineExamples, &IndexNode[*ReferencedExample]{
+ Node: example,
+ Location: loc,
+ })
+ i.indexedExamples[obj] = true
+ }
+}
+
+func (i *Index) indexReferencedLink(ctx context.Context, loc Locations, link *ReferencedLink) {
+ if link == nil {
+ return
+ }
+
+ if link.IsReference() && !link.IsResolved() {
+ resolveAndValidateReference(i, ctx, link)
+ }
+
+ if link.IsReference() {
+ // Add to references list only if this exact reference object hasn't been indexed
+ if !i.indexedReferences[link] {
+ i.LinkReferences = append(i.LinkReferences, &IndexNode[*ReferencedLink]{
+ Node: link,
+ Location: loc,
+ })
+ i.indexedReferences[link] = true
+ }
+
+ // Get the document path for the resolved link
+ info := link.GetReferenceResolutionInfo()
+ var docPath string
+ if info != nil {
+ docPath = info.AbsoluteDocumentPath
+ }
+
+ // Push document path onto document stack BEFORE walking
+ currentDoc := ""
+ if len(i.currentDocumentStack) > 0 {
+ currentDoc = i.currentDocumentStack[len(i.currentDocumentStack)-1]
+ }
+ if docPath != "" && docPath != currentDoc {
+ i.currentDocumentStack = append(i.currentDocumentStack, docPath)
+ defer func() {
+ // Pop from document stack
+ if len(i.currentDocumentStack) > 1 {
+ i.currentDocumentStack = i.currentDocumentStack[:len(i.currentDocumentStack)-1]
+ }
+ }()
+ }
+
+ // If resolved, explicitly walk the resolved content
+ resolved := link.GetObject()
+ if resolved != nil {
+ wrapped := &ReferencedLink{Object: resolved}
+ _ = buildIndex(ctx, i, wrapped)
+ }
+ return
+ }
+
+ obj := link.GetObject()
+ if obj == nil {
+ return
+ }
+
+ if isTopLevelComponent(loc, "links") {
+ if !i.indexedLinks[obj] {
+ i.ComponentLinks = append(i.ComponentLinks, &IndexNode[*ReferencedLink]{
+ Node: link,
+ Location: loc,
+ })
+ i.indexedLinks[obj] = true
+ }
+ return
+ }
+
+ // Check if this is a top-level link in an external document
+ // Important: Only mark as external if it's NOT from the main document
+ if isTopLevelExternalSchema(loc) {
+ if !i.isFromMainDocument() && !i.indexedLinks[obj] {
+ i.ExternalLinks = append(i.ExternalLinks, &IndexNode[*ReferencedLink]{
+ Node: link,
+ Location: loc,
+ })
+ i.indexedLinks[obj] = true
+ }
+ return
+ }
+
+ // Everything else is an inline link
+ if !i.indexedLinks[obj] {
+ i.InlineLinks = append(i.InlineLinks, &IndexNode[*ReferencedLink]{
+ Node: link,
+ Location: loc,
+ })
+ i.indexedLinks[obj] = true
+ }
+}
+
+func (i *Index) indexReferencedCallback(ctx context.Context, loc Locations, callback *ReferencedCallback) {
+ if callback == nil {
+ return
+ }
+
+ if callback.IsReference() && !callback.IsResolved() {
+ resolveAndValidateReference(i, ctx, callback)
+ }
+
+ if callback.IsReference() {
+ // Add to references list only if this exact reference object hasn't been indexed
+ if !i.indexedReferences[callback] {
+ i.CallbackReferences = append(i.CallbackReferences, &IndexNode[*ReferencedCallback]{
+ Node: callback,
+ Location: loc,
+ })
+ i.indexedReferences[callback] = true
+ }
+
+ // Get the document path for the resolved callback
+ info := callback.GetReferenceResolutionInfo()
+ var docPath string
+ if info != nil {
+ docPath = info.AbsoluteDocumentPath
+ }
+
+ // Push document path onto document stack BEFORE walking
+ currentDoc := ""
+ if len(i.currentDocumentStack) > 0 {
+ currentDoc = i.currentDocumentStack[len(i.currentDocumentStack)-1]
+ }
+ if docPath != "" && docPath != currentDoc {
+ i.currentDocumentStack = append(i.currentDocumentStack, docPath)
+ defer func() {
+ // Pop from document stack
+ if len(i.currentDocumentStack) > 1 {
+ i.currentDocumentStack = i.currentDocumentStack[:len(i.currentDocumentStack)-1]
+ }
+ }()
+ }
+
+ // If resolved, explicitly walk the resolved content
+ resolved := callback.GetObject()
+ if resolved != nil {
+ wrapped := &ReferencedCallback{Object: resolved}
+ _ = buildIndex(ctx, i, wrapped)
+ }
+ return
+ }
+
+ obj := callback.GetObject()
+ if obj == nil {
+ return
+ }
+
+ if isTopLevelComponent(loc, "callbacks") {
+ if !i.indexedCallbacks[obj] {
+ i.ComponentCallbacks = append(i.ComponentCallbacks, &IndexNode[*ReferencedCallback]{
+ Node: callback,
+ Location: loc,
+ })
+ i.indexedCallbacks[obj] = true
+ }
+ return
+ }
+
+ // Check if this is a top-level callback in an external document
+ // Important: Only mark as external if it's NOT from the main document
+ if isTopLevelExternalSchema(loc) {
+ if !i.isFromMainDocument() && !i.indexedCallbacks[obj] {
+ i.ExternalCallbacks = append(i.ExternalCallbacks, &IndexNode[*ReferencedCallback]{
+ Node: callback,
+ Location: loc,
+ })
+ i.indexedCallbacks[obj] = true
+ }
+ return
+ }
+
+ // Everything else is an inline callback
+ if !i.indexedCallbacks[obj] {
+ i.InlineCallbacks = append(i.InlineCallbacks, &IndexNode[*ReferencedCallback]{
+ Node: callback,
+ Location: loc,
+ })
+ i.indexedCallbacks[obj] = true
+ }
+}
+
+func (i *Index) indexReferencedSecurityScheme(ctx context.Context, loc Locations, ss *ReferencedSecurityScheme) {
+ if ss == nil {
+ return
+ }
+
+ if ss.IsReference() && !ss.IsResolved() {
+ resolveAndValidateReference(i, ctx, ss)
+ }
+
+ if ss.IsReference() {
+ // Add to references list only if this exact reference object hasn't been indexed
+ if !i.indexedReferences[ss] {
+ i.SecuritySchemeReferences = append(i.SecuritySchemeReferences, &IndexNode[*ReferencedSecurityScheme]{
+ Node: ss,
+ Location: loc,
+ })
+ i.indexedReferences[ss] = true
+ }
+ return
+ }
+
+ // SecuritySchemes are always components (no inline security schemes)
+ i.ComponentSecuritySchemes = append(i.ComponentSecuritySchemes, &IndexNode[*ReferencedSecurityScheme]{
+ Node: ss,
+ Location: loc,
+ })
+}
+
+func (i *Index) indexSecurityRequirement(_ context.Context, loc Locations, req *SecurityRequirement) {
+ if req == nil {
+ return
+ }
+
+ i.SecurityRequirements = append(i.SecurityRequirements, &IndexNode[*SecurityRequirement]{
+ Node: req,
+ Location: loc,
+ })
+}
+
+func (i *Index) indexDiscriminator(_ context.Context, loc Locations, discriminator *oas3.Discriminator) {
+ if discriminator == nil {
+ return
+ }
+ i.Discriminators = append(i.Discriminators, &IndexNode[*oas3.Discriminator]{
+ Node: discriminator,
+ Location: loc,
+ })
+}
+
+func (i *Index) indexXML(_ context.Context, loc Locations, xml *oas3.XML) {
+ if xml == nil {
+ return
+ }
+ i.XMLs = append(i.XMLs, &IndexNode[*oas3.XML]{
+ Node: xml,
+ Location: loc,
+ })
+}
+
+func (i *Index) indexMediaType(_ context.Context, loc Locations, mediaType *MediaType) {
+ if mediaType == nil {
+ return
+ }
+ i.MediaTypes = append(i.MediaTypes, &IndexNode[*MediaType]{
+ Node: mediaType,
+ Location: loc,
+ })
+}
+
+func (i *Index) indexEncoding(_ context.Context, loc Locations, encoding *Encoding) {
+ if encoding == nil {
+ return
+ }
+ i.Encodings = append(i.Encodings, &IndexNode[*Encoding]{
+ Node: encoding,
+ Location: loc,
+ })
+}
+
+func (i *Index) indexOAuthFlows(_ context.Context, loc Locations, flows *OAuthFlows) {
+ if flows == nil {
+ return
+ }
+ i.OAuthFlows = append(i.OAuthFlows, &IndexNode[*OAuthFlows]{
+ Node: flows,
+ Location: loc,
+ })
+}
+
+func (i *Index) indexOAuthFlow(_ context.Context, loc Locations, flow *OAuthFlow) {
+ if flow == nil {
+ return
+ }
+ i.OAuthFlowItems = append(i.OAuthFlowItems, &IndexNode[*OAuthFlow]{
+ Node: flow,
+ Location: loc,
+ })
+}
+
+func (i *Index) indexDescriptionNode(_ context.Context, loc Locations, d Descriptioner) {
+ if d == nil {
+ return
+ }
+ i.DescriptionNodes = append(i.DescriptionNodes, &IndexNode[Descriptioner]{
+ Node: d,
+ Location: loc,
+ })
+}
+
+func (i *Index) indexSummaryNode(_ context.Context, loc Locations, s Summarizer) {
+ if s == nil {
+ return
+ }
+ i.SummaryNodes = append(i.SummaryNodes, &IndexNode[Summarizer]{
+ Node: s,
+ Location: loc,
+ })
+}
+
+func (i *Index) indexDescriptionAndSummaryNode(_ context.Context, loc Locations, ds DescriptionAndSummary) {
+ if ds == nil {
+ return
+ }
+ i.DescriptionAndSummaryNodes = append(i.DescriptionAndSummaryNodes, &IndexNode[DescriptionAndSummary]{
+ Node: ds,
+ Location: loc,
+ })
+}
+
+func (i *Index) documentPathForSchema(schema *oas3.JSONSchemaReferenceable) string {
+ if i == nil || schema == nil {
+ return ""
+ }
+
+ if info := schema.GetReferenceResolutionInfo(); info != nil {
+ if info.AbsoluteDocumentPath != i.resolveOpts.TargetLocation {
+ return info.AbsoluteDocumentPath
+ }
+ if len(i.currentDocumentStack) > 0 {
+ current := i.currentDocumentStack[len(i.currentDocumentStack)-1]
+ if current != i.resolveOpts.TargetLocation {
+ return current
+ }
+ }
+ return ""
+ }
+
+ if len(i.currentDocumentStack) > 0 {
+ current := i.currentDocumentStack[len(i.currentDocumentStack)-1]
+ if current != i.resolveOpts.TargetLocation {
+ return current
+ }
+ return ""
+ }
+
+ return ""
+}
+
+func (i *Index) applyDocumentLocation(errs []error, documentPath string) []error {
+ if len(errs) == 0 || documentPath == "" {
+ return errs
+ }
+
+ updated := make([]error, 0, len(errs))
+ for _, err := range errs {
+ if err == nil {
+ continue
+ }
+ var vErr *validation.Error
+ if errors.As(err, &vErr) && vErr != nil {
+ if vErr.DocumentLocation == "" {
+ vErr.DocumentLocation = documentPath
+ }
+ updated = append(updated, vErr)
+ continue
+ }
+ updated = append(updated, err)
+ }
+
+ return updated
+}
+
+func (i *Index) referenceValidationOptions() []validation.Option {
+ if i == nil || i.Doc == nil {
+ return nil
+ }
+
+ return []validation.Option{
+ validation.WithContextObject(i.Doc),
+ validation.WithContextObject(&oas3.ParentDocumentVersion{OpenAPI: pointer.From(i.Doc.OpenAPI)}),
+ }
+}
+
+// getCurrentResolveOptions returns ResolveOptions appropriate for the current document context.
+// CRITICAL FIX for multi-file specs: When processing schemas/references in external files,
+// this ensures they resolve internal references against the external file's YAML structure,
+// not the main document. Without this, references like #/components/schemas/... in external
+// files would fail with "source is nil" errors.
+func (i *Index) getCurrentResolveOptions() references.ResolveOptions {
+ resolveOpts := i.resolveOpts
+
+ if len(i.currentDocumentStack) > 0 {
+ currentDoc := i.currentDocumentStack[len(i.currentDocumentStack)-1]
+ // If we're in a different document than the original target, use that document's context
+ if currentDoc != i.resolveOpts.TargetLocation {
+ // Check if we have a cached parsed YAML node for this external document
+ if cachedDoc, ok := i.resolveOpts.RootDocument.GetCachedExternalDocument(currentDoc); ok {
+ // Use the cached YAML node as the TargetDocument for this resolution
+ // This allows internal references to navigate through the external file's structure
+ resolveOpts.TargetDocument = cachedDoc
+ resolveOpts.TargetLocation = currentDoc
+ }
+ }
+ }
+
+ return resolveOpts
+}
+
+func documentPathForReference[T any, V interfaces.Validator[T], C marshaller.CoreModeler](i *Index, ref *Reference[T, V, C]) string {
+ if i == nil || ref == nil {
+ return ""
+ }
+
+ if info := ref.GetReferenceResolutionInfo(); info != nil {
+ if info.AbsoluteDocumentPath != i.resolveOpts.TargetLocation {
+ return info.AbsoluteDocumentPath
+ }
+ return ""
+ }
+
+ return ""
+}
+
+func resolveAndValidateReference[T any, V interfaces.Validator[T], C marshaller.CoreModeler](i *Index, ctx context.Context, ref *Reference[T, V, C]) {
+ if i == nil || ref == nil {
+ return
+ }
+
+ if _, err := ref.Resolve(ctx, i.getCurrentResolveOptions()); err != nil {
+ i.resolutionErrs = append(i.resolutionErrs, validation.NewValidationErrorWithDocumentLocation(
+ validation.SeverityError,
+ "resolution-openapi-reference",
+ err,
+ nil,
+ documentPathForReference(i, ref),
+ ))
+ return
+ }
+
+ obj := ref.GetObject()
+ if obj == nil || i.Doc == nil {
+ return
+ }
+
+ var validator V
+ if v, ok := any(obj).(V); ok {
+ validator = v
+ validationErrs := validator.Validate(ctx, i.referenceValidationOptions()...)
+ i.validationErrs = append(i.validationErrs, i.applyDocumentLocation(validationErrs, documentPathForReference(i, ref))...)
+ }
+}
+
+// isTopLevelComponent checks if the location represents a top-level component definition.
+// A top-level component has the path: /components/{componentType}/{name}
+func isTopLevelComponent(loc Locations, componentType string) bool {
+ // Location should be exactly: /components/{componentType}/{name}
+ // Length 2: [components context, {componentType}/{name} context]
+ if len(loc) != 2 {
+ return false
+ }
+
+ // First element: ParentField = "components"
+ if loc[0].ParentField != "components" {
+ return false
+ }
+
+ // Second element: ParentField = componentType, ParentKey = name
+ if loc[1].ParentField != componentType || loc[1].ParentKey == nil {
+ return false
+ }
+
+ return true
+}
+
+// getParentSchema extracts the parent schema from a LocationContext using the ParentMatchFunc.
+func getParentSchema(loc LocationContext) *oas3.Schema {
+ var parentSchema *oas3.Schema
+
+ // Use the ParentMatchFunc to capture the parent node
+ _ = loc.ParentMatchFunc(Matcher{
+ Schema: func(s *oas3.JSONSchemaReferenceable) error {
+ if s == nil {
+ return nil
+ }
+ if !s.IsBool() && !s.IsReference() {
+ parentSchema = s.GetSchema()
+ } else if s.IsReference() {
+ // For references, get the resolved schema
+ if resolved := s.GetResolvedSchema(); resolved != nil && !resolved.IsBool() {
+ parentSchema = resolved.GetSchema()
+ }
+ }
+ return nil
+ },
+ })
+
+ return parentSchema
+}
+
+// buildPathSegment creates a CircularPathSegment with constraint info from the parent schema.
+func buildPathSegment(loc LocationContext) CircularPathSegment {
+ segment := CircularPathSegment{
+ Field: loc.ParentField,
+ }
+
+ if loc.ParentKey != nil {
+ segment.PropertyName = *loc.ParentKey
+ }
+ if loc.ParentIndex != nil {
+ segment.BranchIndex = *loc.ParentIndex
+ }
+
+ // Get the parent schema for this segment
+ var parentSchemaRef *oas3.JSONSchemaReferenceable
+ _ = loc.ParentMatchFunc(Matcher{
+ Schema: func(s *oas3.JSONSchemaReferenceable) error {
+ parentSchemaRef = s
+ return nil
+ },
+ })
+ segment.ParentSchema = parentSchemaRef
+
+ parent := getParentSchema(loc)
+ if parent == nil {
+ return segment
+ }
+
+ // Check if parent schema is nullable (termination point)
+ segment.IsNullable = isNullable(parent)
+
+ // Extract constraints based on field type
+ switch loc.ParentField {
+ case "properties":
+ if loc.ParentKey != nil {
+ // Check if property is required
+ for _, req := range parent.GetRequired() {
+ if req == *loc.ParentKey {
+ segment.IsRequired = true
+ break
+ }
+ }
+ }
+ case "items":
+ segment.ArrayMinItems = parent.GetMinItems() // Returns 0 if nil (default)
+ case "additionalProperties":
+ if minProps := parent.GetMinProperties(); minProps != nil {
+ segment.MinProperties = *minProps
+ }
+ // Default is 0 (empty object allowed)
+ }
+
+ return segment
+}
+
+// isNullable checks if a schema allows null values (termination point for circular refs).
+func isNullable(schema *oas3.Schema) bool {
+ if schema == nil {
+ return false
+ }
+
+ // OAS 3.0 style: nullable: true
+ if schema.GetNullable() {
+ return true
+ }
+
+ // OAS 3.1 style: type includes "null"
+ types := schema.GetType()
+ for _, t := range types {
+ if t == oas3.SchemaTypeNull {
+ return true
+ }
+ }
+
+ return false
+}
+
+// classifyCircularPath determines if the path allows termination.
+// Returns (classification, polymorphicInfo) where polymorphicInfo is set if pending.
+func (i *Index) classifyCircularPath(schema *oas3.JSONSchemaReferenceable, segments []CircularPathSegment, loc Locations) (CircularClassification, *PolymorphicCircularRef) {
+ // Check if any segment allows termination
+ for segIdx, segment := range segments {
+ // Check nullable at any point in the path
+ if segment.IsNullable {
+ return CircularValid, nil
+ }
+
+ switch segment.Field {
+ case "properties":
+ // Optional property = valid termination
+ if !segment.IsRequired {
+ return CircularValid, nil
+ }
+
+ case "items":
+ // Empty array terminates if minItems == 0 (default)
+ if segment.ArrayMinItems == 0 {
+ return CircularValid, nil
+ }
+
+ case "additionalProperties":
+ // Empty object terminates if minProperties == 0 (default)
+ if segment.MinProperties == 0 {
+ return CircularValid, nil
+ }
+
+ case "oneOf", "anyOf":
+ // Mark for post-processing - need to check ALL branches
+ // Create polymorphic tracking info
+ parentLocLen := len(loc) - len(segments) + segIdx
+ if parentLocLen < 0 {
+ parentLocLen = 0
+ }
+ parentLoc := copyLocations(loc[:parentLocLen])
+
+ // Use the ParentSchema from the segment (which has the oneOf/anyOf)
+ // instead of the schema parameter (which is the $ref)
+ parentSchema := segment.ParentSchema
+ if parentSchema == nil {
+ parentSchema = schema // Fallback to old behavior if ParentSchema not set
+ }
+
+ totalBranches := countPolymorphicBranches(parentSchema, segment.Field)
+ polymorphicInfo := &PolymorphicCircularRef{
+ ParentSchema: parentSchema,
+ ParentLocation: parentLoc,
+ Field: segment.Field,
+ BranchResults: make(map[int]CircularClassification),
+ TotalBranches: totalBranches,
+ }
+ // Record this branch as potentially invalid (recurses)
+ polymorphicInfo.BranchResults[segment.BranchIndex] = CircularInvalid
+ return CircularPending, polymorphicInfo
+
+ case "allOf":
+ // For allOf, if ANY branch has invalid circular ref, the whole thing is invalid
+ // because ALL branches must be satisfied
+ // Check if rest of path allows termination
+ remaining := segments[segIdx+1:]
+ if !pathAllowsTermination(remaining) {
+ return CircularInvalid, nil
+ }
+ }
+ }
+
+ // No termination point found in non-polymorphic path
+ return CircularInvalid, nil
+}
+
+// countPolymorphicBranches counts the number of branches in a oneOf/anyOf schema.
+func countPolymorphicBranches(schema *oas3.JSONSchemaReferenceable, field string) int {
+ if schema == nil || schema.IsBool() {
+ return 0
+ }
+
+ innerSchema := schema.GetSchema()
+ if innerSchema == nil {
+ return 0
+ }
+
+ switch field {
+ case "oneOf":
+ if oneOf := innerSchema.GetOneOf(); oneOf != nil {
+ return len(oneOf)
+ }
+ case "anyOf":
+ if anyOf := innerSchema.GetAnyOf(); anyOf != nil {
+ return len(anyOf)
+ }
+ case "allOf":
+ if allOf := innerSchema.GetAllOf(); allOf != nil {
+ return len(allOf)
+ }
+ }
+
+ return 0
+}
+
+// pathAllowsTermination checks if any segment in the remaining path allows termination.
+func pathAllowsTermination(segments []CircularPathSegment) bool {
+ for _, seg := range segments {
+ if seg.IsNullable {
+ return true
+ }
+
+ switch seg.Field {
+ case "properties":
+ if !seg.IsRequired {
+ return true
+ }
+ case "items":
+ if seg.ArrayMinItems == 0 {
+ return true
+ }
+ case "additionalProperties":
+ if seg.MinProperties == 0 {
+ return true
+ }
+ case "oneOf", "anyOf":
+ // Assume polymorphic branches might provide termination
+ return true
+ }
+ }
+ return false
+}
+
+func joinReferenceChainWithArrows(chain []string) string {
+ if len(chain) == 0 {
+ return ""
+ }
+ if len(chain) == 1 {
+ return chain[0]
+ }
+
+ var result strings.Builder
+ result.WriteString(chain[0])
+ for i := 1; i < len(chain); i++ {
+ result.WriteString(" -> ")
+ result.WriteString(chain[i])
+ }
+ return result.String()
+}
+
+// recordPolymorphicBranch records a polymorphic branch for post-processing.
+func (i *Index) recordPolymorphicBranch(info *PolymorphicCircularRef) {
+ if info == nil {
+ return
+ }
+ i.polymorphicRefs = append(i.polymorphicRefs, info)
+}
+
+// finalizePolymorphicCirculars is called after all walking completes.
+// It analyzes polymorphic schemas to determine if ALL branches recurse.
+func (i *Index) finalizePolymorphicCirculars() {
+ // Group by parent schema
+ grouped := make(map[*oas3.JSONSchemaReferenceable]*PolymorphicCircularRef)
+
+ for _, ref := range i.polymorphicRefs {
+ existing, found := grouped[ref.ParentSchema]
+ if found {
+ // Merge branch results
+ for idx, classification := range ref.BranchResults {
+ existing.BranchResults[idx] = classification
+ }
+ } else {
+ grouped[ref.ParentSchema] = ref
+ }
+ }
+
+ // Analyze each polymorphic schema
+ for _, ref := range grouped {
+ switch ref.Field {
+ case "oneOf", "anyOf":
+ // Invalid only if ALL branches have invalid circular refs
+ allInvalid := true
+ for branchIdx := 0; branchIdx < ref.TotalBranches; branchIdx++ {
+ classification, found := ref.BranchResults[branchIdx]
+ if !found || classification != CircularInvalid {
+ // This branch either doesn't recurse or has valid termination
+ allInvalid = false
+ break
+ }
+ }
+
+ if allInvalid && ref.TotalBranches > 0 {
+ i.invalidCircularRefs++
+ i.circularErrs = append(i.circularErrs, validation.NewValidationErrorWithDocumentLocation(
+ validation.SeverityError,
+ "circular-reference-invalid",
+ fmt.Errorf("non-terminating circular reference: all %s branches recurse with no base case", ref.Field),
+ getSchemaErrorNode(ref.ParentSchema),
+ i.documentPathForSchema(ref.ParentSchema),
+ ))
+ } else if !allInvalid && ref.TotalBranches > 0 {
+ // At least one branch allows termination - this is a valid circular ref
+ i.validCircularRefs++
+ }
+
+ case "allOf":
+ // Invalid if ANY branch has invalid circular ref (already handled inline in classifyCircularPath)
+ // This case is here for completeness if we need cross-branch tracking
+ }
+ }
+}
+
+// copyLocations creates a copy of the Locations slice.
+func copyLocations(loc Locations) Locations {
+ if loc == nil {
+ return nil
+ }
+ result := make(Locations, len(loc))
+ copy(result, loc)
+ return result
+}
+
+// getRefTarget extracts the absolute $ref target string from a schema reference.
+// Uses the resolved AbsoluteReference from resolution cache for normalization.
+func getRefTarget(schema *oas3.JSONSchemaReferenceable) string {
+ if schema == nil || !schema.IsReference() {
+ return ""
+ }
+
+ if !schema.IsResolved() {
+ panic("getRefTarget called on unresolved schema reference")
+ }
+
+ info := schema.GetReferenceResolutionInfo()
+ if info == nil {
+ return ""
+ }
+
+ return info.AbsoluteReference.String()
+}
+
+// getSchemaErrorNode returns an appropriate YAML node for error reporting.
+func getSchemaErrorNode(schema *oas3.JSONSchemaReferenceable) *yaml.Node {
+ if schema == nil {
+ return nil
+ }
+ if schema.IsBool() {
+ return nil
+ }
+ innerSchema := schema.GetSchema()
+ if innerSchema == nil {
+ return nil
+ }
+ // Try to get the $ref node if it's a reference
+ if core := innerSchema.GetCore(); core != nil && core.Ref.Present {
+ return core.Ref.GetKeyNodeOrRoot(innerSchema.GetRootNode())
+ }
+ return innerSchema.GetRootNode()
+}
diff --git a/openapi/index_external_test.go b/openapi/index_external_test.go
new file mode 100644
index 00000000..75a6f762
--- /dev/null
+++ b/openapi/index_external_test.go
@@ -0,0 +1,903 @@
+package openapi_test
+
+import (
+ "errors"
+ "fmt"
+ "io"
+ "io/fs"
+ "net/http"
+ "strings"
+ "testing"
+
+ "github.com/speakeasy-api/openapi/openapi"
+ "github.com/speakeasy-api/openapi/references"
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+// MockVirtualFS implements system.VirtualFS for testing external file references
+type MockVirtualFS struct {
+ files map[string]string
+}
+
+func NewMockVirtualFS() *MockVirtualFS {
+ return &MockVirtualFS{
+ files: make(map[string]string),
+ }
+}
+
+func (m *MockVirtualFS) AddFile(path, content string) {
+ m.files[path] = content
+}
+
+func (m *MockVirtualFS) Open(name string) (fs.File, error) {
+ content, exists := m.files[name]
+ if !exists {
+ return nil, fmt.Errorf("file not found: %s", name)
+ }
+ return &MockFile{content: content}, nil
+}
+
+// MockFile implements fs.File
+type MockFile struct {
+ content string
+ pos int
+}
+
+func (m *MockFile) Read(p []byte) (n int, err error) {
+ if m.pos >= len(m.content) {
+ return 0, io.EOF
+ }
+ n = copy(p, m.content[m.pos:])
+ m.pos += n
+ return n, nil
+}
+
+func (m *MockFile) Close() error {
+ return nil
+}
+
+func (m *MockFile) Stat() (fs.FileInfo, error) {
+ return nil, errors.New("not implemented")
+}
+
+// MockHTTPClient implements system.Client for testing external HTTP references
+type MockHTTPClient struct {
+ responses map[string]string
+}
+
+func NewMockHTTPClient() *MockHTTPClient {
+ return &MockHTTPClient{
+ responses: make(map[string]string),
+ }
+}
+
+func (m *MockHTTPClient) AddResponse(url, body string) {
+ m.responses[url] = body
+}
+
+func (m *MockHTTPClient) Do(req *http.Request) (*http.Response, error) {
+ url := req.URL.String()
+ body, exists := m.responses[url]
+ if !exists {
+ return nil, fmt.Errorf("no response configured for URL: %s", url)
+ }
+ return &http.Response{
+ StatusCode: http.StatusOK,
+ Body: io.NopCloser(strings.NewReader(body)),
+ Header: make(http.Header),
+ }, nil
+}
+
+// setupComprehensiveExternalRefs creates a complete test environment with:
+// - File-based external references
+// - HTTP-based external references
+// - Valid and invalid circular references
+// - Referenced and unreferenced schemas
+func setupComprehensiveExternalRefs(t *testing.T) (*openapi.Index, *MockVirtualFS, *MockHTTPClient) {
+ t.Helper()
+ ctx := t.Context()
+
+ vfs := NewMockVirtualFS()
+ httpClient := NewMockHTTPClient()
+
+ // Expected index counts (verified by tests):
+ // ExternalDocumentation: 2 (main doc + users tag)
+ // Tags: 2 (users, products)
+ // Servers: 2 (production, staging)
+ // ServerVariables: 1 (version variable)
+ // BooleanSchemas: 2 (true, false from additionalProperties)
+ // InlineSchemas: 10 (9 from external + 1 from LocalSchema.id property)
+ // ComponentSchemas: 2 (LocalSchema, AnotherLocal)
+ // ExternalSchemas: 6 (UserResponse, User, Address, Product, Category, TreeNode)
+ // SchemaReferences: 9 (all $ref pointers including circulars)
+ // CircularErrors: 1 (Product<->Category invalid circular)
+
+ // TODO: PathItems indexing (currently marked TODO in buildIndex)
+
+ // Main API document
+ vfs.AddFile("/api/openapi.yaml", `
+openapi: "3.1.0"
+info:
+ title: Comprehensive API
+ version: 1.0.0
+externalDocs:
+ url: https://docs.example.com
+ description: Main API Documentation
+tags:
+ - name: users
+ description: User operations
+ externalDocs:
+ url: https://docs.example.com/users
+ - name: products
+ description: Product operations
+servers:
+ - url: https://api.example.com/{version}
+ description: Production server
+ variables:
+ version:
+ default: v1
+ enum: [v1, v2]
+ - url: https://staging.example.com
+ description: Staging server
+paths:
+ /users:
+ get:
+ operationId: getUsers
+ responses:
+ "200":
+ description: Users response
+ content:
+ application/json:
+ schema:
+ $ref: 'schemas/user.yaml#/UserResponse'
+ /products:
+ get:
+ operationId: getProducts
+ responses:
+ "200":
+ description: Products response
+ content:
+ application/json:
+ schema:
+ $ref: 'https://schemas.example.com/product.yaml#/Product'
+ /trees:
+ get:
+ operationId: getTrees
+ responses:
+ "200":
+ description: Trees response
+ content:
+ application/json:
+ schema:
+ $ref: 'schemas/tree.yaml#/TreeNode'
+components:
+ schemas:
+ LocalSchema:
+ type: object
+ additionalProperties: true
+ properties:
+ id:
+ type: integer
+ AnotherLocal:
+ type: object
+ additionalProperties: false
+`)
+
+ // External file: User schemas with valid circular (optional property)
+ vfs.AddFile("/api/schemas/user.yaml", `
+UserResponse:
+ type: object
+ properties:
+ user:
+ $ref: '#/User'
+User:
+ type: object
+ required: [id, name]
+ properties:
+ id:
+ type: integer
+ name:
+ type: string
+ address:
+ $ref: '#/Address'
+Address:
+ type: object
+ properties:
+ street:
+ type: string
+ user:
+ $ref: '#/User'
+# Unreferenced schema in external file
+UnreferencedUser:
+ type: object
+ properties:
+ neverUsed:
+ type: string
+`)
+
+ // External file: Tree with valid self-reference (array with minItems=0)
+ vfs.AddFile("/api/schemas/tree.yaml", `
+TreeNode:
+ type: object
+ properties:
+ value:
+ type: string
+ children:
+ type: array
+ items:
+ $ref: '#/TreeNode'
+# Another unreferenced schema
+UnusedTreeType:
+ type: object
+ properties:
+ unusedProp:
+ type: boolean
+`)
+
+ // Unreferenced file - nothing from here should appear in index
+ vfs.AddFile("/api/schemas/completely-unreferenced.yaml", `
+TotallyUnused:
+ type: object
+ properties:
+ shouldNotAppear:
+ type: string
+`)
+
+ // External HTTP: Product with invalid circular (required + minItems)
+ httpClient.AddResponse("https://schemas.example.com/product.yaml", `
+Product:
+ type: object
+ required: [id, category]
+ properties:
+ id:
+ type: integer
+ name:
+ type: string
+ category:
+ $ref: '#/Category'
+Category:
+ type: object
+ required: [products]
+ properties:
+ name:
+ type: string
+ products:
+ type: array
+ minItems: 1
+ items:
+ $ref: '#/Product'
+# Unreferenced in HTTP document
+UnreferencedCategory:
+ type: object
+ properties:
+ alsoNeverUsed:
+ type: integer
+`)
+
+ // Unmarshal and build index
+ doc, validationErrs, err := openapi.Unmarshal(ctx, strings.NewReader(vfs.files["/api/openapi.yaml"]))
+ require.NoError(t, err)
+ require.Empty(t, validationErrs)
+
+ resolveOpts := references.ResolveOptions{
+ TargetLocation: "/api/openapi.yaml",
+ RootDocument: doc,
+ TargetDocument: doc,
+ VirtualFS: vfs,
+ HTTPClient: httpClient,
+ }
+ idx := openapi.BuildIndex(ctx, doc, resolveOpts)
+ require.NotNil(t, idx)
+
+ return idx, vfs, httpClient
+}
+
+func TestBuildIndex_ExternalReferences_Comprehensive(t *testing.T) {
+ t.Parallel()
+
+ idx, _, _ := setupComprehensiveExternalRefs(t)
+
+ tests := []struct {
+ name string
+ assertion func(t *testing.T, idx *openapi.Index)
+ }{
+ {
+ name: "external schemas count correct",
+ assertion: func(t *testing.T, idx *openapi.Index) {
+ t.Helper()
+ // External schemas: UserResponse, User, Address, Product, Category, TreeNode (6)
+ assert.Len(t, idx.ExternalSchemas, 6, "should have exactly 6 external schemas")
+ },
+ },
+ {
+ name: "external documentation count correct",
+ assertion: func(t *testing.T, idx *openapi.Index) {
+ t.Helper()
+ // ExternalDocs: main doc + users tag
+ assert.Len(t, idx.ExternalDocumentation, 2, "should have exactly 2 external documentation")
+ },
+ },
+ {
+ name: "tags count correct",
+ assertion: func(t *testing.T, idx *openapi.Index) {
+ t.Helper()
+ // Tags: users, products
+ assert.Len(t, idx.Tags, 2, "should have exactly 2 tags")
+ },
+ },
+ {
+ name: "servers count correct",
+ assertion: func(t *testing.T, idx *openapi.Index) {
+ t.Helper()
+ // Servers: production, staging
+ assert.Len(t, idx.Servers, 2, "should have exactly 2 servers")
+ },
+ },
+ {
+ name: "server variables count correct",
+ assertion: func(t *testing.T, idx *openapi.Index) {
+ t.Helper()
+ // ServerVariables: version
+ assert.Len(t, idx.ServerVariables, 1, "should have exactly 1 server variable")
+ },
+ },
+ {
+ name: "boolean schemas count correct",
+ assertion: func(t *testing.T, idx *openapi.Index) {
+ t.Helper()
+ // BooleanSchemas: true, false from additionalProperties
+ assert.Len(t, idx.BooleanSchemas, 2, "should have exactly 2 boolean schemas")
+ },
+ },
+ {
+ name: "component schemas count correct",
+ assertion: func(t *testing.T, idx *openapi.Index) {
+ t.Helper()
+ // ComponentSchemas: LocalSchema, AnotherLocal
+ assert.Len(t, idx.ComponentSchemas, 2, "should have exactly 2 component schemas")
+ },
+ },
+ {
+ name: "schema references count correct",
+ assertion: func(t *testing.T, idx *openapi.Index) {
+ t.Helper()
+ // Schema references: 9 $ref pointers total
+ assert.Len(t, idx.SchemaReferences, 9, "should have exactly 9 schema references")
+ },
+ },
+ {
+ name: "inline property schemas count correct",
+ assertion: func(t *testing.T, idx *openapi.Index) {
+ t.Helper()
+ // Inline schemas: 9 from external + 1 from LocalSchema.id
+ assert.Len(t, idx.InlineSchemas, 10, "should have exactly 10 inline schemas")
+ },
+ },
+ {
+ name: "inline path items count correct",
+ assertion: func(t *testing.T, idx *openapi.Index) {
+ t.Helper()
+ // InlinePathItems: /users, /products, /trees
+ assert.Len(t, idx.InlinePathItems, 3, "should have exactly 3 inline path items")
+ },
+ },
+ {
+ name: "operations count correct",
+ assertion: func(t *testing.T, idx *openapi.Index) {
+ t.Helper()
+ // Operations: getUsers, getProducts, getTrees
+ assert.Len(t, idx.Operations, 3, "should have exactly 3 operations")
+ },
+ },
+ {
+ name: "inline responses count correct",
+ assertion: func(t *testing.T, idx *openapi.Index) {
+ t.Helper()
+ // InlineResponses: 200 response for each operation
+ assert.Len(t, idx.InlineResponses, 3, "should have exactly 3 inline responses")
+ },
+ },
+ {
+ name: "circular error count correct",
+ assertion: func(t *testing.T, idx *openapi.Index) {
+ t.Helper()
+ // Should detect 1 invalid circular: Product<->Category
+ assert.Len(t, idx.GetCircularReferenceErrors(), 1, "should have exactly 1 circular error")
+ },
+ },
+ {
+ name: "no errors for valid references",
+ assertion: func(t *testing.T, idx *openapi.Index) {
+ t.Helper()
+ // Should have no resolution errors
+ assert.Empty(t, idx.GetResolutionErrors(), "should have no resolution errors")
+ assert.Empty(t, idx.GetValidationErrors(), "should have no validation errors")
+ },
+ },
+ {
+ name: "unreferenced schemas in external files not indexed",
+ assertion: func(t *testing.T, idx *openapi.Index) {
+ t.Helper()
+ for _, schema := range idx.GetAllSchemas() {
+ loc := string(schema.Location.ToJSONPointer())
+ assert.NotContains(t, loc, "UnreferencedUser", "UnreferencedUser should not be indexed")
+ assert.NotContains(t, loc, "UnusedTreeType", "UnusedTreeType should not be indexed")
+ assert.NotContains(t, loc, "TotallyUnused", "TotallyUnused should not be indexed")
+ assert.NotContains(t, loc, "UnreferencedCategory", "UnreferencedCategory should not be indexed")
+ }
+ },
+ },
+ {
+ name: "valid circular reference via optional property",
+ assertion: func(t *testing.T, idx *openapi.Index) {
+ t.Helper()
+ circularErrs := idx.GetCircularReferenceErrors()
+ for _, err := range circularErrs {
+ errStr := err.Error()
+ // User<->Address should not have circular error (address is optional)
+ if strings.Contains(errStr, "User") && strings.Contains(errStr, "Address") {
+ t.Errorf("User<->Address circular via optional property should be valid, got error: %v", err)
+ }
+ }
+ },
+ },
+ {
+ name: "valid circular reference via array minItems=0",
+ assertion: func(t *testing.T, idx *openapi.Index) {
+ t.Helper()
+ circularErrs := idx.GetCircularReferenceErrors()
+ for _, err := range circularErrs {
+ errStr := err.Error()
+ // TreeNode self-reference should not have circular error
+ if strings.Contains(errStr, "TreeNode") {
+ t.Errorf("TreeNode self-reference via array should be valid, got error: %v", err)
+ }
+ }
+ },
+ },
+ {
+ name: "schema references tracked with locations",
+ assertion: func(t *testing.T, idx *openapi.Index) {
+ t.Helper()
+ assert.NotEmpty(t, idx.SchemaReferences, "should have schema references")
+ for _, ref := range idx.SchemaReferences {
+ assert.NotNil(t, ref.Location, "reference should have location")
+ assert.NotNil(t, ref.Node, "reference should have node")
+ }
+ },
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ t.Parallel()
+ tt.assertion(t, idx)
+ })
+ }
+}
+
+// TestExternalPathItemReferencesWithOperations verifies that:
+// 1. External path item references are resolved correctly
+// 2. Operations within external path items are indexed
+// 3. Walk descends into resolved external path items
+func TestExternalPathItemReferencesWithOperations(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ // Create external file with path items containing operations
+ externalSpec := `
+a:
+ get:
+ operationId: op-a
+ responses:
+ '200':
+ description: OK
+ post:
+ operationId: op-a-post
+ responses:
+ '201':
+ description: Created
+b:
+ get:
+ operationId: op-b
+ responses:
+ '200':
+ description: OK
+`
+
+ // Create main spec that references the external path items
+ mainSpec := `
+openapi: 3.1.0
+info:
+ title: Test
+ version: 1.0.0
+paths:
+ /a:
+ $ref: "./external.yaml#/a"
+ /b:
+ $ref: "./external.yaml#/b"
+`
+
+ // Parse main document
+ doc, _, err := openapi.Unmarshal(ctx, strings.NewReader(mainSpec))
+ require.NoError(t, err)
+ require.NotNil(t, doc)
+
+ // Setup virtual filesystem with external file
+ // Use absolute path and matching reference in spec
+ vfs := NewMockVirtualFS()
+ vfs.AddFile("/test/external.yaml", externalSpec)
+
+ // Build index with external reference resolution
+ resolveOpts := references.ResolveOptions{
+ RootDocument: doc,
+ TargetDocument: doc,
+ TargetLocation: "/test/main.yaml", // Absolute path so relative refs resolve correctly
+ VirtualFS: vfs,
+ }
+
+ idx := openapi.BuildIndex(ctx, doc, resolveOpts)
+ require.NotNil(t, idx)
+
+ // Verify external path item references were resolved
+ assert.Len(t, idx.PathItemReferences, 2, "should have 2 external path item references")
+
+ // Verify operations from external path items are indexed
+ assert.Len(t, idx.Operations, 3, "should have 3 operations (2 from /a, 1 from /b)")
+
+ // Verify operation IDs are correct
+ operationIDs := make([]string, len(idx.Operations))
+ for i, op := range idx.Operations {
+ operationIDs[i] = op.Node.GetOperationID()
+ }
+ assert.Contains(t, operationIDs, "op-a", "should contain op-a")
+ assert.Contains(t, operationIDs, "op-a-post", "should contain op-a-post")
+ assert.Contains(t, operationIDs, "op-b", "should contain op-b")
+
+ // Verify no resolution errors
+ assert.Empty(t, idx.GetResolutionErrors(), "should have no resolution errors")
+}
+
+// TestExternalReferencedComponentsWithinOperations verifies that:
+// 1. External parameter, requestBody, response, header, and example references are resolved
+// 2. Walk descends into resolved external references within operations
+// 3. Referenced components are properly indexed
+func TestExternalReferencedComponentsWithinOperations(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ // Create external file with reusable components
+ componentsSpec := `
+UserParam:
+ name: userId
+ in: path
+ required: true
+ schema:
+ type: string
+
+CreateRequest:
+ required: true
+ content:
+ application/json:
+ schema:
+ type: object
+ properties:
+ name:
+ type: string
+
+SuccessResponse:
+ description: Success
+ headers:
+ X-Request-ID:
+ description: Request ID header
+ schema:
+ type: string
+ content:
+ application/json:
+ schema:
+ type: object
+ examples:
+ example1:
+ value:
+ status: success
+`
+
+ // Create main spec with operations that reference external components
+ mainSpec := `
+openapi: 3.1.0
+info:
+ title: Test API
+ version: 1.0.0
+paths:
+ /users/{userId}:
+ parameters:
+ - $ref: "./components.yaml#/UserParam"
+ post:
+ operationId: createUser
+ requestBody:
+ $ref: "./components.yaml#/CreateRequest"
+ responses:
+ '200':
+ $ref: "./components.yaml#/SuccessResponse"
+`
+
+ // Parse main document
+ doc, _, err := openapi.Unmarshal(ctx, strings.NewReader(mainSpec))
+ require.NoError(t, err)
+ require.NotNil(t, doc)
+
+ // Setup virtual filesystem
+ vfs := NewMockVirtualFS()
+ vfs.AddFile("/test/components.yaml", componentsSpec)
+
+ // Build index
+ resolveOpts := references.ResolveOptions{
+ RootDocument: doc,
+ TargetDocument: doc,
+ TargetLocation: "/test/main.yaml",
+ VirtualFS: vfs,
+ }
+
+ idx := openapi.BuildIndex(ctx, doc, resolveOpts)
+ require.NotNil(t, idx)
+
+ // Verify operations were indexed
+ assert.Len(t, idx.Operations, 1, "should have 1 operation")
+
+ // Verify external parameter reference was resolved and indexed
+ assert.NotEmpty(t, idx.ParameterReferences, "should have parameter references")
+
+ // Verify external request body reference was resolved
+ assert.NotEmpty(t, idx.RequestBodyReferences, "should have request body references")
+
+ // Verify external response reference was resolved
+ assert.NotEmpty(t, idx.ResponseReferences, "should have response references")
+
+ // Verify headers within resolved response are indexed (inline headers, not references)
+ assert.NotEmpty(t, idx.InlineHeaders, "should have inline headers from resolved response")
+
+ // Verify examples within resolved response are indexed (inline examples, not references)
+ assert.NotEmpty(t, idx.InlineExamples, "should have inline examples from resolved response")
+
+ // Verify no resolution errors
+ assert.Empty(t, idx.GetResolutionErrors(), "should have no resolution errors")
+}
+func TestBuildIndex_ExternalReferencesForAllTypes_Success(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ vfs := NewMockVirtualFS()
+
+ // Main API document with references to external components
+ vfs.AddFile("/api/openapi.yaml", `
+openapi: "3.1.0"
+info:
+ title: External Components Test API
+ version: 1.0.0
+paths:
+ /users:
+ get:
+ operationId: getUsers
+ parameters:
+ - $ref: 'components.yaml#/PageSize'
+ responses:
+ "200":
+ $ref: 'components.yaml#/UsersResponse'
+ callbacks:
+ onUpdate:
+ $ref: 'components.yaml#/UpdateCallback'
+ post:
+ operationId: createUser
+ requestBody:
+ $ref: 'components.yaml#/UserRequestBody'
+ responses:
+ "201":
+ description: Created
+`)
+
+ // External components file with all types at top level
+ vfs.AddFile("/api/components.yaml", `
+PageSize:
+ name: pageSize
+ in: query
+ schema:
+ type: integer
+
+UsersResponse:
+ description: Users response
+ headers:
+ X-Total-Count:
+ $ref: '#/TotalCountHeader'
+ content:
+ application/json:
+ schema:
+ type: array
+ items:
+ type: object
+ examples:
+ singleUser:
+ $ref: '#/SingleUserExample'
+ links:
+ GetUserById:
+ $ref: '#/UserLink'
+
+UserRequestBody:
+ description: User request body
+ content:
+ application/json:
+ schema:
+ type: object
+
+TotalCountHeader:
+ description: Total count header
+ schema:
+ type: integer
+
+SingleUserExample:
+ value:
+ id: 1
+ name: John Doe
+
+UserLink:
+ operationId: getUsers
+ description: Link to get users
+
+UpdateCallback:
+ '{$request.body#/callbackUrl}':
+ post:
+ requestBody:
+ description: Update notification
+ content:
+ application/json:
+ schema:
+ type: object
+ responses:
+ "200":
+ description: OK
+`)
+
+ // Unmarshal and build index
+ doc, validationErrs, err := openapi.Unmarshal(ctx, strings.NewReader(vfs.files["/api/openapi.yaml"]))
+ require.NoError(t, err)
+ require.Empty(t, validationErrs)
+
+ resolveOpts := references.ResolveOptions{
+ TargetLocation: "/api/openapi.yaml",
+ RootDocument: doc,
+ TargetDocument: doc,
+ VirtualFS: vfs,
+ }
+ idx := openapi.BuildIndex(ctx, doc, resolveOpts)
+ require.NotNil(t, idx)
+
+ // Test External Parameters
+ assert.Len(t, idx.ExternalParameters, 1, "should have 1 external parameter (PageSize)")
+ assert.Len(t, idx.ParameterReferences, 1, "should have 1 parameter reference")
+ assert.Empty(t, idx.ComponentParameters, "should have 0 component parameters (PageSize is external)")
+ assert.Empty(t, idx.InlineParameters, "should have 0 inline parameters")
+
+ // Test External Responses
+ assert.Len(t, idx.ExternalResponses, 1, "should have 1 external response (UsersResponse)")
+ assert.Len(t, idx.ResponseReferences, 1, "should have 1 response reference")
+ assert.Empty(t, idx.ComponentResponses, "should have 0 component responses (UsersResponse is external)")
+ assert.Len(t, idx.InlineResponses, 2, "should have 2 inline responses (201 Created + default from callback)")
+
+ // Test External RequestBodies
+ assert.Len(t, idx.ExternalRequestBodies, 1, "should have 1 external request body (UserRequestBody)")
+ assert.Len(t, idx.RequestBodyReferences, 1, "should have 1 request body reference")
+ assert.Empty(t, idx.ComponentRequestBodies, "should have 0 component request bodies")
+ assert.Len(t, idx.InlineRequestBodies, 1, "should have 1 inline request body (from callback)")
+
+ // Test External Headers
+ // FIXED: Header references inside external files CAN now be resolved!
+ assert.Len(t, idx.ExternalHeaders, 1, "should have 1 external header (TotalCountHeader)")
+ assert.Len(t, idx.HeaderReferences, 1, "should have 1 header reference")
+ assert.Empty(t, idx.ComponentHeaders, "should have 0 component headers")
+ assert.Empty(t, idx.InlineHeaders, "should have 0 inline headers")
+
+ // Test External Examples
+ // FIXED: Example references inside external files CAN now be resolved!
+ assert.Len(t, idx.ExternalExamples, 1, "should have 1 external example (SingleUserExample)")
+ assert.Len(t, idx.ExampleReferences, 1, "should have 1 example reference")
+ assert.Empty(t, idx.ComponentExamples, "should have 0 component examples")
+ assert.Empty(t, idx.InlineExamples, "should have 0 inline examples")
+
+ // Test External Links
+ // FIXED: Link references inside external files CAN now be resolved!
+ assert.Len(t, idx.ExternalLinks, 1, "should have 1 external link (UserLink)")
+ assert.Len(t, idx.LinkReferences, 1, "should have 1 link reference")
+ assert.Empty(t, idx.ComponentLinks, "should have 0 component links")
+ assert.Empty(t, idx.InlineLinks, "should have 0 inline links")
+
+ // Test External Callbacks
+ assert.Len(t, idx.ExternalCallbacks, 1, "should have 1 external callback (UpdateCallback)")
+ assert.Len(t, idx.CallbackReferences, 1, "should have 1 callback reference")
+ assert.Empty(t, idx.ComponentCallbacks, "should have 0 component callbacks")
+ assert.Empty(t, idx.InlineCallbacks, "should have 0 inline callbacks")
+
+ // Test GetAll* methods include external items (but not references)
+ allParameters := idx.GetAllParameters()
+ assert.Len(t, allParameters, 1, "GetAllParameters should return external (not reference)")
+
+ allResponses := idx.GetAllResponses()
+ assert.Len(t, allResponses, 3, "GetAllResponses should return external + 2 inline (not reference)")
+
+ allRequestBodies := idx.GetAllRequestBodies()
+ assert.Len(t, allRequestBodies, 2, "GetAllRequestBodies should return external + inline (not reference)")
+
+ allHeaders := idx.GetAllHeaders()
+ assert.Len(t, allHeaders, 1, "GetAllHeaders should have 1 (TotalCountHeader - internal refs now work!)")
+
+ allExamples := idx.GetAllExamples()
+ assert.Len(t, allExamples, 1, "GetAllExamples should have 1 (SingleUserExample - internal refs now work!)")
+
+ allLinks := idx.GetAllLinks()
+ assert.Len(t, allLinks, 1, "GetAllLinks should have 1 (UserLink - internal refs now work!)")
+
+ allCallbacks := idx.GetAllCallbacks()
+ assert.Len(t, allCallbacks, 1, "GetAllCallbacks should return external (not reference)")
+
+ // FIXED: No more resolution errors! Internal references in external files now work correctly
+ assert.False(t, idx.HasErrors(), "should have no errors after multi-file reference fix")
+ assert.Empty(t, idx.GetResolutionErrors(), "should have 0 resolution errors (bug is fixed!)")
+}
+func TestDebugExternalParameter(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ vfs := NewMockVirtualFS()
+
+ // Main document
+ vfs.AddFile("/api/main.yaml", `
+openapi: "3.1.0"
+info:
+ title: Test
+ version: 1.0.0
+paths:
+ /test:
+ get:
+ operationId: test
+ parameters:
+ - $ref: 'external.yaml#/PageSize'
+ responses:
+ "200":
+ description: OK
+`)
+
+ // External parameter
+ vfs.AddFile("/api/external.yaml", `
+PageSize:
+ name: pageSize
+ in: query
+ schema:
+ type: integer
+`)
+
+ doc, _, err := openapi.Unmarshal(ctx, strings.NewReader(vfs.files["/api/main.yaml"]))
+ require.NoError(t, err)
+
+ idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{
+ TargetLocation: "/api/main.yaml",
+ RootDocument: doc,
+ TargetDocument: doc,
+ VirtualFS: vfs,
+ })
+
+ t.Logf("ComponentParameters: %d", len(idx.ComponentParameters))
+ t.Logf("ExternalParameters: %d", len(idx.ExternalParameters))
+ t.Logf("InlineParameters: %d", len(idx.InlineParameters))
+ t.Logf("ParameterReferences: %d", len(idx.ParameterReferences))
+
+ if len(idx.ExternalParameters) > 0 {
+ t.Logf("External parameter location: %s", idx.ExternalParameters[0].Location.ToJSONPointer())
+ }
+ if len(idx.InlineParameters) > 0 {
+ t.Logf("Inline parameter location: %s", idx.InlineParameters[0].Location.ToJSONPointer())
+ }
+
+ t.Logf("Errors: %v", idx.HasErrors())
+ for _, err := range idx.GetAllErrors() {
+ t.Logf("Error: %v", err)
+ }
+}
diff --git a/openapi/index_node_operation_test.go b/openapi/index_node_operation_test.go
new file mode 100644
index 00000000..c539f661
--- /dev/null
+++ b/openapi/index_node_operation_test.go
@@ -0,0 +1,796 @@
+package openapi_test
+
+import (
+ "strings"
+ "testing"
+
+ "github.com/speakeasy-api/openapi/openapi"
+ "github.com/speakeasy-api/openapi/references"
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+ "gopkg.in/yaml.v3"
+)
+
+func TestBuildIndex_NodeToOperations_WithOption_Success(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ yml := `
+openapi: "3.1.0"
+info:
+ title: Test API
+ version: 1.0.0
+paths:
+ /users:
+ get:
+ operationId: getUsers
+ responses:
+ "200":
+ description: Success
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/User'
+ post:
+ operationId: createUser
+ requestBody:
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/User'
+ responses:
+ "201":
+ description: Created
+components:
+ schemas:
+ User:
+ type: object
+ properties:
+ id:
+ type: integer
+ name:
+ type: string
+`
+ doc := unmarshalOpenAPI(t, ctx, yml)
+ idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{
+ RootDocument: doc,
+ TargetDocument: doc,
+ TargetLocation: "test.yaml",
+ }, openapi.WithNodeOperationMap())
+
+ require.NotNil(t, idx, "index should not be nil")
+ assert.False(t, idx.HasErrors(), "should have no errors")
+
+ // Map should be populated
+ require.NotNil(t, idx.NodeToOperations, "NodeToOperations map should be initialized")
+ assert.NotEmpty(t, idx.NodeToOperations, "NodeToOperations should have entries when enabled")
+
+ // Should have operations indexed
+ assert.Len(t, idx.Operations, 2, "should have 2 operations")
+}
+
+func TestBuildIndex_NodeToOperations_Disabled_Success(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ yml := `
+openapi: "3.1.0"
+info:
+ title: Test API
+ version: 1.0.0
+paths:
+ /users:
+ get:
+ operationId: getUsers
+ responses:
+ "200":
+ description: Success
+`
+ doc := unmarshalOpenAPI(t, ctx, yml)
+ // Don't pass WithNodeOperationMap() option
+ idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{
+ RootDocument: doc,
+ TargetDocument: doc,
+ TargetLocation: "test.yaml",
+ })
+
+ require.NotNil(t, idx, "index should not be nil")
+ assert.False(t, idx.HasErrors(), "should have no errors")
+
+ // Map should be nil when disabled (default)
+ assert.Nil(t, idx.NodeToOperations, "NodeToOperations should be nil when disabled")
+
+ // GetNodeOperations should return nil for any node
+ assert.Nil(t, idx.GetNodeOperations(nil), "GetNodeOperations should return nil when disabled")
+}
+
+func TestBuildIndex_NodeToOperations_SharedSchema_Success(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ yml := `
+openapi: "3.1.0"
+info:
+ title: Test API
+ version: 1.0.0
+paths:
+ /users:
+ get:
+ operationId: getUsers
+ responses:
+ "200":
+ description: Success
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/User'
+ /admin/users:
+ get:
+ operationId: getAdminUsers
+ responses:
+ "200":
+ description: Success
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/User'
+components:
+ schemas:
+ User:
+ type: object
+ properties:
+ id:
+ type: integer
+`
+ doc := unmarshalOpenAPI(t, ctx, yml)
+ idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{
+ RootDocument: doc,
+ TargetDocument: doc,
+ TargetLocation: "test.yaml",
+ }, openapi.WithNodeOperationMap())
+
+ require.NotNil(t, idx, "index should not be nil")
+ assert.False(t, idx.HasErrors(), "should have no errors")
+
+ // Should have 2 operations
+ assert.Len(t, idx.Operations, 2, "should have 2 operations")
+
+ // The User schema is referenced by both operations
+ // Get the User schema node
+ require.Len(t, idx.ComponentSchemas, 1, "should have 1 component schema")
+ userSchema := idx.ComponentSchemas[0]
+ require.NotNil(t, userSchema, "User schema should exist")
+
+ userNode := userSchema.Node.GetRootNode()
+ require.NotNil(t, userNode, "User schema should have a root node")
+
+ // Check that the User schema is mapped to both operations
+ ops := idx.GetNodeOperations(userNode)
+ assert.Len(t, ops, 2, "User schema should be referenced by 2 operations")
+}
+
+func TestBuildIndex_NodeToOperations_Webhooks_Success(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ yml := `
+openapi: "3.1.0"
+info:
+ title: Test API
+ version: 1.0.0
+paths: {}
+webhooks:
+ newUser:
+ post:
+ operationId: userCreatedWebhook
+ requestBody:
+ content:
+ application/json:
+ schema:
+ type: object
+ properties:
+ userId:
+ type: string
+ responses:
+ "200":
+ description: OK
+`
+ doc := unmarshalOpenAPI(t, ctx, yml)
+ idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{
+ RootDocument: doc,
+ TargetDocument: doc,
+ TargetLocation: "test.yaml",
+ }, openapi.WithNodeOperationMap())
+
+ require.NotNil(t, idx, "index should not be nil")
+ assert.False(t, idx.HasErrors(), "should have no errors")
+
+ // Should have 1 webhook operation
+ assert.Len(t, idx.Operations, 1, "should have 1 operation from webhook")
+
+ // Check that nodes are mapped to the webhook operation
+ assert.NotEmpty(t, idx.NodeToOperations, "NodeToOperations should have entries")
+
+ // Verify the operation location indicates it's a webhook
+ op := idx.Operations[0]
+ require.NotNil(t, op, "operation should exist")
+ assert.True(t, openapi.IsWebhookLocation(op.Location), "operation should be identified as webhook")
+}
+
+func TestBuildIndex_GetNodeOperations_NilCases_Success(t *testing.T) {
+ t.Parallel()
+
+ tests := []struct {
+ name string
+ test func(t *testing.T)
+ }{
+ {
+ name: "nil index",
+ test: func(t *testing.T) {
+ t.Helper()
+ var idx *openapi.Index
+ result := idx.GetNodeOperations(nil)
+ assert.Nil(t, result, "should return nil for nil index")
+ },
+ },
+ {
+ name: "nil node",
+ test: func(t *testing.T) {
+ t.Helper()
+ ctx := t.Context()
+ yml := `
+openapi: "3.1.0"
+info:
+ title: Test
+ version: 1.0.0
+paths: {}
+`
+ doc := unmarshalOpenAPI(t, ctx, yml)
+ idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{
+ RootDocument: doc,
+ TargetDocument: doc,
+ TargetLocation: "test.yaml",
+ }, openapi.WithNodeOperationMap())
+
+ result := idx.GetNodeOperations(nil)
+ assert.Nil(t, result, "should return nil for nil node")
+ },
+ },
+ {
+ name: "node not found",
+ test: func(t *testing.T) {
+ t.Helper()
+ ctx := t.Context()
+ yml := `
+openapi: "3.1.0"
+info:
+ title: Test
+ version: 1.0.0
+paths: {}
+`
+ doc := unmarshalOpenAPI(t, ctx, yml)
+ idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{
+ RootDocument: doc,
+ TargetDocument: doc,
+ TargetLocation: "test.yaml",
+ }, openapi.WithNodeOperationMap())
+
+ // Create a node that's not in the document
+ unknownNode := &yaml.Node{Kind: yaml.ScalarNode, Value: "unknown"}
+ result := idx.GetNodeOperations(unknownNode)
+ assert.Nil(t, result, "should return nil for unknown node")
+ },
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ t.Parallel()
+ tt.test(t)
+ })
+ }
+}
+
+func TestIsWebhookLocation_Success(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ tests := []struct {
+ name string
+ yml string
+ isWebhook bool
+ opId string
+ }{
+ {
+ name: "path operation is not webhook",
+ yml: `
+openapi: "3.1.0"
+info:
+ title: Test
+ version: 1.0.0
+paths:
+ /users:
+ get:
+ operationId: getUsers
+ responses:
+ "200":
+ description: OK
+`,
+ isWebhook: false,
+ opId: "getUsers",
+ },
+ {
+ name: "webhook operation is webhook",
+ yml: `
+openapi: "3.1.0"
+info:
+ title: Test
+ version: 1.0.0
+paths: {}
+webhooks:
+ userCreated:
+ post:
+ operationId: userCreatedWebhook
+ responses:
+ "200":
+ description: OK
+`,
+ isWebhook: true,
+ opId: "userCreatedWebhook",
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ t.Parallel()
+
+ doc := unmarshalOpenAPI(t, ctx, tt.yml)
+ idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{
+ RootDocument: doc,
+ TargetDocument: doc,
+ TargetLocation: "test.yaml",
+ })
+
+ require.NotNil(t, idx, "index should not be nil")
+ require.Len(t, idx.Operations, 1, "should have 1 operation")
+
+ op := idx.Operations[0]
+ assert.Equal(t, tt.isWebhook, openapi.IsWebhookLocation(op.Location),
+ "IsWebhookLocation should return %v for %s", tt.isWebhook, tt.opId)
+ })
+ }
+}
+
+func TestExtractOperationInfo_Success(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ tests := []struct {
+ name string
+ yml string
+ expectedPath string
+ expectedMethod string
+ isWebhook bool
+ }{
+ {
+ name: "path operation",
+ yml: `
+openapi: "3.1.0"
+info:
+ title: Test
+ version: 1.0.0
+paths:
+ /users/{id}:
+ get:
+ operationId: getUser
+ responses:
+ "200":
+ description: OK
+`,
+ expectedPath: "/users/{id}",
+ expectedMethod: "get",
+ isWebhook: false,
+ },
+ {
+ name: "webhook operation",
+ yml: `
+openapi: "3.1.0"
+info:
+ title: Test
+ version: 1.0.0
+paths: {}
+webhooks:
+ orderCreated:
+ post:
+ operationId: orderWebhook
+ responses:
+ "200":
+ description: OK
+`,
+ expectedPath: "orderCreated",
+ expectedMethod: "post",
+ isWebhook: true,
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ t.Parallel()
+
+ doc := unmarshalOpenAPI(t, ctx, tt.yml)
+ idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{
+ RootDocument: doc,
+ TargetDocument: doc,
+ TargetLocation: "test.yaml",
+ })
+
+ require.NotNil(t, idx, "index should not be nil")
+ require.Len(t, idx.Operations, 1, "should have 1 operation")
+
+ op := idx.Operations[0]
+ path, method, isWebhook := openapi.ExtractOperationInfo(op.Location)
+
+ assert.Equal(t, tt.expectedPath, path, "path should match")
+ assert.Equal(t, tt.expectedMethod, method, "method should match")
+ assert.Equal(t, tt.isWebhook, isWebhook, "isWebhook should match")
+ })
+ }
+}
+
+func TestBuildIndex_NodeToOperations_ComponentsNotMapped_Success(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ // This test verifies that schemas defined in components but not referenced
+ // by any operation are NOT in the NodeToOperations map
+ yml := `
+openapi: "3.1.0"
+info:
+ title: Test API
+ version: 1.0.0
+paths:
+ /users:
+ get:
+ operationId: getUsers
+ responses:
+ "200":
+ description: Success
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/User'
+components:
+ schemas:
+ User:
+ type: object
+ UnusedSchema:
+ type: object
+ properties:
+ unused:
+ type: string
+`
+ doc := unmarshalOpenAPI(t, ctx, yml)
+ idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{
+ RootDocument: doc,
+ TargetDocument: doc,
+ TargetLocation: "test.yaml",
+ }, openapi.WithNodeOperationMap())
+
+ require.NotNil(t, idx, "index should not be nil")
+ assert.False(t, idx.HasErrors(), "should have no errors")
+
+ // Get the User schema - should be mapped to the operation
+ require.Len(t, idx.ComponentSchemas, 2, "should have 2 component schemas")
+
+ // Find the User and UnusedSchema
+ var userOps, unusedOps []*openapi.IndexNode[*openapi.Operation]
+ for _, schema := range idx.ComponentSchemas {
+ if schema == nil || schema.Node == nil {
+ continue
+ }
+ node := schema.Node.GetRootNode()
+ if node == nil {
+ continue
+ }
+ ops := idx.GetNodeOperations(node)
+ // Check location to identify which schema this is
+ jp := schema.Location.ToJSONPointer()
+ if strings.Contains(jp.String(), "User") {
+ userOps = ops
+ } else if strings.Contains(jp.String(), "UnusedSchema") {
+ unusedOps = ops
+ }
+ }
+
+ // User should be mapped to 1 operation
+ assert.Len(t, userOps, 1, "User schema should be mapped to 1 operation")
+
+ // UnusedSchema should NOT be mapped to any operations
+ // (it's after paths in the walk order, so currentOperation is nil)
+ assert.Empty(t, unusedOps, "UnusedSchema should not be mapped to any operations")
+}
+
+func TestBuildIndex_NodeToOperations_NestedSchemaNodes_Success(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ // This test verifies that nested nodes WITHIN a component schema
+ // are also mapped to operations that reference the parent schema via $ref
+ yml := `
+openapi: "3.1.0"
+info:
+ title: Test API
+ version: 1.0.0
+paths:
+ /test:
+ get:
+ operationId: getTest
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/MySchema'
+components:
+ schemas:
+ MySchema:
+ type: array
+ items:
+ type: object
+ properties:
+ id:
+ type: integer
+`
+ doc := unmarshalOpenAPI(t, ctx, yml)
+ idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{
+ RootDocument: doc,
+ TargetDocument: doc,
+ TargetLocation: "test.yaml",
+ }, openapi.WithNodeOperationMap())
+
+ require.NotNil(t, idx, "index should not be nil")
+ assert.False(t, idx.HasErrors(), "should have no errors")
+
+ // Should have 1 operation
+ require.Len(t, idx.Operations, 1, "should have 1 operation")
+
+ // Get the component schema (MySchema)
+ require.Len(t, idx.ComponentSchemas, 1, "should have 1 component schema")
+ mySchema := idx.ComponentSchemas[0]
+ require.NotNil(t, mySchema, "MySchema should exist")
+
+ // The root node of MySchema should be mapped to the operation
+ mySchemaNode := mySchema.Node.GetRootNode()
+ require.NotNil(t, mySchemaNode, "MySchema should have a root node")
+
+ rootOps := idx.GetNodeOperations(mySchemaNode)
+ require.Len(t, rootOps, 1, "MySchema root should be mapped to 1 operation")
+ assert.Equal(t, "getTest", *rootOps[0].Node.OperationID, "should be getTest operation")
+
+ // Now check nested nodes - the items schema should also be mapped
+ // Find an inline schema that's within MySchema (like the items schema)
+ var itemsSchemaOps []*openapi.IndexNode[*openapi.Operation]
+ for _, schema := range idx.InlineSchemas {
+ if schema == nil || schema.Node == nil {
+ continue
+ }
+ node := schema.Node.GetRootNode()
+ if node == nil {
+ continue
+ }
+ ops := idx.GetNodeOperations(node)
+ if len(ops) > 0 {
+ // This is an inline schema that's mapped to operations
+ itemsSchemaOps = ops
+ break
+ }
+ }
+
+ // At least one inline schema (like items or id property) should be mapped
+ assert.NotEmpty(t, itemsSchemaOps, "nested inline schemas should be mapped to operations")
+}
+
+func TestBuildIndex_NodeToOperations_BooleanSchema_Success(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ // This test reproduces the exact scenario the user described:
+ // - Operation references a component schema via $ref
+ // - The component schema has `items: true` (boolean schema)
+ // - We need to verify that the items node is mapped to the operation
+ yml := `
+openapi: "3.1.0"
+info:
+ title: Test
+ version: 1.0.0
+paths:
+ /test:
+ get:
+ operationId: getTest
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/MySchema'
+components:
+ schemas:
+ MySchema:
+ type: array
+ items: true
+`
+ doc := unmarshalOpenAPI(t, ctx, yml)
+ idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{
+ RootDocument: doc,
+ TargetDocument: doc,
+ TargetLocation: "test.yaml",
+ }, openapi.WithNodeOperationMap())
+
+ require.NotNil(t, idx, "index should not be nil")
+ assert.False(t, idx.HasErrors(), "should have no errors")
+
+ // Should have 1 operation
+ require.Len(t, idx.Operations, 1, "should have 1 operation")
+
+ // Find the boolean schema (items: true)
+ require.NotEmpty(t, idx.BooleanSchemas, "should have boolean schemas")
+
+ // Check if the boolean schema is mapped to the operation
+ var boolSchemaOps []*openapi.IndexNode[*openapi.Operation]
+ for _, boolSchema := range idx.BooleanSchemas {
+ if boolSchema == nil || boolSchema.Node == nil {
+ continue
+ }
+ node := boolSchema.Node.GetRootNode()
+ if node != nil {
+ ops := idx.GetNodeOperations(node)
+ if len(ops) > 0 {
+ boolSchemaOps = ops
+ break
+ }
+ }
+ }
+
+ // The boolean schema should be mapped to the getTest operation
+ require.Len(t, boolSchemaOps, 1, "boolean schema should be mapped to 1 operation")
+ assert.Equal(t, "getTest", *boolSchemaOps[0].Node.OperationID, "should be getTest operation")
+}
+
+func TestBuildIndex_NodeToOperations_LeafValueNode_Success(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ // This test verifies that GetNodeOperations works for leaf VALUE nodes,
+ // not just root nodes. For example, when a linter finds an issue on
+ // the `true` value node in `items: true`, GetNodeOperations should
+ // return the operations that reference the parent schema.
+ yml := `
+openapi: "3.1.0"
+info:
+ title: Test
+ version: 1.0.0
+paths:
+ /test:
+ get:
+ operationId: getTest
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/MyArray'
+components:
+ schemas:
+ MyArray:
+ type: array
+ items: true
+`
+ doc := unmarshalOpenAPI(t, ctx, yml)
+ idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{
+ RootDocument: doc,
+ TargetDocument: doc,
+ TargetLocation: "test.yaml",
+ }, openapi.WithNodeOperationMap())
+
+ require.NotNil(t, idx, "index should not be nil")
+ assert.False(t, idx.HasErrors(), "should have no errors")
+ require.Len(t, idx.Operations, 1, "should have 1 operation")
+
+ // Find the boolean schema (representing items: true)
+ // This is the scenario where a linter gets a value node
+ require.NotEmpty(t, idx.BooleanSchemas, "should have boolean schemas")
+
+ // The boolean schema's root node is the actual value node (`true`)
+ boolSchema := idx.BooleanSchemas[0]
+ require.NotNil(t, boolSchema, "boolean schema should exist")
+ require.NotNil(t, boolSchema.Node, "boolean schema node should not be nil")
+
+ // Get the boolean value node - this is what a linter would get
+ // when it finds an issue on `items: true`
+ boolValueNode := boolSchema.Node.GetRootNode()
+ require.NotNil(t, boolValueNode, "boolean value node should not be nil")
+
+ // Verify this is actually the `true` value node
+ assert.Equal(t, yaml.ScalarNode, boolValueNode.Kind, "should be a scalar node")
+ assert.Equal(t, "true", boolValueNode.Value, "should have value 'true'")
+
+ // Now verify GetNodeOperations works for this leaf value node
+ ops := idx.GetNodeOperations(boolValueNode)
+ require.Len(t, ops, 1, "leaf value node should be mapped to 1 operation")
+ assert.Equal(t, "getTest", *ops[0].Node.OperationID, "should be getTest operation")
+}
+
+func TestBuildIndex_NodeToOperations_LeafKeyNode_Success(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ // This test verifies that GetNodeOperations works for leaf KEY nodes.
+ // For example, when a linter reports an issue on the key `type` in
+ // a schema, GetNodeOperations should return the associated operations.
+ yml := `
+openapi: "3.1.0"
+info:
+ title: Test
+ version: 1.0.0
+paths:
+ /pets:
+ get:
+ operationId: getPets
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/Pet'
+components:
+ schemas:
+ Pet:
+ type: object
+ properties:
+ name:
+ type: string
+`
+ doc := unmarshalOpenAPI(t, ctx, yml)
+ idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{
+ RootDocument: doc,
+ TargetDocument: doc,
+ TargetLocation: "test.yaml",
+ }, openapi.WithNodeOperationMap())
+
+ require.NotNil(t, idx, "index should not be nil")
+ assert.False(t, idx.HasErrors(), "should have no errors")
+ require.Len(t, idx.Operations, 1, "should have 1 operation")
+
+ // Find the component schema (Pet)
+ require.Len(t, idx.ComponentSchemas, 1, "should have 1 component schema")
+ petSchema := idx.ComponentSchemas[0]
+ require.NotNil(t, petSchema, "Pet schema should exist")
+
+ // Get the actual schema to access the core model's Type field
+ schema := petSchema.Node.GetSchema()
+ require.NotNil(t, schema, "schema should not be nil")
+
+ core := schema.GetCore()
+ require.NotNil(t, core, "core should not be nil")
+
+ // Access the Type field's key node directly
+ // This tests that leaf key nodes are registered
+ typeKeyNode := core.Type.KeyNode
+ if typeKeyNode != nil {
+ ops := idx.GetNodeOperations(typeKeyNode)
+ require.Len(t, ops, 1, "type key node should be mapped to 1 operation")
+ assert.Equal(t, "getPets", *ops[0].Node.OperationID, "should be getPets operation")
+ }
+
+ // Also test the value node of the Type field
+ typeValueNode := core.Type.ValueNode
+ if typeValueNode != nil {
+ ops := idx.GetNodeOperations(typeValueNode)
+ require.Len(t, ops, 1, "type value node should be mapped to 1 operation")
+ assert.Equal(t, "getPets", *ops[0].Node.OperationID, "should be getPets operation")
+ }
+}
diff --git a/openapi/index_test.go b/openapi/index_test.go
new file mode 100644
index 00000000..f148972c
--- /dev/null
+++ b/openapi/index_test.go
@@ -0,0 +1,2207 @@
+package openapi_test
+
+import (
+ "context"
+ "errors"
+ "strings"
+ "testing"
+
+ "github.com/speakeasy-api/openapi/openapi"
+ "github.com/speakeasy-api/openapi/references"
+ "github.com/speakeasy-api/openapi/validation"
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func unmarshalOpenAPI(t *testing.T, ctx context.Context, yaml string) *openapi.OpenAPI {
+ t.Helper()
+ o, validationErrs, err := openapi.Unmarshal(ctx, strings.NewReader(yaml))
+ require.NoError(t, err, "unmarshal should succeed")
+ require.Empty(t, validationErrs, "should have no validation errors")
+ return o
+}
+
+func TestBuildIndex_EmptyDoc_Success(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ yaml := `
+openapi: "3.1.0"
+info:
+ title: Empty API
+ version: 1.0.0
+paths: {}
+`
+ doc := unmarshalOpenAPI(t, ctx, yaml)
+ idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{
+ RootDocument: doc,
+ TargetDocument: doc,
+ TargetLocation: "test.yaml",
+ })
+
+ require.NotNil(t, idx, "index should not be nil")
+ assert.Empty(t, idx.GetAllSchemas(), "should have no schemas")
+ assert.Empty(t, idx.GetAllPathItems(), "should have no path items")
+ assert.False(t, idx.HasErrors(), "should have no errors")
+}
+
+func TestBuildIndex_ComponentSchemas_Success(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ yaml := `
+openapi: "3.1.0"
+info:
+ title: Test API
+ version: 1.0.0
+paths: {}
+components:
+ schemas:
+ User:
+ type: object
+ properties:
+ id:
+ type: integer
+ name:
+ type: string
+ Pet:
+ type: object
+ properties:
+ name:
+ type: string
+`
+ doc := unmarshalOpenAPI(t, ctx, yaml)
+ idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{
+ RootDocument: doc,
+ TargetDocument: doc,
+ TargetLocation: "test.yaml",
+ })
+
+ require.NotNil(t, idx, "index should not be nil")
+ assert.False(t, idx.HasErrors(), "should have no errors")
+
+ // Should have component schemas indexed
+ assert.Len(t, idx.ComponentSchemas, 2, "should have 2 component schemas")
+
+ // Should have inline schemas within the components
+ assert.Len(t, idx.InlineSchemas, 3, "should have 3 inline schemas from properties")
+}
+
+func TestBuildIndex_InlineSchemas_Success(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ yaml := `
+openapi: "3.1.0"
+info:
+ title: Test API
+ version: 1.0.0
+paths:
+ /users:
+ get:
+ operationId: getUsers
+ responses:
+ "200":
+ description: Success
+ content:
+ application/json:
+ schema:
+ type: array
+ items:
+ type: object
+ properties:
+ id:
+ type: integer
+`
+ doc := unmarshalOpenAPI(t, ctx, yaml)
+ idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{
+ RootDocument: doc,
+ TargetDocument: doc,
+ TargetLocation: "test.yaml",
+ })
+
+ require.NotNil(t, idx, "index should not be nil")
+ assert.False(t, idx.HasErrors(), "should have no errors")
+
+ // Should have inline schemas: array, object (items), integer (id property)
+ assert.Len(t, idx.InlineSchemas, 3, "should have 3 inline schemas")
+ assert.Empty(t, idx.ComponentSchemas, "should have no component schemas")
+ assert.Empty(t, idx.SchemaReferences, "should have no schema references")
+}
+
+func TestBuildIndex_SchemaReferences_Success(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ yaml := `
+openapi: "3.1.0"
+info:
+ title: Test API
+ version: 1.0.0
+paths:
+ /users:
+ get:
+ operationId: getUsers
+ responses:
+ "200":
+ description: Success
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/User'
+components:
+ schemas:
+ User:
+ type: object
+ properties:
+ id:
+ type: integer
+`
+ doc := unmarshalOpenAPI(t, ctx, yaml)
+ idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{
+ RootDocument: doc,
+ TargetDocument: doc,
+ TargetLocation: "test.yaml",
+ })
+
+ require.NotNil(t, idx, "index should not be nil")
+ assert.False(t, idx.HasErrors(), "should have no errors")
+
+ // $ref to User schema
+ assert.Len(t, idx.SchemaReferences, 1, "should have 1 schema reference")
+ // User component schema
+ assert.Len(t, idx.ComponentSchemas, 1, "should have 1 component schema")
+ // id property inline schema
+ assert.Len(t, idx.InlineSchemas, 1, "should have 1 inline schema")
+}
+
+func TestBuildIndex_BooleanSchemas_Success(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ yaml := `
+openapi: "3.1.0"
+info:
+ title: Test API
+ version: 1.0.0
+paths: {}
+components:
+ schemas:
+ AnyValue:
+ type: object
+ additionalProperties: true
+ NoAdditional:
+ type: object
+ additionalProperties: false
+`
+ doc := unmarshalOpenAPI(t, ctx, yaml)
+ idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{
+ RootDocument: doc,
+ TargetDocument: doc,
+ TargetLocation: "test.yaml",
+ })
+
+ require.NotNil(t, idx, "index should not be nil")
+ assert.False(t, idx.HasErrors(), "should have no errors")
+
+ // Two boolean schemas (true and false for additionalProperties)
+ assert.Len(t, idx.BooleanSchemas, 2, "should have 2 boolean schemas")
+ // Two component schemas (AnyValue and NoAdditional)
+ assert.Len(t, idx.ComponentSchemas, 2, "should have 2 component schemas")
+ assert.Empty(t, idx.InlineSchemas, "should have no inline schemas")
+}
+
+func TestBuildIndex_Servers_Success(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ yaml := `
+openapi: "3.1.0"
+info:
+ title: Test API
+ version: 1.0.0
+servers:
+ - url: https://api.example.com
+ description: Production
+ variables:
+ version:
+ default: v1
+ enum: [v1, v2]
+ - url: https://staging.example.com
+ description: Staging
+paths: {}
+`
+ doc := unmarshalOpenAPI(t, ctx, yaml)
+ idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{
+ RootDocument: doc,
+ TargetDocument: doc,
+ TargetLocation: "test.yaml",
+ })
+
+ require.NotNil(t, idx, "index should not be nil")
+ assert.False(t, idx.HasErrors(), "should have no errors")
+
+ assert.Len(t, idx.Servers, 2, "should have 2 servers")
+ assert.Len(t, idx.ServerVariables, 1, "should have 1 server variable")
+}
+
+func TestBuildIndex_Tags_Success(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ yaml := `
+openapi: "3.1.0"
+info:
+ title: Test API
+ version: 1.0.0
+tags:
+ - name: users
+ description: User operations
+ - name: pets
+ description: Pet operations
+paths: {}
+`
+ doc := unmarshalOpenAPI(t, ctx, yaml)
+ idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{
+ RootDocument: doc,
+ TargetDocument: doc,
+ TargetLocation: "test.yaml",
+ })
+
+ require.NotNil(t, idx, "index should not be nil")
+ assert.False(t, idx.HasErrors(), "should have no errors")
+
+ assert.Len(t, idx.Tags, 2, "should have 2 tags")
+}
+
+func TestBuildIndex_ExternalDocs_Success(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ yaml := `
+openapi: "3.1.0"
+info:
+ title: Test API
+ version: 1.0.0
+externalDocs:
+ url: https://docs.example.com
+ description: API Documentation
+tags:
+ - name: users
+ externalDocs:
+ url: https://docs.example.com/users
+paths: {}
+`
+ doc := unmarshalOpenAPI(t, ctx, yaml)
+ idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{
+ RootDocument: doc,
+ TargetDocument: doc,
+ TargetLocation: "test.yaml",
+ })
+
+ require.NotNil(t, idx, "index should not be nil")
+ assert.False(t, idx.HasErrors(), "should have no errors")
+
+ assert.Len(t, idx.ExternalDocumentation, 2, "should have 2 external docs")
+}
+
+func TestBuildIndex_GetAllSchemas_Success(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ yaml := `
+openapi: "3.1.0"
+info:
+ title: Test API
+ version: 1.0.0
+paths:
+ /users:
+ get:
+ operationId: getUsers
+ responses:
+ "200":
+ description: Success
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/User'
+components:
+ schemas:
+ User:
+ type: object
+ additionalProperties: true
+ properties:
+ id:
+ type: integer
+`
+ doc := unmarshalOpenAPI(t, ctx, yaml)
+ idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{
+ RootDocument: doc,
+ TargetDocument: doc,
+ TargetLocation: "test.yaml",
+ })
+
+ require.NotNil(t, idx, "index should not be nil")
+
+ allSchemas := idx.GetAllSchemas()
+ assert.NotEmpty(t, allSchemas, "should have schemas")
+
+ // Should include boolean, inline, component, and external schemas (not references)
+ totalExpected := len(idx.BooleanSchemas) + len(idx.InlineSchemas) +
+ len(idx.ComponentSchemas) + len(idx.ExternalSchemas)
+ assert.Len(t, allSchemas, totalExpected, "GetAllSchemas should return all schema types")
+}
+
+func TestBuildIndex_GetAllParameters_Success(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ yaml := `
+openapi: "3.1.0"
+info:
+ title: Test API
+ version: 1.0.0
+paths:
+ /users/{id}:
+ parameters:
+ - name: id
+ in: path
+ required: true
+ schema:
+ type: integer
+ get:
+ operationId: getUser
+ parameters:
+ - $ref: '#/components/parameters/PageSize'
+ responses:
+ "200":
+ description: Success
+components:
+ parameters:
+ PageSize:
+ name: pageSize
+ in: query
+ schema:
+ type: integer
+`
+ doc := unmarshalOpenAPI(t, ctx, yaml)
+ idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{
+ RootDocument: doc,
+ TargetDocument: doc,
+ TargetLocation: "test.yaml",
+ })
+
+ require.NotNil(t, idx, "index should not be nil")
+
+ allParameters := idx.GetAllParameters()
+ assert.NotEmpty(t, allParameters, "should have parameters")
+
+ // Should include inline, component, and external parameters (not references)
+ totalExpected := len(idx.InlineParameters) + len(idx.ComponentParameters) +
+ len(idx.ExternalParameters)
+ assert.Len(t, allParameters, totalExpected, "GetAllParameters should return all parameter types")
+}
+
+func TestBuildIndex_GetAllResponses_Success(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ yaml := `
+openapi: "3.1.0"
+info:
+ title: Test API
+ version: 1.0.0
+paths:
+ /users:
+ get:
+ operationId: getUsers
+ responses:
+ "200":
+ description: Success
+ "404":
+ $ref: '#/components/responses/NotFound'
+components:
+ responses:
+ NotFound:
+ description: Not found
+`
+ doc := unmarshalOpenAPI(t, ctx, yaml)
+ idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{
+ RootDocument: doc,
+ TargetDocument: doc,
+ TargetLocation: "test.yaml",
+ })
+
+ require.NotNil(t, idx, "index should not be nil")
+
+ allResponses := idx.GetAllResponses()
+ assert.NotEmpty(t, allResponses, "should have responses")
+
+ // Should include inline, component, and external responses (not references)
+ totalExpected := len(idx.InlineResponses) + len(idx.ComponentResponses) +
+ len(idx.ExternalResponses)
+ assert.Len(t, allResponses, totalExpected, "GetAllResponses should return all response types")
+}
+
+func TestBuildIndex_GetAllRequestBodies_Success(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ yaml := `
+openapi: "3.1.0"
+info:
+ title: Test API
+ version: 1.0.0
+paths:
+ /users:
+ post:
+ operationId: createUser
+ requestBody:
+ description: User to create
+ content:
+ application/json:
+ schema:
+ type: object
+ responses:
+ "201":
+ description: Created
+ put:
+ operationId: updateUser
+ requestBody:
+ $ref: '#/components/requestBodies/UserBody'
+ responses:
+ "200":
+ description: Updated
+components:
+ requestBodies:
+ UserBody:
+ description: User body
+ content:
+ application/json:
+ schema:
+ type: object
+`
+ doc := unmarshalOpenAPI(t, ctx, yaml)
+ idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{
+ RootDocument: doc,
+ TargetDocument: doc,
+ TargetLocation: "test.yaml",
+ })
+
+ require.NotNil(t, idx, "index should not be nil")
+
+ allRequestBodies := idx.GetAllRequestBodies()
+ assert.NotEmpty(t, allRequestBodies, "should have request bodies")
+
+ // Should include inline, component, and external request bodies (not references)
+ totalExpected := len(idx.InlineRequestBodies) + len(idx.ComponentRequestBodies) +
+ len(idx.ExternalRequestBodies)
+ assert.Len(t, allRequestBodies, totalExpected, "GetAllRequestBodies should return all request body types")
+}
+
+func TestBuildIndex_GetAllHeaders_Success(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ yaml := `
+openapi: "3.1.0"
+info:
+ title: Test API
+ version: 1.0.0
+paths:
+ /users:
+ get:
+ operationId: getUsers
+ responses:
+ "200":
+ description: Success
+ headers:
+ X-Rate-Limit:
+ description: Rate limit
+ schema:
+ type: integer
+ X-Custom:
+ $ref: '#/components/headers/CustomHeader'
+components:
+ headers:
+ CustomHeader:
+ description: Custom header
+ schema:
+ type: string
+`
+ doc := unmarshalOpenAPI(t, ctx, yaml)
+ idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{
+ RootDocument: doc,
+ TargetDocument: doc,
+ TargetLocation: "test.yaml",
+ })
+
+ require.NotNil(t, idx, "index should not be nil")
+
+ allHeaders := idx.GetAllHeaders()
+ assert.NotEmpty(t, allHeaders, "should have headers")
+
+ // Should include inline, component, and external headers (not references)
+ totalExpected := len(idx.InlineHeaders) + len(idx.ComponentHeaders) +
+ len(idx.ExternalHeaders)
+ assert.Len(t, allHeaders, totalExpected, "GetAllHeaders should return all header types")
+}
+
+func TestBuildIndex_GetAllExamples_Success(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ yaml := `
+openapi: "3.1.0"
+info:
+ title: Test API
+ version: 1.0.0
+paths:
+ /users:
+ get:
+ operationId: getUsers
+ responses:
+ "200":
+ description: Success
+ content:
+ application/json:
+ examples:
+ inline:
+ value: { id: 1 }
+ referenced:
+ $ref: '#/components/examples/UserExample'
+components:
+ examples:
+ UserExample:
+ value: { id: 2 }
+`
+ doc := unmarshalOpenAPI(t, ctx, yaml)
+ idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{
+ RootDocument: doc,
+ TargetDocument: doc,
+ TargetLocation: "test.yaml",
+ })
+
+ require.NotNil(t, idx, "index should not be nil")
+
+ allExamples := idx.GetAllExamples()
+ assert.NotEmpty(t, allExamples, "should have examples")
+
+ // Should include inline, component, and external examples (not references)
+ totalExpected := len(idx.InlineExamples) + len(idx.ComponentExamples) +
+ len(idx.ExternalExamples)
+ assert.Len(t, allExamples, totalExpected, "GetAllExamples should return all example types")
+}
+
+func TestBuildIndex_GetAllLinks_Success(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ yaml := `
+openapi: "3.1.0"
+info:
+ title: Test API
+ version: 1.0.0
+paths:
+ /users:
+ get:
+ operationId: getUsers
+ responses:
+ "200":
+ description: Success
+ links:
+ GetUserById:
+ operationId: getUsers
+ ReferencedLink:
+ $ref: '#/components/links/CustomLink'
+ /products:
+ get:
+ operationId: getProducts
+ responses:
+ "200":
+ description: Success
+components:
+ links:
+ CustomLink:
+ operationId: getProducts
+`
+ doc := unmarshalOpenAPI(t, ctx, yaml)
+ idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{
+ RootDocument: doc,
+ TargetDocument: doc,
+ TargetLocation: "test.yaml",
+ })
+
+ require.NotNil(t, idx, "index should not be nil")
+
+ allLinks := idx.GetAllLinks()
+ assert.NotEmpty(t, allLinks, "should have links")
+
+ // Should include inline, component, and external links (not references)
+ totalExpected := len(idx.InlineLinks) + len(idx.ComponentLinks) +
+ len(idx.ExternalLinks)
+ assert.Len(t, allLinks, totalExpected, "GetAllLinks should return all link types")
+}
+
+func TestBuildIndex_GetAllCallbacks_Success(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ yaml := `
+openapi: "3.1.0"
+info:
+ title: Test API
+ version: 1.0.0
+paths:
+ /subscribe:
+ post:
+ operationId: subscribe
+ callbacks:
+ onData:
+ '{$request.body#/callbackUrl}':
+ post:
+ requestBody:
+ description: Callback
+ content:
+ application/json:
+ schema:
+ type: object
+ responses:
+ "200":
+ description: OK
+ onComplete:
+ $ref: '#/components/callbacks/CompleteCallback'
+ responses:
+ "201":
+ description: Created
+components:
+ callbacks:
+ CompleteCallback:
+ '{$request.body#/callbackUrl}':
+ post:
+ requestBody:
+ description: Complete callback
+ content:
+ application/json:
+ schema:
+ type: object
+ responses:
+ "200":
+ description: OK
+`
+ doc := unmarshalOpenAPI(t, ctx, yaml)
+ idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{
+ RootDocument: doc,
+ TargetDocument: doc,
+ TargetLocation: "test.yaml",
+ })
+
+ require.NotNil(t, idx, "index should not be nil")
+
+ allCallbacks := idx.GetAllCallbacks()
+ assert.NotEmpty(t, allCallbacks, "should have callbacks")
+
+ // Should include inline, component, and external callbacks (not references)
+ totalExpected := len(idx.InlineCallbacks) + len(idx.ComponentCallbacks) +
+ len(idx.ExternalCallbacks)
+ assert.Len(t, allCallbacks, totalExpected, "GetAllCallbacks should return all callback types")
+}
+
+func TestBuildIndex_NilIndex_Methods_Success(t *testing.T) {
+ t.Parallel()
+
+ var idx *openapi.Index
+
+ assert.Nil(t, idx.GetAllSchemas(), "nil index GetAllSchemas should return nil")
+ assert.Nil(t, idx.GetAllPathItems(), "nil index GetAllPathItems should return nil")
+ assert.Nil(t, idx.GetValidationErrors(), "nil index GetValidationErrors should return nil")
+ assert.Nil(t, idx.GetResolutionErrors(), "nil index GetResolutionErrors should return nil")
+ assert.Nil(t, idx.GetCircularReferenceErrors(), "nil index GetCircularReferenceErrors should return nil")
+ assert.Nil(t, idx.GetAllErrors(), "nil index GetAllErrors should return nil")
+ assert.False(t, idx.HasErrors(), "nil index HasErrors should return false")
+}
+
+// Tests for circular reference detection
+
+func TestBuildIndex_CircularRef_OptionalProperty_Valid(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ // Optional property recursion - VALID (not required means {} is valid)
+ yaml := `
+openapi: "3.1.0"
+info:
+ title: Test API
+ version: 1.0.0
+paths: {}
+components:
+ schemas:
+ Node:
+ type: object
+ properties:
+ next:
+ $ref: '#/components/schemas/Node'
+`
+ doc := unmarshalOpenAPI(t, ctx, yaml)
+ idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{
+ RootDocument: doc,
+ TargetDocument: doc,
+ TargetLocation: "test.yaml",
+ })
+
+ require.NotNil(t, idx, "index should not be nil")
+
+ // Optional property circular refs should be VALID (no error)
+ circularErrs := idx.GetCircularReferenceErrors()
+ assert.Empty(t, circularErrs, "optional property circular ref should be valid (no error)")
+}
+
+func TestBuildIndex_CircularRef_RequiredProperty_Invalid(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ // Required property recursion - INVALID (no base case)
+ yaml := `
+openapi: "3.1.0"
+info:
+ title: Test API
+ version: 1.0.0
+paths: {}
+components:
+ schemas:
+ A:
+ type: object
+ required: [b]
+ properties:
+ b:
+ $ref: '#/components/schemas/B'
+ B:
+ type: object
+ required: [a]
+ properties:
+ a:
+ $ref: '#/components/schemas/A'
+`
+ doc := unmarshalOpenAPI(t, ctx, yaml)
+ idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{
+ RootDocument: doc,
+ TargetDocument: doc,
+ TargetLocation: "test.yaml",
+ })
+
+ require.NotNil(t, idx, "index should not be nil")
+
+ // Required property circular refs should be INVALID
+ circularErrs := idx.GetCircularReferenceErrors()
+ assert.NotEmpty(t, circularErrs, "required property circular ref should be invalid")
+}
+
+func TestBuildIndex_CircularRef_ArrayMinItemsZero_Valid(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ // Array with default minItems (0) - VALID (empty array terminates)
+ yaml := `
+openapi: "3.1.0"
+info:
+ title: Test API
+ version: 1.0.0
+paths: {}
+components:
+ schemas:
+ Category:
+ type: object
+ required: [children]
+ properties:
+ children:
+ type: array
+ items:
+ $ref: '#/components/schemas/Category'
+`
+ doc := unmarshalOpenAPI(t, ctx, yaml)
+ idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{
+ RootDocument: doc,
+ TargetDocument: doc,
+ TargetLocation: "test.yaml",
+ })
+
+ require.NotNil(t, idx, "index should not be nil")
+
+ // Array with minItems=0 circular refs should be VALID
+ circularErrs := idx.GetCircularReferenceErrors()
+ assert.Empty(t, circularErrs, "array with minItems=0 circular ref should be valid")
+}
+
+func TestBuildIndex_CircularRef_ArrayMinItemsOne_Invalid(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ // Array with minItems=1 - INVALID (can't have empty array)
+ yaml := `
+openapi: "3.1.0"
+info:
+ title: Test API
+ version: 1.0.0
+paths: {}
+components:
+ schemas:
+ Node:
+ type: object
+ required: [children]
+ properties:
+ children:
+ type: array
+ minItems: 1
+ items:
+ $ref: '#/components/schemas/Node'
+`
+ doc := unmarshalOpenAPI(t, ctx, yaml)
+ idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{
+ RootDocument: doc,
+ TargetDocument: doc,
+ TargetLocation: "test.yaml",
+ })
+
+ require.NotNil(t, idx, "index should not be nil")
+
+ // Array with minItems>=1 circular refs should be INVALID
+ circularErrs := idx.GetCircularReferenceErrors()
+ assert.NotEmpty(t, circularErrs, "array with minItems>=1 circular ref should be invalid")
+}
+
+func TestBuildIndex_CircularRef_Nullable_Valid(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ // Nullable type union - VALID (null is a base case)
+ yaml := `
+openapi: "3.1.0"
+info:
+ title: Test API
+ version: 1.0.0
+paths: {}
+components:
+ schemas:
+ Node:
+ type: [object, "null"]
+ required: [next]
+ properties:
+ next:
+ $ref: '#/components/schemas/Node'
+`
+ doc := unmarshalOpenAPI(t, ctx, yaml)
+ idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{
+ RootDocument: doc,
+ TargetDocument: doc,
+ TargetLocation: "test.yaml",
+ })
+
+ require.NotNil(t, idx, "index should not be nil")
+
+ // Nullable circular refs should be VALID
+ circularErrs := idx.GetCircularReferenceErrors()
+ assert.Empty(t, circularErrs, "nullable circular ref should be valid")
+}
+
+func TestBuildIndex_CircularRef_AdditionalPropertiesMinZero_Valid(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ // AdditionalProperties with default minProperties (0) - VALID
+ yaml := `
+openapi: "3.1.0"
+info:
+ title: Test API
+ version: 1.0.0
+paths: {}
+components:
+ schemas:
+ TrieNode:
+ type: object
+ required: [children]
+ properties:
+ children:
+ type: object
+ additionalProperties:
+ $ref: '#/components/schemas/TrieNode'
+`
+ doc := unmarshalOpenAPI(t, ctx, yaml)
+ idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{
+ RootDocument: doc,
+ TargetDocument: doc,
+ TargetLocation: "test.yaml",
+ })
+
+ require.NotNil(t, idx, "index should not be nil")
+
+ // AdditionalProperties with minProperties=0 should be VALID
+ circularErrs := idx.GetCircularReferenceErrors()
+ assert.Empty(t, circularErrs, "additionalProperties with minProperties=0 should be valid")
+}
+
+func TestBuildIndex_CircularRef_AdditionalPropertiesMinOne_Invalid(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ // AdditionalProperties with minProperties>=1 - INVALID
+ yaml := `
+openapi: "3.1.0"
+info:
+ title: Test API
+ version: 1.0.0
+paths: {}
+components:
+ schemas:
+ Node:
+ type: object
+ required: [children]
+ properties:
+ children:
+ type: object
+ minProperties: 1
+ additionalProperties:
+ $ref: '#/components/schemas/Node'
+`
+ doc := unmarshalOpenAPI(t, ctx, yaml)
+ idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{
+ RootDocument: doc,
+ TargetDocument: doc,
+ TargetLocation: "test.yaml",
+ })
+
+ require.NotNil(t, idx, "index should not be nil")
+
+ // AdditionalProperties with minProperties>=1 should be INVALID
+ circularErrs := idx.GetCircularReferenceErrors()
+ assert.NotEmpty(t, circularErrs, "additionalProperties with minProperties>=1 should be invalid")
+}
+
+func TestBuildIndex_CircularRef_OneOfWithNonRecursiveBranch_Valid(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ // oneOf with at least one non-recursive branch - VALID
+ yaml := `
+openapi: "3.1.0"
+info:
+ title: Test API
+ version: 1.0.0
+paths: {}
+components:
+ schemas:
+ Expr:
+ oneOf:
+ - $ref: '#/components/schemas/Literal'
+ - $ref: '#/components/schemas/BinaryExpr'
+ Literal:
+ type: object
+ properties:
+ value:
+ type: string
+ BinaryExpr:
+ type: object
+ required: [left, right]
+ properties:
+ left:
+ $ref: '#/components/schemas/Expr'
+ right:
+ $ref: '#/components/schemas/Expr'
+`
+ doc := unmarshalOpenAPI(t, ctx, yaml)
+ idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{
+ RootDocument: doc,
+ TargetDocument: doc,
+ TargetLocation: "test.yaml",
+ })
+
+ require.NotNil(t, idx, "index should not be nil")
+
+ // oneOf with a non-recursive branch should be VALID
+ circularErrs := idx.GetCircularReferenceErrors()
+ assert.Empty(t, circularErrs, "oneOf with non-recursive branch should be valid")
+}
+
+func TestBuildIndex_CircularRef_DirectSelfRef_Optional_Valid(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ // Direct self-reference through optional property - VALID
+ yaml := `
+openapi: "3.1.0"
+info:
+ title: Test API
+ version: 1.0.0
+paths: {}
+components:
+ schemas:
+ LinkedNode:
+ type: object
+ properties:
+ value:
+ type: string
+ next:
+ $ref: '#/components/schemas/LinkedNode'
+`
+ doc := unmarshalOpenAPI(t, ctx, yaml)
+ idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{
+ RootDocument: doc,
+ TargetDocument: doc,
+ TargetLocation: "test.yaml",
+ })
+
+ require.NotNil(t, idx, "index should not be nil")
+
+ circularErrs := idx.GetCircularReferenceErrors()
+ assert.Empty(t, circularErrs, "direct self-ref through optional should be valid")
+}
+
+func TestBuildIndex_CircularRef_DirectSelfRef_Required_Invalid(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ // Direct self-reference through required property - INVALID
+ yaml := `
+openapi: "3.1.0"
+info:
+ title: Test API
+ version: 1.0.0
+paths: {}
+components:
+ schemas:
+ InfiniteNode:
+ type: object
+ required: [self]
+ properties:
+ self:
+ $ref: '#/components/schemas/InfiniteNode'
+`
+ doc := unmarshalOpenAPI(t, ctx, yaml)
+ idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{
+ RootDocument: doc,
+ TargetDocument: doc,
+ TargetLocation: "test.yaml",
+ })
+
+ require.NotNil(t, idx, "index should not be nil")
+
+ circularErrs := idx.GetCircularReferenceErrors()
+ assert.NotEmpty(t, circularErrs, "direct self-ref through required should be invalid")
+}
+
+func TestBuildIndex_NoCircularRef_Success(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ // No circular reference - just regular refs
+ yaml := `
+openapi: "3.1.0"
+info:
+ title: Test API
+ version: 1.0.0
+paths:
+ /users:
+ get:
+ operationId: getUsers
+ responses:
+ "200":
+ description: Success
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/User'
+components:
+ schemas:
+ User:
+ type: object
+ properties:
+ id:
+ type: integer
+ address:
+ $ref: '#/components/schemas/Address'
+ Address:
+ type: object
+ properties:
+ street:
+ type: string
+ city:
+ type: string
+`
+ doc := unmarshalOpenAPI(t, ctx, yaml)
+ idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{
+ RootDocument: doc,
+ TargetDocument: doc,
+ TargetLocation: "test.yaml",
+ })
+
+ require.NotNil(t, idx, "index should not be nil")
+ assert.False(t, idx.HasErrors(), "should have no errors")
+ assert.Empty(t, idx.GetCircularReferenceErrors(), "should have no circular reference errors")
+}
+
+func TestBuildIndex_LocationInfo_Success(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ yaml := `
+openapi: "3.1.0"
+info:
+ title: Test API
+ version: 1.0.0
+paths: {}
+components:
+ schemas:
+ User:
+ type: object
+ properties:
+ id:
+ type: integer
+`
+ doc := unmarshalOpenAPI(t, ctx, yaml)
+ idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{
+ RootDocument: doc,
+ TargetDocument: doc,
+ TargetLocation: "test.yaml",
+ })
+
+ require.NotNil(t, idx, "index should not be nil")
+
+ // Check that schemas have location information
+ for _, schema := range idx.ComponentSchemas {
+ assert.NotNil(t, schema.Location, "schema should have location")
+ jp := schema.Location.ToJSONPointer()
+ assert.NotEmpty(t, jp, "location should produce JSON pointer")
+ }
+}
+
+func TestBuildIndex_Operations_Success(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ yaml := `
+openapi: "3.1.0"
+info:
+ title: Test API
+ version: 1.0.0
+paths:
+ /users:
+ get:
+ operationId: getUsers
+ summary: Get users
+ responses:
+ "200":
+ description: Success
+ post:
+ operationId: createUser
+ summary: Create user
+ responses:
+ "201":
+ description: Created
+ /products:
+ get:
+ operationId: getProducts
+ summary: Get products
+ responses:
+ "200":
+ description: Success
+`
+ doc := unmarshalOpenAPI(t, ctx, yaml)
+ idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{
+ RootDocument: doc,
+ TargetDocument: doc,
+ TargetLocation: "test.yaml",
+ })
+
+ require.NotNil(t, idx, "index should not be nil")
+ assert.False(t, idx.HasErrors(), "should have no errors")
+
+ // Should have 3 operations indexed
+ assert.Len(t, idx.Operations, 3, "should have 3 operations")
+ // Should have 2 inline path items
+ assert.Len(t, idx.InlinePathItems, 2, "should have 2 inline path items")
+}
+
+func TestBuildIndex_Parameters_Success(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ yaml := `
+openapi: "3.1.0"
+info:
+ title: Test API
+ version: 1.0.0
+paths:
+ /users/{id}:
+ parameters:
+ - name: id
+ in: path
+ required: true
+ schema:
+ type: integer
+ get:
+ operationId: getUser
+ responses:
+ "200":
+ description: Success
+ parameters:
+ - $ref: '#/components/parameters/PageSize'
+components:
+ parameters:
+ PageSize:
+ name: pageSize
+ in: query
+ schema:
+ type: integer
+`
+ doc := unmarshalOpenAPI(t, ctx, yaml)
+ idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{
+ RootDocument: doc,
+ TargetDocument: doc,
+ TargetLocation: "test.yaml",
+ })
+
+ require.NotNil(t, idx, "index should not be nil")
+ assert.False(t, idx.HasErrors(), "should have no errors")
+
+ // Should have 1 component parameter (PageSize)
+ assert.Len(t, idx.ComponentParameters, 1, "should have 1 component parameter")
+ // Should have 1 inline parameter (id in path)
+ assert.Len(t, idx.InlineParameters, 1, "should have 1 inline parameter")
+ // Should have 1 parameter reference ($ref to PageSize)
+ assert.Len(t, idx.ParameterReferences, 1, "should have 1 parameter reference")
+}
+
+func TestBuildIndex_Responses_Success(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ yaml := `
+openapi: "3.1.0"
+info:
+ title: Test API
+ version: 1.0.0
+paths:
+ /users:
+ get:
+ operationId: getUsers
+ responses:
+ "200":
+ description: Success
+ content:
+ application/json:
+ schema:
+ type: array
+ "404":
+ $ref: '#/components/responses/NotFound'
+components:
+ responses:
+ NotFound:
+ description: Not found
+`
+ doc := unmarshalOpenAPI(t, ctx, yaml)
+ idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{
+ RootDocument: doc,
+ TargetDocument: doc,
+ TargetLocation: "test.yaml",
+ })
+
+ require.NotNil(t, idx, "index should not be nil")
+ assert.False(t, idx.HasErrors(), "should have no errors")
+
+ // Should have 1 component response (NotFound)
+ assert.Len(t, idx.ComponentResponses, 1, "should have 1 component response")
+ // Should have 1 inline response (200)
+ assert.Len(t, idx.InlineResponses, 1, "should have 1 inline response")
+ // Should have 1 response reference ($ref to NotFound)
+ assert.Len(t, idx.ResponseReferences, 1, "should have 1 response reference")
+}
+
+func TestBuildIndex_RequestBodies_Success(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ yaml := `
+openapi: "3.1.0"
+info:
+ title: Test API
+ version: 1.0.0
+paths:
+ /users:
+ post:
+ operationId: createUser
+ requestBody:
+ description: User to create
+ content:
+ application/json:
+ schema:
+ type: object
+ responses:
+ "201":
+ description: Created
+ put:
+ operationId: updateUser
+ requestBody:
+ $ref: '#/components/requestBodies/UserBody'
+ responses:
+ "200":
+ description: Updated
+components:
+ requestBodies:
+ UserBody:
+ description: User body
+ content:
+ application/json:
+ schema:
+ type: object
+`
+ doc := unmarshalOpenAPI(t, ctx, yaml)
+ idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{
+ RootDocument: doc,
+ TargetDocument: doc,
+ TargetLocation: "test.yaml",
+ })
+
+ require.NotNil(t, idx, "index should not be nil")
+ assert.False(t, idx.HasErrors(), "should have no errors")
+
+ // Should have 1 component request body (UserBody)
+ assert.Len(t, idx.ComponentRequestBodies, 1, "should have 1 component request body")
+ // Should have 1 inline request body (POST)
+ assert.Len(t, idx.InlineRequestBodies, 1, "should have 1 inline request body")
+ // Should have 1 request body reference ($ref to UserBody)
+ assert.Len(t, idx.RequestBodyReferences, 1, "should have 1 request body reference")
+}
+
+func TestBuildIndex_MediaTypes_Success(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ yaml := `
+openapi: "3.1.0"
+info:
+ title: Test API
+ version: 1.0.0
+paths:
+ /users:
+ post:
+ operationId: createUser
+ requestBody:
+ content:
+ application/json:
+ schema:
+ type: object
+ application/xml:
+ schema:
+ type: object
+ responses:
+ "200":
+ description: Success
+ content:
+ application/json:
+ schema:
+ type: object
+`
+ doc := unmarshalOpenAPI(t, ctx, yaml)
+ idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{
+ RootDocument: doc,
+ TargetDocument: doc,
+ TargetLocation: "test.yaml",
+ })
+
+ require.NotNil(t, idx, "index should not be nil")
+ assert.False(t, idx.HasErrors(), "should have no errors")
+
+ // Should have 3 media types (2 in request, 1 in response)
+ assert.Len(t, idx.MediaTypes, 3, "should have 3 media types")
+}
+
+func TestBuildIndex_Discriminator_Success(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ yaml := `
+openapi: "3.1.0"
+info:
+ title: Test API
+ version: 1.0.0
+paths: {}
+components:
+ schemas:
+ Pet:
+ type: object
+ discriminator:
+ propertyName: petType
+ mapping:
+ dog: '#/components/schemas/Dog'
+ cat: '#/components/schemas/Cat'
+ properties:
+ petType:
+ type: string
+ Dog:
+ type: object
+ Cat:
+ type: object
+`
+ doc := unmarshalOpenAPI(t, ctx, yaml)
+ idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{
+ RootDocument: doc,
+ TargetDocument: doc,
+ TargetLocation: "test.yaml",
+ })
+
+ require.NotNil(t, idx, "index should not be nil")
+ assert.False(t, idx.HasErrors(), "should have no errors")
+
+ // Should have 1 discriminator
+ assert.Len(t, idx.Discriminators, 1, "should have 1 discriminator")
+}
+
+func TestBuildIndex_SecuritySchemes_Success(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ yaml := `
+openapi: "3.1.0"
+info:
+ title: Test API
+ version: 1.0.0
+paths: {}
+components:
+ securitySchemes:
+ apiKey:
+ type: apiKey
+ in: header
+ name: X-API-Key
+ oauth2:
+ type: oauth2
+ flows:
+ implicit:
+ authorizationUrl: https://example.com/oauth/authorize
+ scopes:
+ read: Read access
+ write: Write access
+`
+ doc := unmarshalOpenAPI(t, ctx, yaml)
+ idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{
+ RootDocument: doc,
+ TargetDocument: doc,
+ TargetLocation: "test.yaml",
+ })
+
+ require.NotNil(t, idx, "index should not be nil")
+ assert.False(t, idx.HasErrors(), "should have no errors")
+
+ // Should have 2 component security schemes
+ assert.Len(t, idx.ComponentSecuritySchemes, 2, "should have 2 component security schemes")
+ // Should have 1 OAuth flows container
+ assert.Len(t, idx.OAuthFlows, 1, "should have 1 OAuth flows")
+ // Should have 1 OAuth flow item (implicit)
+ assert.Len(t, idx.OAuthFlowItems, 1, "should have 1 OAuth flow item")
+}
+
+func TestBuildIndex_UnknownProperties_DetectedAsWarnings(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ tests := []struct {
+ name string
+ yaml string
+ expectedWarningCount int
+ expectedWarningSubstr string
+ }{
+ {
+ name: "MediaType with $ref property in OpenAPI 3.1",
+ yaml: `
+openapi: "3.1.0"
+info:
+ title: Test API
+ version: 1.0.0
+paths:
+ /vehicles:
+ get:
+ responses:
+ "200":
+ description: Success
+ content:
+ application/json:
+ $ref: '#/components/schemas/VehiclesResponse'
+components:
+ schemas:
+ VehiclesResponse:
+ type: object
+ properties:
+ vehicles:
+ type: array
+`,
+ expectedWarningCount: 1,
+ expectedWarningSubstr: "unknown property `$ref`",
+ },
+ {
+ name: "MediaType with schema property (valid)",
+ yaml: `
+openapi: "3.1.0"
+info:
+ title: Test API
+ version: 1.0.0
+paths:
+ /vehicles:
+ get:
+ responses:
+ "200":
+ description: Success
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/VehiclesResponse'
+components:
+ schemas:
+ VehiclesResponse:
+ type: object
+`,
+ expectedWarningCount: 0,
+ expectedWarningSubstr: "",
+ },
+ {
+ name: "Operation with unknown property",
+ yaml: `
+openapi: "3.1.0"
+info:
+ title: Test API
+ version: 1.0.0
+paths:
+ /test:
+ get:
+ unknownField: value
+ responses:
+ "200":
+ description: Success
+`,
+ expectedWarningCount: 1,
+ expectedWarningSubstr: "unknown property `unknownField`",
+ },
+ {
+ name: "Schema property with unknown keyword",
+ yaml: `
+openapi: "3.1.0"
+info:
+ title: Test API
+ version: 1.0.0
+paths:
+ /test:
+ post:
+ requestBody:
+ required: true
+ content:
+ application/json:
+ schema:
+ type: object
+ example: foobar
+ properties:
+ test:
+ type: string
+ description: Test
+ name: foo
+ responses:
+ "204":
+ description: No content
+`,
+ expectedWarningCount: 1,
+ expectedWarningSubstr: "unknown property `name`",
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ t.Parallel()
+
+ doc := unmarshalOpenAPI(t, ctx, tt.yaml)
+ idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{
+ RootDocument: doc,
+ TargetDocument: doc,
+ TargetLocation: "test.yaml",
+ })
+
+ require.NotNil(t, idx, "index should not be nil")
+
+ allErrors := idx.GetAllErrors()
+ warnings := []error{}
+ for _, err := range allErrors {
+ var vErr *validation.Error
+ if errors.As(err, &vErr) && vErr.Severity == validation.SeverityWarning {
+ warnings = append(warnings, err)
+ }
+ }
+
+ assert.Len(t, warnings, tt.expectedWarningCount, "should have expected number of warnings")
+
+ if tt.expectedWarningCount > 0 {
+ found := false
+ for _, w := range warnings {
+ if strings.Contains(w.Error(), tt.expectedWarningSubstr) {
+ found = true
+ break
+ }
+ }
+ assert.True(t, found, "should have warning containing '%s'", tt.expectedWarningSubstr)
+ }
+ })
+ }
+}
+
+func TestBuildIndex_UnknownProperties_Deduplicated_WhenComponentReferencedMultipleTimes(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ // Create a schema with an unknown property that is referenced from multiple operations
+ yaml := `
+openapi: "3.1.0"
+info:
+ title: Test API
+ version: 1.0.0
+paths:
+ /users:
+ get:
+ responses:
+ "200":
+ description: Get users
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/User'
+ post:
+ requestBody:
+ required: true
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/User'
+ responses:
+ "201":
+ description: Created
+ /admin/users:
+ get:
+ responses:
+ "200":
+ description: Get admin users
+ content:
+ application/json:
+ schema:
+ type: array
+ items:
+ $ref: '#/components/schemas/User'
+components:
+ schemas:
+ User:
+ type: object
+ unknownField: this-should-trigger-warning
+ properties:
+ id:
+ type: string
+ name:
+ type: string
+`
+
+ doc := unmarshalOpenAPI(t, ctx, yaml)
+ idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{
+ RootDocument: doc,
+ TargetDocument: doc,
+ TargetLocation: "test.yaml",
+ })
+
+ require.NotNil(t, idx, "index should not be nil")
+
+ // Get all warnings
+ allErrors := idx.GetAllErrors()
+ unknownPropWarnings := []error{}
+ for _, err := range allErrors {
+ var vErr *validation.Error
+ if errors.As(err, &vErr) && vErr.Severity == validation.SeverityWarning {
+ if strings.Contains(err.Error(), "unknown property `unknownField`") {
+ unknownPropWarnings = append(unknownPropWarnings, err)
+ }
+ }
+ }
+
+ // Despite the User schema being referenced 3 times (in 3 different operations),
+ // we should only get 1 warning for the unknown property
+ assert.Len(t, unknownPropWarnings, 1, "should only have 1 warning for unknownField despite multiple references")
+ assert.Contains(t, unknownPropWarnings[0].Error(), "unknown property `unknownField`", "warning should mention the unknown field")
+}
+
+func TestBuildIndex_CircularReferenceCounts_ValidCircular_Success(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ yaml := `
+openapi: "3.1.0"
+info:
+ title: Test API
+ version: 1.0.0
+paths: {}
+components:
+ schemas:
+ Node:
+ type: object
+ properties:
+ value:
+ type: string
+ next:
+ $ref: '#/components/schemas/Node'
+`
+ doc := unmarshalOpenAPI(t, ctx, yaml)
+ idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{
+ RootDocument: doc,
+ TargetDocument: doc,
+ TargetLocation: "test.yaml",
+ })
+
+ require.NotNil(t, idx, "index should not be nil")
+ assert.Equal(t, 1, idx.GetValidCircularRefCount(), "should have 1 valid circular reference")
+ assert.Equal(t, 0, idx.GetInvalidCircularRefCount(), "should have 0 invalid circular references")
+ assert.Empty(t, idx.GetCircularReferenceErrors(), "should have no circular reference errors")
+}
+
+func TestBuildIndex_CircularReferenceCounts_InvalidCircular_Success(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ yaml := `
+openapi: "3.1.0"
+info:
+ title: Test API
+ version: 1.0.0
+paths: {}
+components:
+ schemas:
+ BadNode:
+ type: object
+ required:
+ - next
+ properties:
+ next:
+ $ref: '#/components/schemas/BadNode'
+`
+ doc := unmarshalOpenAPI(t, ctx, yaml)
+ idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{
+ RootDocument: doc,
+ TargetDocument: doc,
+ TargetLocation: "test.yaml",
+ })
+
+ require.NotNil(t, idx, "index should not be nil")
+ assert.Equal(t, 0, idx.GetValidCircularRefCount(), "should have 0 valid circular references")
+ assert.Equal(t, 1, idx.GetInvalidCircularRefCount(), "should have 1 invalid circular reference")
+ assert.Len(t, idx.GetCircularReferenceErrors(), 1, "should have 1 circular reference error")
+}
+
+func TestBuildIndex_CircularReferenceCounts_MixedCirculars_Success(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ yaml := `
+openapi: "3.1.0"
+info:
+ title: Test API
+ version: 1.0.0
+paths: {}
+components:
+ schemas:
+ GoodNode:
+ type: object
+ properties:
+ value:
+ type: string
+ next:
+ $ref: '#/components/schemas/GoodNode'
+ BadNode:
+ type: object
+ required:
+ - next
+ properties:
+ next:
+ $ref: '#/components/schemas/BadNode'
+`
+ doc := unmarshalOpenAPI(t, ctx, yaml)
+ idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{
+ RootDocument: doc,
+ TargetDocument: doc,
+ TargetLocation: "test.yaml",
+ })
+
+ require.NotNil(t, idx, "index should not be nil")
+ assert.Equal(t, 1, idx.GetValidCircularRefCount(), "should have 1 valid circular reference")
+ assert.Equal(t, 1, idx.GetInvalidCircularRefCount(), "should have 1 invalid circular reference")
+ assert.Len(t, idx.GetCircularReferenceErrors(), 1, "should have 1 circular reference error")
+}
+
+func TestBuildIndex_CircularReferenceCounts_ArrayWithMinItems_Success(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ yaml := `
+openapi: "3.1.0"
+info:
+ title: Test API
+ version: 1.0.0
+paths: {}
+components:
+ schemas:
+ TreeNode:
+ type: object
+ properties:
+ children:
+ type: array
+ items:
+ $ref: '#/components/schemas/TreeNode'
+`
+ doc := unmarshalOpenAPI(t, ctx, yaml)
+ idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{
+ RootDocument: doc,
+ TargetDocument: doc,
+ TargetLocation: "test.yaml",
+ })
+
+ require.NotNil(t, idx, "index should not be nil")
+ assert.Equal(t, 1, idx.GetValidCircularRefCount(), "should have 1 valid circular reference (empty array terminates)")
+ assert.Equal(t, 0, idx.GetInvalidCircularRefCount(), "should have 0 invalid circular references")
+ assert.Empty(t, idx.GetCircularReferenceErrors(), "should have no circular reference errors")
+}
+
+func TestBuildIndex_CircularReferenceCounts_NullableSchema_Success(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ yaml := `
+openapi: "3.1.0"
+info:
+ title: Test API
+ version: 1.0.0
+paths: {}
+components:
+ schemas:
+ NullableNode:
+ type: object
+ nullable: true
+ required:
+ - next
+ properties:
+ next:
+ $ref: '#/components/schemas/NullableNode'
+`
+ doc := unmarshalOpenAPI(t, ctx, yaml)
+ idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{
+ RootDocument: doc,
+ TargetDocument: doc,
+ TargetLocation: "test.yaml",
+ })
+
+ require.NotNil(t, idx, "index should not be nil")
+ assert.Equal(t, 1, idx.GetValidCircularRefCount(), "should have 1 valid circular reference (nullable terminates)")
+ assert.Equal(t, 0, idx.GetInvalidCircularRefCount(), "should have 0 invalid circular references")
+ assert.Empty(t, idx.GetCircularReferenceErrors(), "should have no circular reference errors")
+}
+
+func TestBuildIndex_CircularReferenceCounts_OneOfValid_Success(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ tests := []struct {
+ name string
+ yaml string
+ expectedValidCircular int
+ }{
+ {
+ name: "oneOf with referenced schema",
+ yaml: `
+openapi: "3.1.0"
+info:
+ title: Test API
+ version: 1.0.0
+paths: {}
+components:
+ schemas:
+ PolyNode:
+ oneOf:
+ - type: string
+ - $ref: '#/components/schemas/PolyNodeObject'
+ PolyNodeObject:
+ type: object
+ properties:
+ next:
+ $ref: '#/components/schemas/PolyNode'
+`,
+ // 2 circular refs detected: one starting from PolyNode, one from PolyNodeObject
+ // Both are part of the same cycle but detected at different entry points
+ expectedValidCircular: 2,
+ },
+ {
+ name: "oneOf with inline schema",
+ yaml: `
+openapi: "3.1.0"
+info:
+ title: Test API
+ version: 1.0.0
+paths: {}
+components:
+ schemas:
+ PolyNode:
+ oneOf:
+ - type: string
+ - type: object
+ properties:
+ next:
+ $ref: '#/components/schemas/PolyNode'
+`,
+ // 1 circular ref: PolyNode referencing itself
+ expectedValidCircular: 1,
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ t.Parallel()
+
+ doc := unmarshalOpenAPI(t, ctx, tt.yaml)
+ idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{
+ RootDocument: doc,
+ TargetDocument: doc,
+ TargetLocation: "test.yaml",
+ })
+
+ require.NotNil(t, idx, "index should not be nil")
+ assert.Equal(t, tt.expectedValidCircular, idx.GetValidCircularRefCount(), "should have expected valid circular references")
+ assert.Equal(t, 0, idx.GetInvalidCircularRefCount(), "should have 0 invalid circular references")
+ assert.Empty(t, idx.GetCircularReferenceErrors(), "should have no circular reference errors")
+ })
+ }
+}
+
+func TestBuildIndex_CircularReferenceCounts_GettersWithNilIndex_Success(t *testing.T) {
+ t.Parallel()
+
+ var idx *openapi.Index = nil
+
+ assert.Equal(t, 0, idx.GetValidCircularRefCount(), "should return 0 for nil index")
+ assert.Equal(t, 0, idx.GetInvalidCircularRefCount(), "should return 0 for nil index")
+}
+
+func TestIndex_GetAllReferences_Success(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ yaml := `
+openapi: "3.1.0"
+info:
+ title: Test API
+ version: 1.0.0
+paths:
+ /users:
+ get:
+ operationId: getUsers
+ parameters:
+ - $ref: '#/components/parameters/UserIdParam'
+ responses:
+ '200':
+ $ref: '#/components/responses/UserResponse'
+ callbacks:
+ statusUpdate:
+ $ref: '#/components/callbacks/StatusCallback'
+components:
+ parameters:
+ UserIdParam:
+ name: userId
+ in: query
+ schema:
+ type: string
+ responses:
+ UserResponse:
+ description: User response
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/User'
+ examples:
+ user1:
+ $ref: '#/components/examples/UserExample'
+ headers:
+ X-Custom:
+ $ref: '#/components/headers/CustomHeader'
+ links:
+ self:
+ $ref: '#/components/links/SelfLink'
+ schemas:
+ User:
+ type: object
+ properties:
+ id:
+ type: string
+ profile:
+ $ref: '#/components/schemas/Profile'
+ Profile:
+ type: object
+ properties:
+ name:
+ type: string
+ examples:
+ UserExample:
+ value:
+ id: "123"
+ headers:
+ CustomHeader:
+ schema:
+ type: string
+ links:
+ SelfLink:
+ operationId: getUsers
+ requestBodies:
+ UserBody:
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/User'
+ callbacks:
+ StatusCallback:
+ '{$request.body#/callbackUrl}':
+ post:
+ requestBody:
+ $ref: '#/components/requestBodies/UserBody'
+ responses:
+ '200':
+ description: Callback response
+ securitySchemes:
+ ApiKey:
+ type: apiKey
+ in: header
+ name: X-API-Key
+`
+ doc := unmarshalOpenAPI(t, ctx, yaml)
+ idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{
+ RootDocument: doc,
+ TargetDocument: doc,
+ TargetLocation: "test.yaml",
+ })
+
+ require.NotNil(t, idx, "index should not be nil")
+ assert.False(t, idx.HasErrors(), "should have no errors")
+
+ // Get all references
+ allRefs := idx.GetAllReferences()
+ require.NotNil(t, allRefs, "GetAllReferences should not return nil")
+
+ expectedRefCount := 10
+ assert.Len(t, allRefs, expectedRefCount, "should have expected number of references")
+
+ // Verify all returned nodes implement ReferenceNode interface
+ for i, ref := range allRefs {
+ assert.NotNil(t, ref, "reference at index %d should not be nil", i)
+ assert.NotNil(t, ref.Node, "reference node at index %d should not be nil", i)
+
+ // Verify it's actually a reference
+ assert.True(t, ref.Node.IsReference(), "node at index %d should be a reference", i)
+
+ // Verify it has a reference value
+ refVal := ref.Node.GetReference()
+ assert.NotEmpty(t, refVal, "node at index %d should have a reference value", i)
+ }
+
+ // Verify specific reference counts
+ assert.Len(t, idx.SchemaReferences, 3, "should have 3 schema references")
+ assert.Len(t, idx.ParameterReferences, 1, "should have 1 parameter reference")
+ assert.Len(t, idx.ResponseReferences, 1, "should have 1 response reference")
+ assert.Len(t, idx.ExampleReferences, 1, "should have 1 example reference")
+ assert.Len(t, idx.HeaderReferences, 1, "should have 1 header reference")
+ assert.Len(t, idx.LinkReferences, 1, "should have 1 link reference")
+ assert.Len(t, idx.RequestBodyReferences, 1, "should have 1 request body reference")
+ assert.Len(t, idx.CallbackReferences, 1, "should have 1 callback reference")
+}
+
+func TestIndex_GetAllReferences_EmptyDoc_Success(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ yaml := `
+openapi: "3.1.0"
+info:
+ title: Empty API
+ version: 1.0.0
+paths: {}
+`
+ doc := unmarshalOpenAPI(t, ctx, yaml)
+ idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{
+ RootDocument: doc,
+ TargetDocument: doc,
+ TargetLocation: "test.yaml",
+ })
+
+ require.NotNil(t, idx, "index should not be nil")
+
+ allRefs := idx.GetAllReferences()
+ assert.Empty(t, allRefs, "should have no references in empty doc")
+}
+
+func TestIndex_GetAllReferences_NilIndex_Success(t *testing.T) {
+ t.Parallel()
+
+ var idx *openapi.Index = nil
+ allRefs := idx.GetAllReferences()
+ assert.Nil(t, allRefs, "should return nil for nil index")
+}
+
+func TestBuildIndex_CircularRef_OneOfSelfRefWithBaseCases_Valid(t *testing.T) {
+ t.Parallel()
+ ctx := t.Context()
+
+ // A recursive JSON-value-like type: oneOf with self-referencing branches (object/array)
+ // AND non-recursive base-case branches (string/number/boolean).
+ // Referenced from within an inline oneOf in a path response.
+ // This should be VALID because the oneOf has non-recursive branches.
+ yaml := `
+openapi: "3.0.3"
+info:
+ title: Test API
+ version: 1.0.0
+paths:
+ /test:
+ get:
+ operationId: getTest
+ responses:
+ "200":
+ description: OK
+ content:
+ application/json:
+ schema:
+ oneOf:
+ - type: object
+ properties:
+ data:
+ $ref: '#/components/schemas/JsonValue'
+ - type: object
+ properties:
+ items:
+ $ref: '#/components/schemas/JsonValue'
+components:
+ schemas:
+ JsonValue:
+ nullable: true
+ oneOf:
+ - type: string
+ - type: number
+ - type: object
+ additionalProperties:
+ $ref: '#/components/schemas/JsonValue'
+ - type: array
+ items:
+ $ref: '#/components/schemas/JsonValue'
+ - type: boolean
+`
+ doc := unmarshalOpenAPI(t, ctx, yaml)
+ idx := openapi.BuildIndex(ctx, doc, references.ResolveOptions{
+ RootDocument: doc,
+ TargetDocument: doc,
+ TargetLocation: "test.yaml",
+ })
+
+ require.NotNil(t, idx, "index should not be nil")
+
+ circularErrs := idx.GetCircularReferenceErrors()
+ assert.Empty(t, circularErrs, "oneOf with non-recursive base-case branches should be valid")
+}
diff --git a/openapi/info.go b/openapi/info.go
index 7ac8a587..31e4c794 100644
--- a/openapi/info.go
+++ b/openapi/info.go
@@ -2,6 +2,8 @@ package openapi
import (
"context"
+ "errors"
+ "fmt"
"net/mail"
"net/url"
@@ -106,16 +108,16 @@ func (i *Info) Validate(ctx context.Context, opts ...validation.Option) []error
errs := []error{}
if core.Title.Present && i.Title == "" {
- errs = append(errs, validation.NewValueError(validation.NewMissingValueError("info.title is required"), core, core.Title))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("`info.title` is required"), core, core.Title))
}
if core.Version.Present && i.Version == "" {
- errs = append(errs, validation.NewValueError(validation.NewMissingValueError("info.version is required"), core, core.Version))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("`info.version` is required"), core, core.Version))
}
if core.TermsOfService.Present {
if _, err := url.Parse(*i.TermsOfService); err != nil {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("info.termsOfService is not a valid uri: %s", err), core, core.TermsOfService))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidFormat, fmt.Errorf("`info.termsOfService` is not a valid uri: %w", err), core, core.TermsOfService))
}
}
@@ -186,13 +188,13 @@ func (c *Contact) Validate(ctx context.Context, opts ...validation.Option) []err
if core.URL.Present {
if _, err := url.Parse(*c.URL); err != nil {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("contact.url is not a valid uri: %s", err), core, core.URL))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidFormat, fmt.Errorf("contact.url is not a valid uri: %w", err), core, core.URL))
}
}
if core.Email.Present {
if _, err := mail.ParseAddress(*c.Email); err != nil {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("contact.email is not a valid email address: %s", err), core, core.Email))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidFormat, fmt.Errorf("contact.email is not a valid email address: %w", err), core, core.Email))
}
}
@@ -255,12 +257,12 @@ func (l *License) Validate(ctx context.Context, opts ...validation.Option) []err
errs := []error{}
if core.Name.Present && l.Name == "" {
- errs = append(errs, validation.NewValueError(validation.NewMissingValueError("license.name is required"), core, core.Name))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationRequiredField, errors.New("`license.name` is required"), core, core.Name))
}
if core.URL.Present {
if _, err := url.Parse(*l.URL); err != nil {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("license.url is not a valid uri: %s", err), core, core.URL))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidFormat, fmt.Errorf("license.url is not a valid uri: %w", err), core, core.URL))
}
}
diff --git a/openapi/info_validate_test.go b/openapi/info_validate_test.go
index b469342f..6b0eb66d 100644
--- a/openapi/info_validate_test.go
+++ b/openapi/info_validate_test.go
@@ -108,7 +108,7 @@ func TestInfo_Validate_Error(t *testing.T) {
yml: `
version: 1.0.0
`,
- wantErrs: []string{"[2:1] info.title is missing"},
+ wantErrs: []string{"[2:1] error validation-required-field `info.title` is required"},
},
{
name: "empty title",
@@ -116,14 +116,14 @@ version: 1.0.0
title: ""
version: 1.0.0
`,
- wantErrs: []string{"[2:8] info.title is required"},
+ wantErrs: []string{"[2:8] error validation-required-field `info.title` is required"},
},
{
name: "missing version",
yml: `
title: Test API
`,
- wantErrs: []string{"[2:1] info.version is missing"},
+ wantErrs: []string{"[2:1] error validation-required-field `info.version` is required"},
},
{
name: "empty version",
@@ -131,7 +131,7 @@ title: Test API
title: Test API
version: ""
`,
- wantErrs: []string{"[3:10] info.version is required"},
+ wantErrs: []string{"[3:10] error validation-required-field `info.version` is required"},
},
{
name: "invalid termsOfService URI",
@@ -140,7 +140,7 @@ title: Test API
version: 1.0.0
termsOfService: ":invalid"
`,
- wantErrs: []string{"[4:17] info.termsOfService is not a valid uri: parse \":invalid\": missing protocol scheme"},
+ wantErrs: []string{"[4:17] error validation-invalid-format `info.termsOfService` is not a valid uri: parse \":invalid\": missing protocol scheme"},
},
{
name: "invalid contact URL",
@@ -151,7 +151,7 @@ contact:
name: Support
url: ":invalid"
`,
- wantErrs: []string{"[6:8] contact.url is not a valid uri: parse \":invalid\": missing protocol scheme"},
+ wantErrs: []string{"[6:8] error validation-invalid-format contact.url is not a valid uri: parse \":invalid\": missing protocol scheme"},
},
{
name: "invalid contact email",
@@ -162,7 +162,7 @@ contact:
name: Support
email: "not-an-email"
`,
- wantErrs: []string{"[6:10] contact.email is not a valid email address: mail: missing '@' or angle-addr"},
+ wantErrs: []string{"[6:10] error validation-invalid-format contact.email is not a valid email address: mail: missing '@' or angle-addr"},
},
{
name: "invalid license URL",
@@ -173,7 +173,7 @@ license:
name: MIT
url: ":invalid"
`,
- wantErrs: []string{"[6:8] license.url is not a valid uri: parse \":invalid\": missing protocol scheme"},
+ wantErrs: []string{"[6:8] error validation-invalid-format license.url is not a valid uri: parse \":invalid\": missing protocol scheme"},
},
{
name: "missing license name",
@@ -183,7 +183,7 @@ version: 1.0.0
license:
url: https://opensource.org/licenses/MIT
`,
- wantErrs: []string{"[5:3] license.name is missing"},
+ wantErrs: []string{"[5:3] error validation-required-field `license.name` is required"},
},
{
name: "multiple validation errors",
@@ -196,10 +196,10 @@ license:
name: ""
`,
wantErrs: []string{
- "[2:8] info.title is required",
- "[3:10] info.version is required",
- "[5:10] contact.email is not a valid email address: mail: missing '@' or angle-addr",
- "[7:9] license.name is required",
+ "[2:8] error validation-required-field `info.title` is required",
+ "[3:10] error validation-required-field `info.version` is required",
+ "[5:10] error validation-invalid-format contact.email is not a valid email address: mail: missing '@' or angle-addr",
+ "[7:9] error validation-required-field `license.name` is required",
},
},
}
@@ -326,7 +326,7 @@ func TestContact_Validate_Error(t *testing.T) {
name: Support
url: ":invalid"
`,
- wantErrs: []string{"[3:6] contact.url is not a valid uri: parse \":invalid\": missing protocol scheme"},
+ wantErrs: []string{"[3:6] error validation-invalid-format contact.url is not a valid uri: parse \":invalid\": missing protocol scheme"},
},
{
name: "invalid email",
@@ -334,7 +334,7 @@ url: ":invalid"
name: Support
email: "not-an-email"
`,
- wantErrs: []string{"[3:8] contact.email is not a valid email address: mail: missing '@' or angle-addr"},
+ wantErrs: []string{"[3:8] error validation-invalid-format contact.email is not a valid email address: mail: missing '@' or angle-addr"},
},
{
name: "invalid URL with spaces",
@@ -342,7 +342,7 @@ email: "not-an-email"
name: Support
url: ":invalid url"
`,
- wantErrs: []string{"[3:6] contact.url is not a valid uri: parse \":invalid url\": missing protocol scheme"},
+ wantErrs: []string{"[3:6] error validation-invalid-format contact.url is not a valid uri: parse \":invalid url\": missing protocol scheme"},
},
{
name: "invalid email missing @",
@@ -350,7 +350,7 @@ url: ":invalid url"
name: Support
email: "supportexample.com"
`,
- wantErrs: []string{"[3:8] contact.email is not a valid email address: mail: missing '@' or angle-addr"},
+ wantErrs: []string{"[3:8] error validation-invalid-format contact.email is not a valid email address: mail: missing '@' or angle-addr"},
},
{
name: "multiple validation errors",
@@ -360,8 +360,8 @@ url: ":invalid"
email: "invalid-email"
`,
wantErrs: []string{
- "[3:6] contact.url is not a valid uri: parse \":invalid\": missing protocol scheme",
- "[4:8] contact.email is not a valid email address: mail: missing '@' or angle-addr",
+ "[3:6] error validation-invalid-format contact.url is not a valid uri: parse \":invalid\": missing protocol scheme",
+ "[4:8] error validation-invalid-format contact.email is not a valid email address: mail: missing '@' or angle-addr",
},
},
}
@@ -476,7 +476,7 @@ func TestLicense_Validate_Error(t *testing.T) {
yml: `
url: https://opensource.org/licenses/MIT
`,
- wantErrs: []string{"[2:1] license.name is missing"},
+ wantErrs: []string{"[2:1] error validation-required-field `license.name` is required"},
},
{
name: "empty name",
@@ -484,7 +484,7 @@ url: https://opensource.org/licenses/MIT
name: ""
url: https://opensource.org/licenses/MIT
`,
- wantErrs: []string{"[2:7] license.name is required"},
+ wantErrs: []string{"[2:7] error validation-required-field `license.name` is required"},
},
{
name: "invalid URL",
@@ -492,7 +492,7 @@ url: https://opensource.org/licenses/MIT
name: MIT
url: ":invalid"
`,
- wantErrs: []string{"[3:6] license.url is not a valid uri: parse \":invalid\": missing protocol scheme"},
+ wantErrs: []string{"[3:6] error validation-invalid-format license.url is not a valid uri: parse \":invalid\": missing protocol scheme"},
},
{
name: "invalid URL with spaces",
@@ -500,7 +500,7 @@ url: ":invalid"
name: MIT
url: ":invalid url"
`,
- wantErrs: []string{"[3:6] license.url is not a valid uri: parse \":invalid url\": missing protocol scheme"},
+ wantErrs: []string{"[3:6] error validation-invalid-format license.url is not a valid uri: parse \":invalid url\": missing protocol scheme"},
},
{
name: "multiple validation errors",
@@ -509,8 +509,8 @@ name: ""
url: ":invalid"
`,
wantErrs: []string{
- "[2:7] license.name is required",
- "[3:6] license.url is not a valid uri: parse \":invalid\": missing protocol scheme",
+ "[2:7] error validation-required-field `license.name` is required",
+ "[3:6] error validation-invalid-format license.url is not a valid uri: parse \":invalid\": missing protocol scheme",
},
},
}
diff --git a/openapi/inline.go b/openapi/inline.go
index a16650d2..096569d3 100644
--- a/openapi/inline.go
+++ b/openapi/inline.go
@@ -349,7 +349,7 @@ func inlineReference[T any, V interfaces.Validator[T], C marshaller.CoreModeler]
recursiveOpts := ResolveOptions{
RootDocument: opts.RootDocument,
TargetDocument: targetDocInfo.ResolvedDocument,
- TargetLocation: targetDocInfo.AbsoluteReference,
+ TargetLocation: targetDocInfo.AbsoluteDocumentPath,
}
if err := inlineObject(ctx, ref, doc, recursiveOpts, collectedDefs, defHashes); err != nil {
return fmt.Errorf("failed to inline nested references in %s: %w", ref.GetReference(), err)
diff --git a/openapi/links.go b/openapi/links.go
index f9c13d7b..5328b86e 100644
--- a/openapi/links.go
+++ b/openapi/links.go
@@ -2,6 +2,8 @@ package openapi
import (
"context"
+ "errors"
+ "fmt"
"net/url"
"github.com/speakeasy-api/openapi/expression"
@@ -11,7 +13,6 @@ import (
"github.com/speakeasy-api/openapi/openapi/core"
"github.com/speakeasy-api/openapi/sequencedmap"
"github.com/speakeasy-api/openapi/validation"
- walkpkg "github.com/speakeasy-api/openapi/walk"
)
type Link struct {
@@ -101,70 +102,40 @@ func (l *Link) Validate(ctx context.Context, opts ...validation.Option) []error
core := l.GetCore()
errs := []error{}
- op := validation.NewOptions(opts...)
- o := validation.GetContextObject[OpenAPI](op)
-
if core.OperationID.Present && core.OperationRef.Present {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("operationID and operationRef are mutually exclusive"), core, core.OperationID))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationMutuallyExclusiveFields, errors.New("operationID and operationRef are mutually exclusive"), core, core.OperationID))
}
- if l.OperationID != nil {
- if o == nil {
- panic("OpenAPI object is required to validate operationId")
- }
-
- foundOp := false
-
- for item := range Walk(ctx, o) {
- err := item.Match(Matcher{
- Operation: func(o *Operation) error {
- if o.GetOperationID() == "" {
- return nil
- }
-
- if o.GetOperationID() == l.GetOperationID() {
- foundOp = true
- return walkpkg.ErrTerminate
- }
- return nil
- },
- })
- if err != nil {
- break
- }
- }
-
- if !foundOp {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("link.operationId value %s does not exist in document", *l.OperationID), core, core.OperationID))
- }
- }
+ // Note: operationId validation has been moved to the linter rule "semantic-link-operation"
+ // This allows validation to occur after the index is built, enabling checks against
+ // operations in external documents that may be referenced later.
// TODO should we validate the reference resolves here? Or as part of the resolution operation? Or make it optional?
if l.OperationRef != nil {
if _, err := url.Parse(*l.OperationRef); err != nil {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("link.operationRef is not a valid uri: %s", err), core, core.OperationRef))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidFormat, fmt.Errorf("link.operationRef is not a valid uri: %w", err), core, core.OperationRef))
}
}
for key, exp := range l.GetParameters().All() {
_, expression, err := expression.GetValueOrExpressionValue(exp)
if err != nil {
- errs = append(errs, validation.NewMapValueError(validation.NewValueValidationError("link.parameters expression is invalid: %s", err.Error()), core, core.Parameters, key))
+ errs = append(errs, validation.NewMapValueError(validation.SeverityError, validation.RuleValidationInvalidSyntax, fmt.Errorf("link.parameters expression is invalid: %w", err), core, core.Parameters, key))
}
if expression != nil {
if err := expression.Validate(); err != nil {
- errs = append(errs, validation.NewMapValueError(validation.NewValueValidationError("link.parameters expression is invalid: %s", err.Error()), core, core.Parameters, key))
+ errs = append(errs, validation.NewMapValueError(validation.SeverityError, validation.RuleValidationInvalidSyntax, fmt.Errorf("link.parameters expression is invalid: %w", err), core, core.Parameters, key))
}
}
}
_, rbe, err := expression.GetValueOrExpressionValue(l.RequestBody)
if err != nil {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("link.requestBody expression is invalid: %s", err.Error()), core, core.RequestBody))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidSyntax, fmt.Errorf("link.requestBody expression is invalid: %w", err), core, core.RequestBody))
}
if rbe != nil {
if err := rbe.Validate(); err != nil {
- errs = append(errs, validation.NewValueError(validation.NewValueValidationError("link.requestBody expression is invalid: %s", err.Error()), core, core.RequestBody))
+ errs = append(errs, validation.NewValueError(validation.SeverityError, validation.RuleValidationInvalidSyntax, fmt.Errorf("link.requestBody expression is invalid: %w", err), core, core.RequestBody))
}
}
diff --git a/openapi/links_validate_test.go b/openapi/links_validate_test.go
index ce2e2fea..ae032790 100644
--- a/openapi/links_validate_test.go
+++ b/openapi/links_validate_test.go
@@ -177,7 +177,7 @@ server:
description: Invalid server without URL
description: Link with invalid server
`,
- wantErrs: []string{"[4:3] server.url is missing"},
+ wantErrs: []string{"[4:3] error validation-required-field `server.url` is required"},
},
{
name: "invalid_operation_ref_uri",
@@ -240,30 +240,10 @@ description: Invalid request body expression syntax - empty query name
}
}
-func TestLink_Validate_OperationID_NotFound(t *testing.T) {
- t.Parallel()
-
- // Create a minimal OpenAPI document with operations
- openAPIDoc := &openapi.OpenAPI{
- Paths: openapi.NewPaths(),
- }
-
- // Add a path with an operation
- pathItem := openapi.NewPathItem()
- operation := &openapi.Operation{
- OperationID: pointer.From("existingOperation"),
- }
- pathItem.Set("get", operation)
- openAPIDoc.Paths.Set("/users/{id}", &openapi.ReferencedPathItem{Object: pathItem})
-
- link := &openapi.Link{
- OperationID: pointer.From("nonExistentOperation"),
- }
-
- errs := link.Validate(t.Context(), validation.WithContextObject(openAPIDoc))
- require.NotEmpty(t, errs, "Expected validation error for non-existent operationId")
- require.Contains(t, errs[0].Error(), "link.operationId value nonExistentOperation does not exist in document")
-}
+// Note: TestLink_Validate_OperationID_NotFound has been removed because operationId validation
+// has been moved to the linter rule "semantic-link-operation" (see link_operation.go in linter/rules).
+// This allows validation to occur after the index is built, enabling checks against operations
+// in external documents that may be referenced later.
func TestLink_Validate_OperationID_Found(t *testing.T) {
t.Parallel()
@@ -289,17 +269,9 @@ func TestLink_Validate_OperationID_Found(t *testing.T) {
require.Empty(t, errs, "Expected no validation errors for existing operationId")
}
-func TestLink_Validate_OperationID_WithoutOpenAPIContext_Panics(t *testing.T) {
- t.Parallel()
-
- link := &openapi.Link{
- OperationID: pointer.From("getUserById"),
- }
-
- require.Panics(t, func() {
- link.Validate(t.Context())
- }, "Expected panic when validating operationId without OpenAPI context")
-}
+// Note: TestLink_Validate_OperationID_WithoutOpenAPIContext_Panics has been removed because
+// operationId validation has been moved to the linter rule "semantic-link-operation".
+// Link.Validate() no longer requires OpenAPI context for operationId validation.
func TestLink_Validate_ComplexExpressions(t *testing.T) {
t.Parallel()
diff --git a/openapi/linter/README.md b/openapi/linter/README.md
new file mode 100644
index 00000000..79b6c057
--- /dev/null
+++ b/openapi/linter/README.md
@@ -0,0 +1,420 @@
+# OpenAPI Linter
+
+The OpenAPI linter validates OpenAPI specifications for style, consistency,
+and best practices beyond basic spec validation.
+
+## Quick Start
+
+### CLI
+
+```bash
+# Lint an OpenAPI specification
+openapi spec lint api.yaml
+
+# Output as JSON
+openapi spec lint --format json api.yaml
+
+# Disable specific rules
+openapi spec lint --disable semantic-path-params api.yaml
+```
+
+### Go API
+
+```go
+import (
+ "context"
+ "fmt"
+ "os"
+
+ "github.com/speakeasy-api/openapi/linter"
+ "github.com/speakeasy-api/openapi/openapi"
+ openapiLinter "github.com/speakeasy-api/openapi/openapi/linter"
+)
+
+func main() {
+ ctx := context.Background()
+
+ // Load your OpenAPI document
+ f, _ := os.Open("api.yaml")
+ doc, validationErrors, _ := openapi.Unmarshal(ctx, f)
+
+ // Create linter with default configuration
+ config := linter.NewConfig()
+ lint := openapiLinter.NewLinter(config)
+
+ // Run linting
+ output, _ := lint.Lint(ctx, linter.NewDocumentInfo(doc, "api.yaml"), validationErrors, nil)
+
+ // Print results
+ fmt.Println(output.FormatText())
+}
+```
+
+## Available Rules
+
+
+
+| Rule | Severity | Description |
+| ----------------------------------------------------------------------------------------------- | -------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ |
+| `oas-schema-check` | error | Schemas must use type-appropriate constraints and have valid constraint values. For example, string types should use minLength/maxLength/pattern, numbers should use minimum/maximum/multipleOf, and constraint values must be logically valid (e.g., maxLength >= minLength). |
+| `oas3-example-missing` | hint | Schemas, parameters, headers, and media types should include example values to illustrate expected data formats. Examples improve documentation quality, help developers understand how to use the API correctly, and enable better testing and validation. |
+| `oas3-no-nullable` | warning | The nullable keyword is not supported in OpenAPI 3.1+ and should be replaced with a type array that includes null (e.g., type: [string, null]). This change aligns OpenAPI 3.1 with JSON Schema Draft 2020-12, which uses type arrays to express nullable values. |
+| `owasp-additional-properties-constrained` | hint | Schemas with additionalProperties set to true or a schema should define maxProperties to limit object size. Without size limits, APIs are vulnerable to resource exhaustion attacks where clients send excessively large objects. |
+| `owasp-array-limit` | error | Array schemas must specify maxItems to prevent resource exhaustion attacks. Without array size limits, malicious clients could send extremely large arrays that consume excessive memory or processing time. |
+| `owasp-auth-insecure-schemes` | error | Authentication schemes using outdated or insecure methods must be avoided or upgraded. Insecure authentication schemes like API keys in query parameters or HTTP Basic over HTTP expose credentials and create security vulnerabilities. |
+| `owasp-define-error-responses-401` | warning | Operations should define a 401 Unauthorized response with a proper schema to handle authentication failures. Documenting authentication error responses helps clients implement proper error handling and understand when credentials are invalid or missing. |
+| `owasp-define-error-responses-429` | warning | Operations should define a 429 Too Many Requests response with a proper schema to indicate rate limiting. Rate limit responses help clients understand when they've exceeded usage thresholds and need to slow down requests. |
+| `owasp-define-error-responses-500` | warning | Operations should define a 500 Internal Server Error response with a proper schema to handle unexpected failures. Documenting server error responses helps clients distinguish between client-side and server-side problems. |
+| `owasp-define-error-validation` | warning | Operations should define validation error responses (400, 422, or 4XX) to indicate request data problems. Validation error responses help clients understand when and why their request data is invalid or malformed. |
+| `owasp-integer-format` | error | Integer schemas must specify a format of int32 or int64 to define the expected size and range. Explicit integer formats prevent overflow vulnerabilities and ensure clients and servers agree on numeric boundaries. |
+| `owasp-integer-limit` | error | Integer schemas must specify minimum and maximum values (or exclusive variants) to prevent unbounded inputs. Without numeric limits, APIs are vulnerable to overflow attacks and unexpected behavior from extreme values. |
+| `owasp-jwt-best-practices` | error | Security schemes using OAuth2 or JWT must explicitly declare support for RFC8725 (JWT Best Current Practices) in the description. RFC8725 compliance ensures JWTs are validated properly and protected against common attacks like algorithm confusion. |
+| `owasp-no-additional-properties` | error | Object schemas must not allow arbitrary additional properties (set additionalProperties to false or omit it). Allowing unexpected properties can lead to mass assignment vulnerabilities where attackers inject unintended fields. |
+| `owasp-no-api-keys-in-url` | error | API keys must not be passed via URL parameters (query or path) as they are logged and cached. URL-based API keys appear in browser history, server logs, and proxy caches, creating security exposure. |
+| `owasp-no-credentials-in-url` | error | URL parameters must not contain credentials like API keys, passwords, or secrets. Credentials in URLs are logged by servers, proxies, and browsers, creating significant security risks. |
+| `owasp-no-http-basic` | error | Security schemes must not use HTTP Basic authentication without additional security layers. HTTP Basic sends credentials in easily-decoded base64 encoding, making it vulnerable to interception without HTTPS. |
+| `owasp-no-numeric-ids` | error | Resource identifiers must use random values like UUIDs instead of sequential numeric IDs. Sequential IDs enable enumeration attacks where attackers can guess valid IDs and access unauthorized resources. |
+| `owasp-protection-global-safe` | hint | Safe operations (GET, HEAD) should be protected by security schemes or explicitly marked as public. Unprotected read operations may expose sensitive data to unauthorized users. |
+| `owasp-protection-global-unsafe` | error | Unsafe operations (POST, PUT, PATCH, DELETE) must be protected by security schemes to prevent unauthorized modifications. Write operations without authentication create serious security vulnerabilities allowing data tampering. |
+| `owasp-protection-global-unsafe-strict` | hint | Unsafe operations (POST, PUT, PATCH, DELETE) must be protected by non-empty security schemes without explicit opt-outs. Strict authentication requirements ensure write operations cannot bypass security even with empty security arrays. |
+| `owasp-rate-limit` | error | 2XX and 4XX responses must define rate limiting headers (X-RateLimit-Limit, X-RateLimit-Remaining) to prevent API overload. Rate limit headers help clients manage their usage and avoid hitting limits. |
+| `owasp-rate-limit-retry-after` | error | 429 Too Many Requests responses must include a Retry-After header indicating when clients can retry. Retry-After headers prevent thundering herd problems by telling clients exactly when to resume requests. |
+| `owasp-security-hosts-https-oas3` | error | Server URLs must begin with https:// as the only permitted protocol. Using HTTPS is essential for protecting API traffic from interception, tampering, and eavesdropping attacks. |
+| `owasp-string-limit` | error | String schemas must specify maxLength, const, or enum to prevent unbounded data. Without string length limits, APIs are vulnerable to resource exhaustion from extremely long inputs. |
+| `owasp-string-restricted` | error | String schemas must specify format, const, enum, or pattern to restrict content. String restrictions prevent injection attacks and ensure data conforms to expected formats. |
+| `semantic-duplicated-enum` | warning | Enum arrays should not contain duplicate values. Duplicate enum values are redundant and can cause confusion or unexpected behavior in client code generation and validation. |
+| `semantic-link-operation` | error | Link operationId must reference an existing operation in the API specification. This ensures that links point to valid operations, including those defined in external documents that are referenced in the specification. |
+| `semantic-no-ambiguous-paths` | error | Path definitions must be unambiguous and distinguishable from each other to ensure correct request routing. Ambiguous paths like `/users/{id}` and `/users/{name}` can cause runtime routing conflicts since both match the same URL pattern. |
+| `semantic-no-eval-in-markdown` | error | Markdown descriptions must not contain eval() statements, which pose serious security risks. Including eval() in documentation could enable code injection attacks if the documentation is rendered in contexts that execute JavaScript. |
+| `semantic-no-script-tags-in-markdown` | error | Markdown descriptions must not contain "
+paths:
+ /users:
+ get:
+ description: "safe"
+ responses:
+ '200':
+ description: ok
+ /admin:
+ get:
+ description: ""
+ responses:
+ '200':
+ description: ok
+`
+
+ expectedErrors := []string{
+ "[6:16] error semantic-no-script-tags-in-markdown description contains content with `