diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml deleted file mode 100644 index eb90654..0000000 --- a/.github/workflows/ci.yml +++ /dev/null @@ -1,47 +0,0 @@ -name: Test and Lint -on: - push: - branches-ignore: [wip/**] -jobs: - build: - strategy: - matrix: - os: [[🐧, Ubuntu], [🍎, macOS], [πŸͺŸ, Windows]] - go: ["1.24", "1.23"] - name: ${{ matrix.os[0] }} Test Go ${{ matrix.go }} on ${{ matrix.os[1] }} - runs-on: ${{ matrix.os[1] }}-latest - steps: - - name: Checkout Repo - uses: actions/checkout@v4 - with: { submodules: true } - - name: Setup Go - uses: actions/setup-go@v5 - with: { go-version: "${{ matrix.go }}", check-latest: true } - - name: Run Tests - run: make test - - name: Setup TinyGo - uses: acifani/setup-tinygo@v2 - with: { tinygo-version: 0.37.0 } - - name: Test WASM - run: make wasm - lint: - name: πŸ“Š Lint and Cover - runs-on: ubuntu-latest - steps: - - name: Checkout Repo - uses: actions/checkout@v4 - with: { submodules: true } - - name: Setup Go - uses: actions/setup-go@v5 - with: { go-version-file: go.mod, check-latest: true } - - name: Install Dependencies - run: make debian-lint-depends - - name: Run pre-commit - uses: pre-commit/action@v3.0.1 - - name: Run Test Coverage - run: go test -race -coverprofile coverage.txt -covermode atomic ./... - - name: Upload Coverage - uses: codecov/codecov-action@v4 - with: - token: ${{ secrets.CODECOV_TOKEN }} - files: cover.out diff --git a/.github/workflows/deploy.yml b/.github/workflows/deploy.yml new file mode 100644 index 0000000..a0e5bc6 --- /dev/null +++ b/.github/workflows/deploy.yml @@ -0,0 +1,39 @@ +name: πŸ› Deploy Playground +on: + push: + +jobs: + build: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Setup Go + uses: actions/setup-go@v5 + with: { go-version-file: go.mod, check-latest: true } + - name: Setup TinyGo + uses: acifani/setup-tinygo@v2 + with: { tinygo-version: 0.36.0 } + - name: Generate App + run: make playground + - name: Upload Artifact + if: github.ref == 'refs/heads/playground' + uses: actions/upload-pages-artifact@v3 + with: { path: ./pub } + + deploy: + needs: build + if: github.ref == 'refs/heads/playground' + permissions: + pages: write + id-token: write + concurrency: + group: "pages" + cancel-in-progress: false + environment: + name: github-pages + url: ${{ steps.deployment.outputs.page_url }} + runs-on: ubuntu-latest + steps: + - id: deployment + uses: actions/deploy-pages@v4 diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml deleted file mode 100644 index 29fdc8b..0000000 --- a/.github/workflows/release.yml +++ /dev/null @@ -1,22 +0,0 @@ -name: πŸš€ Release -on: - push: - # Release on semantic version tag. - tags: ["v[0-9]+.[0-9]+.[0-9]+"] -jobs: - release: - name: πŸš€ Release on GitHub - runs-on: ubuntu-latest - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - steps: - - name: Check out the repo - uses: actions/checkout@v4 - - name: Generate Release Notes - id: notes - uses: theory/changelog-version-notes-action@v0 - - name: Create GitHub Release - uses: softprops/action-gh-release@v2 - with: - name: Release ${{ github.ref_name }} - body_path: "${{ steps.notes.outputs.file }}" diff --git a/.gitignore b/.gitignore index 689c20e..f0d691c 100644 --- a/.gitignore +++ b/.gitignore @@ -21,11 +21,12 @@ vendor/ go.work # Editor files -.vscode/ +# .vscode/ # Build artifacts -_build/ pub/ +_build/ +jsonpath-compliance-test-suite/ # OS Stuff .DS_Store diff --git a/.gitmodules b/.gitmodules deleted file mode 100644 index 22d92d7..0000000 --- a/.gitmodules +++ /dev/null @@ -1,3 +0,0 @@ -[submodule "jsonpath-compliance-test-suite"] - path = jsonpath-compliance-test-suite - url = https://github.com/jsonpath-standard/jsonpath-compliance-test-suite diff --git a/.golangci.yaml b/.golangci.yaml index a3e942d..623f72d 100644 --- a/.golangci.yaml +++ b/.golangci.yaml @@ -5,61 +5,18 @@ linters: # Too strict. - cyclop # redundant, covered by gocyclo - depguard - - exhaustruct - - funcorder # new, too much to move around + - mnd - nestif # redundant, covered by gocyclo - nlreturn - - testpackage - - varnamelen - wsl - settings: - dupword: - ignore: - - R. - errcheck: - disable-default-exclusions: false - check-type-assertions: false - check-blank: false - exclude-functions: - - fmt.Fprintf - - fmt.Fprintln - - fmt.Fprint - exhaustive: - default-signifies-exhaustive: true - funlen: - ignore-comments: true - ireturn: - allow: - - anon - - error - - empty - - stdlib - - generic - - spec\.PathValue - - spec\.FuncExprArg - - spec\.Selector - - spec\.BasicExpr - - spec\.CompVal exclusions: generated: lax - rules: - # Exclude some linters from running on tests files. - - linters: - - err113 - - funlen - - maintidx - - wrapcheck - path: _test\.go - - linters: - - err113 - path: registry/funcs\.go paths: - third_party$ - builtin$ - examples$ formatters: enable: - - gci - gofmt - gofumpt - goimports diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index e9fe98c..a5a39ca 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -19,14 +19,12 @@ repos: name: Go mod tidy language: system entry: go mod tidy - types: [go] pass_filenames: false - id: golangci-lint name: Go linting language: system - entry: golangci-lint run --fix --timeout=5m + entry: make golangci-lint pass_filenames: false - types: [go] - repo: https://github.com/pre-commit/mirrors-prettier rev: v3.1.0 @@ -34,3 +32,4 @@ repos: - id: prettier name: JSON and YAML formatting types_or: [json, yaml] + exclude: ^.vscode/ diff --git a/.vscode/settings.json b/.vscode/settings.json new file mode 100644 index 0000000..d9f098a --- /dev/null +++ b/.vscode/settings.json @@ -0,0 +1,22 @@ +{ + "gopls": { + "build.env": { + "GOOS": "js", + "GOARCH": "wasm" + } + }, + "cSpell.words": [ + "Boxicons", + "Goldmark", + "jsontree", + "Moby", + "Paren", + "Rees", + "segs", + "serde", + "syscall" + ], + "yaml.schemas": { + "https://json.schemastore.org/github-workflow.json": "file:///Users/david/dev/go/jsontree/.github/workflows/deploy.yml" + } +} diff --git a/CHANGELOG.md b/CHANGELOG.md deleted file mode 100644 index 1b17bef..0000000 --- a/CHANGELOG.md +++ /dev/null @@ -1,201 +0,0 @@ -# Changelog - -All notable changes to this project will be documented in this file. It uses the -[Keep a Changelog] format, and this project adheres to [Semantic Versioning]. - - [Keep a Changelog]: https://keepachangelog.com/en/1.1.0/ - [Semantic Versioning]: https://semver.org/spec/v2.0.0.html - "Semantic Versioning 2.0.0" - -## [v0.9.0] β€” Unreleased - -### ⚑ Improvements - -* Significantly refactored the `spec` package toward greater stability, - usability, and increased comprehensibility. The names of things are more - consistent, the APIs more legible and user-friendly. Quite a few types - were renamed or merged. -* Added support for [json.Number] values to complement the existing support - for Go core numeric types. This should allow for transparent handling of - values marshaled with [json.Decoder.UseNumber] enabled. -* Moved the function extension types from the `registry` to the `spec` - package, simplifying `registry` and the handling of function extensions, - without changing the interface for using a registry or adding extensions - to it. - -### πŸ“š Documentation - -* Vastly expanded the `spec` package documentation to make it much more - thorough and accurate, with copious links to relevant parts of [RFC 9535] - and complete lists of interface implementations and examples for each - significant type. -* Removed the "Package Stability" statement from the README, as all packages - are considered stable or else potentially unstable types in the `spec` - package have been labeled as such. -* Fixed links and typos in the main package documentation, and moved the - registry example under `WithRegistry`. -* Replaced the `spec.Wildcard` variable with a the function - `spec.Wildcard()`. - -### πŸ“” Notes - -* Upgraded the [compliance test suite] and integrated its located path test - expectations. All tests passed, no changes required. - - [v0.9.0]: https://github.com/theory/jsonpath/compare/v0.4.1...v0.9.0 - [json.Number]: https://pkg.go.dev/encoding/json#Number - [json.Decoder.UseNumber]: https://pkg.go.dev/encoding/json#Decoder.UseNumber - [compliance test suite]: https://github.com/jsonpath-standard/jsonpath-compliance-test-suite - [RFC 9535]: https://www.rfc-editor.org/rfc/rfc9535.html - "RFC 9535 JSONPath: Query Expressions for JSON" - -## [v0.4.1] β€” 2025-05-02 - -### πŸͺ² Bug Fixes - -* Fixed the panic messages from `spec.Slice` to properly report coming from - `Slice` and not `NewSlice`. - -### πŸ“” Notes - -* Upgraded to and fixed issues reported by `golangci-lint` v2.1.5. -* Added TinyGo testing on Go v1.24. - - [v0.4.1]: https://github.com/theory/jsonpath/compare/v0.4.0...v0.4.1 - -## [v0.4.0] β€” 2025-01-15 - -### ⚑ Improvements - -* Added the `Pointer` method to `NormalizedPath`. It returns an [RFC 9535 - JSON Pointer] string representation of the normalized path. - - [v0.4.0]: https://github.com/theory/jsonpath/compare/v0.3.0...v0.4.0 - [RFC 9535 JSON Pointer]: https://www.rfc-editor.org/rfc/rfc9535#name-normalized-paths - -## [v0.3.0] β€” 2024-12-28 - -### ⚑ Improvements - -* Added `SelectLocated`. It works just like `Select`, but returns - `LocatedNode`s that pair the selected nodes with [RFC 9535-defined] - `NormalizedPath`s that uniquely identify their locations within the JSON - query argument. -* Added `LocatedNodeList`, the return value from `SelectLocated`. It - contains methods for working with the selected nodes, including iterators - for its nodes & `NormalizedPath`s, deduplication, sorting, and cloning. -* Added `Compare` to `NormalizedPath`, which enables the sorting of - `LocatedNodeList`s. - -### πŸ“” Notes - -* Requires Go 1.23 to take advantage of its iterator support. -* Changed the return value of `Select` from `[]any` to `NodeList`, which is - an alias for `[]any`. Done to pair with `LocatedNodeList`, the return - value of `SelectLocated`. Features an `All` method, which returns an - iterator over all the nodes in the list. It may gain additional methods in - the future. - -### πŸ“š Documentation - -* Added `Select`, `SelectLocated`, `NodeList`, and `LocatedNodeList` - examples to the Go docs. - - [v0.3.0]: https://github.com/theory/jsonpath/compare/v0.2.1...v0.3.0 - [RFC 9535-defined]: https://www.rfc-editor.org/rfc/rfc9535#section-2.7 - -## [v0.2.1] β€” 2024-12-12 - -### πŸͺ² Bug Fixes - -* Fixed the formatting of slice strings to omit min and max integers when - not specified and using a negative step. - - [v0.2.1]: https://github.com/theory/jsonpath/compare/v0.2.0...v0.2.1 - -## [v0.2.0] β€” 2024-11-13 - -### ⚑ Improvements - -* Added `spec.Filter.Eval` to allow public evaluation of a single JSON node. - Used internally by `spec.FilterSelector.Select`. -* Added `spec.Segment.IsDescendant` to tell wether a segments selects just - from the current child node or also recursively selects from all of its - descendants. - -### πŸͺ² Bug Fixes - -* Added missing "?" to the stringification of `spec.FilterSelector`. - -### πŸ“” Notes - -* Made `spec.SliceSelector.Bounds` public. -* Made the underlying struct defining `spec.Wildcard` public, named it - `spec.WildcardSelector`. - - [v0.2.0]: https://github.com/theory/jsonpath/compare/v0.1.2...v0.2.0 - -## [v0.1.2] β€” 2024-10-28 - -### πŸͺ² Bug Fixes - -* Eliminated a lexer variable that prevented [TinyGo] compilation. - -### πŸ—οΈ Build Setup - -* Added simple tests to ensure the package compiles properly as Go and - TinyGo WASM. -* Added the WASM compile test to the [Test and Lint] GitHub action. - - [v0.1.2]: https://github.com/theory/jsonpath/compare/v0.1.1...v0.1.2 - [TinyGo]: https://tinygo.org "TinyGo β€” A Go Compiler For Small Places" - [Test and Lint]: https://github.com/theory/jsonpath/actions/workflows/ci.yml - -### πŸ“š Documentation - -* Fixed version header links here in CHANGELOG.md. - -## [v0.1.1] β€” 2024-09-19 - -### πŸ“š Documentation - -* Neatened the formatting of the README table for improved display on - pkg.go.dev. - - [v0.1.1]: https://github.com/theory/jsonpath/compare/v0.1.0...v0.1.1 - -## [v0.1.0] β€” 2024-09-19 - -The theme of this release is *Standards Matter.* - -### ⚑ Improvements - -* First release, everything is new! -* Full [RFC 9535] JSONPath implementation -* All [JSONPath Compliance Test Suite] tests pass -* Includes parser, AST, and executor - -### πŸ—οΈ Build Setup - -* Built with Go -* Use `go get` to add to a project - -### πŸ“š Documentation - -* Docs on [pkg.go.dev] -* Syntax summary in `README` - -### πŸ“” Notes - -* The `jsonpath` package is stable and unlikely to change -* The `spec` package is not yet stable -* The `registry` package is stable, although `spec` objects it references - may change -* More detailed documentation to come - - [v0.1.0]: https://github.com/theory/jsonpath/compare/a7279e6...v0.1.0 - [RFC 9535]: https://www.rfc-editor.org/rfc/rfc9535.html - "RFC 9535 JSONPath: Query Expressions for JSON" - [JSONPath Compliance Test Suite]: https://github.com/jsonpath-standard/jsonpath-compliance-test-suite - "A Compliance Test Suite for the RFC 9535 JSONPath Standard" - [pkg.go.dev]: https://pkg.go.dev/github.com/theory/jsonpath diff --git a/LICENSE b/LICENSE index 4a96515..b4d2f21 100644 --- a/LICENSE +++ b/LICENSE @@ -1,6 +1,6 @@ MIT License -Copyright 2024-2025 David E. Wheeler +Copyright 2024 David E. Wheeler Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the β€œSoftware”), to deal diff --git a/Makefile b/Makefile index 218ba4a..b324ea5 100644 --- a/Makefile +++ b/Makefile @@ -1,60 +1,55 @@ GO ?= go -.PHONY: test # Run the unit tests -test: - GOTOOLCHAIN=local $(GO) test ./... -count=1 +SRC_DIR := src +DST_DIR := pub +WASM_EXEC := $(shell tinygo env TINYGOROOT)/targets/wasm_exec.js +# WASM_EXEC := $(shell go env GOROOT)/misc/wasm/wasm_exec.js -.PHONY: cover # Run test coverage -cover: $(shell find . -name \*.go) - GOTOOLCHAIN=local $(GO) test -v -coverprofile=cover.out -covermode=count ./... - @$(GO) tool cover -html=cover.out +playground: $(DST_DIR)/play.wasm $(DST_DIR)/index.html $(DST_DIR)/wasm_exec.js $(DST_DIR)/play.css -.PHONY: lint # Lint the project -lint: .golangci.yaml - @pre-commit run --show-diff-on-failure --color=always --all-files +ROOT_DIR := $(dir $(realpath $(lastword $(MAKEFILE_LIST)))) +$(DST_DIR)/play.wasm: $(SRC_DIR)/main.go + @mkdir -p $(@D) + GOOS=js GOARCH=wasm tinygo build -no-debug -size short -o $@ $< +# cd $(SRC_DIR); GOOS=js GOARCH=wasm go build -o $(ROOT_DIR)/$@ $$(basename "$<") -.PHONY: clean # Remove generated files -clean: - $(GO) clean - @rm -rf cover.out _build +$(DST_DIR)/play.css: $(SRC_DIR)/play.css + mkdir -p $(@D) + cp $< $@ -# WASM -.PHONY: wasm # Build a simple app with Go and TinyGo WASM compilation. -wasm: _build/go.wasm _build/tinygo.wasm +$(DST_DIR)/index.html: $(SRC_DIR)/index.html + mkdir -p $(@D) + version=$$(grep jsonpath go.mod | awk '{print $$3}'); cat $< | sed -e "s!{{version}}!$${version}!g" > $@ -_build/go.wasm: internal/wasm/wasm.go - @mkdir -p $(@D) - GOOS=js GOARCH=wasm $(GO) build -o $@ $< +$(DST_DIR)/wasm_exec.js: $(WASM_EXEC) + mkdir -p $(@D) + cp $< $@ -_build/tinygo.wasm: internal/wasm/wasm.go - @mkdir -p $(@D) - GOOS=js GOARCH=wasm tinygo build -no-debug -size short -o $@ $< +.PHONY: run +run: playground + python3 -m http.server --directory $(DST_DIR) -############################################################################ -# Utilities. .PHONY: brew-lint-depends # Install linting tools from Homebrew brew-lint-depends: brew install golangci-lint .PHONY: debian-lint-depends # Install linting tools on Debian debian-lint-depends: - curl -sSfL https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh | sudo sh -s -- -b /usr/bin v2.1.5 + curl -sSfL https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh | sudo sh -s -- -b /usr/bin v2.1.6 -.PHONY: install-generators # Install Go code generators -install-generators: - @$(GO) install golang.org/x/tools/cmd/stringer@v0.29.0 +.PHONY: lint # Lint the project +lint: .pre-commit-config.yaml + @GOOS=js GOARCH=wasm pre-commit run --show-diff-on-failure --color=always --all-files -.PHONY: generate # Generate Go code -generate: - @$(GO) generate ./... +.PHONY: golangci-lint # Run golangci-lint +golangci-lint: .golangci.yaml + golangci-lint run --fix --timeout=5m ## .git/hooks/pre-commit: Install the pre-commit hook .git/hooks/pre-commit: @printf "#!/bin/sh\nmake lint\n" > $@ @chmod +x $@ -submodules: - git submodule update --init - -update-submodules: - git submodule update --recursive --remote --init +.PHONY: clean +clean: + rm -rf $(DST_DIR) diff --git a/README.md b/README.md index 4889072..992eb1e 100644 --- a/README.md +++ b/README.md @@ -1,57 +1,79 @@ -RFC 9535 JSONPath in Go -======================= - -[![βš–οΈ MIT]][mit] [![πŸ“š Docs]][docs] [![πŸ—ƒοΈ Report Card]][card] [![πŸ› οΈ Build Status]][ci] [![πŸ“Š Coverage]][cov] - -The jsonpath package provides [RFC 9535 JSONPath] functionality in Go. - -## Learn More - -* πŸ“¦ See the [package documentation][docs] for usage and examples. -* πŸ“š See the [RFC 9535 JSONPath] standard for details on the JSONPath query - syntax and examples of its usage. -* πŸ› Try it out in the [Playground]. - -## JSONPath Expressions - -A brief overview of [RFC 9535 JSONPath] syntax: - -| Syntax Element | Description | -| ------------------ | ----------------------------------------------------------------------- | -| `$` | root node identifier | -| `@` | current node identifier (valid only within filter selectors) | -| `[]` | child segment: selects zero or more children of a node | -| `.name` | shorthand for `['name']` | -| `.*` | shorthand for `[*]` | -| `..[]` | descendant segment: selects zero or more descendants of a node | -| `..name` | shorthand for `..['name']` | -| `..*` | shorthand for `..[*]` | -| `'name'` | name selector: selects a named child of an object | -| `*` | wildcard selector: selects all children of a node | -| `3` | index selector: selects an indexed child of an array (from 0) | -| `0:100:5` | array slice selector: `start:end:step` for arrays | -| `?` | filter selector: selects particular children using a logical expression | -| `length(@.foo)` | function extension: invokes a function in a filter expression | - -## Copyright - -Copyright Β© 2024-2025 David E. Wheeler - - [βš–οΈ MIT]: https://img.shields.io/badge/License-MIT-blue.svg "βš–οΈ MIT License" - [mit]: https://opensource.org/license/MIT "βš–οΈ MIT License" - [πŸ“š Docs]: https://godoc.org/github.com/theory/jsonpath?status.svg "πŸ“š Documentation" - [docs]: https://pkg.go.dev/github.com/theory/jsonpath "πŸ“„ Documentation" - [πŸ—ƒοΈ Report Card]: https://goreportcard.com/badge/github.com/theory/jsonpath - "πŸ—ƒοΈ Report Card" - [card]: https://goreportcard.com/report/github.com/theory/jsonpath - "πŸ—ƒοΈ Report Card" - [πŸ› οΈ Build Status]: https://github.com/theory/jsonpath/actions/workflows/ci.yml/badge.svg - "πŸ› οΈ Build Status" - [ci]: https://github.com/theory/jsonpath/actions/workflows/ci.yml - "πŸ› οΈ Build Status" - [πŸ“Š Coverage]: https://codecov.io/gh/theory/jsonpath/graph/badge.svg?token=UB1UJ95NIK - "πŸ“Š Code Coverage" - [cov]: https://codecov.io/gh/theory/jsonpath "πŸ“Š Code Coverage" - [RFC 9535 JSONPath]: https://www.rfc-editor.org/rfc/rfc9535.html - "RFC 9535 JSONPath: Query Expressions for JSON" - [Playground]: https://theory.github.io/jsonpath/ "Go JSONPath Playground" +Go RFC 9535 JSONPath Playground +=============================== + +The source for the [Go JSONPath Playground], a stateless single-page web site +for experimenting with the [Go RFC 9535 JSONPath] package. Compiled via +[TinyGo] into a ca. 723 K (265 K compressed) [Wasm] file and loaded directly +into the page. All functionality implemented in JavaScript and Go, heavily +borrowed from the [Goldmark Playground] and [serde_json_path Sandbox]. + +Usage +----- + +On load, the form will be filled with sample JSON and a randomly-selected +example query. Hit the "Run Query" button to see the values the path query +selects from the JSON appear in the "Query Output" field. + +To try your own, paste the JSON to query into the "JSON" field and input the +JSONpath query into the "Path" field, then hit the "Run Query" button. + +That's it. + +Read on for details and additional features. + +### Docs + +The two buttons in the top-right corner provide documentation and links. + +* Hit the button with the circled question mark in the top right corner to + reveal a table summarizing the JSONPath syntax. + +* Hit the button with the circled i for information about the JSONPath + playground. + +### Options + +Select options for execution and the display of results: + +* **Located**: Show the normalized path location along with each value. + +### Permalink + +Hit the "Permalink" button instead of "Run Query" to reload the page with a +URL that contains the contents the JSONPath, JSON, and options and executes +the results. Copy the URL to use it for sharing. + +Note that the Playground is stateless; no data is stored except in the +Permalink URL itself (and whatever data collection GitHub injects; see its +[privacy statement] for details). + +### Path + +Input the JSONPath query to execute into this field. On load, the app will +pre-load an example query. See [RFC 9535] for details on the jsonpath +language. + +### JSON Input + +Input the JSON against which to execute the JSONPath query. May be any kind +of JSON value, including objects, arrays, and scalar values. On load, the +field will contain the JSON object used in examples from [RFC 9535]. + +## Copyright and License + +Copyright (c) 2024 David E. Wheeler. Distributed under the [MIT License]. + +Based on [Goldmark Playground] the [serde_json_path Sandbox], with icons from +[Boxicons], all distributed under the [MIT License]. + + [Go JSONPath Playground]: https://theory.github.io/jsonpath/playground + [Go RFC 9535 JSONPath]: https://pkg.go.dev/github.com/theory/jsonpath + "pkg.go.dev: github.com/theory/jsonpath" + [Wasm]: https://webassembly.org "WebAssembly" + [TinyGo]: https://tinygo.org + [Goldmark Playground]: https://yuin.github.io/goldmark/playground + [serde_json_path Sandbox]: https://serdejsonpath.live + [privacy statement]: https://docs.github.com/en/site-policy/privacy-policies/github-general-privacy-statement + [RFC 9535]: https://www.rfc-editor.org/rfc/rfc9535.html + [MIT License]: https://opensource.org/license/mit + [Boxicons]: https://boxicons.com diff --git a/go.mod b/go.mod index f6a5912..60a2040 100644 --- a/go.mod +++ b/go.mod @@ -1,14 +1,7 @@ -module github.com/theory/jsonpath +module main -go 1.23 +go 1.24 -require github.com/stretchr/testify v1.10.0 +toolchain go1.24.2 -require ( - github.com/davecgh/go-spew v1.1.1 // indirect - github.com/kr/pretty v0.3.0 // indirect - github.com/pmezard/go-difflib v1.0.0 // indirect - github.com/rogpeppe/go-internal v1.12.0 // indirect - gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c // indirect - gopkg.in/yaml.v3 v3.0.1 // indirect -) +require github.com/theory/jsonpath v0.9.0 diff --git a/go.sum b/go.sum index e13ef19..d275b1f 100644 --- a/go.sum +++ b/go.sum @@ -1,25 +1,10 @@ -github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= -github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= -github.com/kr/pretty v0.3.0 h1:WgNl7dwNpEZ6jJ9k1snq4pZsg7DOEN8hP9Xw0Tsjwk0= -github.com/kr/pretty v0.3.0/go.mod h1:640gp4NfQd8pI5XOwp5fnNeVWj67G7CFk/SaSQn7NBk= -github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= -github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= -github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= -github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= -github.com/rogpeppe/go-internal v1.6.1/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc= -github.com/rogpeppe/go-internal v1.12.0 h1:exVL4IDcn6na9z1rAb56Vxr+CgyK3nn3O+epU5NdKM8= -github.com/rogpeppe/go-internal v1.12.0/go.mod h1:E+RYuTGaKKdloAfM02xzb0FW3Paa99yedzYV+kq4uf4= github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA= github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= -gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= -gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= -gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= +github.com/theory/jsonpath v0.9.0 h1:7of3UBzdNB9peRb8OyW0Pdo9NATPHTTa2D+Br7rMxEU= +github.com/theory/jsonpath v0.9.0/go.mod h1:yv+crL58A+g3yxLr1sbOyn8H+L/6kS4AMXlXeVGOuNU= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/internal/wasm/wasm.go b/internal/wasm/wasm.go deleted file mode 100644 index dd668d4..0000000 --- a/internal/wasm/wasm.go +++ /dev/null @@ -1,24 +0,0 @@ -// Package main performs a basic JSONPath query in order to test WASM compilation. -package main - -import ( - "encoding/json" - "fmt" - - "github.com/theory/jsonpath" -) - -func main() { - // Parse a jsonpath query. - p, _ := jsonpath.Parse(`$.foo`) - - // Select values from unmarshaled JSON input. - result := p.Select([]byte(`{"foo": "bar"}`)) - - // Show the result. - //nolint:errchkjson - items, _ := json.Marshal(result) - - //nolint:forbidigo - fmt.Printf("%s\n", items) -} diff --git a/internal/wasm/wasm_test.go b/internal/wasm/wasm_test.go deleted file mode 100644 index dba06b0..0000000 --- a/internal/wasm/wasm_test.go +++ /dev/null @@ -1,10 +0,0 @@ -package main - -import ( - "testing" -) - -func TestMain(t *testing.T) { - t.Parallel() - main() -} diff --git a/jsonpath-compliance-test-suite b/jsonpath-compliance-test-suite deleted file mode 160000 index 05f6cac..0000000 --- a/jsonpath-compliance-test-suite +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 05f6cac786bf0cce95437e6f1adedc3186d54a71 diff --git a/parser/lex.go b/parser/lex.go deleted file mode 100644 index 928a130..0000000 --- a/parser/lex.go +++ /dev/null @@ -1,552 +0,0 @@ -package parser - -import ( - "fmt" - "strconv" - "strings" - "unicode" - "unicode/utf16" - "unicode/utf8" -) - -// token represents a single token in the input stream. -// Name: mnemonic name (numeric). -// Val: string value of the token from the original stream. -// Pos: position - offset from beginning of stream. -type token struct { - // tok identifies the token, either a rune or a negative number - // representing a special token. - tok rune - - // val contains the string representation of the token. When tok is - // goString, val will be a fully-parsed Go string. When tok is invalid, - // val will be the error message. - val string - - // pos represents the position of the start of the token. - pos int -} - -const ( - // Non-rune special tokens are negative. - invalid = -(iota + 1) - eof - identifier - integer - number - goString - blankSpace - boolTrue - boolFalse - jsonNull -) - -// Special values. -const ( - // Numeric bases. - decimal = 10 - hex = 16 - - // Token literals. - nullByte = 0x00 - backslash = '\\' - - // blanks selects blank space characters. - blanks = uint64(1<<'\t' | 1<<'\n' | 1<<'\r' | 1<<' ') -) - -// name returns the token name, a string for special tokens and a quoted rune -// or escaped rune. -func (tok token) name() string { - switch tok.tok { - case invalid: - return "invalid" - case eof: - return "eof" - case identifier: - return "identifier" - case integer: - return "integer" - case number: - return "number" - case goString: - return "string" - case blankSpace: - return "blank space" - default: - return strconv.QuoteRune(tok.tok) - } -} - -// String returns a string representation of the token. -func (tok token) String() string { - return fmt.Sprintf("Token{%v, %q, %v}", tok.name(), tok.val, tok.pos) -} - -// err returns an error for invalid tokens and nil for all other tokens. -func (tok token) err() error { - if tok.tok != invalid { - return nil - } - return fmt.Errorf("%w: %v at %v", ErrPathParse, tok.val, tok.pos) -} - -// errToken creates and returns an error token. -func (lex *lexer) errToken(pos int, msg string) token { - // Set r to invalid so scan() ceases to scan. - lex.r = invalid - return token{invalid, msg, pos} -} - -// lexer for [RFC 9535] JSONPath queries. -// -// Create a new lexer with NewLexer and then call NextToken repeatedly to get -// tokens from the stream. The lexer will return a token with the name EOF when -// done. -// -// Based on the public domain [TableGen lexer] by [Eli Bendersky]. -// -// [RFC 9535]: https://www.rfc-editor.org/rfc/rfc9535.html -// [TableGen lexer]: https://github.com/eliben/code-for-blog/blob/main/2014/tablegen-lexer-go/lexer-string/lexer.go -// [Eli Bendersky]: https://eli.thegreenplace.net -type lexer struct { - buf string - - // Current rune. - r rune - - // Position of the current rune in buf. - rPos int - - // Position of the next rune in buf. - nextPos int - - // Last scanned token. - prev token -} - -// newLexer creates a new lexer for the given input. -func newLexer(buf string) *lexer { - lex := lexer{buf, -1, 0, 0, token{}} - - // Prime the lexer by calling .next - lex.next() - return &lex -} - -// scan returns the next token. -func (lex *lexer) scan() token { - switch { - case lex.r < 0: - lex.prev = token{eof, "", lex.rPos} - case lex.r == '$': - if isIdentRune(lex.peek(), 0) { - lex.prev = lex.scanIdentifier() - } else { - lex.prev = token{lex.r, "", lex.rPos} - lex.next() - } - case isIdentRune(lex.r, 0): - lex.prev = lex.scanIdentifier() - case isDigit(lex.r) || lex.r == '-': - lex.prev = lex.scanNumber() - case lex.r == '"' || lex.r == '\'': - lex.prev = lex.scanString() - case isBlankSpace(lex.r): - lex.prev = lex.scanBlankSpace() - default: - lex.prev = token{lex.r, "", lex.rPos} - lex.next() - } - - return lex.prev -} - -// next advances the lexer's internal state to point to the next rune in the -// input. -func (lex *lexer) next() rune { - if lex.nextPos < len(lex.buf) { - lex.rPos = lex.nextPos - r, w := rune(lex.buf[lex.nextPos]), 1 - - if r >= utf8.RuneSelf { - r, w = utf8.DecodeRuneInString(lex.buf[lex.nextPos:]) - } - - lex.nextPos += w - lex.r = r - } else { - lex.rPos = len(lex.buf) - lex.r = eof - } - - return lex.r -} - -// peek returns the next byte in the stream (the one after lex.r). -// Note: a single byte is peeked at - if there's a rune longer than a byte -// there, only its first byte is returned. Returns eof if there is no next -// byte. -func (lex *lexer) peek() rune { - if lex.nextPos < len(lex.buf) { - return rune(lex.buf[lex.nextPos]) - } - return rune(eof) -} - -// scanBlankSpace scan and returns a token of blank spaces. -func (lex *lexer) scanBlankSpace() token { - startPos := lex.rPos - for isBlankSpace(lex.r) { - lex.next() - } - return token{blankSpace, lex.buf[startPos:lex.rPos], startPos} -} - -// skipBlankSpace skips blank spaces from the current position until the next -// non-blank space token. -func (lex *lexer) skipBlankSpace() rune { - if isBlankSpace(lex.r) { - lex.scan() - } - return lex.r -} - -// isBlankSpace returns true if r is blank space. -func isBlankSpace(r rune) bool { - return blanks&(1<= 'a' && r <= 'z') || - ('A' <= r && r <= 'Z') || - r == '_' || - ('0' <= r && r <= '9') || - (0x80 <= r && r <= 0xd7ff) || - (0xE000 <= r && r <= 0x10FFFF) -} - -// scanNumber scans an integer or decimal number. -func (lex *lexer) scanNumber() token { - startPos := lex.rPos - - // Start with integer. - switch lex.r { - case '0': - next := lex.next() - if next != '.' && next != 'e' { - if isDigit(next) { - // No leading zeros for integers. - return lex.errToken(startPos, "invalid number literal") - } - // Standalone zero. - return token{integer, "0", startPos} - } - case '-': - // ["-"] DIGIT1 *DIGIT / (int / "-0") [ frac ] [ exp ] - next := lex.next() - switch { - case next == '0': - if isDigit(lex.peek()) { - // No leading zeros. - return lex.errToken(startPos, "invalid number literal") - } - case !isDigit(next): - // Need digits after decimal. - return lex.errToken(startPos, "invalid number literal") - } - fallthrough - default: - for isDigit(lex.r) { - lex.next() - } - } - - // We have the integer part. Is there a decimal and/or exponent? - switch lex.r { - case '.': - // Parse fraction: "." 1*DIGIT - if !isDigit(lex.next()) { - // No digits after decimal. - return lex.errToken(startPos, "invalid number literal") - } - // Collect remaining digits. - for isDigit(lex.r) { - lex.next() - } - switch lex.r { - case 'e', 'E': - // Exponent. - return lex.scanExponent(startPos) - } - return token{number, lex.buf[startPos:lex.rPos], startPos} - case 'e', 'E': - // Exponent. - return lex.scanExponent(startPos) - default: - // Just an integer. - return token{integer, lex.buf[startPos:lex.rPos], startPos} - } -} - -// scanExponent scans the exponent part of a decimal number. lex.r should be -// set to 'e' and expect to be followed by an optional '-' or '+' and then one -// or more digits. -func (lex *lexer) scanExponent(startPos int) token { - // Parse exponent: "e" [ "-" / "+" ] 1*DIGIT - switch lex.next() { - case '-', '+': - if !isDigit(lex.next()) { - // No digit after + or - - return lex.errToken(startPos, "invalid number literal") - } - default: - if !isDigit(lex.r) { - // No digit after e - return lex.errToken(startPos, "invalid number literal") - } - } - - // Consume remaining digits. - for isDigit(lex.r) { - lex.next() - } - return token{number, lex.buf[startPos:lex.rPos], startPos} -} - -// scanString scans and parses a single- or double-quoted JavaScript string. -// Token.Val contains the parsed value. Returns an error token on error. -func (lex *lexer) scanString() token { - startPos := lex.rPos - q := lex.r - lex.next() - buf := new(strings.Builder) - -NEXT: - for lex.r > 0 { - switch { - case isUnescaped(lex.r, q): - // Regular character. - buf.WriteRune(lex.r) - lex.next() - case lex.r == '\\': - if !lex.writeEscape(q, buf) { - pos := lex.rPos - lex.next() - return lex.errToken(pos, "invalid escape after backslash") - } - default: - // End of string or buffer. - break NEXT - } - } - - if lex.r != q { - return lex.errToken(lex.rPos, "unterminated string literal") - } - - lex.next() - return token{goString, buf.String(), startPos} -} - -// writeEscape handles string escapes in the context of a string. Set q to ' -// or " to indicate a single or double-quoted string context, respectively, -// and -1 for an identifier. lex.r should be set to the rune following the -// backslash that initiated the escape. -func (lex *lexer) writeEscape(q rune, buf *strings.Builder) bool { - // Starting an escape sequence. - next := lex.next() - if r := unescape(next, q); r > 0 { - // A single-character escape. - buf.WriteRune(r) - lex.next() - return true - } - - if next == '\u0075' { // uXXXX U+XXXX - // \uXXXX unicode escape. - if r := lex.parseUnicode(); r >= 0 { - buf.WriteRune(r) - lex.next() - return true - } - } - return false -} - -// Returns true if r is a regular, non-escaped character. Pass q as " or ' to -// indicate the scanning of a double or single quotation string. -func isUnescaped(r rune, q rune) bool { - switch { - case r == q: - return false - case r >= '\u0020' && r <= '\u005b': // omit 0x5C \ - return true - case r >= '\u005d' && r <= '\ud7ff': // skip surrogate code points - return true - case r >= '\ue000' && r <= '\U0010FFFF': - return true - default: - return false - } -} - -// Returns the value corresponding to escape code r. Pass q as " or ' to -// indicate the scanning of a double or single quotation string. -func unescape(r rune, q rune) rune { - switch r { - case q: // ' or " - return q - case '\u0062': // b BS backspace U+0008 - return '\u0008' - case '\u0066': // f FF form feed U+000C - return '\u000c' - case '\u006e': // n LF line feed U+000A - return '\u000a' - case '\u0072': // r CR carriage return U+000D - return '\u000d' - case '\u0074': // t HT horizontal tab U+0009 - return '\u0009' - case '/': // / slash (solidus) U+002F - return '/' - case '\\': // \ backslash (reverse solidus) U+005C - return '\\' - default: - return 0 - } -} - -// Parses a \u unicode escape sequence. Returns invalid (-1) on error. -func (lex *lexer) parseUnicode() rune { - if !isHexDigit(lex.next()) { - return rune(invalid) - } - - if lex.r != 'd' && lex.r != 'D' { - // non-surrogate DIGIT / "A"/"B"/"C" / "E"/"F" - return lex.scanUnicode() - } - - switch lex.peek() { - case '0', '1', '2', '3', '4', '5', '6', '7': - // non-surrogate "D" %x30-37 2HEXDIG - return lex.scanUnicode() - } - - // potential high-surrogate "D" ("8"/"9"/"A"/"B") 2HEXDIG - high := lex.scanUnicode() - - // Must be followed by \u - if high < nullByte || lex.next() != '\\' || lex.next() != 'u' { - return rune(invalid) - } - - // potential low-surrogate "D" ("C"/"D"/"E"/"F") 2HEXDIG - lex.next() - low := lex.scanUnicode() - if low < nullByte { - return rune(invalid) - } - - // Merge and return the surrogate pair, if valid. - if dec := utf16.DecodeRune(high, low); dec != unicode.ReplacementChar { - return dec - } - // Adjust position to start of second \u escape for error reporting. - lex.rPos -= 3 - - return rune(invalid) -} - -// scanUnicode scans the current rune plus the next four and merges them into -// the resulting unicode rune. Returns invalid (-1) on error. -func (lex *lexer) scanUnicode() rune { - rr := hexChar(lex.r) - for i := range 3 { - c := hexChar(lex.next()) - if c < nullByte { - // Reset to before first rune for error reporting. - lex.rPos -= i + 1 - return c - } - - rr = rr*hex + c - } - return rr -} - -// isHexDigit returns true if r represents a hex digit matching [0-9a-fA-F]. -func isHexDigit(r rune) bool { - return isDigit(r) || (r >= 'A' && r <= 'F') || (r >= 'a' && r <= 'f') -} - -// hexChar returns the byte value corresponding to hex character c. -func hexChar(c rune) rune { - switch { - case '0' <= c && c <= '9': - return c - '0' - case 'a' <= c && c <= 'f': - return c - 'a' + decimal - case 'A' <= c && c <= 'F': - return c - 'A' + decimal - default: - return rune(invalid) - } -} - -// isDigit returns true if r is a digit ([0-9]). -func isDigit(r rune) bool { - return '0' <= r && r <= '9' -} diff --git a/parser/lex_test.go b/parser/lex_test.go deleted file mode 100644 index fb96d15..0000000 --- a/parser/lex_test.go +++ /dev/null @@ -1,919 +0,0 @@ -package parser - -import ( - "math" - "strconv" - "strings" - "testing" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -func TestScanString(t *testing.T) { - t.Parallel() - a := assert.New(t) - - for _, tc := range []struct { - name string - in string - tok token - }{ - { - name: "empty_dq", - in: ``, - tok: token{goString, "", 0}, - }, - { - name: "one_char_dq", - in: `x`, - tok: token{goString, "x", 0}, - }, - { - name: "multi_char_dq", - in: `hello there`, - tok: token{goString, "hello there", 0}, - }, - { - name: "utf8_dq", - in: `hello ΓΈ`, - tok: token{goString, "hello ΓΈ", 0}, - }, - { - name: "emoji_dq", - in: `hello πŸ‘‹πŸ»`, - tok: token{goString, "hello πŸ‘‹πŸ»", 0}, - }, - { - name: "emoji_escape_q_dq", - in: `hello \"πŸ‘‹πŸ»\" there`, - tok: token{goString, `hello "πŸ‘‹πŸ»" there`, 0}, - }, - { - name: "escapes_dq", - in: `\b\f\n\r\t\/\\`, - tok: token{goString, "\b\f\n\r\t/\\", 0}, - }, - { - name: "unicode_dq", - in: `fo\u00f8`, - tok: token{goString, "foΓΈ", 0}, - }, - { - name: "non_surrogate_start_dq", - in: `\u00f8y vey`, - tok: token{goString, "ΓΈy vey", 0}, - }, - { - name: "non_surrogate_end_dq", - in: `fo\u00f8`, - tok: token{goString, "foΓΈ", 0}, - }, - { - name: "non_surrogate_mid_dq", - in: `fo\u00f8 bar`, - tok: token{goString, "foΓΈ bar", 0}, - }, - { - name: "non_surrogate_start_d_dq", - in: `\ud3c0 yep`, - tok: token{goString, "폀 yep", 0}, - }, - { - name: "non_surrogate_end_d_dq", - in: `got \ud3c0`, - tok: token{goString, "got 폀", 0}, - }, - { - name: "non_surrogate_mid_d_dq", - in: `got \ud3c0 yep`, - tok: token{goString, "got 폀 yep", 0}, - }, - { - name: "surrogate_pair_dq", - in: `\uD834\uDD1E`, - tok: token{goString, "\U0001D11E", 0}, - }, - { - name: "surrogate_pair_start_dq", - in: `\uD834\uDD1E yep`, - tok: token{goString, "\U0001D11E yep", 0}, - }, - { - name: "surrogate_pair_end_dq", - in: `go \uD834\uDD1E`, - tok: token{goString, "go \U0001D11E", 0}, - }, - { - name: "invalid_unicode_dq", - in: `fo\u0f8`, - tok: token{invalid, "invalid escape after backslash", 5}, - }, - { - name: "invalid_non_surrogate_start_d_dq", - in: `\ud30 yep`, - tok: token{invalid, "invalid escape after backslash", 3}, - }, - { - name: "invalid_surrogate_high_dq", - in: `\uD8x4\uDD1E`, - tok: token{invalid, "invalid escape after backslash", 3}, - }, - { - name: "invalid_surrogate_low_dq", - in: `\uD834\uDDxE`, - tok: token{invalid, "invalid escape after backslash", 9}, - }, - { - name: "surrogate_low_not_d_dq", - in: `\uD834\uED1E`, - tok: token{invalid, "invalid escape after backslash", 9}, - }, - { - name: "surrogate_low_not_a_f_dq", - in: `\uD834\ud11E`, - tok: token{invalid, "invalid escape after backslash", 9}, - }, - { - name: "no_surrogate_low_dq", - in: `\uD834 oops`, - tok: token{invalid, "invalid escape after backslash", 7}, - }, - { - name: "bad_escape_dq", - in: `left \7 right`, - tok: token{invalid, "invalid escape after backslash", 7}, - }, - { - name: "unicode_not_hex_dq", - in: `hi \ux234 oops`, - tok: token{invalid, "invalid escape after backslash", 6}, - }, - { - name: "dollar_start", - in: `$xyz`, - tok: token{goString, "$xyz", 0}, - }, - { - name: "dollar_end", - in: `xyz$`, - tok: token{goString, "xyz$", 0}, - }, - { - name: "dollar_mid", - in: `xy$z`, - tok: token{goString, "xy$z", 0}, - }, - } { - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - - // Test double-quoted. - lex := newLexer(`"` + tc.in + `"`) - a.Equal('"', lex.r) - a.Equal(tc.tok, lex.scanString()) - - // Test single-quoted. - lex = newLexer(`'` + strings.ReplaceAll(tc.in, `"`, `'`) + `'`) - a.Equal('\'', lex.r) - if tc.tok.tok != invalid { - tc.tok.val = strings.ReplaceAll(tc.tok.val, `"`, `'`) - } - a.Equal(tc.tok, lex.scanString()) - - // Test identifier. - if tc.in != "" && !strings.ContainsAny(tc.in, "\\$") { - lex := newLexer(strings.ReplaceAll(tc.in, ` `, `_`)) - a.Equal(rune(tc.in[0]), lex.r) - if tc.tok.tok == invalid { - tc.tok.pos-- - } else { - tc.tok.val = strings.ReplaceAll(tc.tok.val, ` `, `_`) - } - if tc.tok.tok == goString { - tc.tok.tok = identifier - } - a.Equal(tc.tok, lex.scanIdentifier()) - } - }) - } - - // Test unclosed strings. - t.Run("unclosed", func(t *testing.T) { - t.Parallel() - a.Equal(rune(invalid), newLexer(`"food`).scanString().tok) - a.Equal(rune(invalid), newLexer(`'food`).scanString().tok) - }) -} - -func TestScanIdentifier(t *testing.T) { - t.Parallel() - a := assert.New(t) - - for _, tc := range []struct { - name string - in string - tok token - }{ - { - name: "with_emoji", - in: "say_πŸ˜€", - tok: token{identifier, "say_πŸ˜€", 0}, - }, - { - name: "with_surrogate_pair", - in: "say_\U0001D11E", - tok: token{identifier, "say_π„ž", 0}, - }, - { - name: "newline", - in: "say\n", - tok: token{identifier, "say", 0}, - }, - { - name: "linefeed", - in: "xxx\f", - tok: token{identifier, "xxx", 0}, - }, - { - name: "return", - in: "abx_xyx\ryup", - tok: token{identifier, "abx_xyx", 0}, - }, - { - name: "whitespace", - in: "go on", - tok: token{identifier, "go", 0}, - }, - { - name: "true", - in: "true", - tok: token{boolTrue, "true", 0}, - }, - { - name: "false", - in: "false", - tok: token{boolFalse, "false", 0}, - }, - { - name: "null", - in: "null", - tok: token{jsonNull, "null", 0}, - }, - { - name: "true_stop_at_escaped", - in: `tru\u0065`, - tok: token{identifier, "tru", 0}, - }, - { - name: "false_stop_at_escaped", - in: `fals\u0065`, - tok: token{identifier, "fals", 0}, - }, - { - name: "null_stop_at_escaped", - in: `n\u0075ll`, - tok: token{identifier, "n", 0}, - }, - } { - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - lex := newLexer(tc.in) - a.Equal(tc.tok, lex.scanIdentifier()) - }) - } -} - -func TestScanNumber(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - for _, tc := range []struct { - name string - in string - tok token - num any - }{ - { - name: "zero", - in: "0", - tok: token{integer, "0", 0}, - num: int64(0), - }, - { - name: "zero_and_more", - in: "0 say", - tok: token{integer, "0", 0}, - num: int64(0), - }, - { - name: "one", - in: "1", - tok: token{integer, "1", 0}, - num: int64(1), - }, - { - name: "nine", - in: "9", - tok: token{integer, "9", 0}, - num: int64(9), - }, - { - name: "twelve", - in: "12", - tok: token{integer, "12", 0}, - num: int64(12), - }, - { - name: "hundred", - in: "100", - tok: token{integer, "100", 0}, - num: int64(100), - }, - { - name: "max_int", - in: strconv.FormatInt(math.MaxInt64, 10), - tok: token{integer, strconv.FormatInt(math.MaxInt64, 10), 0}, - num: int64(math.MaxInt64), - }, - { - name: "neg_one", - in: "-1", - tok: token{integer, "-1", 0}, - num: int64(-1), - }, - { - name: "neg_42", - in: "-42", - tok: token{integer, "-42", 0}, - num: int64(-42), - }, - { - name: "neg_zero", - in: "-0 oops", - tok: token{integer, "-0", 0}, - num: int64(0), - }, - { - name: "leading_zero", - in: "032", - tok: token{invalid, "invalid number literal", 0}, - }, - { - name: "zero_frac", - in: "0.1", - tok: token{number, "0.1", 0}, - num: float64(0.1), - }, - { - name: "zero_frac_more", - in: "0.09323200/", - tok: token{number, "0.09323200", 0}, - num: float64(0.093232), - }, - { - name: "more_frac", - in: "42.234853+", - tok: token{number, "42.234853", 0}, - num: float64(42.234853), - }, - { - name: "neg_frac", - in: "-42.734/", - tok: token{number, "-42.734", 0}, - num: float64(-42.734), - }, - { - name: "neg_zero_frac", - in: "-0.23", - tok: token{number, "-0.23", 0}, - num: float64(-0.23), - }, - { - name: "double_zero_frac", - in: "01.23", - tok: token{invalid, "invalid number literal", 0}, - }, - { - name: "neg_double_zero_frac", - in: "-01.23", - tok: token{invalid, "invalid number literal", 0}, - }, - { - name: "missing_frac", - in: "42.x", - tok: token{invalid, "invalid number literal", 0}, - }, - { - name: "missing_neg_frac", - in: "-42.x", - tok: token{invalid, "invalid number literal", 0}, - }, - { - name: "zero_exp", - in: "0e12", - tok: token{number, "0e12", 0}, - num: float64(0e12), - }, - { - name: "numb_exp", - in: "42E124", - tok: token{number, "42E124", 0}, - num: float64(42e124), - }, - { - name: "neg_zero_exp", - in: "-0e123", - tok: token{number, "-0e123", 0}, - num: float64(-0e123), - }, - { - name: "neg_exp", - in: "-42E123", - tok: token{number, "-42E123", 0}, - num: float64(-42e123), - }, - { - name: "lead_zero_exp", - in: "00e12", - tok: token{invalid, "invalid number literal", 0}, - }, - { - name: "exp_plus", - in: "99e+123", - tok: token{number, "99e+123", 0}, - num: float64(99e+123), - }, - { - name: "exp_minus", - in: "99e-01234", - tok: token{number, "99e-01234", 0}, - num: float64(99e-01234), - }, - { - name: "exp_decimal", - in: "12.32E3", - tok: token{number, "12.32E3", 0}, - num: float64(12.32e3), - }, - { - name: "exp_plus_no_digits", - in: "99e++", - tok: token{invalid, "invalid number literal", 0}, - }, - { - name: "exp_minus_no_digits", - in: "99e-x lol", - tok: token{invalid, "invalid number literal", 0}, - }, - { - name: "neg_no_digits", - in: "-lol", - tok: token{invalid, "invalid number literal", 0}, - }, - { - name: "exp_no_digits", - in: "42eek", - tok: token{invalid, "invalid number literal", 0}, - }, - // https://go.dev/ref/spec#Integer_literals - { - name: "integer", - in: "42", - tok: token{integer, "42", 0}, - num: int64(42), - }, - { - name: "long_int", - in: "170141183460469231731687303715884105727", - tok: token{integer, "170141183460469231731687303715884105727", 0}, - num: false, // integer too large, will not parse - }, - // https://go.dev/ref/spec#Floating-point_literals - { - name: "float", - in: "72.40", - tok: token{number, "72.40", 0}, - num: float64(72.4), - }, - { - name: "float_2", - in: "2.71828", - tok: token{number, "2.71828", 0}, - num: float64(2.71828), - }, - { - name: "float_3", - in: "6.67428e-11", - tok: token{number, "6.67428e-11", 0}, - num: float64(6.67428e-11), - }, - { - name: "float_4", - in: "1e6", - tok: token{number, "1e6", 0}, - num: float64(1e6), - }, - } { - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - lex := newLexer(tc.in) - tok := lex.scanNumber() - a.Equal(tc.tok, tok) - - // Test that we can parse the values. - var ( - num any - err error - ) - - switch tc.tok.tok { - case integer: - num, err = strconv.ParseInt(tok.val, 10, 64) - case number: - num, err = strconv.ParseFloat(tok.val, 64) - default: - return - } - - if _, ok := tc.num.(bool); ok { - // Not a valid value. - r.Error(err) - } else { - r.NoError(err) - a.Equal(tc.num, num) - } - }) - } -} - -func TestScanBlankSpace(t *testing.T) { - t.Parallel() - a := assert.New(t) - - for _, tc := range []struct { - name string - in string - tok token - next rune - }{ - { - name: "empty", - in: "", - tok: token{blankSpace, "", 0}, - next: eof, - }, - { - name: "no_spaces", - in: "xxx", - tok: token{blankSpace, "", 0}, - next: 'x', - }, - { - name: "space", - in: " ", - tok: token{blankSpace, " ", 0}, - next: eof, - }, - { - name: "spaces", - in: " ", - tok: token{blankSpace, " ", 0}, - next: eof, - }, - { - name: "spacey", - in: " y", - tok: token{blankSpace, " ", 0}, - next: 'y', - }, - { - name: "newline", - in: "\n", - tok: token{blankSpace, "\n", 0}, - next: eof, - }, - { - name: "newlines", - in: "\n\n\n\n", - tok: token{blankSpace, "\n\n\n\n", 0}, - next: eof, - }, - { - name: "newline_plus", - in: "\n\n\n\ngo on", - tok: token{blankSpace, "\n\n\n\n", 0}, - next: 'g', - }, - { - name: "linefeed", - in: "\r", - tok: token{blankSpace, "\r", 0}, - next: eof, - }, - { - name: "multiple_linefeed", - in: "\r\r\r\r", - tok: token{blankSpace, "\r\r\r\r", 0}, - next: eof, - }, - { - name: "linefeed_plus", - in: "\r\r\r\rgo on", - tok: token{blankSpace, "\r\r\r\r", 0}, - next: 'g', - }, - { - name: "tab", - in: "\t", - tok: token{blankSpace, "\t", 0}, - next: eof, - }, - { - name: "multiple_tab", - in: "\t\t\t\t", - tok: token{blankSpace, "\t\t\t\t", 0}, - next: eof, - }, - { - name: "tab_plus", - in: "\t\t\t\tgo on", - tok: token{blankSpace, "\t\t\t\t", 0}, - next: 'g', - }, - { - name: "mix_blanks", - in: "\t \r\n\t \r\n\t lol", - tok: token{blankSpace, "\t \r\n\t \r\n\t ", 0}, - next: 'l', - }, - } { - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - lex := newLexer(tc.in) - a.Equal(tc.tok, lex.scanBlankSpace()) - lex = newLexer(tc.in) - lex.skipBlankSpace() - a.Equal(tc.next, lex.r) - }) - } -} - -func TestScanTokens(t *testing.T) { - t.Parallel() - a := assert.New(t) - - for _, tc := range []struct { - name string - in string - tokens []token - }{ - { - name: "empty", - in: "", - tokens: []token{}, - }, - { - name: "dollar", - in: "$", - tokens: []token{{'$', "", 0}}, - }, - { - name: "dollar_dot_string", - in: "$.foo", - tokens: []token{ - {'$', "", 0}, - {'.', "", 1}, - {identifier, "foo", 2}, - }, - }, - { - name: "bracket_space_int_bracket", - in: "[ 42]", - tokens: []token{ - {'[', "", 0}, - {blankSpace, " ", 1}, - {integer, "42", 3}, - {']', "", 5}, - }, - }, - { - name: "string_bracket_int_bracket", - in: "'hello'[42]", - tokens: []token{ - {goString, "hello", 0}, - {'[', "", 7}, - {integer, "42", 8}, - {']', "", 10}, - }, - }, - { - name: "number_space_unclosed_string", - in: `98.6 "foo`, - tokens: []token{ - {number, "98.6", 0}, - {blankSpace, " ", 4}, - {invalid, "unterminated string literal", 9}, - }, - }, - { - name: "number_space_string_invalid_escape", - in: `98.6 'foo\x'`, - tokens: []token{ - {number, "98.6", 0}, - {blankSpace, " ", 4}, - {invalid, "invalid escape after backslash", 10}, - }, - }, - { - name: "number_space_string_invalid_unicode", - in: `98.6 'foo\uf3xx'`, - tokens: []token{ - {number, "98.6", 0}, - {blankSpace, " ", 4}, - {invalid, "invalid escape after backslash", 11}, - }, - }, - { - name: "number_space_string_invalid_low_surrogate", - in: `98.6 "foo\uD834\uED1E"`, - tokens: []token{ - {number, "98.6", 0}, - {blankSpace, " ", 4}, - {invalid, "invalid escape after backslash", 17}, - }, - }, - } { - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - lex := newLexer(tc.in) - a.Equal(token{}, lex.prev) - tokens := make([]token, 0, len(tc.tokens)) - for t := lex.scan(); t.tok != eof; t = lex.scan() { - tokens = append(tokens, t) - a.Equal(t, lex.prev) - } - a.Equal(tc.tokens, tokens) - }) - } -} - -func TestToken(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - for _, tc := range []struct { - name string - tok token - id string - str string - err string - }{ - { - name: "invalid", - id: "invalid", - tok: token{invalid, "oops", 12}, - str: `Token{invalid, "oops", 12}`, - err: "jsonpath: oops at 12", - }, - { - name: "eof", - id: "eof", - tok: token{eof, "", 12}, - str: `Token{eof, "", 12}`, - }, - { - name: "identifier", - id: "identifier", - tok: token{identifier, "foo", 12}, - str: `Token{identifier, "foo", 12}`, - }, - { - name: "integer", - id: "integer", - tok: token{integer, "42", 12}, - str: `Token{integer, "42", 12}`, - }, - { - name: "number", - id: "number", - tok: token{number, "98.6", 12}, - str: `Token{number, "98.6", 12}`, - }, - { - name: "string", - id: "string", - tok: token{goString, "πŸ‘‹πŸ» there", 12}, - str: `Token{string, "πŸ‘‹πŸ» there", 12}`, - }, - { - name: "blankSpace", - id: "blank space", - tok: token{blankSpace, " \t", 3}, - str: `Token{blank space, " \t", 3}`, - }, - { - name: "bracket", - id: "'['", - tok: token{'[', "[", 3}, - str: `Token{'[', "[", 3}`, - }, - { - name: "dollar", - id: "'$'", - tok: token{'$', "$", 3}, - str: `Token{'$', "$", 3}`, - }, - { - name: "dot", - id: "'.'", - tok: token{'.', ".", 3}, - str: `Token{'.', ".", 3}`, - }, - { - name: "multibyte", - id: "'ΓΌ'", - tok: token{'ΓΌ', "ΓΌ", 3}, - str: `Token{'ΓΌ', "ΓΌ", 3}`, - }, - { - name: "emoji", - id: "'🐢'", - tok: token{'🐢', "🐢", 3}, - str: `Token{'🐢', "🐢", 3}`, - }, - { - name: "surrogate_pair", - id: "'\U0001D11E'", - tok: token{'π„ž', "π„ž", 3}, - str: `Token{'π„ž', "π„ž", 3}`, - }, - { - name: "newline", - id: `'\n'`, - tok: token{'\n', "\n", 3}, - str: `Token{'\n', "\n", 3}`, - }, - { - name: "tab", - id: `'\t'`, - tok: token{'\t', "\t", 3}, - str: `Token{'\t', "\t", 3}`, - }, - { - name: "null_byte", - id: `'\x00'`, - tok: token{'\u0000', "\x00", 3}, - str: `Token{'\x00', "\x00", 3}`, - }, - { - name: "cancel", - id: `'\x18'`, - tok: token{'\u0018', "\x18", 3}, - str: `Token{'\x18', "\x18", 3}`, - }, - { - name: "bel", - id: `'\a'`, - tok: token{'\007', "\007", 3}, - str: `Token{'\a', "\a", 3}`, - }, - { - name: "unicode_space", - id: `'\u2028'`, - tok: token{'\u2028', "\u2028", 3}, - str: `Token{'\u2028', "\u2028", 3}`, - }, - } { - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - a.Equal(tc.id, tc.tok.name()) - a.Equal(tc.str, tc.tok.String()) - err := tc.tok.err() - if tc.err == "" { - r.NoError(err) - } else { - r.EqualError(err, tc.err) - r.ErrorIs(err, ErrPathParse) - } - }) - } -} - -func TestPeek(t *testing.T) { - t.Parallel() - a := assert.New(t) - - input := "this is it" - lex := newLexer(input) - for _, r := range input[1:] { - a.Equal(r, lex.peek()) - lex.next() - } - - for range 3 { - a.Equal(rune(eof), lex.peek()) - } -} diff --git a/parser/parse.go b/parser/parse.go deleted file mode 100644 index e99eafb..0000000 --- a/parser/parse.go +++ /dev/null @@ -1,736 +0,0 @@ -// Package parser parses RFC 9535 JSONPath queries into parse trees. Most -// JSONPath users will use package [github.com/theory/jsonpath] instead of -// this package. -package parser - -import ( - "errors" - "fmt" - "strconv" - - "github.com/theory/jsonpath/registry" - "github.com/theory/jsonpath/spec" -) - -// ErrPathParse errors are returned for path parse errors. -var ErrPathParse = errors.New("jsonpath") - -func makeError(tok token, msg string) error { - return fmt.Errorf("%w: %v at position %v", ErrPathParse, msg, tok.pos+1) -} - -// unexpected creates and returns an error for an unexpected token. For -// invalid tokens, the error will be as returned by the lexer. Otherwise, the -// error will "unexpected: $name". -func unexpected(tok token) error { - if tok.tok == invalid { - // Lex error message in the token value. - return makeError(tok, tok.val) - } - return makeError(tok, "unexpected "+tok.name()) -} - -type parser struct { - lex *lexer - reg *registry.Registry -} - -// Parse parses path, a JSONPath query string, into a [spec.PathQuery]. -// Returns a [ErrPathParse] on parse failure. -func Parse(reg *registry.Registry, path string) (*spec.PathQuery, error) { - lex := newLexer(path) - tok := lex.scan() - p := parser{lex, reg} - - switch tok.tok { - case '$': - // All path queries must start with $. - q, err := p.parseQuery(true) - if err != nil { - return nil, err - } - // Should have scanned to the end of input. - if lex.r != eof { - return nil, unexpected(lex.scan()) - } - return q, nil - case eof: - // The token contained nothing. - return nil, fmt.Errorf("%w: unexpected end of input", ErrPathParse) - default: - return nil, unexpected(tok) - } -} - -// parseQuery parses a query expression. lex.r should be set to $ (or, -// eventually, @) before calling. Returns the parsed Query. -func (p *parser) parseQuery(root bool) (*spec.PathQuery, error) { - segs := []*spec.Segment{} - lex := p.lex - for { - switch { - case lex.r == '[': - // Start of segment; scan selectors - lex.scan() - selectors, err := p.parseSelectors() - if err != nil { - return nil, err - } - segs = append(segs, spec.Child(selectors...)) - case lex.r == '.': - // Start of a name selector, wildcard, or descendant segment. - lex.scan() - if lex.r == '.' { - // Consume `.` and parse descendant. - lex.scan() - seg, err := p.parseDescendant() - if err != nil { - return nil, err - } - segs = append(segs, seg) - continue - } - // Child segment with a name or wildcard selector. - sel, err := parseNameOrWildcard(lex) - if err != nil { - return nil, err - } - segs = append(segs, spec.Child(sel)) - case isBlankSpace(lex.r): - switch lex.peekPastBlankSpace() { - case '.', '[': - lex.scanBlankSpace() - continue - } - fallthrough - default: - // Done parsing. - return spec.Query(root, segs...), nil - } - } -} - -// parseNameOrWildcard parses a name or '*' wildcard selector. Returns the -// parsed Selector. -func parseNameOrWildcard(lex *lexer) (spec.Selector, error) { - switch tok := lex.scan(); tok.tok { - case identifier: - return spec.Name(tok.val), nil - case '*': - return spec.Wildcard(), nil - default: - return nil, unexpected(tok) - } -} - -// parseDescendant parses a ".." descendant segment, which may be a bracketed -// segment or a wildcard or name selector segment. Returns the parsed Segment. -func (p *parser) parseDescendant() (*spec.Segment, error) { - switch tok := p.lex.scan(); tok.tok { - case '[': - // Start of segment; scan selectors - selectors, err := p.parseSelectors() - if err != nil { - return nil, err - } - return spec.Descendant(selectors...), nil - case identifier: - return spec.Descendant(spec.Name(tok.val)), nil - case '*': - return spec.Descendant(spec.Wildcard()), nil - default: - return nil, unexpected(tok) - } -} - -// makeNumErr converts strconv.NumErrors to jsonpath errors. -func makeNumErr(tok token, err error) error { - var numError *strconv.NumError - if errors.As(err, &numError) { - return makeError(tok, fmt.Sprintf( - "cannot parse %q, %v", - numError.Num, numError.Err.Error(), - )) - } - return makeError(tok, err.Error()) -} - -// parseSelectors parses Selectors from a bracket segment. lex.r should be '[' -// before calling. Returns the Selectors parsed. -func (p *parser) parseSelectors() ([]spec.Selector, error) { - selectors := []spec.Selector{} - lex := p.lex - for { - switch tok := lex.scan(); tok.tok { - case '?': - filter, err := p.parseFilter() - if err != nil { - return nil, err - } - selectors = append(selectors, filter) - case '*': - selectors = append(selectors, spec.Wildcard()) - case goString: - selectors = append(selectors, spec.Name(tok.val)) - case integer: - // Index or slice? - if lex.skipBlankSpace() == ':' { - // Slice. - slice, err := parseSlice(lex, tok) - if err != nil { - return nil, err - } - selectors = append(selectors, slice) - } else { - // Index. - idx, err := parsePathInt(tok) - if err != nil { - return nil, err - } - selectors = append(selectors, spec.Index(idx)) - } - case ':': - // Slice. - slice, err := parseSlice(lex, tok) - if err != nil { - return nil, err - } - selectors = append(selectors, slice) - case blankSpace: - // Skip. - continue - default: - return nil, unexpected(tok) - } - - // Successfully parsed a selector. What's next? - switch lex.skipBlankSpace() { - case ',': - // Consume the comma. - lex.scan() - case ']': - // Consume and return. - lex.scan() - return selectors, nil - default: - // Anything else is an error. - return nil, unexpected(lex.scan()) - } - } -} - -// parsePathInt parses an integer as used in index values and steps, which must be -// within the interval [-(253)+1, (253)-1]. -func parsePathInt(tok token) (int64, error) { - if tok.val == "-0" { - return 0, makeError(tok, fmt.Sprintf( - "invalid integer path value %q", tok.val, - )) - } - idx, err := strconv.ParseInt(tok.val, 10, 64) - if err != nil { - return 0, makeNumErr(tok, err) - } - const ( - minVal = -1<<53 + 1 - maxVal = 1<<53 - 1 - ) - if idx > maxVal || idx < minVal { - return 0, makeError(tok, fmt.Sprintf( - "cannot parse %q, value out of range", - tok.val, - )) - } - return idx, nil -} - -// parseSlice parses a slice selector, ::. Returns the -// parsed SliceSelector. -func parseSlice(lex *lexer, tok token) (spec.SliceSelector, error) { - var args [3]any - - // Parse the three parts: start, end, and step. - i := 0 - for i < 3 { - switch tok.tok { - case ':': - // Skip to the next index. - i++ - case integer: - // Parse the integer. - num, err := parsePathInt(tok) - if err != nil { - return spec.SliceSelector{}, err - } - args[i] = int(num) - default: - // Nothing else allowed. - return spec.SliceSelector{}, unexpected(tok) - } - - // What's next? - next := lex.skipBlankSpace() - if next == ']' || next == ',' { - // We've reached the end. - return spec.Slice(args[0], args[1], args[2]), nil - } - tok = lex.scan() - } - - // Never found the end of the slice. - return spec.SliceSelector{}, unexpected(tok) -} - -// parseFilter parses a [Filter] from Lex. A [Filter] consists of a single -// [LogicalOrExpr] (logical-or-expr). -func (p *parser) parseFilter() (*spec.FilterSelector, error) { - lor, err := p.parseLogicalOrExpr() - if err != nil { - return nil, err - } - return spec.Filter(lor...), nil -} - -// parseLogicalOrExpr parses a [LogicalOrExpr] from lex. A [LogicalOrExpr] is -// made up of one or more [LogicalAndExpr] (logical-and-expr) separated by -// "||". -func (p *parser) parseLogicalOrExpr() (spec.LogicalOr, error) { - lex := p.lex - ands := []spec.LogicalAnd{} - land, err := p.parseLogicalAndExpr() - if err != nil { - return nil, err - } - - ands = append(ands, land) - lex.scanBlankSpace() - for lex.r == '|' { - lex.scan() - next := lex.scan() - if next.tok != '|' { - return nil, makeError(next, fmt.Sprintf("expected '|' but found %v", next.name())) - } - land, err := p.parseLogicalAndExpr() - if err != nil { - return nil, err - } - ands = append(ands, land) - } - - return spec.LogicalOr(ands), nil -} - -// parseLogicalAndExpr parses a [LogicalAndExpr] from lex. A [LogicalAndExpr] -// is made up of one or more [BasicExpr]s (basic-expr) separated by "&&". -func (p *parser) parseLogicalAndExpr() (spec.LogicalAnd, error) { - expr, err := p.parseBasicExpr() - if err != nil { - return nil, err - } - - ors := []spec.BasicExpr{expr} - lex := p.lex - lex.scanBlankSpace() - for lex.r == '&' { - lex.scan() - next := lex.scan() - if next.tok != '&' { - return nil, makeError(next, fmt.Sprintf("expected '&' but found %v", next.name())) - } - expr, err := p.parseBasicExpr() - if err != nil { - return nil, err - } - ors = append(ors, expr) - } - - return spec.LogicalAnd(ors), nil -} - -// parseBasicExpr parses a [BasicExpr] from lex. A [BasicExpr] may be a -// parenthesized expression (paren-expr), comparison expression -// (comparison-expr), or test expression (test-expr). -func (p *parser) parseBasicExpr() (spec.BasicExpr, error) { - // Consume blank space. - lex := p.lex - lex.skipBlankSpace() - - tok := lex.scan() - switch tok.tok { - case '!': - if lex.skipBlankSpace() == '(' { - // paren-expr - lex.scan() - return p.parseNotParenExpr() - } - - next := lex.scan() - if next.tok == identifier { - // test-expr or comparison-expr - f, err := p.parseFunction(next) - if err != nil { - return nil, err - } - return spec.NotFunction(f), nil - } - - // test-expr or comparison-expr - return p.parseNonExistExpr(next) - case '(': - return p.parseParenExpr() - case goString, integer, number, boolFalse, boolTrue, jsonNull: - // comparison-expr - left, err := parseLiteral(tok) - if err != nil { - return nil, err - } - return p.parseComparableExpr(left) - case identifier: - if lex.r == '(' { - return p.parseFunctionFilterExpr(tok) - } - case '@', '$': - q, err := p.parseFilterQuery(tok) - if err != nil { - return nil, err - } - - if sing := q.Singular(); sing != nil { - switch lex.skipBlankSpace() { - // comparison-expr - case '=', '!', '<', '>': - return p.parseComparableExpr(sing) - } - } - return spec.Existence(q), nil - } - - return nil, unexpected(tok) -} - -// parseFunctionFilterExpr parses a [BasicExpr] (basic-expr) that starts with -// ident, which must be an identifier token that's expected to be the name of -// a function. The return value will be either a [spec.FuncExpr] -// (function-expr), if the function return value is a logical (boolean) value. -// Otherwise it will be a [ComparisonExpr] (comparison-expr), as long as the -// function call is compared to another expression. Any other configuration -// returns an error. -func (p *parser) parseFunctionFilterExpr(ident token) (spec.BasicExpr, error) { - f, err := p.parseFunction(ident) - if err != nil { - return nil, err - } - - if f.ResultType() == spec.FuncLogical { - return f, nil - } - - switch p.lex.skipBlankSpace() { - case '=', '!', '<', '>': - // comparison-expr - return p.parseComparableExpr(f) - } - - return nil, makeError(p.lex.scan(), "missing comparison to function result") -} - -// parseNonExistExpr parses a [spec.NonExistExpr] (non-existence) from lex. -func (p *parser) parseNonExistExpr(tok token) (*spec.NonExistExpr, error) { - q, err := p.parseFilterQuery(tok) - if err != nil { - return nil, err - } - return spec.Nonexistence(q), nil -} - -// parseFilterQuery parses a [*spec.Query] (rel-query / jsonpath-query) from -// lex. -func (p *parser) parseFilterQuery(tok token) (*spec.PathQuery, error) { - q, err := p.parseQuery(tok.tok == '$') - if err != nil { - return nil, err - } - return q, nil -} - -// parseLogicalOrExpr parses a [spec.LogicalOrExpr] from lex, which should -// return the next token after '(' from scan(). Returns an error if the -// expression does not end with a closing ')'. -func (p *parser) parseInnerParenExpr() (spec.LogicalOr, error) { - expr, err := p.parseLogicalOrExpr() - if err != nil { - return nil, err - } - - // Make sure we ended on a parenthesis. - next := p.lex.scan() - if next.tok != ')' { - return nil, makeError( - next, fmt.Sprintf("expected ')' but found %v", next.name()), - ) - } - - return expr, nil -} - -// parseParenExpr parses a [ParenExpr] (paren-expr) expression from lex, which -// should return the next token after '(' from scan(). Returns an error if the -// expression does not end with a closing ')'. -func (p *parser) parseParenExpr() (*spec.ParenExpr, error) { - expr, err := p.parseInnerParenExpr() - if err != nil { - return nil, err - } - return spec.Paren(expr...), nil -} - -// parseParenExpr parses a [*spec.NotParenExpr] expression (logical-not-op -// paren-expression) from lex, which should return the next token after '(' -// from scan(). Returns an error if the expression does not end with a closing -// ')'. -func (p *parser) parseNotParenExpr() (*spec.NotParenExpr, error) { - expr, err := p.parseInnerParenExpr() - if err != nil { - return nil, err - } - return spec.NotParen(expr...), nil -} - -// parseFunction parses a function named tok.val from lex. tok should be the -// token just before the next call to lex.scan, and must be an identifier -// token naming the function. Returns an error if the function is not found in -// the registry or if arguments are invalid for the function. -func (p *parser) parseFunction(tok token) (*spec.FuncExpr, error) { - function := p.reg.Get(tok.val) - if function == nil { - return nil, makeError(tok, fmt.Sprintf("unknown function %v()", tok.val)) - } - - paren := p.lex.scan() // Drop ( - args, err := p.parseFunctionArgs() - if err != nil { - return nil, err - } - - if err := function.Validate(args); err != nil { - return nil, makeError(paren, fmt.Sprintf("function %v() %v", tok.val, err.Error())) - } - - return spec.Function(function, args...), nil -} - -// parseFunctionArgs parses the comma-delimited arguments to a function from -// lex. Arguments may be one of literal, filter-query (including -// singular-query), logical-expr, or function-expr. -func (p *parser) parseFunctionArgs() ([]spec.FuncExprArg, error) { - res := []spec.FuncExprArg{} - lex := p.lex - for { - switch tok := p.lex.scan(); tok.tok { - case goString, integer, number, boolFalse, boolTrue, jsonNull: - // literal - val, err := parseLiteral(tok) - if err != nil { - return nil, err - } - res = append(res, val) - case '@', '$': - // filter-query - q, err := p.parseFilterQuery(tok) - if err != nil { - return nil, err - } - - res = append(res, q.Expression()) - case identifier: - // function-expr - if p.lex.skipBlankSpace() != '(' { - return nil, unexpected(tok) - } - f, err := p.parseFunction(tok) - if err != nil { - return nil, err - } - res = append(res, f) - case blankSpace: - // Skip. - continue - case ')': - // All done. - return res, nil - case '!', '(': - ors, err := p.parseLogicalOrExpr() - if err != nil { - return nil, err - } - res = append(res, ors) - } - - // Successfully parsed an argument. What's next? - switch lex.skipBlankSpace() { - case ',': - // Consume the comma. - lex.scan() - case ')': - // Consume and return. - lex.scan() - return res, nil - default: - // Anything else is an error. - return nil, unexpected(lex.scan()) - } - } -} - -// parseLiteral parses the literal value from tok into native Go values and -// returns them as spec.LiteralArg. tok.tok must be one of goString, integer, -// number, boolFalse, boolTrue, or jsonNull. -func parseLiteral(tok token) (*spec.LiteralArg, error) { - switch tok.tok { - case goString: - return spec.Literal(tok.val), nil - case integer: - integer, err := strconv.ParseInt(tok.val, 10, 64) - if err != nil { - return nil, makeNumErr(tok, err) - } - return spec.Literal(integer), nil - case number: - num, err := strconv.ParseFloat(tok.val, 64) - if err != nil { - return nil, makeNumErr(tok, err) - } - return spec.Literal(num), nil - case boolTrue: - return spec.Literal(true), nil - case boolFalse: - return spec.Literal(false), nil - case jsonNull: - return spec.Literal(nil), nil - default: - return nil, unexpected(tok) - } -} - -// parseComparableExpr parses a [ComparisonExpr] (comparison-expr) from lex. -func (p *parser) parseComparableExpr(left spec.CompVal) (*spec.CompExpr, error) { - // Skip blank space. - lex := p.lex - lex.skipBlankSpace() - - op, err := parseCompOp(lex) - if err != nil { - return nil, err - } - - // Skip blank space. - lex.skipBlankSpace() - - right, err := p.parseComparableVal(lex.scan()) - if err != nil { - return nil, err - } - - return spec.Comparison(left, op, right), nil -} - -// parseComparableVal parses a [CompVal] (comparable) from lex. -func (p *parser) parseComparableVal(tok token) (spec.CompVal, error) { - switch tok.tok { - case goString, integer, number, boolFalse, boolTrue, jsonNull: - // literal - return parseLiteral(tok) - case '@', '$': - // singular-query - return parseSingularQuery(tok, p.lex) - case identifier: - // function-expr - if p.lex.r != '(' { - return nil, unexpected(tok) - } - f, err := p.parseFunction(tok) - if err != nil { - return nil, err - } - if f.ResultType() == spec.FuncLogical { - return nil, makeError(tok, "cannot compare result of logical function") - } - return f, nil - default: - return nil, unexpected(tok) - } -} - -// parseCompOp pares a [CompOp] (comparison-op) from lex. -func parseCompOp(lex *lexer) (spec.CompOp, error) { - tok := lex.scan() - switch tok.tok { - case '=': - if lex.r == '=' { - lex.scan() - return spec.EqualTo, nil - } - case '!': - if lex.r == '=' { - lex.scan() - return spec.NotEqualTo, nil - } - case '<': - if lex.r == '=' { - lex.scan() - return spec.LessThanEqualTo, nil - } - return spec.LessThan, nil - case '>': - if lex.r == '=' { - lex.scan() - return spec.GreaterThanEqualTo, nil - } - return spec.GreaterThan, nil - } - - return 0, makeError(tok, "invalid comparison operator") -} - -// parseSingularQuery parses a [spec.SingularQueryExpr] (singular-query) from -// lex. A singular query consists only of single-selector nodes. -func parseSingularQuery(startToken token, lex *lexer) (*spec.SingularQueryExpr, error) { - selectors := []spec.Selector{} - for { - switch lex.r { - case '[': - // Index or name selector. - lex.skipBlankSpace() - lex.scan() - switch tok := lex.scan(); tok.tok { - case goString: - selectors = append(selectors, spec.Name(tok.val)) - case integer: - idx, err := parsePathInt(tok) - if err != nil { - return nil, err - } - selectors = append(selectors, spec.Index(idx)) - default: - return nil, unexpected(tok) - } - // Look for closing bracket. - lex.skipBlankSpace() - tok := lex.scan() - if tok.tok != ']' { - return nil, unexpected(tok) - } - case '.': - // Start of a name selector. - lex.scan() - tok := lex.scan() - if tok.tok != identifier { - return nil, unexpected(tok) - } - selectors = append(selectors, spec.Name(tok.val)) - default: - // Done parsing. - return spec.SingularQuery(startToken.tok == '$', selectors...), nil - } - } -} diff --git a/parser/parse_test.go b/parser/parse_test.go deleted file mode 100644 index f4fb7e7..0000000 --- a/parser/parse_test.go +++ /dev/null @@ -1,1395 +0,0 @@ -package parser - -import ( - "errors" - "strconv" - "testing" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" - "github.com/theory/jsonpath/registry" - "github.com/theory/jsonpath/spec" -) - -func TestParseRoot(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - q, err := Parse(registry.New(), "$") - r.NoError(err) - a.Equal("$", q.String()) - a.Empty(q.Segments()) -} - -func TestParseSimple(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - reg := registry.New() - - for _, tc := range []struct { - name string - path string - exp *spec.PathQuery - err string - }{ - { - name: "root", - path: "$", - exp: spec.Query(true, []*spec.Segment{}...), - }, - { - name: "name", - path: "$.x", - exp: spec.Query(true, spec.Child(spec.Name("x"))), - }, - { - name: "trim_leading_space", - path: " $.x", - err: `jsonpath: unexpected blank space at position 1`, - }, - { - name: "trim_trailing_space", - path: "$.x ", - err: `jsonpath: unexpected blank space at position 4`, - }, - { - name: "no_interim_space", - path: "$.x .y", - exp: spec.Query( - true, - spec.Child(spec.Name("x")), - spec.Child(spec.Name("y")), - ), - }, - { - name: "unexpected_integer", - path: "$.62", - err: `jsonpath: unexpected integer at position 3`, - }, - { - name: "unexpected_token", - path: "$.==12", - err: `jsonpath: unexpected '=' at position 3`, - }, - { - name: "name_name", - path: "$.x.y", - exp: spec.Query( - true, - spec.Child(spec.Name("x")), - spec.Child(spec.Name("y")), - ), - }, - { - name: "wildcard", - path: "$.*", - exp: spec.Query(true, spec.Child(spec.Wildcard())), - }, - { - name: "wildcard_wildcard", - path: "$.*.*", - exp: spec.Query( - true, - spec.Child(spec.Wildcard()), - spec.Child(spec.Wildcard()), - ), - }, - { - name: "name_wildcard", - path: "$.x.*", - exp: spec.Query( - true, - spec.Child(spec.Name("x")), - spec.Child(spec.Wildcard()), - ), - }, - { - name: "desc_name", - path: "$..x", - exp: spec.Query(true, spec.Descendant(spec.Name("x"))), - }, - { - name: "desc_name_2x", - path: "$..x..y", - exp: spec.Query( - true, - spec.Descendant(spec.Name("x")), - spec.Descendant(spec.Name("y")), - ), - }, - { - name: "desc_wildcard", - path: "$..*", - exp: spec.Query(true, spec.Descendant(spec.Wildcard())), - }, - { - name: "desc_wildcard_2x", - path: "$..*..*", - exp: spec.Query( - true, - spec.Descendant(spec.Wildcard()), - spec.Descendant(spec.Wildcard()), - ), - }, - { - name: "desc_wildcard_name", - path: "$..*.xyz", - exp: spec.Query( - true, - spec.Descendant(spec.Wildcard()), - spec.Child(spec.Name("xyz")), - ), - }, - { - name: "wildcard_desc_name", - path: "$.*..xyz", - exp: spec.Query( - true, - spec.Child(spec.Wildcard()), - spec.Descendant(spec.Name("xyz")), - ), - }, - { - name: "empty_string", - path: "", - err: "jsonpath: unexpected end of input", - }, - { - name: "bad_start", - path: ".x", - err: `jsonpath: unexpected '.' at position 1`, - }, - { - name: "not_a_segment", - path: "$foo", - err: "jsonpath: unexpected identifier at position 1", - }, - { - name: "not_a_dot_segment", - path: "$.{x}", - err: `jsonpath: unexpected '{' at position 3`, - }, - { - name: "not_a_descendant", - path: "$..{x}", - err: `jsonpath: unexpected '{' at position 4`, - }, - } { - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - q, err := Parse(reg, tc.path) - if tc.err == "" { - r.NoError(err) - a.Equal(tc.exp, q) - } else { - a.Nil(q) - r.EqualError(err, tc.err) - r.ErrorIs(err, ErrPathParse) - } - }) - } -} - -func TestParseFilter(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - reg := registry.New() - _ = reg.Register( - "__true", - spec.FuncLogical, - func([]spec.FuncExprArg) error { return nil }, - func([]spec.PathValue) spec.PathValue { - return spec.LogicalTrue - }, - ) - trueFunc := reg.Get("__true") - - for _, tc := range []struct { - name string - query string - filter *spec.FilterSelector - err string - }{ - // ExistExpr - { - name: "current_exists", - query: "@", - filter: spec.Filter(spec.And( - spec.Existence(spec.Query(false, []*spec.Segment{}...)), - )), - }, - { - name: "root_exists", - query: "$", - filter: spec.Filter(spec.And( - spec.Existence(spec.Query(true, []*spec.Segment{}...)), - )), - }, - { - name: "current_name_exists", - query: "@.x", - filter: spec.Filter(spec.And( - spec.Existence(spec.Query(false, spec.Child(spec.Name("x")))), - )), - }, - { - name: "root_name_exists", - query: "$.x", - filter: spec.Filter(spec.And( - spec.Existence(spec.Query(true, spec.Child(spec.Name("x")))), - )), - }, - { - name: "current_two_segment_exists", - query: "@.x[1]", - filter: spec.Filter(spec.And( - spec.Existence(spec.Query( - false, - spec.Child(spec.Name("x")), - spec.Child(spec.Index(1)), - )), - )), - }, - { - name: "root_two_selector_exists", - query: `$["x", 1]`, - filter: spec.Filter(spec.And( - spec.Existence(spec.Query( - true, - spec.Child(spec.Name("x"), spec.Index(1)), - )), - )), - }, - // NonExistExpr - { - name: "current_not_exists", - query: "!@", - filter: spec.Filter(spec.And( - spec.Nonexistence(spec.Query(false, []*spec.Segment{}...)), - )), - }, - { - name: "root_not_exists", - query: "!$", - filter: spec.Filter(spec.And(spec.Nonexistence( - spec.Query(true, []*spec.Segment{}...), - ))), - }, - { - name: "current_name_not_exists", - query: "!@.x", - filter: spec.Filter(spec.And( - spec.Nonexistence(spec.Query(false, spec.Child(spec.Name("x")))), - )), - }, - { - name: "root_name_not_exists", - query: "!$.x", - filter: spec.Filter(spec.And( - spec.Nonexistence(spec.Query(true, spec.Child(spec.Name("x")))), - )), - }, - { - name: "current_two_segment_not_exists", - query: "!@.x[1]", - filter: spec.Filter(spec.And( - spec.Nonexistence(spec.Query( - false, - spec.Child(spec.Name("x")), - spec.Child(spec.Index(1)), - )), - )), - }, - { - name: "root_two_selector_not_exists", - query: `!$["x", 1]`, - filter: spec.Filter(spec.And( - spec.Nonexistence(spec.Query( - true, - spec.Child(spec.Name("x"), spec.Index(1)), - )), - )), - }, - // ParenExistExpr - { - name: "paren_current_exists", - query: "(@)", - filter: spec.Filter(spec.And( - spec.Paren(spec.And( - spec.Existence(spec.Query(false, []*spec.Segment{}...)), - )), - )), - }, - { - name: "paren_root_exists", - query: "($)", - filter: spec.Filter(spec.And( - spec.Paren(spec.And( - spec.Existence(spec.Query(true, []*spec.Segment{}...)), - )), - )), - }, - { - name: "paren_current_exists_name_index", - query: `(@["x", 1])`, - filter: spec.Filter(spec.And( - spec.Paren(spec.And( - spec.Existence(spec.Query( - false, - spec.Child(spec.Name("x"), spec.Index(1)), - )), - )), - )), - }, - { - name: "paren_logical_and", - query: `( @["x", 1] && $["y"] )`, - filter: spec.Filter(spec.And( - spec.Paren(spec.And( - spec.Existence(spec.Query( - false, - spec.Child(spec.Name("x"), spec.Index(1)), - )), - spec.Existence(spec.Query( - true, - spec.Child(spec.Name("y")), - )), - )), - )), - }, - { - name: "paren_logical_or", - query: `(@["x", 1] || $["y"])`, - filter: spec.Filter(spec.And( - spec.Paren( - spec.And(spec.Existence(spec.Query( - false, - spec.Child(spec.Name("x"), spec.Index(1)), - ))), - spec.And(spec.Existence(spec.Query( - true, - spec.Child(spec.Name("y")), - ))), - )), - ), - }, - // NotParenExistExpr - { - name: "not_paren_current_exists", - query: "!(@)", - filter: spec.Filter(spec.And( - spec.NotParen(spec.And( - spec.Existence(spec.Query(false, []*spec.Segment{}...)), - )), - )), - }, - { - name: "not_paren_root_exists", - query: "!($)", - filter: spec.Filter(spec.And( - spec.NotParen(spec.And( - spec.Existence(spec.Query(true, []*spec.Segment{}...)), - )), - )), - }, - { - name: "not_paren_current_exists_name_index", - query: `!(@["x", 1])`, - filter: spec.Filter(spec.And( - spec.NotParen(spec.And( - spec.Existence(spec.Query( - false, - spec.Child(spec.Name("x"), spec.Index(1)), - )), - )), - )), - }, - { - name: "not_paren_logical_and", - query: `!( @["x", 1] && $["y"] )`, - filter: spec.Filter(spec.And( - spec.NotParen(spec.And( - spec.Existence(spec.Query( - false, - spec.Child(spec.Name("x"), spec.Index(1)), - )), - spec.Existence(spec.Query( - true, - spec.Child(spec.Name("y")), - )), - )), - )), - }, - { - name: "not_paren_logical_or", - query: `!(@["x", 1] || $["y"])`, - filter: spec.Filter(spec.And( - spec.NotParen( - spec.And(spec.Existence(spec.Query( - false, - spec.Child(spec.Name("x"), spec.Index(1)), - ))), - spec.And(spec.Existence(spec.Query( - true, - spec.Child(spec.Name("y")), - ))), - ), - )), - }, - // FunExpr - { - name: "function_current", - query: "__true(@)", - filter: spec.Filter(spec.And( - spec.Function( - trueFunc, - spec.SingularQuery(false, []spec.Selector{}...), - ), - )), - }, - { - name: "function_match_current_integer", - query: "match( @, 42 )", - filter: spec.Filter(spec.And( - spec.Function( - reg.Get("match"), - spec.SingularQuery(false, []spec.Selector{}...), - spec.Literal(int64(42)), - ), - )), - }, - { - name: "function_search_two_queries", - query: "search( $.x, @[0] )", - filter: spec.Filter(spec.And( - spec.Function( - reg.Get("search"), - spec.SingularQuery(true, spec.Name("x")), - spec.SingularQuery(false, spec.Index(0)), - ), - )), - }, - { - name: "function_length_string", - query: `length("hi") == 2`, - filter: spec.Filter(spec.And( - spec.Comparison( - spec.Function(reg.Get("length"), spec.Literal("hi")), - spec.EqualTo, - spec.Literal(int64(2)), - ), - )), - }, - { - name: "function_length_true", - query: `length(true) == 1`, - filter: spec.Filter(spec.And( - spec.Comparison( - spec.Function(reg.Get("length"), spec.Literal(true)), - spec.EqualTo, - spec.Literal(int64(1)), - ), - )), - }, - { - name: "function_length_false", - query: `length(false)==1`, - filter: spec.Filter(spec.And( - spec.Comparison( - spec.Function(reg.Get("length"), spec.Literal(false)), - spec.EqualTo, - spec.Literal(int64(1)), - ), - )), - }, - { - name: "function_value_null", - query: `__true(null)`, // defined in function_test.go - filter: spec.Filter(spec.And( - spec.Function(trueFunc, spec.Literal(nil)), - )), - }, - { - name: "nested_function", - query: `__true(count(@))`, // defined in function_test.go - filter: spec.Filter(spec.And( - spec.Function( - trueFunc, - spec.Function( - reg.Get("count"), - spec.SingularQuery(false, []spec.Selector{}...), - ), - ), - )), - }, - { - name: "function_paren_logical_expr", - query: `__true((@.x))`, // defined in function_test.go - filter: spec.Filter(spec.And( - spec.Function(trueFunc, spec.Or(spec.And( - spec.Existence(spec.Query(false, spec.Child(spec.Name("x")))), - ))), - )), - }, - { - name: "function_paren_logical_not_expr", - query: `__true((!@.x))`, // defined in function_test.go - filter: spec.Filter(spec.And( - spec.Function(trueFunc, spec.Or(spec.And( - spec.Nonexistence(spec.Query(false, spec.Child(spec.Name("x")))), - ))), - )), - }, - { - name: "function_lots_of_literals", - query: `__true("hi", 42, true, false, null, 98.6)`, - filter: spec.Filter(spec.And( - spec.Function( - trueFunc, - spec.Literal("hi"), - spec.Literal(int64(42)), - spec.Literal(true), - spec.Literal(false), - spec.Literal(nil), - spec.Literal(float64(98.6)), - ), - )), - }, - { - name: "function_no_args", - query: `__true()`, - filter: spec.Filter(spec.And( - spec.Function(trueFunc, []spec.FuncExprArg{}...), - )), - }, - // ComparisonExpr - { - name: "literal_comparison", - query: `42 == 42`, - filter: spec.Filter(spec.And( - spec.Comparison(spec.Literal(int64(42)), spec.EqualTo, spec.Literal(int64(42))), - )), - }, - { - name: "literal_singular_comparison", - query: `42 != $.a.b`, - filter: spec.Filter(spec.And( - spec.Comparison( - spec.Literal(int64(42)), - spec.NotEqualTo, - spec.SingularQuery(true, spec.Name("a"), spec.Name("b")), - ), - )), - }, - { - name: "literal_comparison_function", - query: `42 > length("hi")`, - filter: spec.Filter(spec.And( - spec.Comparison( - spec.Literal(int64(42)), - spec.GreaterThan, - spec.Function(reg.Get("length"), spec.Literal("hi")), - ), - )), - }, - { - name: "function_cmp_singular", - query: `length("hi") <= @[0]["a"]`, - filter: spec.Filter(spec.And( - spec.Comparison( - spec.Function(reg.Get("length"), spec.Literal("hi")), - spec.LessThanEqualTo, - spec.SingularQuery(false, spec.Index(0), spec.Name("a")), - ), - )), - }, - { - name: "singular_cmp_literal", - query: `$.a.b >= 98.6`, - filter: spec.Filter(spec.And( - spec.Comparison( - spec.SingularQuery(true, spec.Name("a"), spec.Name("b")), - spec.GreaterThanEqualTo, - spec.Literal(float64(98.6)), - ), - )), - }, - { - name: "function_cmp_literal", - query: `length("hi") < 42 `, - filter: spec.Filter(spec.And( - spec.Comparison( - spec.Function(reg.Get("length"), spec.Literal("hi")), - spec.LessThan, - spec.Literal(int64(42)), - ), - )), - }, - { - name: "not_function", - query: `!__true()`, - filter: spec.Filter(spec.And( - spec.NotFunction(spec.Function(trueFunc, []spec.FuncExprArg{}...)), - )), - }, - { - name: "singular_cmp_literal_no_space", - query: `@.x<42`, - filter: spec.Filter(spec.And( - spec.Comparison( - spec.SingularQuery(false, spec.Name("x")), - spec.LessThan, - spec.Literal(int64(42)), - ), - )), - }, - { - name: "invalid_logical_or", - query: `(@["x", 1] || hi)`, - err: `jsonpath: unexpected identifier at position 15`, - }, - { - name: "invalid_logical_or", - query: `(@["x", 1] || hi)`, - err: `jsonpath: unexpected identifier at position 15`, - }, - { - name: "incomplete_logical_or", - query: `(@["x", 1] | $["y"])`, - err: `jsonpath: expected '|' but found blank space at position 13`, - }, - { - name: "incomplete_logical_and", - query: `(@["x", 1] &? $["y"])`, - err: `jsonpath: expected '&' but found '?' at position 13`, - }, - { - name: "invalid_and_expression", - query: `(@["x", 1] && nope(@))`, - err: `jsonpath: unknown function nope() at position 15`, - }, - { - name: "nonexistent_function", - query: `nonesuch(@)`, - err: `jsonpath: unknown function nonesuch() at position 1`, - }, - { - name: "not_nonexistent_function", - query: `!nonesuch(@)`, - err: `jsonpath: unknown function nonesuch() at position 2`, - }, - { - name: "invalid_literal", - query: `99e+1234`, - err: `jsonpath: cannot parse "99e+1234", value out of range at position 1`, - }, - { - name: "invalid_query", - query: `@["x", hi]`, - err: `jsonpath: unexpected identifier at position 8`, - }, - { - name: "invalid_function_comparison", - query: `length(@.x) == hi`, - err: `jsonpath: unexpected identifier at position 16`, - }, - { - name: "function_without_comparison", - query: `length(@.x)`, - err: `jsonpath: missing comparison to function result at position 12`, - }, - { - name: "invalid_not_exists_query", - query: `!@.0`, - err: `jsonpath: unexpected integer at position 4`, - }, - { - name: "unclosed_paren_expr", - query: `(@["x", 1]`, - err: `jsonpath: expected ')' but found eof at position 11`, - }, - { - name: "unclosed_not_paren_expr", - query: `!(@["x", 1]`, - err: `jsonpath: expected ')' but found eof at position 12`, - }, - { - name: "bad_function_arg", - query: `length(xyz)`, - err: `jsonpath: unexpected identifier at position 8`, - }, - { - name: "invalid_function_arg", - query: `length(@[1, 2])`, - err: `jsonpath: function length() cannot convert argument to Value at position 7`, - }, - { - name: "too_many_function_args", - query: `length(@, $)`, - err: `jsonpath: function length() expected 1 argument but found 2 at position 7`, - }, - { - name: "function_literal_parse_error", - query: `length(99e+1234)`, - err: `jsonpath: cannot parse "99e+1234", value out of range at position 8`, - }, - { - name: "function_query_parse_error", - query: `length(@[foo])`, - err: `jsonpath: unexpected identifier at position 10`, - }, - { - name: "unknown_function_in_function_arg", - query: `length(nonesuch())`, - err: `jsonpath: unknown function nonesuch() at position 8`, - }, - { - name: "invalid_not_function_arg", - query: `length(!@[foo])`, - err: `jsonpath: unexpected identifier at position 11`, - }, - { - name: "invalid_second_arg", - query: `length("foo" == "bar")`, - err: `jsonpath: unexpected '=' at position 14`, - }, - { - name: "invalid_comparable_expression", - query: `"foo" => "bar"`, - err: `jsonpath: invalid comparison operator at position 7`, - }, - { - name: "invalid_comparable_function", - query: `42 == nonesuch()`, - err: `jsonpath: unknown function nonesuch() at position 7`, - }, - { - name: "cannot_compare_logical_func", - query: `42 == __true()`, - err: `jsonpath: cannot compare result of logical function at position 7`, - }, - { - name: "function_wrong_arg_count", - query: `match("foo")`, - err: `jsonpath: function match() expected 2 arguments but found 1 at position 6`, - }, - { - name: "function_second_arg_parse_error", - query: `search("foo", @[foo])`, - err: `jsonpath: unexpected identifier at position 17`, - }, - { - name: "cmp_query_invalid_index", - query: `42 == $[-0]`, - err: `jsonpath: invalid integer path value "-0" at position 9`, - }, - { - name: "cmp_val_not_valid", - query: `42 == {}`, - err: `jsonpath: unexpected '{' at position 7`, - }, - { - name: "cmp_query_unclosed_bracket", - query: `42 == $[0`, - err: `jsonpath: unexpected eof at position 10`, - }, - { - name: "cmp_query_invalid_selector", - query: `42 == $[foo]`, - err: `jsonpath: unexpected identifier at position 9`, - }, - { - name: "cmp_query_invalid_ident", - query: `42 == $.42`, - err: `jsonpath: unexpected integer at position 9`, - }, - } { - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - parser := &parser{lex: newLexer(tc.query), reg: reg} - filter, err := parser.parseFilter() - if tc.err == "" { - r.NoError(err, tc.name) - a.Equal(tc.filter, filter, tc.name) - } else { - a.Nil(filter, tc.name) - r.EqualError(err, tc.err, tc.name) - r.ErrorIs(err, ErrPathParse, tc.name) - } - }) - } -} - -func TestParseSelectors(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - reg := registry.New() - - for _, tc := range []struct { - name string - path string - exp *spec.PathQuery - err string - }{ - { - name: "index", - path: "$[0]", - exp: spec.Query(true, spec.Child(spec.Index(0))), - }, - { - name: "two_indexes", - path: "$[0, 1]", - exp: spec.Query(true, spec.Child(spec.Index(0), spec.Index(1))), - }, - { - name: "name", - path: `$["foo"]`, - exp: spec.Query(true, spec.Child(spec.Name("foo"))), - }, - { - name: "sq_name", - path: `$['foo']`, - exp: spec.Query(true, spec.Child(spec.Name("foo"))), - }, - { - name: "two_names", - path: `$["foo", "πŸ¦β€πŸ”₯"]`, - exp: spec.Query(true, spec.Child(spec.Name("foo"), spec.Name("πŸ¦β€πŸ”₯"))), - }, - { - name: "json_escapes", - path: `$["abx_xyx\ryup", "\b\f\n\r\t\/\\"]`, - exp: spec.Query(true, spec.Child( - spec.Name("abx_xyx\ryup"), - spec.Name("\b\f\n\r\t/\\"), - )), - }, - { - name: "unicode_escapes", - path: `$["fo\u00f8", "tune \uD834\uDD1E"]`, - exp: spec.Query(true, spec.Child(spec.Name("foΓΈ"), spec.Name("tune π„ž"))), - }, - { - name: "slice_start", - path: `$[1:]`, - exp: spec.Query(true, spec.Child(spec.Slice(1))), - }, - { - name: "slice_start_2", - path: `$[2:]`, - exp: spec.Query(true, spec.Child(spec.Slice(2))), - }, - { - name: "slice_start_end", - path: `$[2:6]`, - exp: spec.Query(true, spec.Child(spec.Slice(2, 6))), - }, - { - name: "slice_end", - path: `$[:6]`, - exp: spec.Query(true, spec.Child(spec.Slice(nil, 6))), - }, - { - name: "slice_start_end_step", - path: `$[2:6:2]`, - exp: spec.Query(true, spec.Child(spec.Slice(2, 6, 2))), - }, - { - name: "slice_start_step", - path: `$[2::2]`, - exp: spec.Query(true, spec.Child(spec.Slice(2, nil, 2))), - }, - { - name: "slice_step", - path: `$[::2]`, - exp: spec.Query(true, spec.Child(spec.Slice(nil, nil, 2))), - }, - { - name: "slice_defaults", - path: `$[:]`, - exp: spec.Query(true, spec.Child(spec.Slice())), - }, - { - name: "slice_spacing", - path: `$[ 1: 2 : 2 ]`, - exp: spec.Query(true, spec.Child(spec.Slice(1, 2, 2))), - }, - { - name: "slice_slice", - path: `$[:,:]`, - exp: spec.Query(true, spec.Child(spec.Slice(), spec.Slice())), - }, - { - name: "slice_slice_slice", - path: `$[2:,:4,7:9]`, - exp: spec.Query(true, spec.Child( - spec.Slice(2), - spec.Slice(nil, 4), - spec.Slice(7, 9), - )), - }, - { - name: "slice_name", - path: `$[:,"hi"]`, - exp: spec.Query( - true, - spec.Child(spec.Slice(), spec.Name("hi")), - ), - }, - { - name: "name_slice", - path: `$["hi",2:]`, - exp: spec.Query( - true, - spec.Child(spec.Name("hi"), spec.Slice(2)), - ), - }, - { - name: "slice_index", - path: `$[:,42]`, - exp: spec.Query( - true, - spec.Child(spec.Slice(), spec.Index(42)), - ), - }, - { - name: "index_slice", - path: `$[42,:3]`, - exp: spec.Query( - true, - spec.Child(spec.Index(42), spec.Slice(nil, 3)), - ), - }, - { - name: "slice_wildcard", - path: `$[:, *]`, - exp: spec.Query(true, spec.Child(spec.Slice(), spec.Wildcard())), - }, - { - name: "wildcard_slice", - path: `$[ *, : ]`, - exp: spec.Query(true, spec.Child(spec.Wildcard(), spec.Slice())), - }, - { - name: "slice_neg_start", - path: `$[-3:]`, - exp: spec.Query(true, spec.Child(spec.Slice(-3))), - }, - { - name: "slice_neg_end", - path: `$[:-3:]`, - exp: spec.Query(true, spec.Child(spec.Slice(nil, -3))), - }, - { - name: "slice_neg_step", - path: `$[::-2]`, - exp: spec.Query(true, spec.Child(spec.Slice(nil, nil, -2))), - }, - { - name: "index_name_slice_wildcard", - path: `$[3, "πŸ¦€", :3,*]`, - exp: spec.Query(true, spec.Child( - spec.Index(3), - spec.Name("πŸ¦€"), - spec.Slice(nil, 3), - spec.Wildcard(), - )), - }, - { - name: "filter_eq", - path: `$[?@.x == 'y']`, - exp: spec.Query(true, spec.Child( - spec.Filter(spec.And( - spec.Comparison( - spec.SingularQuery(false, spec.Name("x")), - spec.EqualTo, - spec.Literal("y"), - ), - ))), - ), - }, - { - name: "filter_exists", - path: `$[?@.x]`, - exp: spec.Query(true, spec.Child( - spec.Filter(spec.And( - spec.Existence( - spec.Query(false, spec.Child(spec.Name("x"))), - ), - )), - )), - }, - { - name: "filter_not_exists", - path: `$[?!@[0]]`, - exp: spec.Query(true, spec.Child( - spec.Filter(spec.And( - spec.Nonexistence( - spec.Query(false, spec.Child(spec.Index(0))), - ), - )), - )), - }, - { - name: "filter_current_and_root", - path: `$[? @ && $[0]]`, - exp: spec.Query(true, spec.Child( - spec.Filter(spec.And( - spec.Existence(spec.Query(false, []*spec.Segment{}...)), - spec.Existence(spec.Query(true, spec.Child(spec.Index(0)))), - )), - )), - }, - { - name: "filter_err", - path: `$[?`, - err: `jsonpath: unexpected eof at position 4`, - }, - { - name: "slice_bad_start", - path: `$[:d]`, - err: `jsonpath: unexpected identifier at position 4`, - }, - { - name: "slice_four_parts", - path: `$[0:0:0:0]`, - err: `jsonpath: unexpected integer at position 9`, - }, - { - name: "invalid_selector", - path: `$[{}]`, - err: `jsonpath: unexpected '{' at position 3`, - }, - { - name: "invalid_second_selector", - path: `$[1, hi]`, - err: `jsonpath: unexpected identifier at position 6`, - }, - { - name: "missing_segment_comma", - path: `$[1 "hi"]`, - err: `jsonpath: unexpected string at position 5`, - }, - { - name: "space_index", - path: "$[ 0]", - exp: spec.Query(true, spec.Child(spec.Index(0))), - }, - { - name: "index_space_comma_index", - path: "$[0 , 12]", - exp: spec.Query( - true, - spec.Child(spec.Index(0), spec.Index(12)), - ), - }, - { - name: "index_comma_space_name", - path: `$[0, "xyz"]`, - exp: spec.Query( - true, - spec.Child(spec.Index(0), spec.Name("xyz")), - ), - }, - { - name: "tab_index", - path: "$[\t0]", - exp: spec.Query(true, spec.Child(spec.Index(0))), - }, - { - name: "newline_index", - path: "$[\n0]", - exp: spec.Query(true, spec.Child(spec.Index(0))), - }, - { - name: "return_index", - path: "$[\r0]", - exp: spec.Query(true, spec.Child(spec.Index(0))), - }, - { - name: "name_space", - path: `$["hi" ]`, - exp: spec.Query(true, spec.Child(spec.Name("hi"))), - }, - { - name: "wildcard_tab", - path: "$[*\t]", - exp: spec.Query(true, spec.Child(spec.Wildcard())), - }, - { - name: "slice_newline", - path: "$[2:\t]", - exp: spec.Query(true, spec.Child(spec.Slice(2))), - }, - { - name: "index_return", - path: "$[0\r]", - exp: spec.Query(true, spec.Child(spec.Index(0))), - }, - { - name: "descendant_index", - path: "$..[0]", - exp: spec.Query(true, spec.Descendant(spec.Index(0))), - }, - { - name: "descendant_name", - path: `$..["hi"]`, - exp: spec.Query(true, spec.Descendant(spec.Name("hi"))), - }, - { - name: "descendant_multi", - path: `$..[ "hi", 2, *, 4:5 ]`, - exp: spec.Query(true, spec.Descendant( - spec.Name("hi"), - spec.Index(2), - spec.Wildcard(), - spec.Slice(4, 5), - )), - }, - { - name: "invalid_descendant", - path: "$..[oops]", - err: `jsonpath: unexpected identifier at position 5`, - }, - { - name: "invalid_unicode_escape", - path: `$["fo\uu0f8"]`, - err: `jsonpath: invalid escape after backslash at position 8`, - }, - { - name: "invalid_integer", - path: `$[170141183460469231731687303715884105727]`, // too large - err: `jsonpath: cannot parse "170141183460469231731687303715884105727", value out of range at position 3`, - }, - { - name: "invalid_slice_float", - path: `$[:170141183460469231731687303715884105727]`, // too large - err: `jsonpath: cannot parse "170141183460469231731687303715884105727", value out of range at position 4`, - }, - { - name: `name_sq_name_desc_wild`, - path: `$.names['first_name']..*`, - exp: spec.Query( - true, - spec.Child(spec.Name("names")), - spec.Child(spec.Name("first_name")), - spec.Descendant(spec.Wildcard()), - ), - }, - { - name: "no_tail", - path: `$.a['b']tail`, - err: `jsonpath: unexpected identifier at position 9`, - }, - { - name: "dq_name", - path: `$["name"]`, - exp: spec.Query(true, spec.Child(spec.Name("name"))), - }, - { - name: "sq_name", - path: `$['name']`, - exp: spec.Query(true, spec.Child(spec.Name("name"))), - }, - { - name: "two_name_segment", - path: `$["name","test"]`, - exp: spec.Query( - true, - spec.Child(spec.Name("name"), spec.Name("test")), - ), - }, - { - name: "name_index_slice_segment", - path: `$['name',10,0:3]`, - exp: spec.Query( - true, - spec.Child(spec.Name("name"), spec.Index(10), spec.Slice(0, 3)), - ), - }, - { - name: "default_slice_wildcard_segment", - path: `$[::,*]`, - exp: spec.Query(true, spec.Child(spec.Slice(), spec.Wildcard())), - }, - { - name: "leading_zero_index", - path: `$[010]`, - err: `jsonpath: invalid number literal at position 3`, - }, - } { - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - q, err := Parse(reg, tc.path) - if tc.err == "" { - r.NoError(err) - a.Equal(tc.exp, q) - } else { - a.Nil(q) - r.EqualError(err, tc.err) - r.ErrorIs(err, ErrPathParse) - } - }) - } -} - -func TestMakeNumErr(t *testing.T) { - t.Parallel() - r := require.New(t) - - t.Run("parse_int", func(t *testing.T) { - t.Parallel() - _, numErr := strconv.ParseInt("170141183460469231731687303715884105727", 10, 64) - r.Error(numErr) - tok := token{invalid, "", 6} - err := makeNumErr(tok, numErr) - r.EqualError( - err, - `jsonpath: cannot parse "170141183460469231731687303715884105727", value out of range at position 7`, - ) - r.ErrorIs(err, ErrPathParse) - }) - - t.Run("parse_float", func(t *testing.T) { - t.Parallel() - _, numErr := strconv.ParseFloat("99e+1234", 64) - r.Error(numErr) - tok := token{invalid, "", 12} - err := makeNumErr(tok, numErr) - r.EqualError( - err, - `jsonpath: cannot parse "99e+1234", value out of range at position 13`, - ) - r.ErrorIs(err, ErrPathParse) - }) - - t.Run("other error", func(t *testing.T) { - t.Parallel() - myErr := errors.New("oops") - tok := token{invalid, "", 19} - err := makeNumErr(tok, myErr) - r.EqualError(err, `jsonpath: oops at position 20`) - r.ErrorIs(err, ErrPathParse) - }) -} - -func TestParsePathInt(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - for _, tc := range []struct { - name string - input string - exp int64 - err string - }{ - {"zero", "0", 0, ""}, - {"1000", "1000", 1000, ""}, - {"neg_1000", "-1000", -1000, ""}, - { - name: "neg_zero", - input: "-0", - err: `jsonpath: invalid integer path value "-0" at position 4`, - }, - { - name: "too_big", - input: "9007199254740992", - err: `jsonpath: cannot parse "9007199254740992", value out of range at position 4`, - }, - { - name: "too_small", - input: "-9007199254740992", - err: `jsonpath: cannot parse "-9007199254740992", value out of range at position 4`, - }, - } { - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - num, err := parsePathInt(token{integer, tc.input, 3}) - if tc.err == "" { - r.NoError(err) - a.Equal(tc.exp, num) - } else { - a.Zero(num) - r.EqualError(err, tc.err) - r.ErrorIs(err, ErrPathParse) - } - }) - } -} - -func TestParseLiteral(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - for _, tc := range []struct { - name string - tok token - exp any - err string - }{ - { - name: "string", - tok: token{goString, "hello", 0}, - exp: "hello", - }, - { - name: "integer", - tok: token{integer, "42", 0}, - exp: int64(42), - }, - { - name: "float", - tok: token{number, "98.6", 0}, - exp: float64(98.6), - }, - { - name: "true", - tok: token{boolTrue, "", 0}, - exp: true, - }, - { - name: "false", - tok: token{boolFalse, "", 0}, - exp: false, - }, - { - name: "null", - tok: token{jsonNull, "", 0}, - exp: nil, - }, - { - name: "invalid_int", - tok: token{integer, "170141183460469231731687303715884105727", 5}, - err: `jsonpath: cannot parse "170141183460469231731687303715884105727", value out of range at position 6`, - }, - { - name: "invalid_float", - tok: token{number, "99e+1234", 3}, - err: `jsonpath: cannot parse "99e+1234", value out of range at position 4`, - }, - { - name: "non_literal_token", - tok: token{eof, "", 3}, - err: `jsonpath: unexpected eof at position 4`, - }, - } { - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - lit, err := parseLiteral(tc.tok) - if tc.err == "" { - r.NoError(err) - a.Equal(spec.Literal(tc.exp), lit) - } else { - a.Nil(lit) - r.EqualError(err, tc.err) - r.ErrorIs(err, ErrPathParse) - } - }) - } -} diff --git a/path.go b/path.go deleted file mode 100644 index 81d6b6c..0000000 --- a/path.go +++ /dev/null @@ -1,205 +0,0 @@ -// Package jsonpath implements RFC 9535 JSONPath query expressions. -package jsonpath - -import ( - "iter" - "slices" - - "github.com/theory/jsonpath/parser" - "github.com/theory/jsonpath/registry" - "github.com/theory/jsonpath/spec" -) - -// ErrPathParse errors are returned for path parse errors. -var ErrPathParse = parser.ErrPathParse - -// Path represents a [RFC 9535] JSONPath query. -// -// [RFC 9535]: https://www.rfc-editor.org/rfc/rfc9535.html -type Path struct { - q *spec.PathQuery -} - -// New creates and returns a new [Path] consisting of q. -func New(q *spec.PathQuery) *Path { - return &Path{q: q} -} - -// Parse parses path, a JSONPath query string, into a [Path]. Returns an -// [ErrPathParse] on parse failure. -func Parse(path string) (*Path, error) { - return NewParser().Parse(path) -} - -// MustParse parses path into a [Path]. Panics with an [ErrPathParse] on parse -// failure. -func MustParse(path string) *Path { - return NewParser().MustParse(path) -} - -// String returns a string representation of p. -func (p *Path) String() string { - return p.q.String() -} - -// Query returns p's root [spec.PathQuery]. -func (p *Path) Query() *spec.PathQuery { - return p.q -} - -// Select returns the nodes that JSONPath query p selects from input. -func (p *Path) Select(input any) NodeList { - return p.q.Select(nil, input) -} - -// SelectLocated returns the nodes that JSONPath query p selects from input as -// [spec.LocatedNode] values that pair the nodes with the [normalized paths] -// that identify them. Unless you have a specific need for the unique -// [spec.NormalizedPath] for each value, you probably want to use -// [Path.Select]. -// -// [normalized paths]: https://www.rfc-editor.org/rfc/rfc9535#section-2.7 -func (p *Path) SelectLocated(input any) LocatedNodeList { - return p.q.SelectLocated(nil, input, spec.Normalized()) -} - -// Parser parses JSONPath strings into [Path] values. -type Parser struct { - reg *registry.Registry -} - -// Option defines a parser option. -type Option func(*Parser) - -// WithRegistry configures a [Parser] with a [registry.Registry], which may -// contain function extensions. -func WithRegistry(reg *registry.Registry) Option { - return func(p *Parser) { p.reg = reg } -} - -// NewParser creates a new [Parser] configured by opt. -func NewParser(opt ...Option) *Parser { - p := &Parser{} - for _, o := range opt { - o(p) - } - - if p.reg == nil { - p.reg = registry.New() - } - - return p -} - -// Parse parses path, a JSONPath query string, into a [Path]. Returns an -// [ErrPathParse] on parse failure. -func (c *Parser) Parse(path string) (*Path, error) { - q, err := parser.Parse(c.reg, path) - if err != nil { - //nolint:wrapcheck - return nil, err - } - return New(q), nil -} - -// MustParse parses path, a JSONPath query string, into a [Path]. Panics with -// an [ErrPathParse] on parse failure. -func (c *Parser) MustParse(path string) *Path { - q, err := parser.Parse(c.reg, path) - if err != nil { - panic(err) - } - return New(q) -} - -// NodeList is a list of nodes selected by a JSONPath query. Each node -// represents a single JSON value selected from the JSON query argument. -// Returned by [Path.Select]. -type NodeList []any - -// All returns an iterator over all the nodes in list. -// -// Range over list itself to get indexes as well as values. -func (list NodeList) All() iter.Seq[any] { - return func(yield func(any) bool) { - for _, v := range list { - if !yield(v) { - return - } - } - } -} - -// LocatedNodeList is a list of nodes selected by a JSONPath query, along with -// their [NormalizedPath] locations. Returned by [Path.SelectLocated]. -type LocatedNodeList []*spec.LocatedNode - -// All returns an iterator over all the nodes in list. -// -// Range over list itself to get indexes and node values. -func (list LocatedNodeList) All() iter.Seq[*spec.LocatedNode] { - return func(yield func(*spec.LocatedNode) bool) { - for _, v := range list { - if !yield(v) { - return - } - } - } -} - -// Nodes returns an iterator over all the nodes in list. This is the same data -// as returned by [Path.Select]. -func (list LocatedNodeList) Nodes() iter.Seq[any] { - return func(yield func(any) bool) { - for _, v := range list { - if !yield(v.Node) { - return - } - } - } -} - -// Paths returns an iterator over all the [NormalizedPath] values in list. -func (list LocatedNodeList) Paths() iter.Seq[spec.NormalizedPath] { - return func(yield func(spec.NormalizedPath) bool) { - for _, v := range list { - if !yield(v.Path) { - return - } - } - } -} - -// Deduplicate deduplicates the nodes in list based on their [NormalizedPath] -// values, modifying the contents of list. It returns the modified list, which -// may have a shorter length, and zeroes the elements between the new length -// and the original length. -func (list LocatedNodeList) Deduplicate() LocatedNodeList { - if len(list) <= 1 { - return list - } - - seen := map[string]struct{}{} - uniq := list[:0] - for _, n := range list { - p := n.Path.String() - if _, x := seen[p]; !x { - seen[p] = struct{}{} - uniq = append(uniq, n) - } - } - clear(list[len(uniq):]) // zero/nil out the obsolete elements, for GC - return slices.Clip(uniq) -} - -// Sort sorts list by the [NormalizedPath] of each node. -func (list LocatedNodeList) Sort() { - slices.SortFunc(list, func(a, b *spec.LocatedNode) int { - return a.Path.Compare(b.Path) - }) -} - -// Clone returns a shallow copy of list. -func (list LocatedNodeList) Clone() LocatedNodeList { - return append(make(LocatedNodeList, 0, len(list)), list...) -} diff --git a/path_example_test.go b/path_example_test.go deleted file mode 100644 index 72399d2..0000000 --- a/path_example_test.go +++ /dev/null @@ -1,341 +0,0 @@ -package jsonpath_test - -import ( - "encoding/json" - "errors" - "fmt" - "log" - - "github.com/theory/jsonpath" - "github.com/theory/jsonpath/registry" - "github.com/theory/jsonpath/spec" -) - -// Select all the authors of the books in a bookstore object. -func Example() { - // Parse a jsonpath query. - p, err := jsonpath.Parse(`$.store.book[*].author`) - if err != nil { - log.Fatal(err) - } - - // Select values from unmarshaled JSON input. - store := bookstore() - nodes := p.Select(store) - - // Show the selected values. - items, err := json.Marshal(nodes) - if err != nil { - log.Fatal(err) - } - fmt.Printf("%s\n", items) - - // Output: ["Nigel Rees","Evelyn Waugh","Herman Melville","J. R. R. Tolkien"] -} - -func ExamplePath_Select() { - // Load some JSON. - menu := map[string]any{ - "apps": map[string]any{ - "guacamole": 19.99, - "salsa": 5.99, - }, - } - - // Parse a JSONPath and select from the input. - p := jsonpath.MustParse("$.apps.*") - nodes := p.Select(menu) - - // Show the selected values. - for node := range nodes.All() { - fmt.Printf("%v\n", node) - } - // Unordered output: - // 19.99 - // 5.99 -} - -func ExamplePath_SelectLocated() { - // Load some JSON. - menu := map[string]any{ - "apps": map[string]any{ - "guacamole": 19.99, - "salsa": 5.99, - }, - } - - // Parse a JSONPath and select from the input. - p := jsonpath.MustParse("$.apps.*") - nodes := p.SelectLocated(menu) - - // Show the selected nodes. - for node := range nodes.All() { - fmt.Printf("%v: %v\n", node.Path, node.Node) - } - - // Unordered output: - // $['apps']['guacamole']: 19.99 - // $['apps']['salsa']: 5.99 -} - -func ExampleLocatedNodeList() { - // Load some JSON. - menu := map[string]any{ - "apps": map[string]any{ - "guacamole": 19.99, - "salsa": 5.99, - }, - } - - // Parse a JSONPath and select from the input. - p := jsonpath.MustParse(`$.apps["salsa", "guacamole"]`) - nodes := p.SelectLocated(menu) - - // Show the nodes. - fmt.Println("Nodes:") - for n := range nodes.Nodes() { - fmt.Printf(" %v\n", n) - } - - // Show the paths. - fmt.Println("\nPaths:") - for p := range nodes.Paths() { - fmt.Printf(" %v\n", p) - } - - // Output: - // Nodes: - // 5.99 - // 19.99 - // - // Paths: - // $['apps']['salsa'] - // $['apps']['guacamole'] -} - -func ExampleLocatedNodeList_Deduplicate() { - // Load some JSON. - pallet := map[string]any{"colors": []any{"red", "blue"}} - - // Parse a JSONPath and select from the input. - p := jsonpath.MustParse("$.colors[0, 1, 1, 0]") - nodes := p.SelectLocated(pallet) - fmt.Printf("Items: %v\n", len(nodes)) - - // Deduplicate - nodes = nodes.Deduplicate() - fmt.Printf("Items: %v\n", len(nodes)) - - // Output: - // Items: 4 - // Items: 2 -} - -func ExampleLocatedNodeList_Sort() { - // Load some JSON. - pallet := map[string]any{"colors": []any{"red", "blue", "green"}} - - // Parse a JSONPath and select from the input. - p := jsonpath.MustParse("$.colors[2, 0, 1]") - nodes := p.SelectLocated(pallet) - - // Show selected. - fmt.Println("Selected:") - for _, node := range nodes { - fmt.Printf(" %v: %v\n", node.Path, node.Node) - } - - // Sort by normalized paths and show selected again. - nodes.Sort() - fmt.Println("\nSorted:") - for _, node := range nodes { - fmt.Printf(" %v: %v\n", node.Path, node.Node) - } - - // Output: - // Selected: - // $['colors'][2]: green - // $['colors'][0]: red - // $['colors'][1]: blue - // - // Sorted: - // $['colors'][0]: red - // $['colors'][1]: blue - // $['colors'][2]: green -} - -func ExampleLocatedNodeList_Clone() { - // Load some JSON. - items := []any{1, 2, 3, 4, 5} - - // Parse a JSONPath and select from the input. - p := jsonpath.MustParse("$[2, 0, 1, 0, 1]") - nodes := p.SelectLocated(items) - - // Clone the selected nodes then deduplicate. - orig := nodes.Clone() - nodes = nodes.Deduplicate() - - // Cloned nodes have the original count. - fmt.Printf("Unique Count: %v\nOriginal Count: %v\n", len(nodes), len(orig)) - - // Output: - // Unique Count: 3 - // Original Count: 5 -} - -// Use the Parser to parse a collection of paths. -func ExampleParser() { - // Create a new parser using the default function registry. - parser := jsonpath.NewParser() - - // Parse a list of paths. - paths := []*jsonpath.Path{} - for _, path := range []string{ - "$.store.book[*].author", - "$..author", - "$.store..color", - "$..book[2].author", - "$..book[2].publisher", - "$..book[?@.isbn].title", - "$..book[?@.price<10].title", - } { - p, err := parser.Parse(path) - if err != nil { - log.Fatal(err) - } - paths = append(paths, p) - } - - // Later, use the paths to select from JSON inputs. - store := bookstore() - for _, p := range paths { - items := p.Select(store) - array, err := json.Marshal(items) - if err != nil { - log.Fatal(err) - } - - fmt.Printf("%s\n", array) - } - // Output: - // ["Nigel Rees","Evelyn Waugh","Herman Melville","J. R. R. Tolkien"] - // ["Nigel Rees","Evelyn Waugh","Herman Melville","J. R. R. Tolkien"] - // ["red"] - // ["Herman Melville"] - // [] - // ["Moby Dick","The Lord of the Rings"] - // ["Sayings of the Century","Moby Dick"] -} - -// Use WithRegistry to create a [Parser] that uses a [registry.Registry] -// containing function extensions, as [defined by the standard]. This example -// creates a function named "first" that returns the first item in a list of -// nodes. -// -// [defined by the standard]: https://www.rfc-editor.org/rfc/rfc9535.html#name-function-extensions -func ExampleWithRegistry() { - // Register the first function. - reg := registry.New() - err := reg.Register( - "first", // name - spec.FuncValue, // returns a single value - validateFirstArgs, // parse-time validation defined below - firstFunc, // function defined below - ) - if err != nil { - log.Fatalf("Error %v", err) - } - - // Create a parser with the registry that contains the extension. - parser := jsonpath.NewParser(jsonpath.WithRegistry(reg)) - - // Use the function to select lists that start with 6. - path, err := parser.Parse("$[? first(@.*) == 6]") - if err != nil { - log.Fatalf("Error %v", err) - } - - // Do any of these arrays start with 6? - input := []any{ - []any{1, 2, 3, 4, 5}, - []any{6, 7, 8, 9}, - []any{4, 8, 12}, - } - nodes := path.Select(input) - fmt.Printf("%v\n", nodes) - // Output: [[6 7 8 9]] -} - -// validateFirstArgs validates that a single argument is passed to the first() -// function, and that it can be converted to [spec.NodesType], so that first() -// can return the first node. It's called by the parser. -func validateFirstArgs(args []spec.FuncExprArg) error { - if len(args) != 1 { - return fmt.Errorf("expected 1 argument but found %v", len(args)) - } - - if !args[0].ConvertsTo(spec.FuncNodes) { - return errors.New("cannot convert argument to nodes") - } - - return nil -} - -// firstFunc defines the custom first() JSONPath function. It converts its -// single argument to a [spec.NodesType] value and returns a [spec.ValueType] -// that contains the first node. If there are no nodes it returns nil. -func firstFunc(jv []spec.PathValue) spec.PathValue { - nodes := spec.NodesFrom(jv[0]) - if len(nodes) == 0 { - return nil - } - return spec.Value(nodes[0]) -} - -// bookstore returns an unmarshaled JSON object. -func bookstore() any { - src := []byte(`{ - "store": { - "book": [ - { - "category": "reference", - "author": "Nigel Rees", - "title": "Sayings of the Century", - "price": 8.95 - }, - { - "category": "fiction", - "author": "Evelyn Waugh", - "title": "Sword of Honour", - "price": 12.99 - }, - { - "category": "fiction", - "author": "Herman Melville", - "title": "Moby Dick", - "isbn": "0-553-21311-3", - "price": 8.99 - }, - { - "category": "fiction", - "author": "J. R. R. Tolkien", - "title": "The Lord of the Rings", - "isbn": "0-395-19395-8", - "price": 22.99 - } - ], - "bicycle": { - "color": "red", - "price": 399 - } - } - }`) - - // Parse the JSON. - var value any - if err := json.Unmarshal(src, &value); err != nil { - log.Fatal(err) - } - return value -} diff --git a/path_test.go b/path_test.go deleted file mode 100644 index 066c300..0000000 --- a/path_test.go +++ /dev/null @@ -1,618 +0,0 @@ -package jsonpath - -import ( - "encoding/json" - "fmt" - "os" - "path/filepath" - "slices" - "testing" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" - "github.com/theory/jsonpath/registry" - "github.com/theory/jsonpath/spec" -) - -func book(idx int) spec.NormalizedPath { - return spec.Normalized(spec.Name("store"), spec.Name("book"), spec.Index(idx)) -} - -func TestParseSpecExamples(t *testing.T) { - t.Parallel() - a := assert.New(t) - val := specExampleJSON(t) - store, _ := val["store"].(map[string]any) - books, _ := store["book"].([]any) - - for _, tc := range []struct { - name string - path string - exp NodeList - loc LocatedNodeList - size int - rand bool - }{ - { - name: "example_1", - path: `$.store.book[*].author`, - exp: NodeList{"Nigel Rees", "Evelyn Waugh", "Herman Melville", "J. R. R. Tolkien"}, - loc: LocatedNodeList{ - {Path: append(book(0), spec.Name("author")), Node: "Nigel Rees"}, - {Path: append(book(1), spec.Name("author")), Node: "Evelyn Waugh"}, - {Path: append(book(2), spec.Name("author")), Node: "Herman Melville"}, - {Path: append(book(3), spec.Name("author")), Node: "J. R. R. Tolkien"}, - }, - }, - { - name: "example_2", - path: `$..author`, - exp: NodeList{"Nigel Rees", "Evelyn Waugh", "Herman Melville", "J. R. R. Tolkien"}, - loc: LocatedNodeList{ - {Path: append(book(0), spec.Name("author")), Node: "Nigel Rees"}, - {Path: append(book(1), spec.Name("author")), Node: "Evelyn Waugh"}, - {Path: append(book(2), spec.Name("author")), Node: "Herman Melville"}, - {Path: append(book(3), spec.Name("author")), Node: "J. R. R. Tolkien"}, - }, - }, - { - name: "example_3", - path: `$.store.*`, - exp: NodeList{store["book"], store["bicycle"]}, - loc: LocatedNodeList{ - {Path: norm("store", "book"), Node: store["book"]}, - {Path: norm("store", "bicycle"), Node: store["bicycle"]}, - }, - rand: true, - }, - { - name: "example_4", - path: `$.store..price`, - exp: NodeList{399., 8.95, 12.99, 8.99, 22.99}, - loc: LocatedNodeList{ - {Path: norm("store", "bicycle", "price"), Node: 399.}, - {Path: append(book(0), spec.Name("price")), Node: 8.95}, - {Path: append(book(1), spec.Name("price")), Node: 12.99}, - {Path: append(book(2), spec.Name("price")), Node: 8.99}, - {Path: append(book(3), spec.Name("price")), Node: 22.99}, - }, - rand: true, - }, - { - name: "example_5", - path: `$..book[2]`, - exp: NodeList{books[2]}, - loc: []*spec.LocatedNode{{Path: book(2), Node: books[2]}}, - }, - { - name: "example_6", - path: `$..book[-1]`, - exp: NodeList{books[3]}, - loc: []*spec.LocatedNode{{Path: book(3), Node: books[3]}}, - }, - { - name: "example_7", - path: `$..book[0,1]`, - exp: NodeList{books[0], books[1]}, - loc: LocatedNodeList{ - {Path: book(0), Node: books[0]}, - {Path: book(1), Node: books[1]}, - }, - }, - { - name: "example_8", - path: `$..book[?(@.isbn)]`, - exp: NodeList{books[2], books[3]}, - loc: LocatedNodeList{ - {Path: book(2), Node: books[2]}, - {Path: book(3), Node: books[3]}, - }, - }, - { - name: "example_9", - path: `$..book[?(@.price<10)]`, - exp: NodeList{books[0], books[2]}, - loc: LocatedNodeList{ - {Path: book(0), Node: books[0]}, - {Path: book(2), Node: books[2]}, - }, - }, - { - name: "example_10", - path: `$..*`, - size: 27, - rand: true, - }, - } { - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - p := MustParse(tc.path) - a.Equal(p.q, p.Query()) - a.Equal(p.q.String(), p.String()) - res := p.Select(val) - loc := p.SelectLocated(val) - - if tc.exp != nil { - if tc.rand { - a.ElementsMatch(tc.exp, res) - a.ElementsMatch(tc.loc, loc) - } else { - a.Equal(tc.exp, res) - a.Equal(tc.loc, loc) - } - } else { - a.Len(res, tc.size) - a.Len(loc, tc.size) - } - }) - } -} - -func specExampleJSON(t *testing.T) map[string]any { - t.Helper() - src := []byte(`{ - "store": { - "book": [ - { - "category": "reference", - "author": "Nigel Rees", - "title": "Sayings of the Century", - "price": 8.95 - }, - { - "category": "fiction", - "author": "Evelyn Waugh", - "title": "Sword of Honour", - "price": 12.99 - }, - { - "category": "fiction", - "author": "Herman Melville", - "title": "Moby Dick", - "isbn": "0-553-21311-3", - "price": 8.99 - }, - { - "category": "fiction", - "author": "J. R. R. Tolkien", - "title": "The Lord of the Rings", - "isbn": "0-395-19395-8", - "price": 22.99 - } - ], - "bicycle": { - "color": "red", - "price": 399 - } - } - }`) - - var value map[string]any - if err := json.Unmarshal(src, &value); err != nil { - t.Fatal(err) - } - - return value -} - -func TestParseCompliance(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - p := NewParser() - - //nolint:tagliatelle - type testCase struct { - Name string `json:"name"` - Selector string `json:"selector"` - Document any `json:"document"` - Result NodeList `json:"result"` - Results []NodeList `json:"results"` - ResultPaths []string `json:"result_paths"` - ResultsPaths [][]string `json:"results_paths"` - InvalidSelector bool `json:"invalid_selector"` - } - - rawJSON, err := os.ReadFile( - filepath.Join("jsonpath-compliance-test-suite", "cts.json"), - ) - r.NoError(err) - var ts struct { - Tests []testCase `json:"tests"` - } - if err := json.Unmarshal(rawJSON, &ts); err != nil { - t.Fatal(err) - } - - for i, tc := range ts.Tests { - t.Run(fmt.Sprintf("test_%03d", i), func(t *testing.T) { - t.Parallel() - description := fmt.Sprintf("%v: `%v`", tc.Name, tc.Selector) - p, err := p.Parse(tc.Selector) - if tc.InvalidSelector { - r.Error(err, description) - r.ErrorIs(err, ErrPathParse) - a.Nil(p, description) - return - } - - r.NoError(err, description) - a.NotNil(p, description) - - res := p.Select(tc.Document) - switch { - case tc.Result != nil: - a.Equal(tc.Result, res, description) - case tc.Results != nil: - a.Contains(tc.Results, res, description) - } - - // Assemble and test located nodes. - nodes := NodeList{} - paths := []string{} - for l := range p.SelectLocated(tc.Document).All() { - nodes = append(nodes, l.Node) - paths = append(paths, l.Path.String()) - } - - switch { - case tc.ResultPaths != nil: - a.Equal(tc.ResultPaths, paths, description) - a.Equal(tc.Result, nodes, description) - case tc.ResultsPaths != nil: - a.Contains(tc.Results, nodes, description) - a.Contains(tc.ResultsPaths, paths, description) - } - }) - } -} - -func TestParser(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - reg := registry.New() - - for _, tc := range []struct { - name string - path string - reg *registry.Registry - exp *Path - err string - }{ - { - name: "root", - path: "$", - exp: MustParse("$"), - }, - { - name: "root_reg", - path: "$", - reg: reg, - exp: MustParse("$"), - }, - { - name: "parse_error", - path: "lol", - err: "jsonpath: unexpected identifier at position 1", - }, - } { - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - - // Construct a parser. - var parser *Parser - if tc.reg == nil { - parser = NewParser() - } else { - parser = NewParser(WithRegistry(tc.reg)) - a.Equal(tc.reg, parser.reg) - } - - // Test Parse and MustParse methods. - p, err := parser.Parse(tc.path) - if tc.err == "" { - r.NoError(err) - a.Equal(tc.exp, p) - a.Equal(tc.exp, parser.MustParse(tc.path)) - } else { - r.EqualError(err, tc.err) - r.ErrorIs(err, ErrPathParse) - a.PanicsWithError(tc.err, func() { parser.MustParse(tc.path) }) - } - - if tc.reg == nil { - // Test Parse and MustParse functions. - if tc.err == "" { - r.NoError(err) - a.Equal(tc.exp, p) - a.Equal(tc.exp, parser.MustParse(tc.path)) - } else { - r.EqualError(err, tc.err) - r.ErrorIs(err, ErrPathParse) - a.PanicsWithError(tc.err, func() { parser.MustParse(tc.path) }) - } - } - }) - } -} - -func norm(sel ...any) spec.NormalizedPath { - path := make(spec.NormalizedPath, len(sel)) - for i, s := range sel { - switch s := s.(type) { - case string: - path[i] = spec.Name(s) - case int: - path[i] = spec.Index(s) - default: - panic(fmt.Sprintf("Invalid normalized path selector %T", s)) - } - } - return path -} - -func TestNodeList(t *testing.T) { - t.Parallel() - a := assert.New(t) - - for _, tc := range []struct { - name string - list NodeList - }{ - { - name: "empty", - list: NodeList{}, - }, - { - name: "one_node", - list: NodeList{true}, - }, - { - name: "two_nodes", - list: NodeList{true, "hi"}, - }, - { - name: "dupe_nodes", - list: NodeList{"hi", "hi"}, - }, - { - name: "many_nodes", - list: NodeList{"hi", true, nil, "hi", 42, 98.6, []any{99}, map[string]any{"hi": "go"}}, - }, - } { - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - // Test iterators. - if len(tc.list) == 0 { - a.Nil(slices.Collect(tc.list.All())) - } else { - a.Equal([]any(tc.list), slices.Collect(tc.list.All())) - } - }) - } -} - -func TestLocatedNodeList(t *testing.T) { - t.Parallel() - a := assert.New(t) - - for _, tc := range []struct { - name string - list LocatedNodeList - nodes []any - paths []spec.NormalizedPath - uniq LocatedNodeList - sort LocatedNodeList - }{ - { - name: "empty", - list: LocatedNodeList{}, - uniq: LocatedNodeList{}, - sort: LocatedNodeList{}, - }, - { - name: "one_node", - list: LocatedNodeList{{Path: norm("foo"), Node: 42}}, - nodes: []any{42}, - paths: []spec.NormalizedPath{norm("foo")}, - uniq: LocatedNodeList{{Path: norm("foo"), Node: 42}}, - sort: LocatedNodeList{{Path: norm("foo"), Node: 42}}, - }, - { - name: "two_names", - list: LocatedNodeList{ - {Path: norm("foo", "bar"), Node: true}, - }, - nodes: []any{true}, - paths: []spec.NormalizedPath{norm("foo", "bar")}, - uniq: LocatedNodeList{ - {Path: norm("foo", "bar"), Node: true}, - }, - sort: LocatedNodeList{ - {Path: norm("foo", "bar"), Node: true}, - }, - }, - { - name: "two_nodes", - list: LocatedNodeList{ - {Path: norm("foo"), Node: 42}, - {Path: norm("bar"), Node: true}, - }, - nodes: []any{42, true}, - paths: []spec.NormalizedPath{norm("foo"), norm("bar")}, - uniq: LocatedNodeList{ - {Path: norm("foo"), Node: 42}, - {Path: norm("bar"), Node: true}, - }, - // Sort strings. - sort: LocatedNodeList{ - {Path: norm("bar"), Node: true}, - {Path: norm("foo"), Node: 42}, - }, - }, - { - name: "three_nodes", - list: LocatedNodeList{ - {Path: norm("foo"), Node: 42}, - {Path: norm("bar"), Node: true}, - {Path: norm(42), Node: "hi"}, - }, - nodes: []any{42, true, "hi"}, - paths: []spec.NormalizedPath{norm("foo"), norm("bar"), norm(42)}, - uniq: LocatedNodeList{ - {Path: norm("foo"), Node: 42}, - {Path: norm("bar"), Node: true}, - {Path: norm(42), Node: "hi"}, - }, - // Indexes before strings. - sort: LocatedNodeList{ - {Path: norm(42), Node: "hi"}, - {Path: norm("bar"), Node: true}, - {Path: norm("foo"), Node: 42}, - }, - }, - { - name: "two_nodes_diff_lengths", - list: LocatedNodeList{ - {Path: norm("foo"), Node: 42}, - {Path: norm("bar", "baz"), Node: true}, - }, - nodes: []any{42, true}, - paths: []spec.NormalizedPath{norm("foo"), norm("bar", "baz")}, - uniq: LocatedNodeList{ - {Path: norm("foo"), Node: 42}, - {Path: norm("bar", "baz"), Node: true}, - }, - // Sort strings. - sort: LocatedNodeList{ - {Path: norm("bar", "baz"), Node: true}, - {Path: norm("foo"), Node: 42}, - }, - }, - { - name: "two_nodes_diff_lengths_reverse", - list: LocatedNodeList{ - {Path: norm("foo", "baz"), Node: 42}, - {Path: norm("bar"), Node: true}, - }, - nodes: []any{42, true}, - paths: []spec.NormalizedPath{norm("foo", "baz"), norm("bar")}, - uniq: LocatedNodeList{ - {Path: norm("foo", "baz"), Node: 42}, - {Path: norm("bar"), Node: true}, - }, - // Sort strings. - sort: LocatedNodeList{ - {Path: norm("bar"), Node: true}, - {Path: norm("foo", "baz"), Node: 42}, - }, - }, - { - name: "dupe_nodes", - list: LocatedNodeList{ - {Path: norm("foo"), Node: 42}, - {Path: norm("bar"), Node: true}, - {Path: norm("foo"), Node: 42}, - }, - nodes: []any{42, true, 42}, - paths: []spec.NormalizedPath{norm("foo"), norm("bar"), norm("foo")}, - uniq: LocatedNodeList{ - {Path: norm("foo"), Node: 42}, - {Path: norm("bar"), Node: true}, - }, - sort: LocatedNodeList{ - {Path: norm("bar"), Node: true}, - {Path: norm("foo"), Node: 42}, - {Path: norm("foo"), Node: 42}, - }, - }, - } { - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - - // Test iterators. - if len(tc.list) == 0 { - a.Nil(slices.Collect(tc.list.All())) - } else { - a.Equal([]*spec.LocatedNode(tc.list), slices.Collect(tc.list.All())) - } - a.Equal(tc.nodes, slices.Collect(tc.list.Nodes())) - a.Equal(tc.paths, slices.Collect(tc.list.Paths())) - - // Test Clone. - list := tc.list.Clone() - a.Equal(tc.list, list) - - // Test Deduplicate - uniq := list.Deduplicate() - a.Equal(tc.uniq, uniq) - // Additional capacity in list should be zero - for i := len(uniq); i < len(list); i++ { - a.Zero(list[i]) - } - - // Test Sort - list = tc.list.Clone() - list.Sort() - a.Equal(tc.sort, list) - }) - } -} - -func TestNodeListIterators(t *testing.T) { - t.Parallel() - a := assert.New(t) - - list := NodeList{true, 42, "hi"} - - // Fetch a single node. - for node := range list.All() { - a.Equal(true, node) - break - } - - // Should be able to fetch them all after break. - a.Equal([]any{true, 42, "hi"}, slices.Collect(list.All())) -} - -func TestLocatedNodeListIterators(t *testing.T) { - t.Parallel() - a := assert.New(t) - - list := LocatedNodeList{ - {Path: norm("bar"), Node: true}, - {Path: norm("foo", "baz"), Node: 42}, - {Path: norm(1, 2), Node: "hi"}, - } - - // Fetch a single node. - for node := range list.All() { - a.Equal(true, node.Node) - break - } - - // Should be able to fetch them all after break. - a.Equal([]*spec.LocatedNode(list), slices.Collect(list.All())) - - // Fetch a single node. - for node := range list.Nodes() { - a.Equal(true, node) - break - } - - // Should be able to fetch them all after break. - a.Equal([]any{true, 42, "hi"}, slices.Collect(list.Nodes())) - - // Fetch a single path. - for path := range list.Paths() { - a.Equal(path, norm("bar")) - break - } - - // Should be able to fetch them all after break. - a.Equal( - []spec.NormalizedPath{norm("bar"), norm("foo", "baz"), norm(1, 2)}, - slices.Collect(list.Paths()), - ) -} diff --git a/registry/funcs.go b/registry/funcs.go deleted file mode 100644 index 8f9a695..0000000 --- a/registry/funcs.go +++ /dev/null @@ -1,207 +0,0 @@ -package registry - -import ( - "errors" - "fmt" - "regexp" - "regexp/syntax" - "unicode/utf8" - - "github.com/theory/jsonpath/spec" -) - -// checkLengthArgs checks the argument expressions to length() and returns an -// error if there is not exactly one expression that results in a compatible -// [spec.FuncValue] value. -func checkLengthArgs(args []spec.FuncExprArg) error { - if len(args) != 1 { - return fmt.Errorf("expected 1 argument but found %v", len(args)) - } - - if !args[0].ConvertsTo(spec.FuncValue) { - return errors.New("cannot convert argument to Value") - } - - return nil -} - -// lengthFunc extracts the single argument passed in jv and returns its -// length. Panics if jv[0] doesn't exist or is not convertible to -// [spec.ValueType]. -// -// - if jv[0] is nil, the result is nil -// - If jv[0] is a string, the result is the number of Unicode scalar values -// in the string. -// - If jv[0] is a []any, the result is the number of elements in the slice. -// - If jv[0] is an map[string]any, the result is the number of members in -// the map. -// - For any other value, the result is nil. -func lengthFunc(jv []spec.PathValue) spec.PathValue { - v := spec.ValueFrom(jv[0]) - if v == nil { - return nil - } - switch v := v.Value().(type) { - case string: - // Unicode scalar values - return spec.Value(utf8.RuneCountInString(v)) - case []any: - return spec.Value(len(v)) - case map[string]any: - return spec.Value(len(v)) - default: - return nil - } -} - -// checkCountArgs checks the argument expressions to count() and returns an -// error if there is not exactly one expression that results in a -// [spec.FuncNodes]-compatible value. -func checkCountArgs(args []spec.FuncExprArg) error { - if len(args) != 1 { - return fmt.Errorf("expected 1 argument but found %v", len(args)) - } - - if !args[0].ConvertsTo(spec.FuncNodes) { - return errors.New("cannot convert argument to Nodes") - } - - return nil -} - -// countFunc implements the [RFC 9535]-standard count function. The result is -// a ValueType containing an unsigned integer for the number of nodes in -// jv[0]. Panics if jv[0] doesn't exist or is not convertible to -// [spec.NodesType]. -func countFunc(jv []spec.PathValue) spec.PathValue { - return spec.Value(len(spec.NodesFrom(jv[0]))) -} - -// checkValueArgs checks the argument expressions to value() and returns an -// error if there is not exactly one expression that results in a -// [spec.FuncNodes]-compatible value. -func checkValueArgs(args []spec.FuncExprArg) error { - if len(args) != 1 { - return fmt.Errorf("expected 1 argument but found %v", len(args)) - } - - if !args[0].ConvertsTo(spec.FuncNodes) { - return errors.New("cannot convert argument to Nodes") - } - - return nil -} - -// valueFunc implements the [RFC 9535]-standard value function. Panics if -// jv[0] doesn't exist or is not convertible to [spec.NodesType]. Otherwise: -// -// - If jv[0] contains a single node, the result is the value of the node. -// - If jv[0] is empty or contains multiple nodes, the result is nil. -func valueFunc(jv []spec.PathValue) spec.PathValue { - nodes := spec.NodesFrom(jv[0]) - if len(nodes) == 1 { - return spec.Value(nodes[0]) - } - return nil -} - -// checkMatchArgs checks the argument expressions to match() and returns an -// error if there are not exactly two expressions that result in compatible -// [spec.FuncValue] values. -func checkMatchArgs(args []spec.FuncExprArg) error { - const matchArgLen = 2 - if len(args) != matchArgLen { - return fmt.Errorf("expected 2 arguments but found %v", len(args)) - } - - for i, arg := range args { - if !arg.ConvertsTo(spec.FuncValue) { - return fmt.Errorf("cannot convert argument %v to Value", i+1) - } - } - - return nil -} - -// matchFunc implements the [RFC 9535]-standard match function. If jv[0] and -// jv[1] evaluate to strings, the second is compiled into a regular expression with -// implied \A and \z anchors and used to match the first, returning LogicalTrue for -// a match and LogicalFalse for no match. Returns LogicalFalse if either jv value -// is not a string or if jv[1] fails to compile. -func matchFunc(jv []spec.PathValue) spec.PathValue { - if v, ok := spec.ValueFrom(jv[0]).Value().(string); ok { - if r, ok := spec.ValueFrom(jv[1]).Value().(string); ok { - if rc := compileRegex(`\A` + r + `\z`); rc != nil { - return spec.Logical(rc.MatchString(v)) - } - } - } - return spec.LogicalFalse -} - -// checkSearchArgs checks the argument expressions to search() and returns an -// error if there are not exactly two expressions that result in compatible -// [spec.FuncValue] values. -func checkSearchArgs(args []spec.FuncExprArg) error { - const searchArgLen = 2 - if len(args) != searchArgLen { - return fmt.Errorf("expected 2 arguments but found %v", len(args)) - } - - for i, arg := range args { - if !arg.ConvertsTo(spec.FuncValue) { - return fmt.Errorf("cannot convert argument %v to Value", i+1) - } - } - - return nil -} - -// searchFunc implements the [RFC 9535]-standard search function. If both jv[0] -// and jv[1] contain strings, the latter is compiled into a regular expression and used -// to match the former, returning LogicalTrue for a match and LogicalFalse for no -// match. Returns LogicalFalse if either value is not a string, or if jv[1] -// fails to compile. -func searchFunc(jv []spec.PathValue) spec.PathValue { - if val, ok := spec.ValueFrom(jv[0]).Value().(string); ok { - if r, ok := spec.ValueFrom(jv[1]).Value().(string); ok { - if rc := compileRegex(r); rc != nil { - return spec.Logical(rc.MatchString(val)) - } - } - } - return spec.LogicalFalse -} - -// compileRegex compiles str into a regular expression or returns an error. To -// comply with RFC 9485 regular expression semantics, all instances of "." are -// replaced with "[^\n\r]". This sadly requires compiling the regex twice: -// once to produce an AST to replace "." nodes, and a second time for the -// final regex. -func compileRegex(str string) *regexp.Regexp { - // First compile AST and replace "." with [^\n\r]. - // https://www.rfc-editor.org/rfc/rfc9485.html#name-pcre-re2-and-ruby-regexps - r, err := syntax.Parse(str, syntax.Perl|syntax.DotNL) - if err != nil { - // Could use some way to log these errors rather than failing silently. - return nil - } - - replaceDot(r) - re, _ := regexp.Compile(r.String()) - return re -} - -//nolint:gochecknoglobals -var clrf, _ = syntax.Parse("[^\n\r]", syntax.Perl) - -// replaceDot recurses re to replace all "." nodes with "[^\n\r]" nodes. -func replaceDot(re *syntax.Regexp) { - if re.Op == syntax.OpAnyChar { - *re = *clrf - } else { - for _, re := range re.Sub { - replaceDot(re) - } - } -} diff --git a/registry/funcs_test.go b/registry/funcs_test.go deleted file mode 100644 index 2ca2ada..0000000 --- a/registry/funcs_test.go +++ /dev/null @@ -1,486 +0,0 @@ -package registry - -import ( - "strings" - "testing" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" - "github.com/theory/jsonpath/spec" -) - -func TestLengthFunc(t *testing.T) { - t.Parallel() - a := assert.New(t) - - for _, tc := range []struct { - name string - vals []spec.PathValue - exp int - err string - }{ - { - name: "empty_string", - vals: []spec.PathValue{spec.Value("")}, - exp: 0, - }, - { - name: "ascii_string", - vals: []spec.PathValue{spec.Value("abc def")}, - exp: 7, - }, - { - name: "unicode_string", - vals: []spec.PathValue{spec.Value("foΓΆ")}, - exp: 3, - }, - { - name: "emoji_string", - vals: []spec.PathValue{spec.Value("Hi πŸ‘‹πŸ»")}, - exp: 5, - }, - { - name: "empty_array", - vals: []spec.PathValue{spec.Value([]any{})}, - exp: 0, - }, - { - name: "array", - vals: []spec.PathValue{spec.Value([]any{1, 2, 3, 4, 5})}, - exp: 5, - }, - { - name: "nested_array", - vals: []spec.PathValue{spec.Value([]any{1, 2, 3, "x", []any{456, 67}, true})}, - exp: 6, - }, - { - name: "empty_object", - vals: []spec.PathValue{spec.Value(map[string]any{})}, - exp: 0, - }, - { - name: "object", - vals: []spec.PathValue{spec.Value(map[string]any{"x": 1, "y": 0, "z": 2})}, - exp: 3, - }, - { - name: "nested_object", - vals: []spec.PathValue{spec.Value(map[string]any{ - "x": 1, - "y": 0, - "z": []any{1, 2}, - "a": map[string]any{"b": 9}, - })}, - exp: 4, - }, - { - name: "integer", - vals: []spec.PathValue{spec.Value(42)}, - exp: -1, - }, - { - name: "bool", - vals: []spec.PathValue{spec.Value(true)}, - exp: -1, - }, - { - name: "null", - vals: []spec.PathValue{spec.Value(nil)}, - exp: -1, - }, - { - name: "nil", - vals: []spec.PathValue{nil}, - exp: -1, - }, - { - name: "not_value", - vals: []spec.PathValue{spec.LogicalFalse}, - err: "cannot convert LogicalType to ValueType", - }, - } { - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - if tc.err != "" { - a.PanicsWithValue(tc.err, func() { - lengthFunc(tc.vals) - }) - return - } - res := lengthFunc(tc.vals) - if tc.exp < 0 { - a.Nil(res) - } else { - a.Equal(spec.Value(tc.exp), res) - } - }) - } -} - -func TestCheckSingularFuncArgs(t *testing.T) { - t.Parallel() - r := require.New(t) - reg := New() - - for _, tc := range []struct { - name string - expr []spec.FuncExprArg - err string - lengthErr string - countErr string - valueErr string - }{ - { - name: "no_args", - expr: []spec.FuncExprArg{}, - err: "expected 1 argument but found 0", - }, - { - name: "two_args", - expr: []spec.FuncExprArg{spec.Literal(nil), spec.Literal(nil)}, - err: "expected 1 argument but found 2", - }, - { - name: "literal_string", - expr: []spec.FuncExprArg{spec.Literal(nil)}, - countErr: "cannot convert argument to Nodes", - valueErr: "cannot convert argument to Nodes", - }, - { - name: "singular_query", - expr: []spec.FuncExprArg{spec.SingularQuery(false, nil)}, - }, - { - name: "nodes_query", - expr: []spec.FuncExprArg{ - spec.Query(true, spec.Child(spec.Name("x"))), - }, - }, - { - name: "logical_func_expr", - expr: []spec.FuncExprArg{spec.Function(reg.Get("match"), - spec.Query(true, spec.Child(spec.Name("x"))), - spec.Literal("hi"), - )}, - lengthErr: "cannot convert argument to Value", - countErr: "cannot convert argument to Nodes", - valueErr: "cannot convert argument to Nodes", - }, - { - name: "logical_or", - expr: []spec.FuncExprArg{spec.LogicalOr{}}, - lengthErr: "cannot convert argument to Value", - countErr: "cannot convert argument to Nodes", - valueErr: "cannot convert argument to Nodes", - }, - } { - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - // Test length args - err := checkLengthArgs(tc.expr) - switch { - case tc.err != "": - r.EqualError(err, tc.err) - case tc.lengthErr != "": - r.EqualError(err, tc.lengthErr) - default: - r.NoError(err) - } - - // Test count args - err = checkCountArgs(tc.expr) - switch { - case tc.err != "": - r.EqualError(err, tc.err) - case tc.countErr != "": - r.EqualError(err, tc.countErr) - default: - r.NoError(err) - } - - // Test value args - err = checkValueArgs(tc.expr) - switch { - case tc.err != "": - r.EqualError(err, tc.err) - case tc.valueErr != "": - r.EqualError(err, tc.valueErr) - default: - r.NoError(err) - } - }) - } -} - -func TestCheckRegexFuncArgs(t *testing.T) { - t.Parallel() - r := require.New(t) - reg := New() - - for _, tc := range []struct { - name string - expr []spec.FuncExprArg - err string - }{ - { - name: "no_args", - expr: []spec.FuncExprArg{}, - err: "expected 2 arguments but found 0", - }, - { - name: "one_arg", - expr: []spec.FuncExprArg{spec.Literal("hi")}, - err: "expected 2 arguments but found 1", - }, - { - name: "three_args", - expr: []spec.FuncExprArg{spec.Literal("hi"), spec.Literal("hi"), spec.Literal("hi")}, - err: "expected 2 arguments but found 3", - }, - { - name: "logical_or_1", - expr: []spec.FuncExprArg{&spec.LogicalOr{}, spec.Literal("hi")}, - err: "cannot convert argument 1 to Value", - }, - { - name: "logical_or_2", - expr: []spec.FuncExprArg{spec.Literal("hi"), spec.LogicalOr{}}, - err: "cannot convert argument 2 to Value", - }, - { - name: "singular_query_literal", - expr: []spec.FuncExprArg{&spec.SingularQueryExpr{}, spec.Literal("hi")}, - }, - { - name: "literal_singular_query", - expr: []spec.FuncExprArg{spec.Literal("hi"), &spec.SingularQueryExpr{}}, - }, - { - name: "nodes_query_1", - expr: []spec.FuncExprArg{ - spec.Query(true, spec.Child(spec.Name("x"))), - spec.Literal("hi"), - }, - }, - { - name: "nodes_query_2", - expr: []spec.FuncExprArg{ - spec.Literal("hi"), - spec.Query(true, spec.Child(spec.Name("x"))), - }, - }, - { - name: "func_expr_1", - expr: []spec.FuncExprArg{ - spec.Function( - reg.Get("match"), - spec.Query(true, spec.Child(spec.Name("x"))), - spec.Literal("hi"), - ), - spec.Literal("hi"), - }, - err: "cannot convert argument 1 to Value", - }, - { - name: "func_expr_2", - expr: []spec.FuncExprArg{ - spec.Literal("hi"), - spec.Function( - reg.Get("match"), - spec.Query(true, spec.Child(spec.Name("x"))), - spec.Literal("hi"), - ), - }, - err: "cannot convert argument 2 to Value", - }, - } { - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - // Test match args - err := checkMatchArgs(tc.expr) - if tc.err == "" { - r.NoError(err) - } else { - r.EqualError(err, strings.Replace(tc.err, "%v", "match", 1)) - } - - // Test search args - err = checkSearchArgs(tc.expr) - if tc.err == "" { - r.NoError(err) - } else { - r.EqualError(err, strings.Replace(tc.err, "%v", "search", 1)) - } - }) - } -} - -func TestCountFunc(t *testing.T) { - t.Parallel() - a := assert.New(t) - - for _, tc := range []struct { - name string - vals []spec.PathValue - exp int - err string - }{ - {"empty", []spec.PathValue{spec.Nodes()}, 0, ""}, - {"one", []spec.PathValue{spec.Nodes(1)}, 1, ""}, - {"three", []spec.PathValue{spec.Nodes(1, true, nil)}, 3, ""}, - {"not_nodes", []spec.PathValue{spec.LogicalTrue}, 0, "cannot convert LogicalType to NodesType"}, - } { - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - if tc.err != "" { - a.PanicsWithValue(tc.err, func() { countFunc(tc.vals) }) - return - } - res := countFunc(tc.vals) - if tc.exp < 0 { - a.Nil(res) - } else { - a.Equal(spec.Value(tc.exp), res) - } - }) - } -} - -func TestValueFunc(t *testing.T) { - t.Parallel() - a := assert.New(t) - - for _, tc := range []struct { - name string - vals []spec.PathValue - exp spec.PathValue - err string - }{ - {"empty", []spec.PathValue{spec.Nodes()}, nil, ""}, - {"one_int", []spec.PathValue{spec.Nodes(1)}, spec.Value(1), ""}, - {"one_null", []spec.PathValue{spec.Nodes(nil)}, spec.Value(nil), ""}, - {"one_string", []spec.PathValue{spec.Nodes("x")}, spec.Value("x"), ""}, - {"three", []spec.PathValue{spec.Nodes(1, true, nil)}, nil, ""}, - {"not_nodes", []spec.PathValue{spec.LogicalFalse}, nil, "cannot convert LogicalType to NodesType"}, - } { - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - if tc.err != "" { - a.PanicsWithValue(tc.err, func() { valueFunc(tc.vals) }) - return - } - a.Equal(tc.exp, valueFunc(tc.vals)) - }) - } -} - -func TestRegexFuncs(t *testing.T) { - t.Parallel() - a := assert.New(t) - - for _, tc := range []struct { - name string - input *spec.ValueType - regex *spec.ValueType - match bool - search bool - }{ - { - name: "dot", - input: spec.Value("x"), - regex: spec.Value("."), - match: true, - search: true, - }, - { - name: "two_chars", - input: spec.Value("xx"), - regex: spec.Value("."), - match: false, - search: true, - }, - { - name: "multi_line_newline", - input: spec.Value("xx\nyz"), - regex: spec.Value(".*"), - match: false, - search: true, - }, - { - name: "multi_line_crlf", - input: spec.Value("xx\r\nyz"), - regex: spec.Value(".*"), - match: false, - search: true, - }, - { - name: "not_string_input", - input: spec.Value(1), - regex: spec.Value("."), - match: false, - search: false, - }, - { - name: "not_string_regex", - input: spec.Value("x"), - regex: spec.Value(1), - match: false, - search: false, - }, - { - name: "invalid_regex", - input: spec.Value("x"), - regex: spec.Value(".["), - match: false, - search: false, - }, - } { - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - a.Equal(spec.Logical(tc.match), matchFunc([]spec.PathValue{tc.input, tc.regex})) - a.Equal(spec.Logical(tc.search), searchFunc([]spec.PathValue{tc.input, tc.regex})) - }) - } -} - -func TestExecRegexFuncs(t *testing.T) { - t.Parallel() - a := assert.New(t) - - for _, tc := range []struct { - name string - vals []spec.PathValue - match bool - search bool - err string - }{ - { - name: "dot", - vals: []spec.PathValue{spec.Value("x"), spec.Value("x")}, - match: true, - search: true, - }, - { - name: "first_not_value", - vals: []spec.PathValue{spec.Nodes(), spec.Value("x")}, - err: "cannot convert NodesType to ValueType", - }, - { - name: "second_not_value", - vals: []spec.PathValue{spec.Value("x"), spec.LogicalFalse}, - err: "cannot convert LogicalType to ValueType", - }, - } { - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - if tc.err == "" { - a.Equal(matchFunc(tc.vals), spec.Logical(tc.match)) - a.Equal(searchFunc(tc.vals), spec.Logical(tc.search)) - } else { - a.PanicsWithValue(tc.err, func() { matchFunc(tc.vals) }) - a.PanicsWithValue(tc.err, func() { searchFunc(tc.vals) }) - } - }) - } -} diff --git a/registry/registry.go b/registry/registry.go deleted file mode 100644 index c38fe38..0000000 --- a/registry/registry.go +++ /dev/null @@ -1,97 +0,0 @@ -// Package registry provides a RFC 9535 JSONPath function extension registry. -package registry - -import ( - "errors" - "fmt" - "sync" - - "github.com/theory/jsonpath/spec" -) - -// Registry maintains a registry of JSONPath function extensions, including -// both [RFC 9535]-required functions and custom functions. -// -// [RFC 9535]: https://www.rfc-editor.org/rfc/rfc9535.html -type Registry struct { - mu sync.RWMutex - funcs map[string]*spec.FuncExtension -} - -// New returns a new [Registry] loaded with the [RFC 9535]-mandated function -// extensions: -// -// - [length] -// - [count] -// - [value] -// - [match] -// - [search] -// -// [RFC 9535]: https://www.rfc-editor.org/rfc/rfc9535.html -// [length]: https://www.rfc-editor.org/rfc/rfc9535.html#name-length-function-extension -// [count]: https://www.rfc-editor.org/rfc/rfc9535.html#name-count-function-extension -// [value]: https://www.rfc-editor.org/rfc/rfc9535.html#name-value-function-extension -// [match]: https://www.rfc-editor.org/rfc/rfc9535.html#name-match-function-extension -// [search]: https://www.rfc-editor.org/rfc/rfc9535.html#name-search-function-extension -func New() *Registry { - return &Registry{ - mu: sync.RWMutex{}, - funcs: map[string]*spec.FuncExtension{ - "length": spec.Extension("length", spec.FuncValue, checkLengthArgs, lengthFunc), - "count": spec.Extension("count", spec.FuncValue, checkCountArgs, countFunc), - "value": spec.Extension("value", spec.FuncValue, checkValueArgs, valueFunc), - "match": spec.Extension("match", spec.FuncLogical, checkMatchArgs, matchFunc), - "search": spec.Extension("search", spec.FuncLogical, checkSearchArgs, searchFunc), - }, - } -} - -// ErrRegister errors are returned by [Register]. -var ErrRegister = errors.New("register") - -// Register registers a function extension. The parameters are: -// -// - name: the name of the function extension as used in JSONPath queries. -// - returnType: The data type of the function return value. -// - validator: A validation function that will be called at parse time -// to validate that all the function args are compatible with the function. -// - evaluator: The implementation of the function itself that executes -// against args and returns the result of the type defined by resultType. -// -// Returns [ErrRegister] if validator or evaluator is nil or if r already -// contains name. -func (r *Registry) Register( - name string, - resultType spec.FuncType, - validator spec.Validator, - evaluator spec.Evaluator, -) error { - if validator == nil { - return fmt.Errorf("%w: validator is nil", ErrRegister) - } - if evaluator == nil { - return fmt.Errorf("%w: evaluator is nil", ErrRegister) - } - - r.mu.Lock() - defer r.mu.Unlock() - if _, dup := r.funcs[name]; dup { - return fmt.Errorf( - "%w: Register called twice for function %v", - ErrRegister, name, - ) - } - - r.funcs[name] = spec.Extension(name, resultType, validator, evaluator) - return nil -} - -// Get returns a reference to the registered function extension named name. -// Returns nil if no function with that name has been registered. Used by the -// parser to match a function name to its implementation. -func (r *Registry) Get(name string) *spec.FuncExtension { - r.mu.RLock() - defer r.mu.RUnlock() - function := r.funcs[name] - return function -} diff --git a/registry/registry_example_test.go b/registry/registry_example_test.go deleted file mode 100644 index 4b9a869..0000000 --- a/registry/registry_example_test.go +++ /dev/null @@ -1,54 +0,0 @@ -package registry_test - -import ( - "errors" - "fmt" - "log" - - "github.com/theory/jsonpath/registry" - "github.com/theory/jsonpath/spec" -) - -// Create and register a JSONPath extension function, first(), that returns -// the first node in a list of nodes passed to it. See -// [github.com/theory/jsonpath.WithRegistry] for a more complete example. -func Example() { - reg := registry.New() - err := reg.Register( - "first", // function name - spec.FuncValue, // returns a single value - validateFirstArgs, // parse-time validation defined below - firstFunc, // function defined below - ) - if err != nil { - log.Fatalf("Error %v", err) - } - fmt.Printf("%v\n", reg.Get("first").ReturnType()) - // Output: Value -} - -// validateFirstArgs validates that a single argument is passed to the first() -// extension function, and that it can be converted to [spec.NodesType], so -// that first() can return the first node. It's called by the parser. -func validateFirstArgs(args []spec.FuncExprArg) error { - if len(args) != 1 { - return fmt.Errorf("expected 1 argument but found %v", len(args)) - } - - if !args[0].ConvertsTo(spec.FuncNodes) { - return errors.New("cannot convert argument to Nodes") - } - - return nil -} - -// firstFunc defines the first() JSONPath extension function. It converts its -// single argument to a [spec.NodesType] value and returns a [spec.ValueType] -// that contains the first node. If there are no nodes it returns nil. -func firstFunc(jv []spec.PathValue) spec.PathValue { - nodes := spec.NodesFrom(jv[0]) - if len(nodes) == 0 { - return nil - } - return spec.Value(nodes[0]) -} diff --git a/registry/registry_test.go b/registry/registry_test.go deleted file mode 100644 index a053a30..0000000 --- a/registry/registry_test.go +++ /dev/null @@ -1,110 +0,0 @@ -package registry - -import ( - "testing" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" - "github.com/theory/jsonpath/spec" -) - -func TestRegistry(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - for _, tc := range []struct { - name string - rType spec.FuncType - expr []spec.FuncExprArg - args []spec.PathValue - exp any - }{ - // RFC 9535-defined functions. - { - name: "length", - rType: spec.FuncValue, - expr: []spec.FuncExprArg{spec.Literal("foo")}, - args: []spec.PathValue{spec.Value("foo")}, - exp: spec.Value(3), - }, - { - name: "count", - rType: spec.FuncValue, - expr: []spec.FuncExprArg{&spec.SingularQueryExpr{}}, - args: []spec.PathValue{spec.Nodes(1, 2)}, - exp: spec.Value(2), - }, - { - name: "value", - rType: spec.FuncValue, - expr: []spec.FuncExprArg{&spec.SingularQueryExpr{}}, - args: []spec.PathValue{spec.Nodes(42)}, - exp: spec.Value(42), - }, - { - name: "match", - rType: spec.FuncLogical, - expr: []spec.FuncExprArg{spec.Literal("foo"), spec.Literal(".*")}, - args: []spec.PathValue{spec.Value("foo"), spec.Value(".*")}, - exp: spec.LogicalTrue, - }, - { - name: "search", - rType: spec.FuncLogical, - expr: []spec.FuncExprArg{spec.Literal("foo"), spec.Literal(".")}, - args: []spec.PathValue{spec.Value("foo"), spec.Value(".")}, - exp: spec.LogicalTrue, - }, - } { - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - reg := New() - a.Len(reg.funcs, 5) - - ft := reg.Get(tc.name) - a.NotNil(ft) - a.Equal(tc.rType, ft.ReturnType()) - r.NoError(ft.Validate(tc.expr)) - a.Equal(tc.exp, ft.Evaluate(tc.args)) - }) - } -} - -func TestRegisterErr(t *testing.T) { - t.Parallel() - r := require.New(t) - reg := New() - - for _, tc := range []struct { - name string - fnName string - valid spec.Validator - eval spec.Evaluator - err string - }{ - { - name: "existing_func", - fnName: "length", - valid: func([]spec.FuncExprArg) error { return nil }, - eval: func([]spec.PathValue) spec.PathValue { return spec.Value(42) }, - err: "register: Register called twice for function length", - }, - { - name: "nil_validator", - err: "register: validator is nil", - }, - { - name: "nil_evaluator", - valid: func([]spec.FuncExprArg) error { return nil }, - err: "register: evaluator is nil", - }, - } { - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - err := reg.Register(tc.fnName, spec.FuncValue, tc.valid, tc.eval) - r.ErrorIs(err, ErrRegister, tc.name) - r.EqualError(err, tc.err, tc.name) - }) - } -} diff --git a/spec/filter.go b/spec/filter.go deleted file mode 100644 index 334e6b3..0000000 --- a/spec/filter.go +++ /dev/null @@ -1,250 +0,0 @@ -package spec - -import ( - "strings" -) - -// BasicExpr defines the basic interface for filter expressions. -// Implementations: -// -// - [CompExpr] -// - [ExistExpr] -// - [FuncExpr] -// - [LogicalAnd] -// - [LogicalOr] -// - [NonExistExpr] -// - [NotFuncExpr] -// - [NotParenExpr] -// - [ParenExpr] -// - [ValueType] -type BasicExpr interface { - stringWriter - // testFilter executes the filter expression on current and root and - // returns true or false depending on the truthiness of its result. - testFilter(current, root any) bool -} - -// LogicalAnd represents a list of one or more expressions ANDed together by -// the && operator. Evaluates to true if all of its expressions evaluate to -// true. Short-circuits and returns false for the first expression that -// returns false. Interfaces implemented: -// -// - [BasicExpr] -// - [fmt.Stringer] -type LogicalAnd []BasicExpr - -// And creates a LogicalAnd of all expr. -func And(expr ...BasicExpr) LogicalAnd { - return LogicalAnd(expr) -} - -// String returns the string representation of la. -func (la LogicalAnd) String() string { - var buf strings.Builder - la.writeTo(&buf) - return buf.String() -} - -// testFilter returns true if all of la's expressions return true. -// Short-circuits and returns false for the first expression that returns -// false. Defined by [BasicExpr]. -func (la LogicalAnd) testFilter(current, root any) bool { - for _, e := range la { - if !e.testFilter(current, root) { - return false - } - } - return true -} - -// writeTo writes the string representation of la to buf. Defined by -// [stringWriter]. -func (la LogicalAnd) writeTo(buf *strings.Builder) { - for i, e := range la { - e.writeTo(buf) - if i < len(la)-1 { - buf.WriteString(" && ") - } - } -} - -// LogicalOr represents a list of one or more expressions ORed together by the -// || operator. Evaluates to true if any of its expressions evaluates to true. -// Short-circuits and returns true for the first expression that returns true. -// -// Interfaces implemented: -// - [BasicExpr] -// - [FuncExprArg] -// - [fmt.Stringer] -type LogicalOr []LogicalAnd - -// Or returns a LogicalOr of all expr. -func Or(expr ...LogicalAnd) LogicalOr { - return LogicalOr(expr) -} - -// String returns the string representation of lo. -func (lo LogicalOr) String() string { - var buf strings.Builder - lo.writeTo(&buf) - return buf.String() -} - -// testFilter returns true if one of lo's expressions return true. -// Short-circuits and returns true for the first expression that returns true. -// Defined by [BasicExpr]. -func (lo LogicalOr) testFilter(current, root any) bool { - for _, e := range lo { - if e.testFilter(current, root) { - return true - } - } - return false -} - -// writeTo writes the string representation of lo to buf. Defined by -// [stringWriter]. -func (lo LogicalOr) writeTo(buf *strings.Builder) { - for i, e := range lo { - e.writeTo(buf) - if i < len(lo)-1 { - buf.WriteString(" || ") - } - } -} - -// evaluate evaluates lo and returns LogicalTrue when it returns true and -// LogicalFalse when it returns false. Defined by the [FuncExprArg] -// interface. -func (lo LogicalOr) evaluate(current, root any) PathValue { - return Logical(lo.testFilter(current, root)) -} - -// ResultType returns [FuncLogical]. Defined by the [FuncExprArg] interface. -func (lo LogicalOr) ResultType() FuncType { - return FuncLogical -} - -// ConvertsTo returns true if the result of the [LogicalOr] can be converted -// to ft. -func (LogicalOr) ConvertsTo(ft FuncType) bool { return ft == FuncLogical } - -// ParenExpr represents a parenthesized expression that groups the elements of -// a [LogicalOr]. Interfaces implemented (via the underlying [LogicalOr]): -// - [BasicExpr] -// - [FuncExprArg] -// - [fmt.Stringer] -type ParenExpr struct { - LogicalOr -} - -// Paren returns a new ParenExpr that ORs the results of each expr. -func Paren(expr ...LogicalAnd) *ParenExpr { - return &ParenExpr{LogicalOr: LogicalOr(expr)} -} - -// writeTo writes a string representation of p to buf. Defined by -// [stringWriter]. -func (p *ParenExpr) writeTo(buf *strings.Builder) { - buf.WriteRune('(') - p.LogicalOr.writeTo(buf) - buf.WriteRune(')') -} - -// String returns the string representation of p. -func (p *ParenExpr) String() string { - var buf strings.Builder - p.writeTo(&buf) - return buf.String() -} - -// NotParenExpr represents a negated parenthesized expression that groups the -// elements of a [LogicalOr]. Interfaces implemented (via the underlying -// [LogicalOr]): -// - [BasicExpr] -// - [FuncExprArg] -// - [fmt.Stringer] -type NotParenExpr struct { - LogicalOr -} - -// NotParen returns a new NotParenExpr that ORs each expr. -func NotParen(expr ...LogicalAnd) *NotParenExpr { - return &NotParenExpr{LogicalOr: LogicalOr(expr)} -} - -// writeTo writes a string representation of p to buf. Defined by -// [stringWriter]. -func (np *NotParenExpr) writeTo(buf *strings.Builder) { - buf.WriteString("!(") - np.LogicalOr.writeTo(buf) - buf.WriteRune(')') -} - -// String returns the string representation of np. -func (np *NotParenExpr) String() string { - var buf strings.Builder - np.writeTo(&buf) - return buf.String() -} - -// testFilter returns false if the np.LogicalOrExpression returns true and -// true if it returns false. Defined by [BasicExpr]. -func (np *NotParenExpr) testFilter(current, root any) bool { - return !np.LogicalOr.testFilter(current, root) -} - -// ExistExpr represents a [PathQuery] used as a filter expression, in which -// context it returns true if the [PathQuery] selects at least one node. -// Interfaces implemented: -// - [BasicExpr] -// - [Selector] (via the underlying [PathQuery]) -// - [fmt.Stringer] (via the underlying [PathQuery]) -type ExistExpr struct { - *PathQuery -} - -// Existence creates a new [ExistExpr] for q. -func Existence(q *PathQuery) *ExistExpr { - return &ExistExpr{PathQuery: q} -} - -// testFilter returns true if e.Query selects any results from current or -// root. Defined by [BasicExpr]. -func (e *ExistExpr) testFilter(current, root any) bool { - return len(e.Select(current, root)) > 0 -} - -// writeTo writes a string representation of e to buf. Defined by -// [stringWriter]. -func (e *ExistExpr) writeTo(buf *strings.Builder) { - buf.WriteString(e.String()) -} - -// NonExistExpr represents a negated [PathQuery] used as a filter expression, -// in which context it returns true if the [PathQuery] selects no nodes. -// Interfaces implemented: -// - [BasicExpr] -// - [Selector] (via the underlying [PathQuery]) -// - [fmt.Stringer] (via the underlying [PathQuery]) -type NonExistExpr struct { - *PathQuery -} - -// Nonexistence creates a new [NonExistExpr] for q. -func Nonexistence(q *PathQuery) *NonExistExpr { - return &NonExistExpr{PathQuery: q} -} - -// writeTo writes a string representation of ne to buf. Defined by -// [stringWriter]. -func (ne NonExistExpr) writeTo(buf *strings.Builder) { - buf.WriteRune('!') - buf.WriteString(ne.String()) -} - -// testFilter returns true if ne.Query selects no results from current or -// root. Defined by [BasicExpr]. -func (ne NonExistExpr) testFilter(current, root any) bool { - return len(ne.Select(current, root)) == 0 -} diff --git a/spec/filter_test.go b/spec/filter_test.go deleted file mode 100644 index 965b7e7..0000000 --- a/spec/filter_test.go +++ /dev/null @@ -1,255 +0,0 @@ -package spec - -import ( - "strings" - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestExpressionInterface(t *testing.T) { - t.Parallel() - a := assert.New(t) - - for _, tc := range []struct { - name string - tok any - }{ - {"paren", Paren(nil)}, - {"not_paren", NotParen(nil)}, - {"comparison", Comparison(nil, EqualTo, nil)}, - {"exist", Existence(nil)}, - {"not_exist", Nonexistence(nil)}, - {"func_expr", &FuncExpr{}}, - {"not_func_expr", &NotFuncExpr{}}, - {"logical_and", LogicalOr{}}, - {"logical_or", LogicalAnd{}}, - {"value", Value(nil)}, - } { - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - a.Implements((*BasicExpr)(nil), tc.tok) - }) - } -} - -func TestLogicalAndExpr(t *testing.T) { - t.Parallel() - a := assert.New(t) - - for _, tc := range []struct { - name string - expr []BasicExpr - root any - current any - exp bool - str string - }{ - { - name: "no_expr", - expr: []BasicExpr{}, - current: map[string]any{"x": 0}, - exp: true, - str: "", - }, - { - name: "one_true_expr", - expr: []BasicExpr{ - Existence(Query(false, Child(Name("x")))), - }, - current: map[string]any{"x": 0}, - exp: true, - str: `@["x"]`, - }, - { - name: "one_false_expr", - expr: []BasicExpr{ - Existence(Query(true, Child(Name("y")))), - }, - root: map[string]any{"x": 0}, - exp: false, - str: `$["y"]`, - }, - { - name: "two_true_expr", - expr: []BasicExpr{ - Existence(Query(false, Child(Name("x")))), - Existence(Query(false, Child(Name("y")))), - }, - current: map[string]any{"x": 0, "y": 1}, - exp: true, - str: `@["x"] && @["y"]`, - }, - { - name: "one_true_one_false", - expr: []BasicExpr{ - Existence(Query(false, Child(Name("x")))), - Existence(Query(false, Child(Name("y")))), - }, - current: map[string]any{"x": 0, "z": 1}, - exp: false, - str: `@["x"] && @["y"]`, - }, - } { - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - andExpr := LogicalAnd(tc.expr) - a.Equal(tc.exp, andExpr.testFilter(tc.current, tc.root)) - a.Equal(tc.str, bufString(andExpr)) - }) - } -} - -func TestLogicalOrExpr(t *testing.T) { - t.Parallel() - a := assert.New(t) - - for _, tc := range []struct { - name string - expr []LogicalAnd - root any - current any - exp bool - str string - }{ - { - name: "no_expr", - expr: []LogicalAnd{{}}, - current: map[string]any{"x": 0}, - exp: true, - str: "", - }, - { - name: "one_expr", - expr: []LogicalAnd{{Existence( - Query(true, Child(Name("x"))), - )}}, - root: map[string]any{"x": 0}, - exp: true, - str: `$["x"]`, - }, - { - name: "one_false_expr", - expr: []LogicalAnd{{Existence( - Query(false, Child(Name("x"))), - )}}, - current: map[string]any{"y": 0}, - exp: false, - str: `@["x"]`, - }, - { - name: "two_true_expr", - expr: []LogicalAnd{ - {Existence(Query(false, Child(Name("x"))))}, - {Existence(Query(false, Child(Name("y"))))}, - }, - current: map[string]any{"x": 0, "y": "hi"}, - exp: true, - str: `@["x"] || @["y"]`, - }, - { - name: "one_true_one_false", - expr: []LogicalAnd{ - {Existence(Query(false, Child(Name("x"))))}, - {Existence(Query(false, Child(Name("y"))))}, - }, - current: map[string]any{"x": 0, "z": "hi"}, - exp: true, - str: `@["x"] || @["y"]`, - }, - { - name: "nested_ands", - expr: []LogicalAnd{ - { - Existence(Query(false, Child(Name("x")))), - Existence(Query(false, Child(Name("y")))), - }, - { - Existence(Query(false, Child(Name("y")))), - Existence(Query(false, Child(Name("x")))), - }, - }, - current: map[string]any{"x": 0, "y": "hi"}, - exp: true, - str: `@["x"] && @["y"] || @["y"] && @["x"]`, - }, - } { - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - orExpr := LogicalOr(tc.expr) - a.Equal(FuncLogical, orExpr.ResultType()) - a.Equal(tc.exp, orExpr.testFilter(tc.current, tc.root)) - a.Equal(Logical(tc.exp), orExpr.evaluate(tc.current, tc.root)) - a.Equal(tc.str, bufString(orExpr)) - a.True(orExpr.ConvertsTo(FuncLogical)) - a.False(orExpr.ConvertsTo(FuncValue)) - a.False(orExpr.ConvertsTo(FuncNodes)) - - // Test ParenExpr. - pExpr := Paren(orExpr...) - a.Equal(tc.exp, pExpr.testFilter(tc.current, tc.root)) - a.Equal("("+tc.str+")", bufString(pExpr)) - - // Test NotParenExpr. - npExpr := NotParen(orExpr...) - a.Equal(!tc.exp, npExpr.testFilter(tc.current, tc.root)) - a.Equal("!("+tc.str+")", bufString(npExpr)) - }) - } -} - -func TestExistExpr(t *testing.T) { - t.Parallel() - a := assert.New(t) - - for _, tc := range []struct { - name string - query *PathQuery - root any - current any - exp bool - }{ - { - name: "current_name", - query: Query(false, Child(Name("x"))), - current: map[string]any{"x": 0}, - exp: true, - }, - { - name: "root_name", - query: Query(true, Child(Name("x"))), - root: map[string]any{"x": 0}, - exp: true, - }, - { - name: "current_false", - query: Query(false, Child(Name("x"))), - current: map[string]any{"y": 0}, - exp: false, - }, - { - name: "root_false", - query: Query(true, Child(Name("x"))), - root: map[string]any{"y": 0}, - exp: false, - }, - } { - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - - // Test existExpr. - exist := ExistExpr{tc.query} - a.Equal(tc.exp, exist.testFilter(tc.current, tc.root)) - buf := new(strings.Builder) - exist.writeTo(buf) - a.Equal(tc.query.String(), buf.String()) - - // Test NonExistExpr. - ne := NonExistExpr{tc.query} - a.Equal(!tc.exp, ne.testFilter(tc.current, tc.root)) - buf.Reset() - ne.writeTo(buf) - a.Equal("!"+tc.query.String(), buf.String()) - }) - } -} diff --git a/spec/function.go b/spec/function.go deleted file mode 100644 index 025bf96..0000000 --- a/spec/function.go +++ /dev/null @@ -1,615 +0,0 @@ -package spec - -//go:generate stringer -output function_string.go -linecomment -trimprefix Func -type FuncType,LogicalType - -import ( - "encoding/json" - "fmt" - "strings" -) - -// FuncType describes the types of function parameters and results as defined -// by [RFC 9535 Section 2.4.1]. -// -// Implements [fmt.Stringer]. -// -// [RFC 9535 Section 2.4.1]: https://www.rfc-editor.org/rfc/rfc9535.html#section-2.4.1 -type FuncType uint8 - -const ( - // FuncValue represents a JSON value as provided by [ValueType]. - FuncValue FuncType = iota + 1 - - // FuncNodes represents a list of nodes as provided by [NodesType]. - FuncNodes - - // FuncLogical represents a logical value as provided by [LogicalType]. - FuncLogical -) - -// PathValue defines the interface for JSONPath values used as comparison -// operands, filter expression results, and function parameters & return -// values. -// -// Implemented by the function types defined by [RFC 9535 Section 2.4.1]: -// - [ValueType] -// - [LogicalType] -// - [NodesType] -// -// [RFC 9535 Section 2.4.1]: https://www.rfc-editor.org/rfc/rfc9535.html#section-2.4.1 -type PathValue interface { - stringWriter - // FuncType returns the PathValue's [FuncType]. - FuncType() FuncType -} - -// NodesType defines a node list (a list of JSON values) for a function -// expression parameters or results, as defined by [RFC 9535 Section 2.4.1]. -// It can also be used in filter expressions. The underlying types should be -// string, integer, float, [json.Number], nil, true, false, []any, or -// map[string]any. Interfaces implemented: -// -// - [PathValue] -// - [fmt.Stringer] -// -// [RFC 9535 Section 2.4.1]: https://www.rfc-editor.org/rfc/rfc9535.html#section-2.4.1 -type NodesType []any - -// Nodes creates a NodesType that contains val, all of which should be the Go -// equivalent of the JSON data types: string, integer, float, [json.Number], -// nil, true, false, []any, or map[string]any. -func Nodes(val ...any) NodesType { - return NodesType(val) -} - -// FuncType returns [FuncNodes]. Defined by the [PathValue] interface. -func (NodesType) FuncType() FuncType { return FuncNodes } - -// NodesFrom converts value to a [NodesType] and panics if it cannot. Use in -// [github.com/theory/jsonpath/registry.Registry.Register] [Evaluator]. Avoid -// the panic by returning an error from the accompanying [Validator] function -// when [FuncExprArg.ConvertsToNodes] returns false for the [FuncExprArg] that -// returns value. -// -// Converts each implementation of [PathValue] as follows: -// - [NodesType]: returns value -// - [ValueType]: Returns a [NodesType] containing that single value -// - [LogicalType]: Panics -// - nil: Returns an empty [NodesType] -func NodesFrom(value PathValue) NodesType { - switch v := value.(type) { - case NodesType: - return v - case *ValueType: - return NodesType([]any{v.any}) - case nil: - return NodesType([]any{}) - case LogicalType: - panic("cannot convert LogicalType to NodesType") - default: - panic(fmt.Sprintf("unexpected argument of type %T", v)) - } -} - -// writeTo writes the string representation of nt to buf. Defined by -// [stringWriter]. -func (nt NodesType) writeTo(buf *strings.Builder) { - buf.WriteString(nt.String()) -} - -// String returns the string representation of nt. -func (nt NodesType) String() string { - return fmt.Sprintf("%v", []any(nt)) -} - -// LogicalType encapsulates a true or false value for a function expression -// parameters or results, as defined by [RFC 9535 Section 2.4.1]. Interfaces -// implemented: -// -// - [PathValue] -// - [fmt.Stringer] -// -// [RFC 9535 Section 2.4.1]: https://www.rfc-editor.org/rfc/rfc9535.html#section-2.4.1 -type LogicalType uint8 - -const ( - // LogicalFalse represents a true [LogicalType]. - LogicalFalse LogicalType = iota // false - - // LogicalTrue represents a true [LogicalType]. - LogicalTrue // true -) - -// Logical returns the LogicalType equivalent to boolean. -func Logical(boolean bool) LogicalType { - if boolean { - return LogicalTrue - } - return LogicalFalse -} - -// Bool returns the boolean equivalent to lt. -func (lt LogicalType) Bool() bool { return lt == LogicalTrue } - -// FuncType returns [FuncLogical]. Defined by the [PathValue] interface. -func (LogicalType) FuncType() FuncType { return FuncLogical } - -// LogicalFrom converts value to a [LogicalType] and panics if it cannot. Use -// in [github.com/theory/jsonpath/registry.Registry.Register] [Evaluator] -// functions. Avoid the panic by returning an error from the accompanying -// [Validator] function when [FuncExprArg.ConvertsToLogical] returns false for -// the [FuncExprArg] that returns value. -// -// Converts each implementation of [PathValue] as follows: -// - [LogicalType]: returns value -// - [NodesType]: Returns [LogicalFalse] if value is empty and [LogicalTrue] -// if it is not -// - [ValueType]: Panics -// - nil: Returns [LogicalFalse] -func LogicalFrom(value any) LogicalType { - switch v := value.(type) { - case LogicalType: - return v - case NodesType: - return Logical(len(v) > 0) - case nil: - return LogicalFalse - case *ValueType: - panic("cannot convert ValueType to LogicalType") - default: - panic(fmt.Sprintf("unexpected argument of type %T", v)) - } -} - -// writeTo writes a string representation of lt to buf. Defined by -// [stringWriter]. -func (lt LogicalType) writeTo(buf *strings.Builder) { - buf.WriteString(lt.String()) -} - -// ValueType encapsulates a JSON value for a function expression parameter or -// result, as defined by [RFC 9535 Section 2.4.1]. It can also be used as in -// filter expression. The underlying value should be a string, integer, -// [json.Number], float, nil, true, false, []any, or map[string]any. A nil -// ValueType pointer indicates no value. Interfaces implemented: -// -// - [PathValue] -// - [BasicExpr] -// - [fmt.Stringer] -// -// [RFC 9535 Section 2.4.1]: https://www.rfc-editor.org/rfc/rfc9535.html#section-2.4.1 -type ValueType struct { - any -} - -// Value returns a new [ValueType] for val, which must be the Go equivalent of -// a JSON data type: string, integer, float, [json.Number], nil, true, false, -// []any, or map[string]any. -func Value(val any) *ValueType { - return &ValueType{val} -} - -// Value returns the underlying value of vt. -func (vt *ValueType) Value() any { return vt.any } - -// String returns the string representation of vt. -func (vt *ValueType) String() string { return fmt.Sprintf("%v", vt.any) } - -// FuncType returns [FuncValue]. Defined by the [PathValue] interface. -func (*ValueType) FuncType() FuncType { return FuncValue } - -// ValueFrom converts value to a [ValueType] and panics if it cannot. Use in -// [github.com/theory/jsonpath/registry.Registry.Register] [Evaluator] -// functions. Avoid the panic by returning an error from the accompanying -// [Validator] function when [FuncExprArg.ConvertsToValue] returns false for -// the [FuncExprArg] that returns value. -// -// Converts each implementation of [PathValue] as follows: -// - [ValueType]: returns value -// - [NodesType]: Panics -// - [ValueType]: Panics -// - nil: Returns nil -func ValueFrom(value PathValue) *ValueType { - switch v := value.(type) { - case *ValueType: - return v - case nil: - return nil - case LogicalType: - panic("cannot convert LogicalType to ValueType") - case NodesType: - panic("cannot convert NodesType to ValueType") - } - panic(fmt.Sprintf("unexpected argument of type %T", value)) -} - -// Returns true if vt.any is truthy. Defined by the BasicExpr interface. -// Defined by [BasicExpr]. -func (vt *ValueType) testFilter(_, _ any) bool { - switch v := vt.any.(type) { - case nil: - return false - case bool: - return v - case int: - return v != 0 - case int8: - return v != int8(0) - case int16: - return v != int16(0) - case int32: - return v != int32(0) - case int64: - return v != int64(0) - case uint: - return v != 0 - case uint8: - return v != uint8(0) - case uint16: - return v != uint16(0) - case uint32: - return v != uint32(0) - case uint64: - return v != uint64(0) - case float32: - return v != float32(0) - case float64: - return v != float64(0) - case json.Number: - if f, err := v.Float64(); err == nil { - return f != float64(0) - } - return true - default: - return true - } -} - -// writeTo writes a string representation of vt to buf. Defined by -// [stringWriter]. -func (vt *ValueType) writeTo(buf *strings.Builder) { - buf.WriteString(vt.String()) -} - -// FuncExprArg defines the interface for function argument expressions. -// Implementations: -// -// - [LogicalOr] -// - [LiteralArg] -// - [SingularQueryExpr] -// - [PathQuery] -// - [FuncExpr] -type FuncExprArg interface { - stringWriter - // evaluate evaluates the function expression against current and root and - // returns the resulting PathValue. - evaluate(current, root any) PathValue - // ResultType returns the [FuncType] that defines the type of the return - // value of the [FuncExprArg]. - ResultType() FuncType - - // ConvertsTo returns true if the function expression's result can be - // converted to ft. - ConvertsTo(ft FuncType) bool -} - -// LiteralArg represents a literal JSON value, excluding objects and arrays. -// Its underlying value there must be one of string, integer, float, -// [json.Number], nil, true, or false. -// -// Interfaces implemented: -// - [FuncExprArg] -// - [CompVal] -// - [fmt.Stringer] -type LiteralArg struct { - // Number, string, bool, or null - literal any -} - -// Literal creates and returns a new [LiteralArg] consisting of lit, which -// must ge one of string, integer, float, [json.Number], nil, true, or false. -func Literal(lit any) *LiteralArg { - return &LiteralArg{lit} -} - -// Value returns the underlying value of la. -func (la *LiteralArg) Value() any { return la.literal } - -// String returns the JSON string representation of la. -func (la *LiteralArg) String() string { - if la.literal == nil { - return "null" - } - return fmt.Sprintf("%#v", la.literal) -} - -// evaluate returns a [ValueType] containing the literal value. Defined by the -// [FuncExprArg] interface. -func (la *LiteralArg) evaluate(_, _ any) PathValue { - return &ValueType{la.literal} -} - -// ResultType returns [FuncValue]. Defined by the [FuncExprArg] interface. -func (la *LiteralArg) ResultType() FuncType { - return FuncValue -} - -// ConvertsTo returns true if the result of the [LiteralArg] can be converted -// to ft. -func (*LiteralArg) ConvertsTo(ft FuncType) bool { return ft == FuncValue } - -// writeTo writes a JSON string representation of la to buf. Defined by -// [stringWriter]. -func (la *LiteralArg) writeTo(buf *strings.Builder) { - if la.literal == nil { - buf.WriteString("null") - } else { - fmt.Fprintf(buf, "%#v", la.literal) - } -} - -// asValue returns la.literal as a [ValueType]. Defined by the [CompVal] -// interface. -func (la *LiteralArg) asValue(_, _ any) PathValue { - return &ValueType{la.literal} -} - -// SingularQueryExpr represents a query that produces a single [ValueType] -// (JSON value) or nothing. Used in contexts that require a singular value, -// such as comparison operations and function arguments. Interfaces -// implemented: -// -// - [CompVal] -// - [FuncExprArg] -// - [fmt.Stringer] -type SingularQueryExpr struct { - // The kind of singular query, relative (from the current node) or - // absolute (from the root node). - relative bool - // The query Name and/or Index selectors. - selectors []Selector -} - -// SingularQuery creates and returns a [SingularQueryExpr] that selects a -// single value at the path defined by selectors. -func SingularQuery(root bool, selectors ...Selector) *SingularQueryExpr { - return &SingularQueryExpr{relative: !root, selectors: selectors} -} - -// evaluate returns a [ValueType] containing the return value of executing sq. -// Defined by the [FuncExprArg] interface. -func (sq *SingularQueryExpr) evaluate(current, root any) PathValue { - target := root - if sq.relative { - target = current - } - - for _, seg := range sq.selectors { - res := seg.Select(target, nil) - if len(res) == 0 { - return nil - } - target = res[0] - } - - return &ValueType{target} -} - -// ResultType returns [FuncValue]. Defined by the [FuncExprArg] interface. -func (sq *SingularQueryExpr) ResultType() FuncType { - return FuncValue -} - -// ConvertsTo returns true if the result of the [SingularQueryExpr] can be -// converted to ft. -func (*SingularQueryExpr) ConvertsTo(ft FuncType) bool { - return ft == FuncValue || ft == FuncNodes -} - -// asValue returns the result of executing sq.execute against current and -// root. Defined by the [CompVal] interface. -func (sq *SingularQueryExpr) asValue(current, root any) PathValue { - return sq.evaluate(current, root) -} - -// writeTo writes a string representation of sq to buf. Defined by -// [stringWriter]. -func (sq *SingularQueryExpr) writeTo(buf *strings.Builder) { - if sq.relative { - buf.WriteRune('@') - } else { - buf.WriteRune('$') - } - - for _, seg := range sq.selectors { - buf.WriteRune('[') - seg.writeTo(buf) - buf.WriteRune(']') - } -} - -// String returns the string representation of sq. -func (sq *SingularQueryExpr) String() string { - var buf strings.Builder - sq.writeTo(&buf) - return buf.String() -} - -// Validator functions validate that the args expressions to a [FuncExtension] -// can be processed by the function. -type Validator func(args []FuncExprArg) error - -// Evaluator functions execute a [FuncExtension] against the values returned -// by args and returns a result. -type Evaluator func(args []PathValue) PathValue - -// FuncExtension defines a JSONPath function extension as defined in [RFC 9535 -// Section 2.4]. Use [github.com/theory/jsonpath/registry.Registry.Register] -// to construct and register a new [FuncExtension]. -// -// [RFC 9535 Section 2.4]: https://www.rfc-editor.org/rfc/rfc9535#name-function-extensions -type FuncExtension struct { - // name is the name of the function. Must be unique among all functions in - // a registry. - name string - - // returnType defines the type of the function return value. - returnType FuncType - - // validator executes at parse time to validate that all the args to - // the function are compatible with the function. - validator Validator - - // evaluator executes the function and returns the result of type - // resultType. - evaluator Evaluator -} - -// Extension creates a new JSONPath function extension. Created by -// [github.com/theory/jsonpath/registry.Registry.Register]. The parameters -// are: -// -// - name: the name of the function extension as used in JSONPath queries. -// - returnType: The data type of the function return value. -// - validator: A validation function that will be called at parse time -// to validate that all the function args are compatible with the function. -// - evaluator: The implementation of the function itself that executes -// against args and returns the result of the type defined by resultType. -func Extension(name string, returnType FuncType, validator Validator, evaluator Evaluator) *FuncExtension { - return &FuncExtension{name, returnType, validator, evaluator} -} - -// Name returns the name of the [FuncExtension]. -func (f *FuncExtension) Name() string { return f.name } - -// ReturnType returns the data type of the [FuncExtension] return value. -func (f *FuncExtension) ReturnType() FuncType { return f.returnType } - -// Evaluate executes the [FuncExtension] against args and returns a result of -// type [ResultType]. -func (f *FuncExtension) Evaluate(args []PathValue) PathValue { - return f.evaluator(args) -} - -// Validate executes at parse time to validate that all the args to the are -// compatible with the [FuncExtension]. -func (f *FuncExtension) Validate(args []FuncExprArg) error { - return f.validator(args) -} - -// FuncExpr represents a function expression, consisting of a named -// [FuncExtension] and its arguments. See -// [github.com/theory/jsonpath/registry] for an example defining a custom -// function. Interfaces Implemented: -// - [FuncExprArg] -// - [BasicExpr] -// - [fmt.Stringer] -// - [CompVal] -type FuncExpr struct { - args []FuncExprArg - fn *FuncExtension -} - -// Function creates an returns a new [FuncExpr] that will execute fn against -// the return values of args. -func Function(fn *FuncExtension, args ...FuncExprArg) *FuncExpr { - return &FuncExpr{args: args, fn: fn} -} - -// writeTo writes the string representation of fe to buf. Defined by -// [stringWriter]. -func (fe *FuncExpr) writeTo(buf *strings.Builder) { - buf.WriteString(fe.fn.Name() + "(") - for i, arg := range fe.args { - arg.writeTo(buf) - if i < len(fe.args)-1 { - buf.WriteString(", ") - } - } - buf.WriteRune(')') -} - -// String returns a string representation of fe. -func (fe *FuncExpr) String() string { - var buf strings.Builder - fe.writeTo(&buf) - return buf.String() -} - -// evaluate returns a [PathValue] containing the result of executing each -// [FuncExprArg] in fe (as passed to [Function]) and passing them to fe's -// [FuncExtension]. -func (fe *FuncExpr) evaluate(current, root any) PathValue { - res := []PathValue{} - for _, a := range fe.args { - res = append(res, a.evaluate(current, root)) - } - - return fe.fn.Evaluate(res) -} - -// ResultType returns the result type of fe's [FuncExtension]. Defined by the -// [FuncExprArg] interface. -func (fe *FuncExpr) ResultType() FuncType { - return fe.fn.ReturnType() -} - -// ConvertsTo returns true if fe's result can be converted to ft. -func (fe *FuncExpr) ConvertsTo(ft FuncType) bool { - return ft == fe.fn.ReturnType() -} - -// asValue returns the result of executing fe.evaluate against current and -// root. Defined by the [CompVal] interface. -func (fe *FuncExpr) asValue(current, root any) PathValue { - return fe.evaluate(current, root) -} - -// testFilter executes fe and returns true if the function returns a truthy -// value: -// -// - If the result is [NodesType], returns true if it is not empty. -// - If the result is [*ValueType], returns true if its underlying value -// is truthy. -// - If the result is [LogicalType], returns the underlying boolean. -// -// Returns false in all other cases. Defined by [BasicExpr]. -func (fe *FuncExpr) testFilter(current, root any) bool { - switch res := fe.evaluate(current, root).(type) { - case NodesType: - return len(res) > 0 - case *ValueType: - return res.testFilter(current, root) - case LogicalType: - return res.Bool() - default: - return false - } -} - -// NotFuncExpr represents a negated function expression. It reverses the -// result of the return value of the underlying [FuncExpr]. Interfaces -// implemented: -// - [BasicExpr] -// - [fmt.Stringer] -type NotFuncExpr struct { - *FuncExpr -} - -// NotFunction creates and returns a new [NotFuncExpr] that will execute fn -// against the return values of args and return the inverse of its return -// value. -func NotFunction(fn *FuncExpr) NotFuncExpr { - return NotFuncExpr{fn} -} - -// String returns the string representation of nf. -func (nf NotFuncExpr) String() string { - return "!" + nf.FuncExpr.String() -} - -// testFilter returns the inverse of [FuncExpr.testFilter]. Defined by -// [BasicExpr]. -func (nf NotFuncExpr) testFilter(current, root any) bool { - return !nf.FuncExpr.testFilter(current, root) -} diff --git a/spec/function_string.go b/spec/function_string.go deleted file mode 100644 index a330f7e..0000000 --- a/spec/function_string.go +++ /dev/null @@ -1,44 +0,0 @@ -// Code generated by "stringer -output function_string.go -linecomment -trimprefix Func -type FuncType,LogicalType"; DO NOT EDIT. - -package spec - -import "strconv" - -func _() { - // An "invalid array index" compiler error signifies that the constant values have changed. - // Re-run the stringer command to generate them again. - var x [1]struct{} - _ = x[FuncValue-1] - _ = x[FuncNodes-2] - _ = x[FuncLogical-3] -} - -const _FuncType_name = "ValueNodesLogical" - -var _FuncType_index = [...]uint8{0, 5, 10, 17} - -func (i FuncType) String() string { - i -= 1 - if i >= FuncType(len(_FuncType_index)-1) { - return "FuncType(" + strconv.FormatInt(int64(i+1), 10) + ")" - } - return _FuncType_name[_FuncType_index[i]:_FuncType_index[i+1]] -} -func _() { - // An "invalid array index" compiler error signifies that the constant values have changed. - // Re-run the stringer command to generate them again. - var x [1]struct{} - _ = x[LogicalFalse-0] - _ = x[LogicalTrue-1] -} - -const _LogicalType_name = "falsetrue" - -var _LogicalType_index = [...]uint8{0, 5, 9} - -func (i LogicalType) String() string { - if i >= LogicalType(len(_LogicalType_index)-1) { - return "LogicalType(" + strconv.FormatInt(int64(i), 10) + ")" - } - return _LogicalType_name[_LogicalType_index[i]:_LogicalType_index[i+1]] -} diff --git a/spec/function_test.go b/spec/function_test.go deleted file mode 100644 index 0c918a4..0000000 --- a/spec/function_test.go +++ /dev/null @@ -1,614 +0,0 @@ -package spec - -import ( - "encoding/json" - "errors" - "fmt" - "strings" - "testing" - - "github.com/stretchr/testify/assert" -) - -func bufString(sw stringWriter) string { - buf := new(strings.Builder) - sw.writeTo(buf) - return buf.String() -} - -func TestFuncType(t *testing.T) { - t.Parallel() - a := assert.New(t) - - for _, tc := range []struct { - name string - fType FuncType - }{ - { - name: "Value", - fType: FuncValue, - }, - { - name: "Nodes", - fType: FuncNodes, - }, - { - name: "Logical", - fType: FuncLogical, - }, - { - name: "FuncType(16)", - fType: FuncType(16), - }, - } { - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - a.Equal(tc.name, tc.fType.String()) - }) - } -} - -func TestNodesType(t *testing.T) { - t.Parallel() - a := assert.New(t) - - for _, tc := range []struct { - name string - from PathValue - exp NodesType - str string - err string - }{ - {"nodes", NodesType([]any{1, 2}), Nodes(1, 2), "[1 2]", ""}, - {"value", Value(1), Nodes(1), "[1]", ""}, - {"nil", nil, Nodes([]any{}...), "[]", ""}, - {"logical", LogicalTrue, nil, "", "cannot convert LogicalType to NodesType"}, - {"unknown", newValueType{}, nil, "", "unexpected argument of type spec.newValueType"}, - } { - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - if tc.err != "" { - a.PanicsWithValue(tc.err, func() { NodesFrom(tc.from) }) - return - } - nt := NodesFrom(tc.from) - a.Equal(tc.exp, nt) - a.Equal(FuncNodes, nt.FuncType()) - a.Equal(tc.str, bufString(nt)) - }) - } -} - -func TestLogicalType(t *testing.T) { - t.Parallel() - a := assert.New(t) - - for _, tc := range []struct { - name string - from PathValue - exp LogicalType - boolean bool - err string - str string - }{ - {"true", LogicalTrue, LogicalTrue, true, "", "true"}, - {"false", LogicalFalse, LogicalFalse, false, "", "false"}, - {"unknown", LogicalType(16), LogicalType(16), false, "", "LogicalType(16)"}, - {"empty_nodes", Nodes(), LogicalFalse, false, "", "false"}, - {"nodes", Nodes(1), LogicalTrue, true, "", "true"}, - {"null", nil, LogicalFalse, false, "", "false"}, - {"value", Value(1), LogicalFalse, false, "cannot convert ValueType to LogicalType", ""}, - {"unknown", newValueType{}, LogicalFalse, false, "unexpected argument of type spec.newValueType", ""}, - } { - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - if tc.err != "" { - a.PanicsWithValue(tc.err, func() { LogicalFrom(tc.from) }) - return - } - lt := LogicalFrom(tc.from) - a.Equal(tc.exp, lt) - a.Equal(FuncLogical, lt.FuncType()) - a.Equal(tc.str, lt.String()) - a.Equal(tc.str, bufString(lt)) - a.Equal(tc.boolean, lt.Bool()) - if tc.boolean { - a.Equal(LogicalTrue, Logical(tc.boolean)) - } else { - a.Equal(LogicalFalse, Logical(tc.boolean)) - } - }) - } -} - -func TestValueType(t *testing.T) { - t.Parallel() - a := assert.New(t) - - for _, tc := range []struct { - name string - val any - exp bool - }{ - {"nil", nil, false}, - {"true", true, true}, - {"false", false, false}, - {"int", 42, true}, - {"int_zero", 0, false}, - {"int8", int8(1), true}, - {"int8_zero", int8(0), false}, - {"int16", int16(1), true}, - {"int16_zero", int16(0), false}, - {"int32", int32(1), true}, - {"int32_zero", int32(0), false}, - {"int64", int64(1), true}, - {"int64_zero", int64(0), false}, - {"uint", uint(42), true}, - {"uint_zero", 0, false}, - {"uint8", uint8(1), true}, - {"uint8_zero", uint8(0), false}, - {"uint16", uint16(1), true}, - {"uint16_zero", uint16(0), false}, - {"uint32", uint32(1), true}, - {"uint32_zero", uint32(0), false}, - {"json_number_int", json.Number("42"), true}, - {"json_number_zero", json.Number("0"), false}, - {"json_number_float", json.Number("98.6"), true}, - {"json_number_float_zero", json.Number("0.0"), false}, - {"json_number_invalid", json.Number("not a number"), true}, - {"uint64", uint64(1), true}, - {"uint64_zero", uint64(0), false}, - {"float32", float32(1), true}, - {"float32_zero", float32(0), false}, - {"float64", float64(1), true}, - {"float64_zero", float64(0), false}, - {"object", map[string]any{}, true}, - {"array", []any{}, true}, - {"struct", struct{}{}, true}, - } { - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - val := Value(tc.val) - a.Equal(FuncValue, val.FuncType()) - a.Equal(tc.val, val.Value()) - a.Equal(fmt.Sprintf("%v", tc.val), bufString(val)) - a.Equal(fmt.Sprintf("%v", tc.val), val.String()) - a.Equal(tc.exp, val.testFilter(nil, nil)) - }) - } -} - -func TestValueFrom(t *testing.T) { - t.Parallel() - a := assert.New(t) - - for _, tc := range []struct { - name string - val PathValue - exp *ValueType - err string - }{ - {"valueType", Value(42), Value(42), ""}, - {"nil", nil, nil, ""}, - {"logical", LogicalFalse, nil, "cannot convert LogicalType to ValueType"}, - {"nodes", Nodes(1), nil, "cannot convert NodesType to ValueType"}, - {"unknown", newValueType{}, nil, "unexpected argument of type spec.newValueType"}, - } { - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - if tc.err != "" { - a.PanicsWithValue(tc.err, func() { ValueFrom(tc.val) }) - return - } - val := ValueFrom(tc.val) - a.Equal(tc.exp, val) - }) - } -} - -func TestPathValueInterface(t *testing.T) { - t.Parallel() - a := assert.New(t) - - for _, tc := range []struct { - name string - pathVal any - funcType FuncType - str string - }{ - {"nodes", Nodes(), FuncNodes, "[]"}, - {"logical", LogicalType(1), FuncLogical, "true"}, - {"value", &ValueType{}, FuncValue, ""}, - } { - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - a.Implements((*PathValue)(nil), tc.pathVal) - pv, _ := tc.pathVal.(PathValue) - a.Equal(tc.funcType, pv.FuncType()) - a.Equal(tc.str, bufString(pv)) - a.Equal(tc.str, pv.String()) - }) - } -} - -func TestJsonFuncExprArgInterface(t *testing.T) { - t.Parallel() - a := assert.New(t) - - for _, tc := range []struct { - name string - expr any - }{ - {"literal", &LiteralArg{}}, - {"path_query", &PathQuery{}}, - {"singular_query", &SingularQueryExpr{}}, - {"logical_or", &LogicalOr{}}, - {"func_expr", &FuncExpr{}}, - } { - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - a.Implements((*FuncExprArg)(nil), tc.expr) - }) - } -} - -func TestJSONComparableValInterface(t *testing.T) { - t.Parallel() - a := assert.New(t) - - for _, tc := range []struct { - name string - expr any - }{ - {"literal", &LiteralArg{}}, - {"singular_query", &SingularQueryExpr{}}, - {"func_expr", &FuncExpr{}}, - } { - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - a.Implements((*CompVal)(nil), tc.expr) - }) - } -} - -func TestLiteralArg(t *testing.T) { - t.Parallel() - a := assert.New(t) - - for _, tc := range []struct { - name string - literal any - str string - }{ - {"string", "hi", `"hi"`}, - {"number", 42, "42"}, - {"true", true, "true"}, - {"false", false, "false"}, - {"null", nil, "null"}, - } { - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - lit := Literal(tc.literal) - a.Equal(Value(tc.literal), lit.evaluate(nil, nil)) - a.Equal(Value(tc.literal), lit.asValue(nil, nil)) - a.Equal(tc.literal, lit.Value()) - a.Equal(FuncValue, lit.ResultType()) - a.Equal(tc.str, bufString(lit)) - a.Equal(tc.str, lit.String()) - a.True(lit.ConvertsTo(FuncValue)) - a.False(lit.ConvertsTo(FuncNodes)) - a.False(lit.ConvertsTo(FuncLogical)) - }) - } -} - -func TestSingularQuery(t *testing.T) { - t.Parallel() - a := assert.New(t) - - for _, tc := range []struct { - name string - selectors []Selector - input any - exp PathValue - str string - }{ - { - name: "one_name", - selectors: []Selector{Name("x")}, - input: map[string]any{"x": 42}, - exp: Value(42), - str: `["x"]`, - }, - { - name: "two_names", - selectors: []Selector{Name("x"), Name("y")}, - input: map[string]any{"x": map[string]any{"y": 98.6}}, - exp: Value(98.6), - str: `["x"]["y"]`, - }, - { - name: "one_index", - selectors: []Selector{Index(1)}, - input: []any{"x", 42}, - exp: Value(42), - str: `[1]`, - }, - { - name: "two_indexes", - selectors: []Selector{Index(1), Index(0)}, - input: []any{"x", []any{true}}, - exp: Value(true), - str: `[1][0]`, - }, - { - name: "one_of_each", - selectors: []Selector{Index(1), Name("x")}, - input: []any{"x", map[string]any{"x": 12}}, - exp: Value(12), - str: `[1]["x"]`, - }, - { - name: "nonexistent", - selectors: []Selector{Name("x")}, - input: map[string]any{"y": 42}, - str: `["x"]`, - }, - } { - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - sq := &SingularQueryExpr{selectors: tc.selectors, relative: false} - a.Equal(FuncValue, sq.ResultType()) - a.True(sq.ConvertsTo(FuncValue)) - a.True(sq.ConvertsTo(FuncNodes)) - a.False(sq.ConvertsTo(FuncLogical)) - - // Start with absolute query. - a.False(sq.relative) - a.Equal(tc.exp, sq.evaluate(nil, tc.input)) - a.Equal(tc.exp, sq.asValue(nil, tc.input)) - a.Equal("$"+tc.str, bufString(sq)) - - // Try a relative query. - sq.relative = true - a.Equal(tc.exp, sq.evaluate(tc.input, nil)) - a.Equal(tc.exp, sq.asValue(tc.input, nil)) - a.Equal("@"+tc.str, bufString(sq)) - }) - } -} - -func TestFilterQuery(t *testing.T) { - t.Parallel() - a := assert.New(t) - - for _, tc := range []struct { - name string - query *PathQuery - current any - root any - exp []any - typeKind FuncType - }{ - { - name: "root_name", - query: Query(true, Child(Name("x"))), - root: map[string]any{"x": 42}, - exp: []any{42}, - typeKind: FuncValue, - }, - { - name: "current_name", - query: Query(false, Child(Name("x"))), - current: map[string]any{"x": 42}, - exp: []any{42}, - typeKind: FuncValue, - }, - { - name: "root_name_index", - query: Query(true, Child(Name("x")), Child(Index(1))), - root: map[string]any{"x": []any{19, 234}}, - exp: []any{234}, - typeKind: FuncValue, - }, - { - name: "root_slice", - query: Query(true, Child(Slice(0, 2))), - root: []any{13, 2, 5}, - exp: []any{13, 2}, - typeKind: FuncNodes, - }, - { - name: "current_wildcard", - query: Query(false, Child(Wildcard())), - current: []any{13, 2, []any{4}}, - exp: []any{13, 2, []any{4}}, - typeKind: FuncNodes, - }, - } { - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - fq := tc.query - a.Equal(tc.typeKind, fq.ResultType()) - a.Equal(NodesType(tc.exp), fq.evaluate(tc.current, tc.root)) - a.Equal(tc.query.String(), bufString(fq)) - a.Equal(tc.typeKind == FuncValue, fq.ConvertsTo(FuncValue)) - a.True(fq.ConvertsTo(FuncNodes)) - a.False(fq.ConvertsTo(FuncLogical)) - }) - } -} - -func newTrueFunc() *FuncExtension { - return Extension( - "__true", - FuncLogical, - func([]FuncExprArg) error { return nil }, - func([]PathValue) PathValue { return LogicalTrue }, - ) -} - -func newValueFunc(val any) *FuncExtension { - return Extension( - "__val", - FuncValue, - func([]FuncExprArg) error { return nil }, - func([]PathValue) PathValue { return Value(val) }, - ) -} - -func newNodesFunc() *FuncExtension { - return Extension( - "__mk_nodes", - FuncNodes, - func(args []FuncExprArg) error { - for _, arg := range args { - if !arg.ConvertsTo(FuncValue) { - return fmt.Errorf("unexpected argument of type %v", arg.ResultType()) - } - } - return nil - }, - func(args []PathValue) PathValue { - ret := NodesType{} - for _, x := range args { - v, ok := x.(*ValueType) - if !ok { - panic(fmt.Sprintf("unexpected argument of type %T", x)) - } - ret = append(ret, v.any) - } - return ret - }, - ) -} - -// Mock up a valid PathValue that returns a new type. -type newValueType struct{} - -func (newValueType) FuncType() FuncType { return FuncType(16) } -func (newValueType) writeTo(b *strings.Builder) { b.WriteString("FuncType(16)") } -func (newValueType) String() string { return "FuncType(16)" } - -func newTypeFunc() *FuncExtension { - return Extension( - "__new_type", - FuncType(16), - func([]FuncExprArg) error { return nil }, - func([]PathValue) PathValue { return newValueType{} }, - ) -} - -func TestFunc(t *testing.T) { - t.Parallel() - a := assert.New(t) - - for _, tc := range []struct { - name string - fn *FuncExtension - args []PathValue - err error - exp PathValue - }{ - { - name: "valid_err_value", - fn: Extension( - "xyz", FuncValue, - func([]FuncExprArg) error { return errors.New("oops") }, - func([]PathValue) PathValue { return Value(42) }, - ), - args: []PathValue{}, - exp: Value(42), - err: errors.New("oops"), - }, - { - name: "no_valid_err_nodes", - fn: Extension( - "abc", FuncNodes, - func([]FuncExprArg) error { return nil }, - func([]PathValue) PathValue { return Nodes("hi") }, - ), - args: []PathValue{}, - exp: Nodes("hi"), - }, - } { - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - a.Equal(tc.fn.name, tc.fn.Name()) - a.Equal(tc.err, tc.fn.Validate(nil)) - a.Equal(tc.exp, tc.fn.Evaluate(tc.args)) - }) - } -} - -func TestFuncExpr(t *testing.T) { - t.Parallel() - a := assert.New(t) - - for _, tc := range []struct { - name string - fn *FuncExtension - args []FuncExprArg - current any - root any - exp PathValue - logical bool - str string - }{ - { - name: "val", - fn: newValueFunc(42), - args: []FuncExprArg{SingularQuery(true, Name("x"))}, - root: map[string]any{"x": "xyz"}, - exp: Value(42), - logical: true, - str: `__val($["x"])`, - }, - { - name: "__mk_nodes", - fn: newNodesFunc(), - args: []FuncExprArg{Literal(42), Literal(99)}, - exp: Nodes(42, 99), - logical: true, - str: `__mk_nodes(42, 99)`, - }, - { - name: "__mk_nodes_empty", - fn: newNodesFunc(), - args: []FuncExprArg{}, - exp: Nodes([]any{}...), - logical: false, - str: `__mk_nodes()`, - }, - { - name: "__true", - fn: newTrueFunc(), - args: []FuncExprArg{}, - exp: LogicalTrue, - logical: true, - str: `__true()`, - }, - { - name: "__new_type", - fn: newTypeFunc(), - args: []FuncExprArg{}, - exp: newValueType{}, - logical: false, - str: `__new_type()`, - }, - } { - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - fe := Function(tc.fn, tc.args...) - a.Equal(tc.fn.ReturnType(), fe.ResultType()) - a.Equal(tc.exp, fe.evaluate(tc.current, tc.root)) - a.Equal(tc.exp, fe.asValue(tc.current, tc.root)) - a.Equal(tc.logical, fe.testFilter(tc.current, tc.root)) - a.Equal(!tc.logical, NotFunction(fe).testFilter(tc.current, tc.root)) - a.Equal(tc.str, fe.String()) - a.Equal(tc.fn.ReturnType() == FuncValue, fe.ConvertsTo(FuncValue)) - a.Equal(tc.fn.ReturnType() == FuncNodes, fe.ConvertsTo(FuncNodes)) - a.Equal(tc.fn.ReturnType() == FuncLogical, fe.ConvertsTo(FuncLogical)) - }) - } -} diff --git a/spec/normalized.go b/spec/normalized.go deleted file mode 100644 index 0c856f7..0000000 --- a/spec/normalized.go +++ /dev/null @@ -1,123 +0,0 @@ -package spec - -import ( - "cmp" - "strings" -) - -// NormalSelector represents a single selector in a normalized path. -// Implemented by [Name] and [Index]. -type NormalSelector interface { - // writeNormalizedTo writes the selector to buf formatted as a [normalized - // path] element. - // - // [normalized path]: https://www.rfc-editor.org/rfc/rfc9535#section-2.7 - writeNormalizedTo(buf *strings.Builder) - - // writePointerTo writes the selector to buf formatted as a [JSON Pointer] - // reference token. - // - // [JSON Pointer]: https://www.rfc-editor.org/rfc/rfc6901 - writePointerTo(buf *strings.Builder) -} - -// NormalizedPath represents a normalized path identifying a single value in a -// JSON query argument, as [defined by RFC 9535]. Defines a simplified string -// format that exclusively uses single quotation marks to quote names. Useful -// for converting to JSON Pointer (via [NormalizedPath.Pointer]) or other JSON -// pointer-type formats. -// -// Interfaces implemented: -// - [fmt.Stringer] -// - [encoding.TextMarshaler] -// -// [defined by RFC 9535]: https://www.rfc-editor.org/rfc/rfc9535#name-normalized-paths -type NormalizedPath []NormalSelector - -// Normalized creates and returns a [NormalizedPath] that contains sel. -func Normalized(sel ...NormalSelector) NormalizedPath { - return NormalizedPath(sel) -} - -// String returns the string representation of np. -func (np NormalizedPath) String() string { - buf := new(strings.Builder) - buf.WriteRune('$') - for _, e := range np { - e.writeNormalizedTo(buf) - } - return buf.String() -} - -// Pointer returns an [RFC 6901 JSON Pointer] string representation of np. -// -// [RFC 6901 JSON Pointer]: https://www.rfc-editor.org/rfc/rfc6901 -func (np NormalizedPath) Pointer() string { - buf := new(strings.Builder) - for _, e := range np { - buf.WriteRune('/') - e.writePointerTo(buf) - } - return buf.String() -} - -// Compare compares np to np2 and returns -1 if np is less than np2, 1 if it's -// greater than np2, and 0 if they're equal. Indexes are always considered -// less than names. -func (np NormalizedPath) Compare(np2 NormalizedPath) int { - for i := range np { - if i >= len(np2) { - return 1 - } - switch v1 := np[i].(type) { - case Name: - switch v2 := np2[i].(type) { - case Name: - if x := cmp.Compare(v1, v2); x != 0 { - return x - } - case Index: - return 1 - } - case Index: - switch v2 := np2[i].(type) { - case Index: - if x := cmp.Compare(v1, v2); x != 0 { - return x - } - case Name: - return -1 - } - } - } - - if len(np2) > len(np) { - return -1 - } - return 0 -} - -// MarshalText marshals np into text. Implements [encoding.TextMarshaler]. -func (np NormalizedPath) MarshalText() ([]byte, error) { - return []byte(np.String()), nil -} - -// LocatedNode pairs a value with the [NormalizedPath] for its location within -// the JSON query argument from which it was selected. Returned by -// implementations of [Selector]'s SelectLocated method. -type LocatedNode struct { - // Node is the value selected from a JSON query argument. - Node any `json:"node"` - - // Path is the normalized path that uniquely identifies the location of - // Node in a JSON query argument. - Path NormalizedPath `json:"path"` -} - -// newLocatedNode creates and returns a new [LocatedNode]. It copies path. -func newLocatedNode(path NormalizedPath, node any) *LocatedNode { - return &LocatedNode{ - Path: NormalizedPath(append(make([]NormalSelector, 0, len(path)), path...)), - Node: node, - } -} diff --git a/spec/normalized_test.go b/spec/normalized_test.go deleted file mode 100644 index 10b66a1..0000000 --- a/spec/normalized_test.go +++ /dev/null @@ -1,295 +0,0 @@ -package spec - -import ( - "encoding/json" - "strings" - "testing" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -func TestNormalSelector(t *testing.T) { - t.Parallel() - a := assert.New(t) - - for _, tc := range []struct { - name string - elem NormalSelector - str string - ptr string - }{ - { - name: "object_value", - elem: Name("a"), - str: `['a']`, - ptr: `a`, - }, - { - name: "array_index", - elem: Index(1), - str: `[1]`, - ptr: `1`, - }, - { - name: "escape_apostrophes", - elem: Name("'hi'"), - str: `['\'hi\'']`, - ptr: "'hi'", - }, - { - name: "escapes", - elem: Name("'\b\f\n\r\t\\'"), - str: `['\'\b\f\n\r\t\\\'']`, - ptr: "'\b\f\n\r\t\\'", - }, - { - name: "escape_vertical_unicode", - elem: Name("\u000B"), - str: `['\u000b']`, - ptr: "\u000B", - }, - { - name: "escape_unicode_null", - elem: Name("\u0000"), - str: `['\u0000']`, - ptr: "\u0000", - }, - { - name: "escape_unicode_runes", - elem: Name("\u0001\u0002\u0003\u0004\u0005\u0006\u0007\u000e\u000F"), - str: `['\u0001\u0002\u0003\u0004\u0005\u0006\u0007\u000e\u000f']`, - ptr: "\u0001\u0002\u0003\u0004\u0005\u0006\u0007\u000e\u000F", - }, - { - name: "escape_pointer", - elem: Name("this / ~that"), - str: `['this / ~that']`, - ptr: "this ~1 ~0that", - }, - } { - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - buf := new(strings.Builder) - tc.elem.writeNormalizedTo(buf) - a.Equal(tc.str, buf.String()) - buf.Reset() - tc.elem.writePointerTo(buf) - a.Equal(tc.ptr, buf.String()) - }) - } -} - -func TestNormalizedPath(t *testing.T) { - t.Parallel() - a := assert.New(t) - - for _, tc := range []struct { - name string - path NormalizedPath - str string - ptr string - }{ - { - name: "empty_path", - path: Normalized(), - str: "$", - ptr: "", - }, - { - name: "object_value", - path: Normalized(Name("a")), - str: "$['a']", - ptr: "/a", - }, - { - name: "array_index", - path: Normalized(Index(1)), - str: "$[1]", - ptr: "/1", - }, - { - name: "neg_for_len_5", - path: Normalized(Index(2)), - str: "$[2]", - ptr: "/2", - }, - { - name: "nested_structure", - path: Normalized(Name("a"), Name("b"), Index(1)), - str: "$['a']['b'][1]", - ptr: "/a/b/1", - }, - { - name: "unicode_escape", - path: Normalized(Name("\u000B")), - str: `$['\u000b']`, - ptr: "/\u000b", - }, - { - name: "unicode_character", - path: Normalized(Name("\u0061")), - str: "$['a']", - ptr: "/a", - }, - { - name: "nested_structure_pointer_stuff", - path: Normalized(Name("a~x"), Name("b/2"), Index(1)), - str: "$['a~x']['b/2'][1]", - ptr: "/a~0x/b~12/1", - }, - } { - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - a.Equal(tc.str, tc.path.String()) - a.Equal(tc.ptr, tc.path.Pointer()) - }) - } -} - -func TestNormalizedPathCompare(t *testing.T) { - t.Parallel() - a := assert.New(t) - - for _, tc := range []struct { - name string - p1 NormalizedPath - p2 NormalizedPath - exp int - }{ - { - name: "empty_paths", - exp: 0, - }, - { - name: "same_name", - p1: Normalized(Name("a")), - p2: Normalized(Name("a")), - exp: 0, - }, - { - name: "diff_names", - p1: Normalized(Name("a")), - p2: Normalized(Name("b")), - exp: -1, - }, - { - name: "diff_names_rev", - p1: Normalized(Name("b")), - p2: Normalized(Name("a")), - exp: 1, - }, - { - name: "same_name_diff_lengths", - p1: Normalized(Name("a"), Name("b")), - p2: Normalized(Name("a")), - exp: 1, - }, - { - name: "same_name_diff_lengths_rev", - p1: Normalized(Name("a")), - p2: Normalized(Name("a"), Name("b")), - exp: -1, - }, - { - name: "same_multi_names", - p1: Normalized(Name("a"), Name("b")), - p2: Normalized(Name("a"), Name("b")), - exp: 0, - }, - { - name: "diff_nested_names", - p1: Normalized(Name("a"), Name("a")), - p2: Normalized(Name("a"), Name("b")), - exp: -1, - }, - { - name: "diff_nested_names_rev", - p1: Normalized(Name("a"), Name("b")), - p2: Normalized(Name("a"), Name("a")), - exp: 1, - }, - { - name: "name_vs_index", - p1: Normalized(Name("a")), - p2: Normalized(Index(0)), - exp: 1, - }, - { - name: "name_vs_index_rev", - p1: Normalized(Index(0)), - p2: Normalized(Name("a")), - exp: -1, - }, - { - name: "diff_nested_types", - p1: Normalized(Name("a"), Index(1024)), - p2: Normalized(Name("a"), Name("b")), - exp: -1, - }, - { - name: "diff_nested_types_rev", - p1: Normalized(Name("a"), Name("b")), - p2: Normalized(Name("a"), Index(1024)), - exp: 1, - }, - { - name: "same_index", - p1: Normalized(Index(42)), - p2: Normalized(Index(42)), - exp: 0, - }, - { - name: "diff_indexes", - p1: Normalized(Index(42)), - p2: Normalized(Index(99)), - exp: -1, - }, - { - name: "diff_indexes_rev", - p1: Normalized(Index(99)), - p2: Normalized(Index(42)), - exp: 1, - }, - } { - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - a.Equal(tc.exp, tc.p1.Compare(tc.p2)) - }) - } -} - -func TestLocatedNode(t *testing.T) { - t.Parallel() - a := assert.New(t) - r := require.New(t) - - for _, tc := range []struct { - name string - node LocatedNode - exp string - }{ - { - name: "simple", - node: LocatedNode{Path: Normalized(Name("a")), Node: "foo"}, - exp: `{"path": "$['a']", "node": "foo"}`, - }, - { - name: "double_quoted_path", - node: LocatedNode{Path: Normalized(Name(`"a"`)), Node: 42}, - exp: `{"path": "$['\"a\"']", "node": 42}`, - }, - { - name: "single_quoted_path", - node: LocatedNode{Path: Normalized(Name(`'a'`)), Node: true}, - exp: `{"path": "$['\\'a\\'']", "node": true}`, - }, - } { - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - json, err := json.Marshal(tc.node) - r.NoError(err) - a.JSONEq(tc.exp, string(json)) - }) - } -} diff --git a/spec/op.go b/spec/op.go deleted file mode 100644 index ffcd74c..0000000 --- a/spec/op.go +++ /dev/null @@ -1,246 +0,0 @@ -package spec - -//go:generate stringer -linecomment -output op_string.go -type CompOp - -import ( - "encoding/json" - "fmt" - "reflect" - "strings" -) - -// CompOp defines the JSONPath [filter comparison operators]. -// -// [filter comparison operators]: https://www.rfc-editor.org/rfc/rfc9535.html#name-comparisons -type CompOp uint8 - -const ( - // EqualTo is the == operator. - EqualTo CompOp = iota + 1 // == - // NotEqualTo is the != operator. - NotEqualTo // != - // LessThan is the < operator. - LessThan // < - // GreaterThan is the > operator. - GreaterThan // > - // LessThanEqualTo is the <= operator. - LessThanEqualTo // <= - // GreaterThanEqualTo is the >= operator. - GreaterThanEqualTo // >= -) - -// CompVal defines the interface for comparable values in filter -// expressions. Implemented by: -// -// - [LiteralArg] -// - [SingularQueryExpr] -// - [FuncExpr] -type CompVal interface { - stringWriter - // asValue returns the value to be compared. - asValue(current, root any) PathValue -} - -// CompExpr is a filter expression that compares two values, which themselves -// may themselves be the output of expressions. Interfaces implemented: -// -// - [BasicExpr] -// - [fmt.Stringer] -type CompExpr struct { - left CompVal - op CompOp - right CompVal -} - -// Comparison creates and returns a new [CompExpr] that uses op to compare -// left and right. -func Comparison(left CompVal, op CompOp, right CompVal) *CompExpr { - return &CompExpr{left, op, right} -} - -// writeTo writes a string representation of ce to buf. Defined by -// [stringWriter]. -func (ce *CompExpr) writeTo(buf *strings.Builder) { - ce.left.writeTo(buf) - fmt.Fprintf(buf, " %v ", ce.op) - ce.right.writeTo(buf) -} - -// String returns the string representation of ce. -func (ce *CompExpr) String() string { - var buf strings.Builder - ce.writeTo(&buf) - return buf.String() -} - -// testFilter uses ce.Op to compare the values returned by ce.Left and -// ce.Right relative to current and root. Defined by [BasicExpr]. -func (ce *CompExpr) testFilter(current, root any) bool { - left := ce.left.asValue(current, root) - right := ce.right.asValue(current, root) - switch ce.op { - case EqualTo: - return equalTo(left, right) - case NotEqualTo: - return !equalTo(left, right) - case LessThan: - return sameType(left, right) && lessThan(left, right) - case GreaterThan: - return sameType(left, right) && !lessThan(left, right) && !equalTo(left, right) - case LessThanEqualTo: - return sameType(left, right) && (lessThan(left, right) || equalTo(left, right)) - case GreaterThanEqualTo: - return sameType(left, right) && !lessThan(left, right) - default: - panic(fmt.Sprintf("Unknown operator %v", ce.op)) - } -} - -// equalTo returns true if left and right are nils, or if both are [ValueType] -// values and [valueEqualTo] returns true for their underlying values. -// Otherwise it returns false. -func equalTo(left, right PathValue) bool { - switch left := left.(type) { - case *ValueType: - if right, ok := right.(*ValueType); ok { - return valueEqualTo(left.any, right.any) - } - case nil: - return right == nil - } - return false -} - -// toFloat converts val to a float64 if it is a numeric value, setting ok to -// true. Otherwise it returns false for ok. -func toFloat(val any) (float64, bool) { - switch val := val.(type) { - case int: - return float64(val), true - case int8: - return float64(val), true - case int16: - return float64(val), true - case int32: - return float64(val), true - case int64: - return float64(val), true - case uint: - return float64(val), true - case uint8: - return float64(val), true - case uint16: - return float64(val), true - case uint32: - return float64(val), true - case uint64: - return float64(val), true - case float32: - return float64(val), true - case float64: - return val, true - case json.Number: - f, err := val.Float64() - return f, err == nil - default: - return 0, false - } -} - -// valueEqualTo returns true if left and right are equal. -func valueEqualTo(left, right any) bool { - if left, ok := toFloat(left); ok { - if right, ok := toFloat(right); ok { - return left == right - } - return false - } - - return reflect.DeepEqual(left, right) -} - -// lessThan returns true if left and right are both ValueTypes and -// [valueLessThan] returns true for their underlying values. Otherwise it -// returns false. -func lessThan(left, right PathValue) bool { - if left, ok := left.(*ValueType); ok { - if right, ok := right.(*ValueType); ok { - return valueLessThan(left.any, right.any) - } - } - return false -} - -// sameType returns true if left and right resolve to the same JSON data type. -func sameType(left, right PathValue) bool { - switch left := left.(type) { - case NodesType: - if len(left) == 1 { - switch right := right.(type) { - case NodesType: - return valCompType(left[0], right[0]) - case *ValueType: - return valCompType(left[0], right.any) - case LogicalType: - _, ok := left[0].(bool) - return ok - } - } - case *ValueType: - switch right := right.(type) { - case *ValueType: - return valCompType(left.any, right.any) - case NodesType: - return valCompType(left.any, right[0]) - case LogicalType: - _, ok := left.any.(bool) - return ok - } - case LogicalType: - switch right := right.(type) { - case LogicalType: - return true - case NodesType: - if len(right) == 1 { - _, ok := right[0].(bool) - return ok - } - case *ValueType: - _, ok := right.any.(bool) - return ok - } - } - return false -} - -// valCompType returns true if left and right are comparable types, which -// means either both are a numeric type or are otherwise the same type. -func valCompType(left, right any) bool { - switch left.(type) { - case int, int8, int16, int32, int64, uint, uint8, uint16, uint32, uint64, float32, float64, json.Number: - switch right.(type) { - case int, int8, int16, int32, int64, uint, uint8, uint16, uint32, uint64, float32, float64, json.Number: - return true - } - } - return reflect.TypeOf(left) == reflect.TypeOf(right) -} - -// valueLessThan returns true if left and right are both numeric values or -// string values and left is less than right. -func valueLessThan(left, right any) bool { - if left, ok := toFloat(left); ok { - if right, ok := toFloat(right); ok { - return left < right - } - return false - } - - if left, ok := left.(string); ok { - if right, ok := right.(string); ok { - return left < right - } - } - - return false -} diff --git a/spec/op_string.go b/spec/op_string.go deleted file mode 100644 index b1ee64c..0000000 --- a/spec/op_string.go +++ /dev/null @@ -1,29 +0,0 @@ -// Code generated by "stringer -linecomment -output op_string.go -type CompOp"; DO NOT EDIT. - -package spec - -import "strconv" - -func _() { - // An "invalid array index" compiler error signifies that the constant values have changed. - // Re-run the stringer command to generate them again. - var x [1]struct{} - _ = x[EqualTo-1] - _ = x[NotEqualTo-2] - _ = x[LessThan-3] - _ = x[GreaterThan-4] - _ = x[LessThanEqualTo-5] - _ = x[GreaterThanEqualTo-6] -} - -const _CompOp_name = "==!=<><=>=" - -var _CompOp_index = [...]uint8{0, 2, 4, 5, 6, 8, 10} - -func (i CompOp) String() string { - i -= 1 - if i >= CompOp(len(_CompOp_index)-1) { - return "CompOp(" + strconv.FormatInt(int64(i+1), 10) + ")" - } - return _CompOp_name[_CompOp_index[i]:_CompOp_index[i+1]] -} diff --git a/spec/op_test.go b/spec/op_test.go deleted file mode 100644 index 16638db..0000000 --- a/spec/op_test.go +++ /dev/null @@ -1,420 +0,0 @@ -package spec - -import ( - "encoding/json" - "fmt" - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestCompOp(t *testing.T) { - t.Parallel() - a := assert.New(t) - - for _, tc := range []struct { - op CompOp - str string - }{ - {EqualTo, "=="}, - {NotEqualTo, "!="}, - {LessThan, "<"}, - {LessThanEqualTo, "<="}, - {GreaterThan, ">"}, - {GreaterThanEqualTo, ">="}, - } { - a.Equal(tc.str, tc.op.String()) - } -} - -func TestEqualTo(t *testing.T) { - t.Parallel() - a := assert.New(t) - - for _, tc := range []struct { - name string - left any - right any - exp bool - }{ - {"int_zeros", 0, 0, true}, - {"int_ones", 1, 1, true}, - {"int_zero_one", 0, 1, false}, - {"int8_zeros", int8(0), int8(0), true}, - {"int8_ones", int8(1), int8(1), true}, - {"int8_zero_one", int8(0), int8(1), false}, - {"int16_zeros", int16(0), int16(0), true}, - {"int16_ones", int16(1), int16(1), true}, - {"int16_zero_one", int16(0), int16(1), false}, - {"int32_zeros", int32(0), int32(0), true}, - {"int32_ones", int32(1), int32(1), true}, - {"int32_zero_one", int32(0), int32(1), false}, - {"int64_zeros", int64(0), int64(0), true}, - {"int64_ones", int64(1), int64(1), true}, - {"int64_zero_one", int64(0), int64(1), false}, - {"uint_zeros", uint(0), uint(0), true}, - {"uint_ones", uint(1), uint(1), true}, - {"uint_zero_one", uint(0), uint(1), false}, - {"uint8_zeros", uint8(0), uint8(0), true}, - {"uint8_ones", uint8(1), uint8(1), true}, - {"uint8_zero_one", uint8(0), uint8(1), false}, - {"uint16_zeros", uint16(0), uint16(0), true}, - {"uint16_ones", uint16(1), uint16(1), true}, - {"uint16_zero_one", uint16(0), uint16(1), false}, - {"uint32_zeros", uint32(0), uint32(0), true}, - {"uint32_ones", uint32(1), uint32(1), true}, - {"uint32_zero_one", uint32(0), uint32(1), false}, - {"uint64_zeros", uint64(0), uint64(0), true}, - {"uint64_ones", uint64(1), uint64(1), true}, - {"uint64_zero_one", uint64(0), uint64(1), false}, - {"float32_zeros", float32(0), float32(0), true}, - {"float32_ones", float32(1), float32(1), true}, - {"float32_zero_one", float32(0), float32(1), false}, - {"float64_zeros", float64(0), float64(0), true}, - {"float64_ones", float64(1), float64(1), true}, - {"float64_zero_one", float64(0), float64(1), false}, - {"json_number_eq", json.Number("0"), json.Number("0.0"), true}, - {"json_number_ne", json.Number("1024"), json.Number("0.0"), false}, - {"json_number_invalid", json.Number("not a number"), json.Number("0.0"), false}, - {"int_float_true", int64(10), float64(10), true}, - {"int_float_false", int64(10), float64(11), false}, - {"empty_strings", "", "", true}, - {"strings", "xyz", "xyz", true}, - {"strings_false", "xyz", "abc", false}, - {"unicode_strings", "foΓΌ", "foΓΌ", true}, - {"emoji_strings", "hi πŸ˜€", "hi πŸ˜€", true}, - {"trues", true, true, true}, - {"true_false", true, false, false}, - {"arrays_equal", []any{1, 2, 3}, []any{1, 2, 3}, true}, - {"arrays_ne", []any{1, 2, 3}, []any{1, 2, 3, 4}, false}, - {"nils", nil, nil, true}, - {"nil_not_nil", nil, 2, false}, - {"objects_eq", map[string]any{"x": 1, "y": 2}, map[string]any{"x": 1, "y": 2}, true}, - {"object_keys_ne", map[string]any{"x": 1, "y": 2}, map[string]any{"x": 1, "z": 2}, false}, - {"object_vals_ne", map[string]any{"x": 1, "y": 2}, map[string]any{"x": 1, "y": 3}, false}, - } { - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - a.Equal(tc.exp, valueEqualTo(tc.left, tc.right)) - a.Equal(tc.exp, equalTo(Value(tc.left), Value(tc.right))) - }) - } - - t.Run("not_comparable", func(t *testing.T) { - t.Parallel() - a.False(valueEqualTo(42, "x")) - a.False(equalTo(nil, Value(42))) - a.False(equalTo(Value(42), nil)) - a.False(equalTo(LogicalFalse, LogicalFalse)) - }) -} - -func TestLessThan(t *testing.T) { - t.Parallel() - a := assert.New(t) - - for _, tc := range []struct { - name string - left any - right any - exp bool - }{ - {"int_zeros", 0, 0, false}, - {"int_zero_one", 0, 1, true}, - {"int_one_zero", 1, 0, false}, - {"int8_zeros", 0, 0, false}, - {"int8_zero_one", 0, 1, true}, - {"int8_one_zero", 1, 0, false}, - {"int16_zeros", 0, 0, false}, - {"int16_zero_one", 0, 1, true}, - {"int16_one_zero", 1, 0, false}, - {"int32_zeros", 0, 0, false}, - {"int32_zero_one", 0, 1, true}, - {"int32_one_zero", 1, 0, false}, - {"int64_zeros", 0, 0, false}, - {"int64_zero_one", 0, 1, true}, - {"int64_one_zero", 1, 0, false}, - {"uint_zeros", 0, 0, false}, - {"uint_zero_one", 0, 1, true}, - {"uint_one_zero", 1, 0, false}, - {"uint8_zeros", 0, 0, false}, - {"uint8_zero_one", 0, 1, true}, - {"uint8_one_zero", 1, 0, false}, - {"uint16_zeros", 0, 0, false}, - {"uint16_zero_one", 0, 1, true}, - {"uint16_one_zero", 1, 0, false}, - {"uint32_zeros", 0, 0, false}, - {"uint32_zero_one", 0, 1, true}, - {"uint32_one_zero", 1, 0, false}, - {"uint64_zeros", 0, 0, false}, - {"uint64_zero_one", 0, 1, true}, - {"uint64_one_zero", 1, 0, false}, - {"float32_zeros", 0, 0, false}, - {"float32_zero_one", 0, 1, true}, - {"float32_one_zero", 1, 0, false}, - {"float64_zeros", 0, 0, false}, - {"float64_zero_one", 0, 1, true}, - {"float64_one_zero", 1, 0, false}, - {"int_float_true", 12, 98.6, true}, - {"int_float_false", 99, 98.6, false}, - {"float_int_false", 98.6, 98, false}, - {"float_int_true", 98.6, 99, true}, - {"empty_string_sting", "", "x", true}, - {"empty_strings", "", "", false}, - {"string_a_b", "a", "b", true}, - {"string_c_b", "c", "b", false}, - } { - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - a.Equal(tc.exp, valueLessThan(tc.left, tc.right)) - a.Equal(tc.exp, lessThan(Value(tc.left), Value(tc.right))) - }) - } - - t.Run("not_comparable", func(t *testing.T) { - t.Parallel() - a.False(lessThan(LogicalFalse, Value("."))) - a.False(lessThan(Value("x"), LogicalFalse)) - a.False(valueLessThan(42, "x")) - a.False(valueLessThan([]any{0}, []any{1})) - }) -} - -func TestSameType(t *testing.T) { - t.Parallel() - a := assert.New(t) - - for _, tc := range []struct { - name string - left PathValue - right PathValue - exp bool - }{ - {"int_nodes", Nodes(1), Nodes(0), true}, - {"float_nodes", Nodes(98.6), Nodes(22.4), true}, - {"bool_nodes", Nodes(true), Nodes(false), true}, - {"string_nodes", Nodes("hi"), Nodes("go"), true}, - {"object_nodes", Nodes(map[string]any{}), Nodes(map[string]any{}), true}, - {"array_nodes", Nodes([]any{}), Nodes([]any{}), true}, - {"nil_nodes", Nodes(nil), Nodes(nil), true}, - {"int_float_nodes", Nodes(1), Nodes(98.6), true}, - {"int64_uint32_nodes", Nodes(int64(1)), Nodes(uint32(8)), true}, - {"int_bool_nodes", Nodes(1), Nodes(false), false}, - {"string_obj_nodes", Nodes("hi"), Nodes(map[string]any{}), false}, - {"int64_array_nodes", Nodes(int64(9)), Nodes([]any{}), false}, - {"int_vals", Value(1), Value(0), true}, - {"float_vals", Value(98.6), Value(22.4), true}, - {"bool_vals", Value(true), Value(false), true}, - {"string_vals", Value("hi"), Value("go"), true}, - {"object_vals", Value(map[string]any{}), Value(map[string]any{}), true}, - {"array_vals", Value([]any{}), Value([]any{}), true}, - {"nil_vals", Value(nil), Value(nil), true}, - {"int_float_vals", Value(1), Value(98.6), true}, - {"int64_uint32_vals", Value(int64(1)), Value(uint32(8)), true}, - {"int_bool_vals", Value(1), Value(false), false}, - {"string_obj_vals", Value("hi"), Value(map[string]any{}), false}, - {"int64_array_vals", Value(int64(9)), Value([]any{}), false}, - {"nodes_multi", Nodes(1, 1), Nodes(1, 1), false}, - {"nodes_multi_sing", Nodes(1, 1), Nodes(1), false}, - - {"nodes_val_int", Nodes(0), Value(1), true}, - {"nodes_val_float", Nodes(1.1), Value(2.2), true}, - {"nodes_val_numbers", Nodes(1), Value(2.2), true}, - {"nodes_val_bool", Nodes(true), Value(false), true}, - {"nodes_val_string", Nodes("hi"), Value("go"), true}, - {"nodes_val_object", Nodes(map[string]any{}), Value(map[string]any{}), true}, - {"nodes_val_array", Nodes([]any{"x"}), Value([]any{1}), true}, - {"nodes_val_nil", Nodes(nil), Value(nil), true}, - {"nodes_val_int_bool", Nodes(21), Value(false), false}, - {"nodes_val_string_nil", Nodes("hi"), Value(nil), false}, - {"nodes_val_obj_array", Nodes(map[string]any{}), Value([]any{}), false}, - {"nodes_bool_logical", Nodes(true), LogicalFalse, true}, - {"nodes_string_logical", Nodes("x"), LogicalFalse, false}, - {"nodes_int_logical", Nodes(42), LogicalFalse, false}, - {"multi_nodes_val", Nodes(0, 0), Value(1), false}, - {"multi_nodes_logical", Nodes(true, true), LogicalTrue, false}, - {"nodes_json_number", Nodes(json.Number("1")), Value(1), true}, - {"nodes_json_numbers", Nodes(json.Number("1")), Value(json.Number("10")), true}, - - {"val_nodes_int", Value(0), Nodes(1), true}, - {"val_nodes_float", Value(1.1), Nodes(2.2), true}, - {"val_nodes_numbers", Value(1), Nodes(2.2), true}, - {"val_nodes_bool", Value(true), Nodes(false), true}, - {"val_nodes_string", Value("hi"), Nodes("go"), true}, - {"val_nodes_object", Value(map[string]any{}), Nodes(map[string]any{}), true}, - {"val_nodes_array", Value([]any{"x"}), Nodes([]any{1}), true}, - {"val_nodes_nil", Value(nil), Nodes(nil), true}, - {"val_nodes_int_bool", Value(21), Nodes(false), false}, - {"val_nodes_string_nil", Value("hi"), Nodes(nil), false}, - {"val_nodes_obj_array", Value(map[string]any{}), Nodes([]any{}), false}, - {"val_bool_logical", Value(true), LogicalFalse, true}, - {"val_string_logical", Value("x"), LogicalFalse, false}, - {"val_int_logical", Value(42), LogicalFalse, false}, - - {"logical_types", LogicalFalse, LogicalTrue, true}, - {"logical_val_bool", LogicalFalse, Value(false), true}, - {"logical_nodes_bool", LogicalFalse, Nodes(false), true}, - {"logical_val_string", LogicalFalse, Value("true"), false}, - {"logical_nodes_string", LogicalFalse, Nodes("true"), false}, - } { - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - a.Equal(tc.exp, sameType(tc.left, tc.right)) - }) - } -} - -func TestComparisonExpr(t *testing.T) { - t.Parallel() - a := assert.New(t) - - for _, tc := range []struct { - name string - left CompVal - right CompVal - root any - current any - expect []bool - str string - }{ - { - name: "literal_numbers_eq", - left: Literal(42), - right: Literal(42), - expect: []bool{true, false, false, false, true, true}, - str: "42 %v 42", - }, - { - name: "literal_numbers_lt", - left: Literal(42), - right: Literal(43), - expect: []bool{false, true, true, false, true, false}, - str: "42 %v 43", - }, - { - name: "literal_numbers_gt", - left: Literal(43), - right: Literal(42), - expect: []bool{false, true, false, true, false, true}, - str: "43 %v 42", - }, - { - name: "literal_strings_eq", - left: Literal("x"), - right: Literal("x"), - expect: []bool{true, false, false, false, true, true}, - str: `"x" %v "x"`, - }, - { - name: "literal_strings_lt", - left: Literal("x"), - right: Literal("y"), - expect: []bool{false, true, true, false, true, false}, - str: `"x" %v "y"`, - }, - { - name: "literal_strings_gt", - left: Literal("y"), - right: Literal("x"), - expect: []bool{false, true, false, true, false, true}, - str: `"y" %v "x"`, - }, - { - name: "query_numbers_eq", - left: SingularQuery(true, Name("x")), - right: SingularQuery(true, Name("y")), - root: map[string]any{"x": 42, "y": 42}, - expect: []bool{true, false, false, false, true, true}, - str: `$["x"] %v $["y"]`, - }, - { - name: "query_numbers_lt", - left: SingularQuery(false, Name("x")), - right: SingularQuery(false, Name("y")), - current: map[string]any{"x": 42, "y": 43}, - expect: []bool{false, true, true, false, true, false}, - str: `@["x"] %v @["y"]`, - }, - { - name: "query_string_gt", - left: SingularQuery(true, Name("y")), - right: SingularQuery(true, Name("x")), - root: map[string]any{"x": "x", "y": "y"}, - expect: []bool{false, true, false, true, false, true}, - str: `$["y"] %v $["x"]`, - }, - { - name: "func_numbers_eq", - left: &FuncExpr{ - args: []FuncExprArg{SingularQuery(true, Name("x"))}, - fn: newValueFunc(1), - }, - right: &FuncExpr{ - args: []FuncExprArg{SingularQuery(true, Name("y"))}, - fn: newValueFunc(1), - }, - root: map[string]any{"x": "xx", "y": "yy"}, - expect: []bool{true, false, false, false, true, true}, - str: `__val($["x"]) %v __val($["y"])`, - }, - { - name: "func_numbers_lt", - left: &FuncExpr{ - args: []FuncExprArg{SingularQuery(true, Name("x"))}, - fn: newValueFunc(1), - }, - right: &FuncExpr{ - args: []FuncExprArg{SingularQuery(true, Name("y"))}, - fn: newValueFunc(2), - }, - root: map[string]any{"x": "xx", "y": "yyy"}, - expect: []bool{false, true, true, false, true, false}, - str: `__val($["x"]) %v __val($["y"])`, - }, - { - name: "func_strings_gt", - left: &FuncExpr{ - args: []FuncExprArg{Query(false, Child(Name("y")))}, - fn: newValueFunc(42), - }, - right: &FuncExpr{ - args: []FuncExprArg{Query(false, Child(Name("x")))}, - fn: newValueFunc(41), - }, - current: map[string]any{"x": "x", "y": "y"}, - expect: []bool{false, true, false, true, false, true}, - str: `__val(@["y"]) %v __val(@["x"])`, - }, - } { - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - for i, op := range []struct { - name string - op CompOp - }{ - {"eq", EqualTo}, - {"ne", NotEqualTo}, - {"lt", LessThan}, - {"gt", GreaterThan}, - {"le", LessThanEqualTo}, - {"ge", GreaterThanEqualTo}, - } { - t.Run(op.name, func(t *testing.T) { - t.Parallel() - cmp := Comparison(tc.left, op.op, tc.right) - a.Equal(tc.expect[i], cmp.testFilter(tc.current, tc.root)) - a.Equal(fmt.Sprintf(tc.str, op.op), bufString(cmp)) - }) - } - }) - - t.Run("unknown_op", func(t *testing.T) { - t.Parallel() - cmp := Comparison(tc.left, CompOp(16), tc.right) - a.Equal(fmt.Sprintf(tc.str, cmp.op), bufString(cmp)) - a.PanicsWithValue("Unknown operator CompOp(16)", func() { - cmp.testFilter(tc.current, tc.root) - }) - }) - } -} diff --git a/spec/query.go b/spec/query.go deleted file mode 100644 index bde3fb1..0000000 --- a/spec/query.go +++ /dev/null @@ -1,152 +0,0 @@ -package spec - -import "strings" - -// PathQuery represents a JSONPath query. Interfaces implemented: -// - [Selector] -// - [FuncExprArg] -// - [fmt.Stringer] -type PathQuery struct { - segments []*Segment - root bool -} - -// Query returns a new [PathQuery] consisting of segments. When root is true -// it indicates a query from the root of a value. Set to false for filter -// subqueries. -func Query(root bool, segments ...*Segment) *PathQuery { - return &PathQuery{root: root, segments: segments} -} - -// Segments returns q's [Segment] values. -func (q *PathQuery) Segments() []*Segment { - return q.segments -} - -// String returns a string representation of q. -func (q *PathQuery) String() string { - var buf strings.Builder - q.writeTo(&buf) - return buf.String() -} - -// writeTo writes a string representation of fq to buf. Defined by -// [stringWriter]. -func (q *PathQuery) writeTo(buf *strings.Builder) { - if q.root { - buf.WriteRune('$') - } else { - buf.WriteRune('@') - } - for _, s := range q.segments { - buf.WriteString(s.String()) - } -} - -// Select selects the values from current or root and returns the results. -// Returns just current if q has no segments. Defined by the [Selector] -// interface. -func (q *PathQuery) Select(current, root any) []any { - res := []any{current} - if q.root { - res[0] = root - } - for _, seg := range q.segments { - segRes := []any{} - for _, v := range res { - segRes = append(segRes, seg.Select(v, root)...) - } - res = segRes - } - - return res -} - -// SelectLocated values from current or root into [LocatedNode] values and -// returns the results. Returns just current if q has no segments. Defined by -// the [Selector] interface. -func (q *PathQuery) SelectLocated(current, root any, parent NormalizedPath) []*LocatedNode { - res := []*LocatedNode{nil} - if q.root { - res[0] = newLocatedNode(nil, root) - } else { - res[0] = newLocatedNode(parent, current) - } - for _, seg := range q.segments { - segRes := []*LocatedNode{} - for _, v := range res { - segRes = append(segRes, seg.SelectLocated(v.Node, root, v.Path)...) - } - res = segRes - } - - return res -} - -// isSingular returns true if q always returns a singular value. Defined by -// the [Selector] interface. -func (q *PathQuery) isSingular() bool { - for _, s := range q.segments { - if s.descendant { - return false - } - if !s.isSingular() { - return false - } - } - return true -} - -// Singular returns the [SingularQueryExpr] variant of q if q is a singular -// query. Otherwise it returns nil. -func (q *PathQuery) Singular() *SingularQueryExpr { - if q.isSingular() { - return singular(q) - } - - return nil -} - -// Expression returns a [SingularQueryExpr] variant of q if q is a singular -// query, and otherwise returns q. -func (q *PathQuery) Expression() FuncExprArg { - if q.isSingular() { - return singular(q) - } - - return q -} - -// evaluate returns a [NodesType] containing the result of executing fq. -// Defined by the [FuncExprArg] interface. -func (q *PathQuery) evaluate(current, root any) PathValue { - return NodesType(q.Select(current, root)) -} - -// ResultType returns [FuncValue] if fq is a singular query, and [FuncNodes] -// if it is not. Defined by the [FuncExprArg] interface. -func (q *PathQuery) ResultType() FuncType { - if q.isSingular() { - return FuncValue - } - return FuncNodes -} - -// ConvertsTo returns true if fq's result can be converted to ft. A singular -// query can be converted to either [FuncValue] or [FuncNodes]. All other -// queries can only be converted to FuncNodes. -func (q *PathQuery) ConvertsTo(ft FuncType) bool { - if q.isSingular() { - return ft == FuncValue || ft == FuncNodes - } - return ft == FuncNodes -} - -// singular is a utility function that converts q to a singularQuery. -func singular(q *PathQuery) *SingularQueryExpr { - selectors := make([]Selector, len(q.segments)) - for i, s := range q.segments { - selectors[i] = s.selectors[0] - } - return &SingularQueryExpr{selectors: selectors, relative: !q.root} -} diff --git a/spec/query_test.go b/spec/query_test.go deleted file mode 100644 index c443ba8..0000000 --- a/spec/query_test.go +++ /dev/null @@ -1,1369 +0,0 @@ -package spec - -import ( - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestQueryRoot(t *testing.T) { - t.Parallel() - a := assert.New(t) - - for _, tc := range []struct { - name string - val any - }{ - {"string", "Hi there πŸ˜€"}, - {"bool", true}, - {"float", 98.6}, - {"float64", float64(98.6)}, - {"float32", float32(98.6)}, - {"int", 42}, - {"int64", int64(42)}, - {"int32", int32(42)}, - {"int16", int16(42)}, - {"int8", int8(42)}, - {"uint64", uint64(42)}, - {"uint32", uint32(42)}, - {"uint16", uint16(42)}, - {"uint8", uint8(42)}, - {"struct", struct{ x int }{}}, - {"nil", nil}, - {"map", map[string]any{"x": true, "y": []any{1, 2}}}, - {"slice", []any{1, 2, map[string]any{"x": true}}}, - } { - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - q := Query(false) - a.Equal([]any{tc.val}, q.Select(tc.val, nil)) - }) - } -} - -func TestQueryString(t *testing.T) { - t.Parallel() - a := assert.New(t) - - for _, tc := range []struct { - name string - segs []*Segment - str string - }{ - { - name: "empty", - segs: []*Segment{}, - str: "", - }, - { - name: "one_key", - segs: []*Segment{Child(Name("x"))}, - str: `["x"]`, - }, - { - name: "two_keys", - segs: []*Segment{Child(Name("x"), Name("y"))}, - str: `["x","y"]`, - }, - { - name: "two_segs", - segs: []*Segment{Child(Name("x"), Name("y")), Child(Index(0))}, - str: `["x","y"][0]`, - }, - { - name: "segs_plus_descendant", - segs: []*Segment{Child(Name("x"), Name("y")), Child(Wildcard()), Descendant(Index(0))}, - str: `["x","y"][*]..[0]`, - }, - { - name: "segs_with_slice", - segs: []*Segment{Child(Name("x"), Slice(2)), Child(Wildcard()), Descendant(Index(0))}, - str: `["x",2:][*]..[0]`, - }, - } { - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - q := Query(false, tc.segs...) - a.Equal("@"+tc.str, q.String()) - a.Equal("@"+tc.str, bufString(q)) - q = Query(true, tc.segs...) - a.Equal("$"+tc.str, q.String()) - a.Equal("$"+tc.str, bufString(q)) - }) - } -} - -type queryTestCase struct { - name string - resType FuncType - segs []*Segment - input any - exp []any - loc []*LocatedNode - rand bool -} - -func (tc queryTestCase) run(a *assert.Assertions) { - // Set up Query. - q := Query(false, tc.segs...) - a.Equal(tc.segs, q.Segments()) - a.False(q.root) - - // Test Select and SelectLocated. - if tc.rand { - a.ElementsMatch(tc.exp, q.Select(tc.input, nil)) - a.ElementsMatch(tc.loc, q.SelectLocated(tc.input, nil, NormalizedPath{})) - } else { - a.Equal(tc.exp, q.Select(tc.input, nil)) - a.Equal(tc.loc, q.SelectLocated(tc.input, nil, NormalizedPath{})) - } - - // Test result type and conversion. - a.Equal(tc.resType, q.ResultType()) - a.Equal(tc.resType == FuncValue, q.ConvertsTo(FuncValue)) - a.True(q.ConvertsTo(FuncNodes)) - a.False(q.ConvertsTo(FuncLogical)) -} - -func TestQueryObject(t *testing.T) { - t.Parallel() - a := assert.New(t) - - for _, tc := range []queryTestCase{ - { - name: "root", - resType: FuncValue, - input: map[string]any{"x": true, "y": []any{1, 2}}, - exp: []any{map[string]any{"x": true, "y": []any{1, 2}}}, - loc: []*LocatedNode{ - {Path: NormalizedPath{}, Node: map[string]any{"x": true, "y": []any{1, 2}}}, - }, - }, - { - name: "one_key_scalar", - resType: FuncValue, - segs: []*Segment{Child(Name("x"))}, - input: map[string]any{"x": true, "y": []any{1, 2}}, - exp: []any{true}, - loc: []*LocatedNode{ - {Path: Normalized(Name("x")), Node: true}, - }, - }, - { - name: "one_key_array", - resType: FuncValue, - segs: []*Segment{Child(Name("y"))}, - input: map[string]any{"x": true, "y": []any{1, 2}}, - exp: []any{[]any{1, 2}}, - loc: []*LocatedNode{ - {Path: Normalized(Name("y")), Node: []any{1, 2}}, - }, - }, - { - name: "one_key_object", - resType: FuncValue, - segs: []*Segment{Child(Name("y"))}, - input: map[string]any{"x": true, "y": map[string]any{"a": 1}}, - exp: []any{map[string]any{"a": 1}}, - loc: []*LocatedNode{ - {Path: Normalized(Name("y")), Node: map[string]any{"a": 1}}, - }, - }, - { - name: "multiple_keys", - resType: FuncNodes, - segs: []*Segment{Child(Name("x"), Name("y"))}, - input: map[string]any{"x": true, "y": []any{1, 2}, "z": "hi"}, - exp: []any{true, []any{1, 2}}, - loc: []*LocatedNode{ - {Path: Normalized(Name("x")), Node: true}, - {Path: Normalized(Name("y")), Node: []any{1, 2}}, - }, - rand: true, - }, - { - name: "three_level_path", - resType: FuncValue, - segs: []*Segment{ - Child(Name("x")), - Child(Name("a")), - Child(Name("i")), - }, - input: map[string]any{ - "x": map[string]any{ - "a": map[string]any{ - "i": []any{1, 2}, - "j": 42, - }, - "b": "no", - }, - "y": 1, - }, - exp: []any{[]any{1, 2}}, - loc: []*LocatedNode{ - {Path: Normalized(Name("x"), Name("a"), Name("i")), Node: []any{1, 2}}, - }, - }, - { - name: "wildcard_keys", - resType: FuncNodes, - segs: []*Segment{ - Child(Wildcard()), - Child(Name("a"), Name("b")), - }, - input: map[string]any{ - "x": map[string]any{"a": "go", "b": 2, "c": 5}, - "y": map[string]any{"a": 2, "b": 3, "d": 3}, - }, - exp: []any{"go", 2, 2, 3}, - loc: []*LocatedNode{ - {Path: Normalized(Name("x"), Name("a")), Node: "go"}, - {Path: Normalized(Name("x"), Name("b")), Node: 2}, - {Path: Normalized(Name("y"), Name("a")), Node: 2}, - {Path: Normalized(Name("y"), Name("b")), Node: 3}, - }, - rand: true, - }, - { - name: "any_key_indexes", - resType: FuncNodes, - segs: []*Segment{ - Child(Wildcard()), - Child(Index(0), Index(1)), - }, - input: map[string]any{ - "x": []any{"a", "go", "b", 2, "c", 5}, - "y": []any{"a", 2, "b", 3, "d", 3}, - }, - exp: []any{"a", "go", "a", 2}, - loc: []*LocatedNode{ - {Path: Normalized(Name("x"), Index(0)), Node: "a"}, - {Path: Normalized(Name("x"), Index(1)), Node: "go"}, - {Path: Normalized(Name("y"), Index(0)), Node: "a"}, - {Path: Normalized(Name("y"), Index(1)), Node: 2}, - }, - rand: true, - }, - { - name: "any_key_nonexistent_index", - resType: FuncNodes, - segs: []*Segment{Child(Wildcard()), Child(Index(1))}, - input: map[string]any{ - "x": []any{"a", "go", "b", 2, "c", 5}, - "y": []any{"a"}, - }, - exp: []any{"go"}, - loc: []*LocatedNode{ - {Path: Normalized(Name("x"), Index(1)), Node: "go"}, - }, - }, - { - name: "nonexistent_key", - resType: FuncValue, - segs: []*Segment{Child(Name("x"))}, - input: map[string]any{"y": []any{1, 2}}, - exp: []any{}, - loc: []*LocatedNode{}, - }, - { - name: "nonexistent_branch_key", - resType: FuncValue, - segs: []*Segment{Child(Name("x")), Child(Name("z"))}, - input: map[string]any{"y": []any{1, 2}}, - exp: []any{}, - loc: []*LocatedNode{}, - }, - { - name: "wildcard_then_nonexistent_key", - resType: FuncNodes, - segs: []*Segment{Child(Wildcard()), Child(Name("x"))}, - input: map[string]any{"y": map[string]any{"a": 1}}, - exp: []any{}, - loc: []*LocatedNode{}, - }, - { - name: "not_an_object", - resType: FuncValue, - segs: []*Segment{Child(Name("x")), Child(Name("y"))}, - input: map[string]any{"x": true}, - exp: []any{}, - loc: []*LocatedNode{}, - }, - } { - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - tc.run(a) - }) - } -} - -func TestQueryArray(t *testing.T) { - t.Parallel() - a := assert.New(t) - - for _, tc := range []queryTestCase{ - { - name: "root", - resType: FuncValue, - input: []any{"x", true, "y", []any{1, 2}}, - exp: []any{[]any{"x", true, "y", []any{1, 2}}}, - loc: []*LocatedNode{ - {Path: NormalizedPath{}, Node: []any{"x", true, "y", []any{1, 2}}}, - }, - }, - { - name: "index_zero", - resType: FuncValue, - segs: []*Segment{Child(Index(0))}, - input: []any{"x", true, "y", []any{1, 2}}, - exp: []any{"x"}, - loc: []*LocatedNode{{Path: Normalized(Index(0)), Node: "x"}}, - }, - { - name: "index_one", - resType: FuncValue, - segs: []*Segment{Child(Index(1))}, - input: []any{"x", true, "y", []any{1, 2}}, - exp: []any{true}, - loc: []*LocatedNode{{Path: Normalized(Index(1)), Node: true}}, - }, - { - name: "index_three", - resType: FuncValue, - segs: []*Segment{Child(Index(3))}, - input: []any{"x", true, "y", []any{1, 2}}, - exp: []any{[]any{1, 2}}, - loc: []*LocatedNode{{Path: Normalized(Index(3)), Node: []any{1, 2}}}, - }, - { - name: "multiple_indexes", - resType: FuncNodes, - segs: []*Segment{Child(Index(1), Index(3))}, - input: []any{"x", true, "y", []any{1, 2}}, - exp: []any{true, []any{1, 2}}, - loc: []*LocatedNode{ - {Path: Normalized(Index(1)), Node: true}, - {Path: Normalized(Index(3)), Node: []any{1, 2}}, - }, - }, - { - name: "nested_indices", - resType: FuncValue, - segs: []*Segment{Child(Index(0)), Child(Index(0))}, - input: []any{[]any{1, 2}, "x", true, "y"}, - exp: []any{1}, - loc: []*LocatedNode{ - {Path: Normalized(Index(0), Index(0)), Node: 1}, - }, - }, - { - name: "nested_multiple_indices", - resType: FuncNodes, - segs: []*Segment{Child(Index(0)), Child(Index(0), Index(1))}, - input: []any{[]any{1, 2, 3}, "x", true, "y"}, - exp: []any{1, 2}, - loc: []*LocatedNode{ - {Path: Normalized(Index(0), Index(0)), Node: 1}, - {Path: Normalized(Index(0), Index(1)), Node: 2}, - }, - }, - { - name: "nested_index_gaps", - resType: FuncValue, - segs: []*Segment{Child(Index(1)), Child(Index(1))}, - input: []any{"x", []any{1, 2}, true, "y"}, - exp: []any{2}, - loc: []*LocatedNode{ - {Path: Normalized(Index(1), Index(1)), Node: 2}, - }, - }, - { - name: "three_level_index_path", - resType: FuncValue, - segs: []*Segment{ - Child(Index(0)), - Child(Index(0)), - Child(Index(0)), - }, - input: []any{[]any{[]any{42, 12}, 2}, "x", true, "y"}, - exp: []any{42}, - loc: []*LocatedNode{ - {Path: Normalized(Index(0), Index(0), Index(0)), Node: 42}, - }, - }, - { - name: "mixed_nesting", - resType: FuncNodes, - segs: []*Segment{ - Child(Index(0), Index(1), Index(3)), - Child(Index(1), Name("y"), Name("z")), - }, - input: []any{ - []any{[]any{42, 12}, 2}, - "x", - true, - map[string]any{"y": "hi", "z": 1, "x": "no"}, - }, - exp: []any{2, "hi", 1}, - loc: []*LocatedNode{ - {Path: Normalized(Index(0), Index(1)), Node: 2}, - {Path: Normalized(Index(3), Name("y")), Node: "hi"}, - {Path: Normalized(Index(3), Name("z")), Node: 1}, - }, - rand: true, - }, - { - name: "wildcard_indexes_index", - resType: FuncNodes, - segs: []*Segment{Child(Wildcard()), Child(Index(0), Index(2))}, - input: []any{[]any{1, 2, 3}, []any{3, 2, 1}, []any{4, 5, 6}}, - exp: []any{1, 3, 3, 1, 4, 6}, - loc: []*LocatedNode{ - {Path: Normalized(Index(0), Index(0)), Node: 1}, - {Path: Normalized(Index(0), Index(2)), Node: 3}, - {Path: Normalized(Index(1), Index(0)), Node: 3}, - {Path: Normalized(Index(1), Index(2)), Node: 1}, - {Path: Normalized(Index(2), Index(0)), Node: 4}, - {Path: Normalized(Index(2), Index(2)), Node: 6}, - }, - }, - { - name: "nonexistent_index", - resType: FuncValue, - segs: []*Segment{Child(Index(3))}, - input: []any{"y", []any{1, 2}}, - exp: []any{}, - loc: []*LocatedNode{}, - }, - { - name: "nonexistent_child_index", - resType: FuncNodes, - segs: []*Segment{Child(Wildcard()), Child(Index(3))}, - input: []any{[]any{0, 1, 2, 3}, []any{0, 1, 2}}, - exp: []any{3}, - loc: []*LocatedNode{ - {Path: Normalized(Index(0), Index(3)), Node: 3}, - }, - }, - { - name: "not_an_array_index_1", - resType: FuncValue, - segs: []*Segment{Child(Index(1)), Child(Index(0))}, - input: []any{"x", true}, - exp: []any{}, - loc: []*LocatedNode{}, - }, - { - name: "not_an_array_index_0", - resType: FuncValue, - segs: []*Segment{Child(Index(0)), Child(Index(0))}, - input: []any{"x", true}, - exp: []any{}, - loc: []*LocatedNode{}, - }, - { - name: "wildcard_not_an_array_index_1", - resType: FuncNodes, - segs: []*Segment{Child(Wildcard()), Child(Index(0))}, - input: []any{"x", true}, - exp: []any{}, - loc: []*LocatedNode{}, - }, - { - name: "mix_wildcard_keys", - resType: FuncNodes, - segs: []*Segment{ - Child(Wildcard(), Index(1)), - Child(Name("x"), Index(1), Name("y")), - }, - input: []any{ - map[string]any{"x": "hi", "y": "go"}, - map[string]any{"x": "bo", "y": 42}, - map[string]any{"x": true, "y": 21}, - []any{34, 53, 23}, - }, - exp: []any{"hi", "go", "bo", 42, true, 21, 53, "bo", 42}, - loc: []*LocatedNode{ - {Path: Normalized(Index(0), Name("x")), Node: "hi"}, - {Path: Normalized(Index(0), Name("y")), Node: "go"}, - {Path: Normalized(Index(1), Name("x")), Node: "bo"}, - {Path: Normalized(Index(1), Name("y")), Node: 42}, - {Path: Normalized(Index(2), Name("x")), Node: true}, - {Path: Normalized(Index(2), Name("y")), Node: 21}, - {Path: Normalized(Index(3), Index(1)), Node: 53}, - {Path: Normalized(Index(1), Name("x")), Node: "bo"}, - {Path: Normalized(Index(1), Name("y")), Node: 42}, - }, - rand: true, - }, - { - name: "mix_wildcard_nonexistent_key", - resType: FuncNodes, - segs: []*Segment{ - Child(Wildcard(), Index(1)), - Child(Name("x"), Name("y")), - }, - input: []any{ - map[string]any{"x": "hi"}, - map[string]any{"x": "bo"}, - map[string]any{"x": true}, - }, - exp: []any{"hi", "bo", true, "bo"}, - loc: []*LocatedNode{ - {Path: Normalized(Index(0), Name("x")), Node: "hi"}, - {Path: Normalized(Index(1), Name("x")), Node: "bo"}, - {Path: Normalized(Index(2), Name("x")), Node: true}, - {Path: Normalized(Index(1), Name("x")), Node: "bo"}, - }, - }, - { - name: "mix_wildcard_index", - resType: FuncNodes, - segs: []*Segment{ - Child(Wildcard(), Index(1)), - Child(Index(0), Index(1)), - }, - input: []any{ - []any{"x", "hi", true}, - []any{"x", "bo", 42}, - []any{"x", true, 21}, - }, - exp: []any{"x", "hi", "x", "bo", "x", true, "x", "bo"}, - loc: []*LocatedNode{ - {Path: Normalized(Index(0), Index(0)), Node: "x"}, - {Path: Normalized(Index(0), Index(1)), Node: "hi"}, - {Path: Normalized(Index(1), Index(0)), Node: "x"}, - {Path: Normalized(Index(1), Index(1)), Node: "bo"}, - {Path: Normalized(Index(2), Index(0)), Node: "x"}, - {Path: Normalized(Index(2), Index(1)), Node: true}, - {Path: Normalized(Index(1), Index(0)), Node: "x"}, - {Path: Normalized(Index(1), Index(1)), Node: "bo"}, - }, - }, - { - name: "mix_wildcard_nonexistent_index", - resType: FuncNodes, - segs: []*Segment{ - Child(Wildcard(), Index(1)), - Child(Index(0), Index(3)), - }, - input: []any{ - []any{"x", "hi", true}, - []any{"x", "bo", 42}, - []any{"x", true, 21}, - }, - exp: []any{"x", "x", "x", "x"}, - loc: []*LocatedNode{ - {Path: Normalized(Index(0), Index(0)), Node: "x"}, - {Path: Normalized(Index(1), Index(0)), Node: "x"}, - {Path: Normalized(Index(2), Index(0)), Node: "x"}, - {Path: Normalized(Index(1), Index(0)), Node: "x"}, - }, - }, - { - name: "wildcard_nonexistent_key", - resType: FuncNodes, - segs: []*Segment{Child(Wildcard()), Child(Name("a"))}, - input: []any{ - map[string]any{"a": 1, "b": 2}, - map[string]any{"z": 3, "b": 4}, - }, - exp: []any{1}, - loc: []*LocatedNode{ - {Path: Normalized(Index(0), Name("a")), Node: 1}, - }, - }, - { - name: "wildcard_nonexistent_middle_key", - resType: FuncNodes, - segs: []*Segment{Child(Wildcard()), Child(Name("a"))}, - input: []any{ - map[string]any{"a": 1, "b": 2}, - map[string]any{"z": 3, "b": 4}, - map[string]any{"a": 5}, - map[string]any{"z": 3, "b": 4}, - }, - exp: []any{1, 5}, - loc: []*LocatedNode{ - {Path: Normalized(Index(0), Name("a")), Node: 1}, - {Path: Normalized(Index(2), Name("a")), Node: 5}, - }, - }, - { - name: "wildcard_nested_nonexistent_key", - resType: FuncNodes, - segs: []*Segment{ - Child(Wildcard()), - Child(Wildcard()), - Child(Name("a")), - }, - input: []any{ - map[string]any{ - "x": map[string]any{"a": 1}, - "y": map[string]any{"b": 1}, - }, - map[string]any{ - "y": map[string]any{"b": 1}, - }, - }, - exp: []any{1}, - loc: []*LocatedNode{ - {Path: Normalized(Index(0), Name("x"), Name("a")), Node: 1}, - }, - }, - { - name: "wildcard_nested_nonexistent_index", - resType: FuncNodes, - segs: []*Segment{ - Child(Wildcard()), - Child(Wildcard()), - Child(Index(1)), - }, - input: []any{ - map[string]any{ - "x": []any{1, 2}, - "y": []any{3}, - }, - map[string]any{ - "z": []any{1}, - }, - }, - exp: []any{2}, - loc: []*LocatedNode{ - {Path: Normalized(Index(0), Name("x"), Index(1)), Node: 2}, - }, - }, - } { - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - tc.run(a) - }) - } -} - -func TestQuerySlice(t *testing.T) { - t.Parallel() - a := assert.New(t) - - for _, tc := range []queryTestCase{ - { - name: "slice_0_2", - segs: []*Segment{Child(Slice(0, 2))}, - input: []any{"x", true, "y", []any{1, 2}}, - exp: []any{"x", true}, - loc: []*LocatedNode{ - {Path: Normalized(Index(0)), Node: "x"}, - {Path: Normalized(Index(1)), Node: true}, - }, - }, - { - name: "slice_0_1", - segs: []*Segment{Child(Slice(0, 1))}, - input: []any{"x", true, "y", []any{1, 2}}, - exp: []any{"x"}, - loc: []*LocatedNode{{Path: Normalized(Index(0)), Node: "x"}}, - }, - { - name: "slice_2_5", - segs: []*Segment{Child(Slice(2, 5))}, - input: []any{"x", true, "y", []any{1, 2}, 42, nil, 78}, - exp: []any{"y", []any{1, 2}, 42}, - loc: []*LocatedNode{ - {Path: Normalized(Index(2)), Node: "y"}, - {Path: Normalized(Index(3)), Node: []any{1, 2}}, - {Path: Normalized(Index(4)), Node: 42}, - }, - }, - { - name: "slice_2_5_over_len", - segs: []*Segment{Child(Slice(2, 5))}, - input: []any{"x", true, "y"}, - exp: []any{"y"}, - loc: []*LocatedNode{ - {Path: Normalized(Index(2)), Node: "y"}, - }, - }, - { - name: "slice_defaults", - segs: []*Segment{Child(Slice())}, - input: []any{"x", true, "y", []any{1, 2}, 42, nil, 78}, - exp: []any{"x", true, "y", []any{1, 2}, 42, nil, 78}, - loc: []*LocatedNode{ - {Path: Normalized(Index(0)), Node: "x"}, - {Path: Normalized(Index(1)), Node: true}, - {Path: Normalized(Index(2)), Node: "y"}, - {Path: Normalized(Index(3)), Node: []any{1, 2}}, - {Path: Normalized(Index(4)), Node: 42}, - {Path: Normalized(Index(5)), Node: nil}, - {Path: Normalized(Index(6)), Node: 78}, - }, - }, - { - name: "default_start", - segs: []*Segment{Child(Slice(nil, 2))}, - input: []any{"x", true, "y", []any{1, 2}, 42, nil, 78}, - exp: []any{"x", true}, - loc: []*LocatedNode{ - {Path: Normalized(Index(0)), Node: "x"}, - {Path: Normalized(Index(1)), Node: true}, - }, - }, - { - name: "default_end", - segs: []*Segment{Child(Slice(2))}, - input: []any{"x", true, "y", []any{1, 2}, 42, nil, 78}, - exp: []any{"y", []any{1, 2}, 42, nil, 78}, - loc: []*LocatedNode{ - {Path: Normalized(Index(2)), Node: "y"}, - {Path: Normalized(Index(3)), Node: []any{1, 2}}, - {Path: Normalized(Index(4)), Node: 42}, - {Path: Normalized(Index(5)), Node: nil}, - {Path: Normalized(Index(6)), Node: 78}, - }, - }, - { - name: "step_2", - segs: []*Segment{Child(Slice(nil, nil, 2))}, - input: []any{"x", true, "y", []any{1, 2}, 42, nil, 78}, - exp: []any{"x", "y", 42, 78}, - loc: []*LocatedNode{ - {Path: Normalized(Index(0)), Node: "x"}, - {Path: Normalized(Index(2)), Node: "y"}, - {Path: Normalized(Index(4)), Node: 42}, - {Path: Normalized(Index(6)), Node: 78}, - }, - }, - { - name: "step_3", - segs: []*Segment{Child(Slice(nil, nil, 3))}, - input: []any{"x", true, "y", []any{1, 2}, 42, nil, 78}, - exp: []any{"x", []any{1, 2}, 78}, - loc: []*LocatedNode{ - {Path: Normalized(Index(0)), Node: "x"}, - {Path: Normalized(Index(3)), Node: []any{1, 2}}, - {Path: Normalized(Index(6)), Node: 78}, - }, - }, - { - name: "multiple_slices", - segs: []*Segment{Child(Slice(0, 1), Slice(3, 4))}, - input: []any{"x", true, "y", []any{1, 2}, 42, nil, 78}, - exp: []any{"x", []any{1, 2}}, - loc: []*LocatedNode{ - {Path: Normalized(Index(0)), Node: "x"}, - {Path: Normalized(Index(3)), Node: []any{1, 2}}, - }, - }, - { - name: "overlapping_slices", - segs: []*Segment{Child(Slice(0, 3), Slice(2, 4))}, - input: []any{"x", true, "y", []any{1, 2}, 42, nil, 78}, - exp: []any{"x", true, "y", "y", []any{1, 2}}, - loc: []*LocatedNode{ - {Path: Normalized(Index(0)), Node: "x"}, - {Path: Normalized(Index(1)), Node: true}, - {Path: Normalized(Index(2)), Node: "y"}, - {Path: Normalized(Index(2)), Node: "y"}, - {Path: Normalized(Index(3)), Node: []any{1, 2}}, - }, - }, - { - name: "nested_slices", - segs: []*Segment{Child(Slice(0, 2)), Child(Slice(1, 2))}, - input: []any{ - []any{"hi", 42, true}, - []any{"go", "on"}, - []any{"yo", 98.6, false}, - "x", true, "y", - }, - exp: []any{42, "on"}, - loc: []*LocatedNode{ - {Path: Normalized(Index(0), Index(1)), Node: 42}, - {Path: Normalized(Index(1), Index(1)), Node: "on"}, - }, - }, - { - name: "nested_multiple_indices", - segs: []*Segment{ - Child(Slice(0, 2)), - Child(Slice(1, 2), Slice(3, 5)), - }, - input: []any{ - []any{"hi", 42, true, 64, []any{}, 7}, - []any{"go", "on", false, 88, []any{1}, 8}, - []any{"yo", 98.6, false, 2, []any{3, 4}, 9}, - "x", true, "y", - }, - exp: []any{42, 64, []any{}, "on", 88, []any{1}}, - loc: []*LocatedNode{ - {Path: Normalized(Index(0), Index(1)), Node: 42}, - {Path: Normalized(Index(0), Index(3)), Node: 64}, - {Path: Normalized(Index(0), Index(4)), Node: []any{}}, - {Path: Normalized(Index(1), Index(1)), Node: "on"}, - {Path: Normalized(Index(1), Index(3)), Node: 88}, - {Path: Normalized(Index(1), Index(4)), Node: []any{1}}, - }, - }, - { - name: "three_level_slice_path", - segs: []*Segment{ - Child(Slice(0, 2)), - Child(Slice(0, 1)), - Child(Slice(0, 1)), - }, - input: []any{ - []any{[]any{42, 12}, 2}, - []any{[]any{16, true, "x"}, 7}, - "x", true, "y", - }, - exp: []any{42, 16}, - loc: []*LocatedNode{ - {Path: Normalized(Index(0), Index(0), Index(0)), Node: 42}, - {Path: Normalized(Index(1), Index(0), Index(0)), Node: 16}, - }, - }, - { - name: "varying_nesting_levels_mixed", - segs: []*Segment{ - Child(Slice(0, 2), Slice(2, 3)), - Child(Slice(0, 1), Slice(3, 4)), - Child(Slice(0, 1), Name("y"), Name("z")), - }, - input: []any{ - []any{[]any{42, 12}, 2}, - "x", - []any{map[string]any{"y": "hi", "z": 1, "x": "no"}}, - true, - "go", - }, - exp: []any{42, "hi", 1}, - loc: []*LocatedNode{ - {Path: Normalized(Index(0), Index(0), Index(0)), Node: 42}, - {Path: Normalized(Index(2), Index(0), Name("y")), Node: "hi"}, - {Path: Normalized(Index(2), Index(0), Name("z")), Node: 1}, - }, - }, - { - name: "wildcard_slices_index", - segs: []*Segment{ - Child(Wildcard()), - Child(Slice(0, 2), Slice(3, 4)), - }, - input: []any{ - []any{1, 2, 3, 4, 5}, - []any{3, 2, 1, 0, -1}, - []any{4, 5, 6, 7, 8}, - }, - exp: []any{1, 2, 4, 3, 2, 0, 4, 5, 7}, - loc: []*LocatedNode{ - {Path: Normalized(Index(0), Index(0)), Node: 1}, - {Path: Normalized(Index(0), Index(1)), Node: 2}, - {Path: Normalized(Index(0), Index(3)), Node: 4}, - {Path: Normalized(Index(1), Index(0)), Node: 3}, - {Path: Normalized(Index(1), Index(1)), Node: 2}, - {Path: Normalized(Index(1), Index(3)), Node: 0}, - {Path: Normalized(Index(2), Index(0)), Node: 4}, - {Path: Normalized(Index(2), Index(1)), Node: 5}, - {Path: Normalized(Index(2), Index(3)), Node: 7}, - }, - }, - { - name: "nonexistent_slice", - segs: []*Segment{Child(Slice(3, 5))}, - input: []any{"y", []any{1, 2}}, - exp: []any{}, - loc: []*LocatedNode{}, - }, - { - name: "nonexistent_branch_index", - segs: []*Segment{Child(Wildcard()), Child(Slice(3, 5))}, - input: []any{[]any{0, 1, 2, 3, 4}, []any{0, 1, 2}}, - exp: []any{3, 4}, - loc: []*LocatedNode{ - {Path: Normalized(Index(0), Index(3)), Node: 3}, - {Path: Normalized(Index(0), Index(4)), Node: 4}, - }, - }, - { - name: "not_an_array_index_1", - resType: FuncValue, - segs: []*Segment{Child(Index(1)), Child(Index(0))}, - input: []any{"x", true}, - exp: []any{}, - loc: []*LocatedNode{}, - }, - { - name: "not_an_array", - segs: []*Segment{Child(Slice(0, 5)), Child(Index(0))}, - input: []any{"x", true}, - exp: []any{}, - loc: []*LocatedNode{}, - }, - { - name: "wildcard_not_an_array_index_1", - segs: []*Segment{Child(Wildcard()), Child(Slice(0, 5))}, - input: []any{"x", true}, - exp: []any{}, - loc: []*LocatedNode{}, - }, - { - name: "mix_slice_keys", - segs: []*Segment{ - Child(Slice(0, 5), Index(1)), - Child(Name("x"), Name("y")), - }, - input: []any{ - map[string]any{"x": "hi", "y": "go"}, - map[string]any{"x": "bo", "y": 42}, - map[string]any{"x": true, "y": 21}, - }, - exp: []any{"hi", "go", "bo", 42, true, 21, "bo", 42}, - loc: []*LocatedNode{ - {Path: Normalized(Index(0), Name("x")), Node: "hi"}, - {Path: Normalized(Index(0), Name("y")), Node: "go"}, - {Path: Normalized(Index(1), Name("x")), Node: "bo"}, - {Path: Normalized(Index(1), Name("y")), Node: 42}, - {Path: Normalized(Index(2), Name("x")), Node: true}, - {Path: Normalized(Index(2), Name("y")), Node: 21}, - {Path: Normalized(Index(1), Name("x")), Node: "bo"}, - {Path: Normalized(Index(1), Name("y")), Node: 42}, - }, - rand: true, - }, - { - name: "mix_slice_nonexistent_key", - segs: []*Segment{ - Child(Slice(0, 5), Index(1)), - Child(Name("x"), Name("y")), - }, - input: []any{ - map[string]any{"x": "hi"}, - map[string]any{"x": "bo"}, - map[string]any{"x": true}, - }, - exp: []any{"hi", "bo", true, "bo"}, - loc: []*LocatedNode{ - {Path: Normalized(Index(0), Name("x")), Node: "hi"}, - {Path: Normalized(Index(1), Name("x")), Node: "bo"}, - {Path: Normalized(Index(2), Name("x")), Node: true}, - {Path: Normalized(Index(1), Name("x")), Node: "bo"}, - }, - }, - { - name: "mix_slice_index", - segs: []*Segment{ - Child(Slice(0, 5), Index(1)), - Child(Index(0), Index(1)), - }, - input: []any{ - []any{"x", "hi", true}, - []any{"y", "bo", 42}, - []any{"z", true, 21}, - }, - exp: []any{"x", "hi", "y", "bo", "z", true, "y", "bo"}, - loc: []*LocatedNode{ - {Path: Normalized(Index(0), Index(0)), Node: "x"}, - {Path: Normalized(Index(0), Index(1)), Node: "hi"}, - {Path: Normalized(Index(1), Index(0)), Node: "y"}, - {Path: Normalized(Index(1), Index(1)), Node: "bo"}, - {Path: Normalized(Index(2), Index(0)), Node: "z"}, - {Path: Normalized(Index(2), Index(1)), Node: true}, - {Path: Normalized(Index(1), Index(0)), Node: "y"}, - {Path: Normalized(Index(1), Index(1)), Node: "bo"}, - }, - }, - { - name: "mix_slice_nonexistent_index", - segs: []*Segment{ - Child(Slice(0, 5), Index(1)), - Child(Index(0), Index(3)), - }, - input: []any{ - []any{"x", "hi", true}, - []any{"y", "bo", 42}, - []any{"z", true, 21}, - }, - exp: []any{"x", "y", "z", "y"}, - loc: []*LocatedNode{ - {Path: Normalized(Index(0), Index(0)), Node: "x"}, - {Path: Normalized(Index(1), Index(0)), Node: "y"}, - {Path: Normalized(Index(2), Index(0)), Node: "z"}, - {Path: Normalized(Index(1), Index(0)), Node: "y"}, - }, - }, - { - name: "slice_nonexistent_key", - segs: []*Segment{Child(Slice(0, 5)), Child(Name("a"))}, - input: []any{ - map[string]any{"a": 1, "b": 2}, - map[string]any{"z": 3, "b": 4}, - }, - exp: []any{1}, - loc: []*LocatedNode{ - {Path: Normalized(Index(0), Name("a")), Node: 1}, - }, - }, - { - name: "slice_nonexistent_middle_key", - segs: []*Segment{Child(Slice(0, 5)), Child(Name("a"))}, - input: []any{ - map[string]any{"a": 1, "b": 2}, - map[string]any{"z": 3, "b": 4}, - map[string]any{"a": 5}, - map[string]any{"z": 3, "b": 4}, - }, - exp: []any{1, 5}, - loc: []*LocatedNode{ - {Path: Normalized(Index(0), Name("a")), Node: 1}, - {Path: Normalized(Index(2), Name("a")), Node: 5}, - }, - }, - { - name: "slice_nested_nonexistent_key", - segs: []*Segment{ - Child(Slice(0, 5)), - Child(Wildcard()), - Child(Name("a")), - }, - input: []any{ - map[string]any{ - "x": map[string]any{"a": 1}, - "y": map[string]any{"b": 1}, - }, - map[string]any{ - "y": map[string]any{"b": 1}, - }, - }, - exp: []any{1}, - loc: []*LocatedNode{ - {Path: Normalized(Index(0), Name("x"), Name("a")), Node: 1}, - }, - }, - { - name: "slice_nested_nonexistent_index", - segs: []*Segment{ - Child(Slice(0, 5)), - Child(Wildcard()), - Child(Index(1)), - }, - input: []any{ - map[string]any{ - "x": []any{1, 2}, - "y": []any{3}, - }, - map[string]any{ - "z": []any{1}, - }, - }, - exp: []any{2}, - loc: []*LocatedNode{ - {Path: Normalized(Index(0), Name("x"), Index(1)), Node: 2}, - }, - }, - { - name: "slice_neg", - segs: []*Segment{Child(Slice(nil, nil, -1))}, - input: []any{"x", true, "y", []any{1, 2}}, - exp: []any{[]any{1, 2}, "y", true, "x"}, - loc: []*LocatedNode{ - {Path: Normalized(Index(3)), Node: []any{1, 2}}, - {Path: Normalized(Index(2)), Node: "y"}, - {Path: Normalized(Index(1)), Node: true}, - {Path: Normalized(Index(0)), Node: "x"}, - }, - }, - { - name: "slice_5_0_neg2", - segs: []*Segment{Child(Slice(5, 0, -2))}, - input: []any{"x", true, "y", 8, 13, 25, 23, 78, 13}, - exp: []any{25, 8, true}, - loc: []*LocatedNode{ - {Path: Normalized(Index(5)), Node: 25}, - {Path: Normalized(Index(3)), Node: 8}, - {Path: Normalized(Index(1)), Node: true}, - }, - }, - { - name: "nested_neg_slices", - segs: []*Segment{ - Child(Slice(2, nil, -1)), - Child(Slice(2, 0, -1)), - }, - input: []any{ - []any{"hi", 42, true}, - []any{"go", "on"}, - []any{"yo", 98.6, false}, - "x", true, "y", - }, - exp: []any{false, 98.6, "on", true, 42}, - loc: []*LocatedNode{ - {Path: Normalized(Index(2), Index(2)), Node: false}, - {Path: Normalized(Index(2), Index(1)), Node: 98.6}, - {Path: Normalized(Index(1), Index(1)), Node: "on"}, - {Path: Normalized(Index(0), Index(2)), Node: true}, - {Path: Normalized(Index(0), Index(1)), Node: 42}, - }, - }, - } { - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - if tc.resType == 0 { - tc.resType = FuncNodes - } - tc.run(a) - }) - } -} - -func TestQueryDescendants(t *testing.T) { - t.Parallel() - a := assert.New(t) - json := map[string]any{ - "o": map[string]any{"j": 1, "k": 2}, - "a": []any{5, 3, []any{map[string]any{"j": 4}, map[string]any{"k": 6}}}, - } - - for _, tc := range []queryTestCase{ - { - name: "descendant_name", - segs: []*Segment{Descendant(Name("j"))}, - input: json, - exp: []any{1, 4}, - loc: []*LocatedNode{ - {Path: Normalized(Name("o"), Name("j")), Node: 1}, - {Path: Normalized(Name("a"), Index(2), Index(0), Name("j")), Node: 4}, - }, - rand: true, - }, - { - name: "un_descendant_name", - segs: []*Segment{Descendant(Name("o"))}, - input: json, - exp: []any{map[string]any{"j": 1, "k": 2}}, - loc: []*LocatedNode{ - {Path: Normalized(Name("o")), Node: map[string]any{"j": 1, "k": 2}}, - }, - }, - { - name: "nested_name", - segs: []*Segment{Child(Name("o")), Descendant(Name("k"))}, - input: json, - exp: []any{2}, - loc: []*LocatedNode{ - {Path: Normalized(Name("o"), Name("k")), Node: 2}, - }, - }, - { - name: "nested_wildcard", - segs: []*Segment{Child(Name("o")), Descendant(Wildcard())}, - input: json, - exp: []any{1, 2}, - loc: []*LocatedNode{ - {Path: Normalized(Name("o"), Name("j")), Node: 1}, - {Path: Normalized(Name("o"), Name("k")), Node: 2}, - }, - rand: true, - }, - { - name: "single_index", - segs: []*Segment{Descendant(Index(0))}, - input: json, - exp: []any{5, map[string]any{"j": 4}}, - loc: []*LocatedNode{ - {Path: Normalized(Name("a"), Index(0)), Node: 5}, - {Path: Normalized(Name("a"), Index(2), Index(0)), Node: map[string]any{"j": 4}}, - }, - }, - { - name: "nested_index", - segs: []*Segment{Child(Name("a")), Descendant(Index(0))}, - input: json, - exp: []any{5, map[string]any{"j": 4}}, - loc: []*LocatedNode{ - {Path: Normalized(Name("a"), Index(0)), Node: 5}, - {Path: Normalized(Name("a"), Index(2), Index(0)), Node: map[string]any{"j": 4}}, - }, - }, - { - name: "multiples", - segs: []*Segment{ - Child(Name("profile")), - Descendant(Name("last"), Name("primary"), Name("secondary")), - }, - input: map[string]any{ - "profile": map[string]any{ - "name": map[string]any{ - "first": "Barrack", - "last": "Obama", - }, - "contacts": map[string]any{ - "email": map[string]any{ - "primary": "foo@example.com", - "secondary": "2nd@example.net", - }, - "phones": map[string]any{ - "primary": "123456789", - "secondary": "987654321", - "fax": "1029384758", - }, - "addresses": map[string]any{ - "primary": []any{ - "123 Main Street", - "Whatever", "OR", "98754", - }, - "work": []any{ - "whatever", - "XYZ", "NY", "10093", - }, - }, - }, - }, - }, - exp: []any{ - "Obama", - "foo@example.com", - "2nd@example.net", - "123456789", - "987654321", - []any{ - "123 Main Street", - "Whatever", "OR", "98754", - }, - }, - loc: []*LocatedNode{ - {Path: Normalized(Name("profile"), Name("name"), Name("last")), Node: "Obama"}, - { - Path: Normalized(Name("profile"), Name("contacts"), Name("email"), Name("primary")), - Node: "foo@example.com", - }, - { - Path: Normalized(Name("profile"), Name("contacts"), Name("email"), Name("secondary")), - Node: "2nd@example.net", - }, - { - Path: Normalized(Name("profile"), Name("contacts"), Name("phones"), Name("primary")), - Node: "123456789", - }, - { - Path: Normalized(Name("profile"), Name("contacts"), Name("phones"), Name("secondary")), - Node: "987654321", - }, - { - Path: Normalized(Name("profile"), Name("contacts"), Name("addresses"), Name("primary")), - Node: []any{ - "123 Main Street", - "Whatever", "OR", "98754", - }, - }, - }, - rand: true, - }, - { - name: "do_not_include_parent_key", - segs: []*Segment{Descendant(Name("o")), Child(Name("k"))}, - input: map[string]any{"o": map[string]any{"o": "hi", "k": 2}}, - exp: []any{2}, - loc: []*LocatedNode{ - {Path: Normalized(Name("o"), Name("k")), Node: 2}, - }, - }, - { - name: "do_not_include_parent_index", - segs: []*Segment{Descendant(Index(0)), Child(Index(1))}, - input: []any{[]any{42, 98}}, - exp: []any{98}, - loc: []*LocatedNode{ - {Path: Normalized(Index(0), Index(1)), Node: 98}, - }, - }, - } { - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - tc.resType = FuncNodes - tc.run(a) - }) - } -} - -func TestQueryInputs(t *testing.T) { - t.Parallel() - a := assert.New(t) - x := map[string]any{"x": "x"} - y := map[string]any{"y": "y"} - - // Test current. - q := Query(false, Child(Name("x"))) - a.False(q.root) - a.Equal([]any{"x"}, q.Select(x, y)) - a.Equal([]any{}, q.Select(y, x)) - a.Equal( - []*LocatedNode{{Path: Normalized(Name("x")), Node: "x"}}, - q.SelectLocated(x, y, NormalizedPath{}), - ) - a.Equal([]*LocatedNode{}, q.SelectLocated(y, x, NormalizedPath{})) - - // Test root. - q.root = true - a.Equal([]any{}, q.Select(x, y)) - a.Equal([]any{"x"}, q.Select(y, x)) - a.Equal([]*LocatedNode{}, q.SelectLocated(x, y, NormalizedPath{})) - a.Equal( - []*LocatedNode{{Path: Normalized(Name("x")), Node: "x"}}, - q.SelectLocated(y, x, NormalizedPath{}), - ) -} - -func TestSingularExpr(t *testing.T) { - t.Parallel() - a := assert.New(t) - - for _, tc := range []struct { - name string - query *PathQuery - sing *SingularQueryExpr - }{ - { - name: "relative_singular", - query: Query(false, Child(Name("j"))), - sing: SingularQuery(false, Name("j")), - }, - { - name: "root_singular", - query: &PathQuery{segments: []*Segment{Child(Name("j")), Child(Index(0))}, root: true}, - sing: SingularQuery(true, Name("j"), Index(0)), - }, - { - name: "descendant", - query: Query(false, Descendant(Name("j"))), - }, - { - name: "multi_selector", - query: Query(false, Child(Name("j"), Name("x"))), - }, - { - name: "single_slice", - query: Query(false, Child(Slice())), - }, - { - name: "wildcard", - query: Query(false, Child(Wildcard())), - }, - { - name: "filter", - query: Query(false, Child(&FilterSelector{})), - }, - } { - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - if tc.sing == nil { - a.False(tc.query.isSingular()) - a.Nil(tc.query.Singular()) - a.Equal(tc.query, tc.query.Expression()) - } else { - a.True(tc.query.isSingular()) - a.Equal(tc.sing, tc.query.Singular()) - a.Equal(tc.sing, tc.query.Expression()) - } - }) - } -} diff --git a/spec/segment.go b/spec/segment.go deleted file mode 100644 index a13f70b..0000000 --- a/spec/segment.go +++ /dev/null @@ -1,129 +0,0 @@ -package spec - -import ( - "strings" -) - -// Segment represents a single segment as defined in [RFC 9535 Section 1.4.2], -// consisting of a list of [Selector] values. -// -// [RFC 9535 Section 1.4.2]: https://www.rfc-editor.org/rfc/rfc9535.html#name-segments -type Segment struct { - selectors []Selector - descendant bool -} - -// Child creates and returns a [Segment] that uses sel to select values from a -// JSON object or array. -func Child(sel ...Selector) *Segment { - return &Segment{selectors: sel} -} - -// Descendant creates and returns a [Segment] that uses sel to select values -// from a JSON object or array or any of its descendant objects and arrays. -func Descendant(sel ...Selector) *Segment { - return &Segment{selectors: sel, descendant: true} -} - -// Selectors returns s's [Selector] values. -func (s *Segment) Selectors() []Selector { - return s.selectors -} - -// String returns a string representation of seg. A [Child] [Segment] -// formats as: -// -// [] -// -// A [Descendant] [Segment] formats as: -// -// ..⁠[]) -func (s *Segment) String() string { - buf := new(strings.Builder) - if s.descendant { - buf.WriteString("..") - } - buf.WriteByte('[') - for i, sel := range s.selectors { - if i > 0 { - buf.WriteByte(',') - } - sel.writeTo(buf) - } - buf.WriteByte(']') - return buf.String() -} - -// Select selects and returns values from current or root, for each of s's -// selectors. Defined by the [Selector] interface. -func (s *Segment) Select(current, root any) []any { - ret := []any{} - for _, sel := range s.selectors { - ret = append(ret, sel.Select(current, root)...) - } - if s.descendant { - ret = append(ret, s.descend(current, root)...) - } - return ret -} - -// SelectLocated selects and returns values as [LocatedNode] values from -// current or root for each of seg's selectors. Defined by the [Selector] -// interface. -func (s *Segment) SelectLocated(current, root any, parent NormalizedPath) []*LocatedNode { - ret := []*LocatedNode{} - for _, sel := range s.selectors { - ret = append(ret, sel.SelectLocated(current, root, parent)...) - } - if s.descendant { - ret = append(ret, s.descendLocated(current, root, parent)...) - } - return ret -} - -// descend recursively executes [Segment.Select] for each value in current -// and/or root and its descendants and returns the results. -func (s *Segment) descend(current, root any) []any { - ret := []any{} - switch val := current.(type) { - case []any: - for _, v := range val { - ret = append(ret, s.Select(v, root)...) - } - case map[string]any: - for _, v := range val { - ret = append(ret, s.Select(v, root)...) - } - } - return ret -} - -// descend recursively executes [q] for each value in current and/or root and -// its descendants and returns the results. -func (s *Segment) descendLocated(current, root any, parent NormalizedPath) []*LocatedNode { - ret := []*LocatedNode{} - switch val := current.(type) { - case []any: - for i, v := range val { - ret = append(ret, s.SelectLocated(v, root, append(parent, Index(i)))...) - } - case map[string]any: - for k, v := range val { - ret = append(ret, s.SelectLocated(v, root, append(parent, Name(k)))...) - } - } - return ret -} - -// isSingular returns true if the segment selects at most one node. Defined by -// the [Selector] interface. -func (s *Segment) isSingular() bool { - if s.descendant || len(s.selectors) != 1 { - return false - } - return s.selectors[0].isSingular() -} - -// IsDescendant returns true if the segment is a [Descendant] selector that -// recursively select the children of a JSON value. -func (s *Segment) IsDescendant() bool { return s.descendant } diff --git a/spec/segment_test.go b/spec/segment_test.go deleted file mode 100644 index 8bf9ba2..0000000 --- a/spec/segment_test.go +++ /dev/null @@ -1,674 +0,0 @@ -package spec - -import ( - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestSegmentString(t *testing.T) { - t.Parallel() - a := assert.New(t) - - for _, tc := range []struct { - name string - seg *Segment - str string - sing bool - }{ - { - name: "no_selectors", - seg: Child(), - str: "[]", - }, - { - name: "descendant_no_selectors", - seg: Descendant(), - str: "..[]", - }, - { - name: "name", - seg: Child(Name("hi")), - str: `["hi"]`, - sing: true, - }, - { - name: "index", - seg: Child(Index(2)), - str: `[2]`, - sing: true, - }, - { - name: "wildcard", - seg: Child(Wildcard()), - str: `[*]`, - }, - { - name: "slice", - seg: Child(Slice(2)), - str: `[2:]`, - }, - { - name: "multiples", - seg: Child(Slice(2), Name("hi"), Index(3)), - str: `[2:,"hi",3]`, - }, - { - name: "descendant_multiples", - seg: Descendant(Slice(2), Name("hi"), Index(3)), - str: `..[2:,"hi",3]`, - }, - { - name: "wildcard_override", - seg: Child(Slice(2), Name("hi"), Index(3), Wildcard()), - str: `[2:,"hi",3,*]`, - }, - { - name: "descendant_wildcard_override", - seg: Descendant(Slice(2), Name("hi"), Index(3), Wildcard()), - str: `..[2:,"hi",3,*]`, - }, - { - name: "dupes", - seg: Child(Slice(2), Name("hi"), Slice(2), Slice(2), Name("hi"), Index(3), Name("go"), Index(3)), - str: `[2:,"hi",2:,2:,"hi",3,"go",3]`, - }, - { - name: "descendant_dupes", - seg: Descendant(Slice(2), Name("hi"), Slice(2), Slice(2), Name("hi"), Index(3), Name("go"), Index(3)), - str: `..[2:,"hi",2:,2:,"hi",3,"go",3]`, - }, - } { - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - a.Equal(tc.str, tc.seg.String()) - a.Equal(tc.sing, tc.seg.isSingular()) - a.Equal(tc.seg.descendant, tc.seg.IsDescendant()) - }) - } -} - -func TestSegmentSelect(t *testing.T) { - t.Parallel() - a := assert.New(t) - - for _, tc := range []struct { - name string - seg *Segment - src any - exp []any - loc []*LocatedNode - rand bool - sing bool - }{ - { - name: "no_selectors", - seg: Child(), - src: []any{1, 3}, - exp: []any{}, - loc: []*LocatedNode{}, - }, - { - name: "name", - seg: Child(Name("hi")), - src: map[string]any{"hi": 42, "go": 98.6, "x": true}, - exp: []any{42}, - loc: []*LocatedNode{ - {Path: Normalized(Name("hi")), Node: 42}, - }, - sing: true, - }, - { - name: "two_names", - seg: Child(Name("hi"), Name("go")), - src: map[string]any{"hi": 42, "go": 98.6, "x": true}, - exp: []any{42, 98.6}, - loc: []*LocatedNode{ - {Path: Normalized(Name("hi")), Node: 42}, - {Path: Normalized(Name("go")), Node: 98.6}, - }, - rand: true, - }, - { - name: "dupe_name", - seg: Child(Name("hi"), Name("hi")), - src: map[string]any{"hi": 42, "go": 98.6, "x": true}, - exp: []any{42, 42}, - loc: []*LocatedNode{ - {Path: Normalized(Name("hi")), Node: 42}, - {Path: Normalized(Name("hi")), Node: 42}, - }, - rand: true, - }, - { - name: "three_names", - seg: Child(Name("hi"), Name("go"), Name("x")), - src: map[string]any{"hi": 42, "go": 98.6, "x": true}, - exp: []any{42, 98.6, true}, - loc: []*LocatedNode{ - {Path: Normalized(Name("hi")), Node: 42}, - {Path: Normalized(Name("go")), Node: 98.6}, - {Path: Normalized(Name("x")), Node: true}, - }, - rand: true, - }, - { - name: "name_and_others", - seg: Child(Name("hi"), Index(1), Slice()), - src: map[string]any{"hi": 42, "go": 98.6, "x": true}, - exp: []any{42}, - loc: []*LocatedNode{ - {Path: Normalized(Name("hi")), Node: 42}, - }, - }, - { - name: "index", - seg: Child(Index(1)), - src: []any{"hi", 42, "go", 98.6, "x", true}, - exp: []any{42}, - loc: []*LocatedNode{ - {Path: Normalized(Index(1)), Node: 42}, - }, - sing: true, - }, - { - name: "two_indexes", - seg: Child(Index(1), Index(4)), - src: []any{"hi", 42, "go", 98.6, "x", true}, - exp: []any{42, "x"}, - loc: []*LocatedNode{ - {Path: Normalized(Index(1)), Node: 42}, - {Path: Normalized(Index(4)), Node: "x"}, - }, - }, - { - name: "dupe_index", - seg: Child(Index(1), Index(1)), - src: []any{"hi", 42, "go", 98.6, "x", true}, - exp: []any{42, 42}, - loc: []*LocatedNode{ - {Path: Normalized(Index(1)), Node: 42}, - {Path: Normalized(Index(1)), Node: 42}, - }, - }, - { - name: "three_indexes", - seg: Child(Index(1), Index(4), Index(0)), - src: []any{"hi", 42, "go", 98.6, "x", true}, - exp: []any{42, "x", "hi"}, - loc: []*LocatedNode{ - {Path: Normalized(Index(1)), Node: 42}, - {Path: Normalized(Index(4)), Node: "x"}, - {Path: Normalized(Index(0)), Node: "hi"}, - }, - }, - { - name: "index_and_name", - seg: Child(Name("hi"), Index(2)), - src: []any{"hi", 42, "go", 98.6, "x", true}, - exp: []any{"go"}, - loc: []*LocatedNode{ - {Path: Normalized(Index(2)), Node: "go"}, - }, - }, - { - name: "slice", - seg: Child(Slice(1, 3)), - src: []any{"hi", 42, "go", 98.6, "x", true}, - exp: []any{42, "go"}, - loc: []*LocatedNode{ - {Path: Normalized(Index(1)), Node: 42}, - {Path: Normalized(Index(2)), Node: "go"}, - }, - }, - { - name: "two_slices", - seg: Child(Slice(1, 3), Slice(0, 1)), - src: []any{"hi", 42, "go", 98.6, "x", true}, - exp: []any{42, "go", "hi"}, - loc: []*LocatedNode{ - {Path: Normalized(Index(1)), Node: 42}, - {Path: Normalized(Index(2)), Node: "go"}, - {Path: Normalized(Index(0)), Node: "hi"}, - }, - }, - { - name: "overlapping_slices", - seg: Child(Slice(1, 3), Slice(0, 3)), - src: []any{"hi", 42, "go", 98.6, "x", true}, - exp: []any{42, "go", "hi", 42, "go"}, - loc: []*LocatedNode{ - {Path: Normalized(Index(1)), Node: 42}, - {Path: Normalized(Index(2)), Node: "go"}, - {Path: Normalized(Index(0)), Node: "hi"}, - {Path: Normalized(Index(1)), Node: 42}, - {Path: Normalized(Index(2)), Node: "go"}, - }, - }, - { - name: "slice_plus_index", - seg: Child(Slice(1, 3), Index(0)), - src: []any{"hi", 42, "go", 98.6, "x", true}, - exp: []any{42, "go", "hi"}, - loc: []*LocatedNode{ - {Path: Normalized(Index(1)), Node: 42}, - {Path: Normalized(Index(2)), Node: "go"}, - {Path: Normalized(Index(0)), Node: "hi"}, - }, - }, - { - name: "slice_plus_overlapping_indexes", - seg: Child(Slice(1, 3), Index(0), Index(1)), - src: []any{"hi", 42, "go", 98.6, "x", true}, - exp: []any{42, "go", "hi", 42}, - loc: []*LocatedNode{ - {Path: Normalized(Index(1)), Node: 42}, - {Path: Normalized(Index(2)), Node: "go"}, - {Path: Normalized(Index(0)), Node: "hi"}, - {Path: Normalized(Index(1)), Node: 42}, - }, - }, - { - name: "slice_and_others", - seg: Child(Name("hi"), Index(1), Slice()), - src: []any{"hi", 42, "go", 98.6, "x", true}, - exp: []any{42, "hi", 42, "go", 98.6, "x", true}, - loc: []*LocatedNode{ - {Path: Normalized(Index(1)), Node: 42}, - {Path: Normalized(Index(0)), Node: "hi"}, - {Path: Normalized(Index(1)), Node: 42}, - {Path: Normalized(Index(2)), Node: "go"}, - {Path: Normalized(Index(3)), Node: 98.6}, - {Path: Normalized(Index(4)), Node: "x"}, - {Path: Normalized(Index(5)), Node: true}, - }, - }, - { - name: "wildcard_array", - seg: Child(Wildcard()), - src: []any{"hi", 42, "go", 98.6, "x", true}, - exp: []any{"hi", 42, "go", 98.6, "x", true}, - loc: []*LocatedNode{ - {Path: Normalized(Index(0)), Node: "hi"}, - {Path: Normalized(Index(1)), Node: 42}, - {Path: Normalized(Index(2)), Node: "go"}, - {Path: Normalized(Index(3)), Node: 98.6}, - {Path: Normalized(Index(4)), Node: "x"}, - {Path: Normalized(Index(5)), Node: true}, - }, - }, - { - name: "wildcard_object", - seg: Child(Wildcard()), - src: map[string]any{"hi": 42, "go": 98.6, "x": true}, - exp: []any{42, 98.6, true}, - loc: []*LocatedNode{ - {Path: Normalized(Name("hi")), Node: 42}, - {Path: Normalized(Name("go")), Node: 98.6}, - {Path: Normalized(Name("x")), Node: true}, - }, - rand: true, - }, - { - name: "wildcard_others_array", - seg: Child(Wildcard(), Slice(1, 3), Index(0), Name("go")), - src: []any{"hi", 42, "go", 98.6, "x", true}, - exp: []any{"hi", 42, "go", 98.6, "x", true, 42, "go", "hi"}, - loc: []*LocatedNode{ - {Path: Normalized(Index(0)), Node: "hi"}, - {Path: Normalized(Index(1)), Node: 42}, - {Path: Normalized(Index(2)), Node: "go"}, - {Path: Normalized(Index(3)), Node: 98.6}, - {Path: Normalized(Index(4)), Node: "x"}, - {Path: Normalized(Index(5)), Node: true}, - {Path: Normalized(Index(1)), Node: 42}, - {Path: Normalized(Index(2)), Node: "go"}, - {Path: Normalized(Index(0)), Node: "hi"}, - }, - }, - { - name: "wildcard_others_object", - seg: Child(Wildcard(), Slice(1, 3), Index(0), Name("go")), - src: map[string]any{"hi": 42, "go": 98.6, "x": true}, - exp: []any{42, 98.6, true, 98.6}, - loc: []*LocatedNode{ - {Path: Normalized(Name("hi")), Node: 42}, - {Path: Normalized(Name("go")), Node: 98.6}, - {Path: Normalized(Name("x")), Node: true}, - {Path: Normalized(Name("go")), Node: 98.6}, - }, - rand: true, - }, - } { - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - a.Equal(tc.seg.selectors, tc.seg.Selectors()) - a.Equal(tc.sing, tc.seg.isSingular()) - a.Equal(tc.seg.descendant, tc.seg.IsDescendant()) - if tc.rand { - a.ElementsMatch(tc.exp, tc.seg.Select(tc.src, nil)) - a.ElementsMatch(tc.loc, tc.seg.SelectLocated(tc.src, nil, Normalized())) - } else { - a.Equal(tc.exp, tc.seg.Select(tc.src, nil)) - a.Equal(tc.loc, tc.seg.SelectLocated(tc.src, nil, Normalized())) - } - }) - } -} - -func TestDescendantSegmentSelect(t *testing.T) { - t.Parallel() - a := assert.New(t) - - for _, tc := range []struct { - name string - seg *Segment - src any - exp []any - loc []*LocatedNode - rand bool - }{ - { - name: "no_selectors", - seg: Descendant(), - src: []any{1, 3, []any{3, 5, []any{42, 98.6, true}}}, - exp: []any{}, - loc: []*LocatedNode{}, - }, - { - name: "root_name", - seg: Descendant(Name("hi")), - src: map[string]any{"hi": 42, "go": map[string]any{"x": 98.6}}, - exp: []any{42}, - loc: []*LocatedNode{ - {Path: Normalized(Name("hi")), Node: 42}, - }, - rand: true, - }, - { - name: "name", - seg: Descendant(Name("hi")), - src: map[string]any{ - "hi": 42, "go": map[string]any{ - "hi": 98.6, "x": map[string]any{"hi": true}, - }, - }, - exp: []any{42, 98.6, true}, - loc: []*LocatedNode{ - {Path: Normalized(Name("hi")), Node: 42}, - {Path: Normalized(Name("go"), Name("hi")), Node: 98.6}, - {Path: Normalized(Name("go"), Name("x"), Name("hi")), Node: true}, - }, - rand: true, - }, - { - name: "name_in_name", - seg: Descendant(Name("hi")), - src: map[string]any{ - "hi": 42, "go": map[string]any{ - "hi": map[string]any{"hi": true}, - }, - }, - exp: []any{42, map[string]any{"hi": true}, true}, - loc: []*LocatedNode{ - {Path: Normalized(Name("hi")), Node: 42}, - {Path: Normalized(Name("go"), Name("hi")), Node: map[string]any{"hi": true}}, - {Path: Normalized(Name("go"), Name("hi"), Name("hi")), Node: true}, - }, - rand: true, - }, - { - name: "name_under_array", - seg: Descendant(Name("hi")), - src: []any{ - map[string]any{ - "hi": 98.6, "x": map[string]any{"hi": true}, - }, - }, - exp: []any{98.6, true}, - loc: []*LocatedNode{ - {Path: Normalized(Index(0), Name("hi")), Node: 98.6}, - {Path: Normalized(Index(0), Name("x"), Name("hi")), Node: true}, - }, - rand: true, - }, - { - name: "two_names", - seg: Descendant(Name("hi"), Name("x")), - src: map[string]any{ - "hi": 42, "go": map[string]any{ - "hi": 98.6, "go": map[string]any{"hi": true, "x": 12}, - }, - "x": map[string]any{"x": 99}, - }, - exp: []any{42, 98.6, true, 12, map[string]any{"x": 99}, 99}, - loc: []*LocatedNode{ - {Path: Normalized(Name("hi")), Node: 42}, - {Path: Normalized(Name("go"), Name("hi")), Node: 98.6}, - {Path: Normalized(Name("go"), Name("go"), Name("hi")), Node: true}, - {Path: Normalized(Name("go"), Name("go"), Name("x")), Node: 12}, - {Path: Normalized(Name("x")), Node: map[string]any{"x": 99}}, - {Path: Normalized(Name("x"), Name("x")), Node: 99}, - }, - rand: true, - }, - { - name: "root_index", - seg: Descendant(Index(1)), - src: []any{1, 3, []any{3}}, - exp: []any{3}, - loc: []*LocatedNode{ - {Path: Normalized(Index(1)), Node: 3}, - }, - }, - { - name: "index", - seg: Descendant(Index(1)), - src: []any{1, 3, []any{3, 5, []any{42, 98.6, true}}}, - exp: []any{3, 5, 98.6}, - loc: []*LocatedNode{ - {Path: Normalized(Index(1)), Node: 3}, - {Path: Normalized(Index(2), Index(1)), Node: 5}, - {Path: Normalized(Index(2), Index(2), Index(1)), Node: 98.6}, - }, - }, - { - name: "two_indexes", - seg: Descendant(Index(1), Index(0)), - src: []any{1, 3, []any{3, 5, []any{42, 98.6, true}}}, - exp: []any{3, 1, 5, 3, 98.6, 42}, - loc: []*LocatedNode{ - {Path: Normalized(Index(1)), Node: 3}, - {Path: Normalized(Index(0)), Node: 1}, - {Path: Normalized(Index(2), Index(1)), Node: 5}, - {Path: Normalized(Index(2), Index(0)), Node: 3}, - {Path: Normalized(Index(2), Index(2), Index(1)), Node: 98.6}, - {Path: Normalized(Index(2), Index(2), Index(0)), Node: 42}, - }, - }, - { - name: "index_under_object", - seg: Descendant(Index(1)), - src: map[string]any{"x": 1, "y": 3, "z": []any{3, 5, []any{42, 98.6, true}}}, - exp: []any{5, 98.6}, - loc: []*LocatedNode{ - {Path: Normalized(Name("z"), Index(1)), Node: 5}, - {Path: Normalized(Name("z"), Index(2), Index(1)), Node: 98.6}, - }, - }, - { - name: "root_slice", - seg: Descendant(Slice(1, 3)), - src: []any{1, 3, 4, []any{3}}, - exp: []any{3, 4}, - loc: []*LocatedNode{ - {Path: Normalized(Index(1)), Node: 3}, - {Path: Normalized(Index(2)), Node: 4}, - }, - }, - { - name: "slice", - seg: Descendant(Slice(1, 2)), - src: []any{1, 3, 4, []any{3, 5, "x", []any{42, 98.6, "y", "z", true}}}, - exp: []any{3, 5, 98.6}, - loc: []*LocatedNode{ - {Path: Normalized(Index(1)), Node: 3}, - {Path: Normalized(Index(3), Index(1)), Node: 5}, - {Path: Normalized(Index(3), Index(3), Index(1)), Node: 98.6}, - }, - }, - { - name: "two_more_slices", - seg: Descendant(Slice(1, 2), Slice(3, 4)), - src: []any{1, 3, 4, []any{3, 5, "x", []any{42, 98.6, "y", "z", true}}}, - exp: []any{ - 3, - []any{3, 5, "x", []any{42, 98.6, "y", "z", true}}, - 5, - []any{42, 98.6, "y", "z", true}, - 98.6, - "z", - }, - loc: []*LocatedNode{ - {Path: Normalized(Index(1)), Node: 3}, - {Path: Normalized(Index(3)), Node: []any{3, 5, "x", []any{42, 98.6, "y", "z", true}}}, - {Path: Normalized(Index(3), Index(1)), Node: 5}, - {Path: Normalized(Index(3), Index(3)), Node: []any{42, 98.6, "y", "z", true}}, - {Path: Normalized(Index(3), Index(3), Index(1)), Node: 98.6}, - {Path: Normalized(Index(3), Index(3), Index(3)), Node: "z"}, - }, - }, - { - name: "slice_and_index", - seg: Descendant(Slice(1, 2), Index(3)), - src: []any{1, 3, 4, []any{3, 5, "x", []any{42, 98.6, "y", "z", true}}}, - exp: []any{ - 3, - []any{3, 5, "x", []any{42, 98.6, "y", "z", true}}, - 5, - []any{42, 98.6, "y", "z", true}, - 98.6, - "z", - }, - loc: []*LocatedNode{ - {Path: Normalized(Index(1)), Node: 3}, - {Path: Normalized(Index(3)), Node: []any{3, 5, "x", []any{42, 98.6, "y", "z", true}}}, - {Path: Normalized(Index(3), Index(1)), Node: 5}, - {Path: Normalized(Index(3), Index(3)), Node: []any{42, 98.6, "y", "z", true}}, - {Path: Normalized(Index(3), Index(3), Index(1)), Node: 98.6}, - {Path: Normalized(Index(3), Index(3), Index(3)), Node: "z"}, - }, - }, - { - name: "slice_under_object", - seg: Descendant(Slice(1, 2)), - src: map[string]any{"x": []any{3, 5, "x", []any{42, 98.6, "y", "z", true}}}, - exp: []any{5, 98.6}, - loc: []*LocatedNode{ - {Path: Normalized(Name("x"), Index(1)), Node: 5}, - {Path: Normalized(Name("x"), Index(3), Index(1)), Node: 98.6}, - }, - }, - { - name: "root_wildcard_array", - seg: Descendant(Wildcard()), - src: []any{1, 3, 4}, - exp: []any{1, 3, 4}, - loc: []*LocatedNode{ - {Path: Normalized(Index(0)), Node: 1}, - {Path: Normalized(Index(1)), Node: 3}, - {Path: Normalized(Index(2)), Node: 4}, - }, - }, - { - name: "root_wildcard_object", - seg: Descendant(Wildcard()), - src: map[string]any{"x": 42, "y": true}, - exp: []any{42, true}, - loc: []*LocatedNode{ - {Path: Normalized(Name("x")), Node: 42}, - {Path: Normalized(Name("y")), Node: true}, - }, - rand: true, - }, - { - name: "wildcard_nested_array", - seg: Descendant(Wildcard()), - src: []any{1, 3, []any{4, 5}}, - exp: []any{1, 3, []any{4, 5}, 4, 5}, - loc: []*LocatedNode{ - {Path: Normalized(Index(0)), Node: 1}, - {Path: Normalized(Index(1)), Node: 3}, - {Path: Normalized(Index(2)), Node: []any{4, 5}}, - {Path: Normalized(Index(2), Index(0)), Node: 4}, - {Path: Normalized(Index(2), Index(1)), Node: 5}, - }, - }, - { - name: "wildcard_nested_object", - seg: Descendant(Wildcard()), - src: map[string]any{"x": 42, "y": map[string]any{"z": "hi"}}, - exp: []any{42, map[string]any{"z": "hi"}, "hi"}, - loc: []*LocatedNode{ - {Path: Normalized(Name("x")), Node: 42}, - {Path: Normalized(Name("y")), Node: map[string]any{"z": "hi"}}, - {Path: Normalized(Name("y"), Name("z")), Node: "hi"}, - }, - rand: true, - }, - { - name: "wildcard_mixed", - seg: Descendant(Wildcard()), - src: []any{1, 3, map[string]any{"z": "hi"}}, - exp: []any{1, 3, map[string]any{"z": "hi"}, "hi"}, - loc: []*LocatedNode{ - {Path: Normalized(Index(0)), Node: 1}, - {Path: Normalized(Index(1)), Node: 3}, - {Path: Normalized(Index(2)), Node: map[string]any{"z": "hi"}}, - {Path: Normalized(Index(2), Name("z")), Node: "hi"}, - }, - rand: true, - }, - { - name: "wildcard_mixed_index", - seg: Descendant(Wildcard(), Index(0)), - src: []any{1, 3, map[string]any{"z": "hi"}}, - exp: []any{1, 3, map[string]any{"z": "hi"}, 1, "hi"}, - loc: []*LocatedNode{ - {Path: Normalized(Index(0)), Node: 1}, - {Path: Normalized(Index(1)), Node: 3}, - {Path: Normalized(Index(2)), Node: map[string]any{"z": "hi"}}, - {Path: Normalized(Index(0)), Node: 1}, - {Path: Normalized(Index(2), Name("z")), Node: "hi"}, - }, - }, - { - name: "wildcard_mixed_name", - seg: Descendant(Wildcard(), Name("z")), - src: []any{1, 3, map[string]any{"z": "hi", "y": "x"}}, - exp: []any{1, 3, map[string]any{"z": "hi", "y": "x"}, "hi", "x", "hi"}, - loc: []*LocatedNode{ - {Path: Normalized(Index(0)), Node: 1}, - {Path: Normalized(Index(1)), Node: 3}, - {Path: Normalized(Index(2)), Node: map[string]any{"z": "hi", "y": "x"}}, - {Path: Normalized(Index(2), Name("z")), Node: "hi"}, - {Path: Normalized(Index(2), Name("y")), Node: "x"}, - {Path: Normalized(Index(2), Name("z")), Node: "hi"}, - }, - rand: true, - }, - } { - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - a.False(tc.seg.isSingular()) - a.True(tc.seg.IsDescendant()) - if tc.rand { - a.ElementsMatch(tc.exp, tc.seg.Select(tc.src, nil)) - a.ElementsMatch(tc.loc, tc.seg.SelectLocated(tc.src, nil, Normalized())) - } else { - a.Equal(tc.exp, tc.seg.Select(tc.src, nil)) - a.Equal(tc.loc, tc.seg.SelectLocated(tc.src, nil, Normalized())) - } - }) - } -} diff --git a/spec/selector.go b/spec/selector.go deleted file mode 100644 index 635f0a8..0000000 --- a/spec/selector.go +++ /dev/null @@ -1,531 +0,0 @@ -package spec - -import ( - "fmt" - "math" - "strconv" - "strings" -) - -// stringWriter defines the interface for JSONPath objects to write string -// representations of themselves to a string buffer. -type stringWriter interface { - fmt.Stringer - // writeTo writes a string to buf. - writeTo(buf *strings.Builder) -} - -// Selector represents a single Selector in an RFC 9535 JSONPath query. -type Selector interface { - stringWriter - - // Select selects values from current and/or root and returns them. - Select(current, root any) []any - - // SelectLocated selects values from current and/or root and returns them - // in [LocatedNode] values with their located normalized paths - SelectLocated(current, root any, parent NormalizedPath) []*LocatedNode - - // isSingular returns true for selectors that can only return a single - // value. - isSingular() bool -} - -// Name is a key name selector, e.g., .name or ["name"], as defined by [RFC -// 9535 Section 2.3.1]. Interfaces implemented: -// - [Selector] -// - [fmt.Stringer] -// - [NormalSelector] -// -// [RFC 9535 Section 2.3.1]: https://www.rfc-editor.org/rfc/rfc9535.html#name-name-selector -type Name string - -// isSingular returns true because Name selects a single value from an object. -// Defined by the [Selector] interface. -func (Name) isSingular() bool { return true } - -// String returns the quoted string representation of n. -func (n Name) String() string { - return strconv.Quote(string(n)) -} - -// writeTo writes a quoted string representation of i to buf. Defined by -// [stringWriter]. -func (n Name) writeTo(buf *strings.Builder) { - buf.WriteString(n.String()) -} - -// Select selects n from input and returns it as a single value in a slice. -// Returns an empty slice if input is not a map[string]any or if it does not -// contain n. Defined by the [Selector] interface. -func (n Name) Select(input, _ any) []any { - if obj, ok := input.(map[string]any); ok { - if val, ok := obj[string(n)]; ok { - return []any{val} - } - } - return make([]any, 0) -} - -// SelectLocated selects n from input and returns it with its normalized path -// as a single [LocatedNode] in a slice. Returns an empty slice if input is -// not a map[string]any or if it does not contain n. Defined by the [Selector] -// interface. -func (n Name) SelectLocated(input, _ any, parent NormalizedPath) []*LocatedNode { - if obj, ok := input.(map[string]any); ok { - if val, ok := obj[string(n)]; ok { - return []*LocatedNode{newLocatedNode(append(parent, n), val)} - } - } - return make([]*LocatedNode, 0) -} - -// writeNormalizedTo writes n to buf formatted as a [normalized path] element. -// Defined by [NormalSelector]. -// -// [normalized path]: https://www.rfc-editor.org/rfc/rfc9535#section-2.7 -func (n Name) writeNormalizedTo(buf *strings.Builder) { - // https://www.rfc-editor.org/rfc/rfc9535#section-2.7 - buf.WriteString("['") - for _, r := range string(n) { - switch r { - case '\b': // b BS backspace U+0008 - buf.WriteString(`\b`) - case '\f': // f FF form feed U+000C - buf.WriteString(`\f`) - case '\n': // n LF line feed U+000A - buf.WriteString(`\n`) - case '\r': // r CR carriage return U+000D - buf.WriteString(`\r`) - case '\t': // t HT horizontal tab U+0009 - buf.WriteString(`\t`) - case '\'': // ' apostrophe U+0027 - buf.WriteString(`\'`) - case '\\': // \ backslash (reverse solidus) U+005C - buf.WriteString(`\\`) - case '\x00', '\x01', '\x02', '\x03', '\x04', '\x05', '\x06', '\x07', '\x0b', '\x0e', '\x0f': - // "00"-"07", "0b", "0e"-"0f" - fmt.Fprintf(buf, `\u000%x`, r) - default: - buf.WriteRune(r) - } - } - buf.WriteString("']") -} - -// writePointerTo writes n to buf formatted as a [JSON Pointer] reference -// token. Defined by [NormalSelector]. -// -// [JSON Pointer]: https://www.rfc-editor.org/rfc/rfc6901 -func (n Name) writePointerTo(buf *strings.Builder) { - buf.WriteString(strings.ReplaceAll( - strings.ReplaceAll(string(n), "~", "~0"), - "/", "~1", - )) -} - -// WildcardSelector is a wildcard selector, e.g., * or [*], as defined by [RFC -// 9535 Section 2.3.2]. Interfaces implemented: -// - [Selector] -// - [fmt.Stringer] -// -// [RFC 9535 Section 2.3.2]: https://www.rfc-editor.org/rfc/rfc9535.html#name-wildcard-selector -type WildcardSelector struct{} - -//nolint:gochecknoglobals -var wc = WildcardSelector{} - -// Wildcard returns a [WildcardSelector] singleton. -func Wildcard() WildcardSelector { return wc } - -// writeTo writes "*" to buf. Defined by [stringWriter]. -func (WildcardSelector) writeTo(buf *strings.Builder) { buf.WriteByte('*') } - -// String returns "*". -func (WildcardSelector) String() string { return "*" } - -// isSingular returns false because a wild card can select more than one value -// from an object or array. Defined by the [Selector] interface. -func (WildcardSelector) isSingular() bool { return false } - -// Select selects the values from input and returns them in a slice. Returns -// an empty slice if input is not []any map[string]any. Defined by the -// [Selector] interface. -func (WildcardSelector) Select(input, _ any) []any { - switch val := input.(type) { - case []any: - return val - case map[string]any: - vals := make([]any, 0, len(val)) - for _, v := range val { - vals = append(vals, v) - } - return vals - } - return make([]any, 0) -} - -// SelectLocated selects the values from input and returns them with their -// normalized paths in a slice of [LocatedNode] values. Returns an empty -// slice if input is not []any map[string]any. Defined by the [Selector] -// interface. -func (WildcardSelector) SelectLocated(input, _ any, parent NormalizedPath) []*LocatedNode { - switch val := input.(type) { - case []any: - vals := make([]*LocatedNode, len(val)) - for i, v := range val { - vals[i] = newLocatedNode(append(parent, Index(i)), v) - } - return vals - case map[string]any: - vals := make([]*LocatedNode, 0, len(val)) - for k, v := range val { - vals = append(vals, newLocatedNode(append(parent, Name(k)), v)) - } - return vals - } - return make([]*LocatedNode, 0) -} - -// Index is an array index selector, e.g., [3], as defined by [RFC -// 9535 Section 2.3.3]. Interfaces -// implemented: -// - [Selector] -// - [fmt.Stringer] -// - [NormalSelector] -// -// [RFC 9535 Section 2.3.3]: https://www.rfc-editor.org/rfc/rfc9535.html#name-index-selector -type Index int - -// isSingular returns true because Index selects a single value from an array. -// Defined by the [Selector] interface. -func (Index) isSingular() bool { return true } - -// writeTo writes a string representation of i to buf. Defined by -// [stringWriter]. -func (i Index) writeTo(buf *strings.Builder) { - buf.WriteString(i.String()) -} - -// String returns a string representation of i. -func (i Index) String() string { return strconv.FormatInt(int64(i), 10) } - -// Select selects i from input and returns it as a single value in a slice. -// Returns an empty slice if input is not a slice or if i it outside the -// bounds of input. Defined by the [Selector] interface. -func (i Index) Select(input, _ any) []any { - if val, ok := input.([]any); ok { - idx := int(i) - if idx < 0 { - if idx = len(val) + idx; idx >= 0 { - return []any{val[idx]} - } - } else if idx < len(val) { - return []any{val[idx]} - } - } - return make([]any, 0) -} - -// SelectLocated selects i from input and returns it with its normalized path -// as a single [LocatedNode] in a slice. Returns an empty slice if input is -// not a slice or if i it outside the bounds of input. Defined by the -// [Selector] interface. -func (i Index) SelectLocated(input, _ any, parent NormalizedPath) []*LocatedNode { - if val, ok := input.([]any); ok { - idx := int(i) - if idx < 0 { - if idx = len(val) + idx; idx >= 0 { - return []*LocatedNode{newLocatedNode(append(parent, Index(idx)), val[idx])} - } - } else if idx < len(val) { - return []*LocatedNode{newLocatedNode(append(parent, Index(idx)), val[idx])} - } - } - return make([]*LocatedNode, 0) -} - -// writeNormalizedTo writes n to buf formatted as a [normalized path] element. -// Implements [NormalSelector]. -// -// [normalized path]: https://www.rfc-editor.org/rfc/rfc9535#section-2.7 -func (i Index) writeNormalizedTo(buf *strings.Builder) { - buf.WriteRune('[') - buf.WriteString(strconv.FormatInt(int64(i), 10)) - buf.WriteRune(']') -} - -// writePointerTo writes n to buf formatted as a [JSON Pointer] reference -// token. Defined by [NormalSelector]. -// -// [JSON Pointer]: https://www.rfc-editor.org/rfc/rfc6901 -func (i Index) writePointerTo(buf *strings.Builder) { - buf.WriteString(strconv.FormatInt(int64(i), 10)) -} - -// SliceSelector is a slice selector, e.g., [0:100:5], as defined by [RFC -// 9535 Section 2.3.4]. Interfaces implemented: -// - [Selector] -// - [fmt.Stringer] -// -// [RFC 9535 Section 2.3.4]: https://www.rfc-editor.org/rfc/rfc9535.html#name-array-slice-selector -type SliceSelector struct { - // Start of the slice; defaults to 0. - start int - // End of the slice; defaults to math.MaxInt. - end int - // Steps between start and end; defaults to 0. - step int -} - -// isSingular returns false because a slice selector can select more than one -// value from an array. Defined by the [Selector] interface. -func (SliceSelector) isSingular() bool { return false } - -// Slice creates a new [SliceSelector]. Pass up to three integers or nils for -// the start, end, and step arguments. Subsequent arguments are ignored. -func Slice(args ...any) SliceSelector { - const ( - startArg = 0 - endArg = 1 - stepArg = 2 - ) - // Set defaults. - s := SliceSelector{0, math.MaxInt, 1} - switch len(args) - 1 { - case stepArg: - //nolint:gosec // disable G602 https://github.com/securego/gosec/issues/1250 - switch step := args[stepArg].(type) { - case int: - s.step = step - case nil: - // Nothing to do - default: - panic("Third value passed to Slice is not an integer") - } - fallthrough - case endArg: - //nolint:gosec // disable G602 https://github.com/securego/gosec/issues/1250 - switch end := args[endArg].(type) { - case int: - s.end = end - case nil: - // Negative step: end with minimum int. - if s.step < 0 { - s.end = math.MinInt - } - default: - panic("Second value passed to Slice is not an integer") - } - fallthrough - case startArg: - switch start := args[startArg].(type) { - case int: - s.start = start - case nil: - // Negative step: start with maximum int. - if s.step < 0 { - s.start = math.MaxInt - } - default: - panic("First value passed to Slice is not an integer") - } - } - return s -} - -// writeTo writes a string representation of s to buf. Defined by -// [stringWriter]. -func (s SliceSelector) writeTo(buf *strings.Builder) { - if s.start != 0 && (s.step >= 0 || s.start != math.MaxInt) { - buf.WriteString(strconv.FormatInt(int64(s.start), 10)) - } - buf.WriteByte(':') - if s.end != math.MaxInt && (s.step >= 0 || s.end != math.MinInt) { - buf.WriteString(strconv.FormatInt(int64(s.end), 10)) - } - if s.step != 1 { - buf.WriteByte(':') - buf.WriteString(strconv.FormatInt(int64(s.step), 10)) - } -} - -// String returns a quoted string representation of s. -func (s SliceSelector) String() string { - buf := new(strings.Builder) - s.writeTo(buf) - return buf.String() -} - -// Select selects and returns the values from input for the indexes specified -// by s. Returns an empty slice if input is not a slice. Indexes outside the -// bounds of input will not be included in the return value. Defined by the -// [Selector] interface. -func (s SliceSelector) Select(input, _ any) []any { - if val, ok := input.([]any); ok { - lower, upper := s.Bounds(len(val)) - res := make([]any, 0, len(val)) - switch { - case s.step > 0: - for i := lower; i < upper; i += s.step { - res = append(res, val[i]) - } - case s.step < 0: - for i := upper; lower < i; i += s.step { - res = append(res, val[i]) - } - } - return res - } - return make([]any, 0) -} - -// SelectLocated selects values from input for the indexes specified by s and -// returns thm with their normalized paths as [LocatedNode] values. Returns -// an empty slice if input is not a slice. Indexes outside the bounds of input -// will not be included in the return value. Defined by the [Selector] -// interface. -func (s SliceSelector) SelectLocated(input, _ any, parent NormalizedPath) []*LocatedNode { - if val, ok := input.([]any); ok { - lower, upper := s.Bounds(len(val)) - res := make([]*LocatedNode, 0, len(val)) - switch { - case s.step > 0: - for i := lower; i < upper; i += s.step { - res = append(res, newLocatedNode(append(parent, Index(i)), val[i])) - } - case s.step < 0: - for i := upper; lower < i; i += s.step { - res = append(res, newLocatedNode(append(parent, Index(i)), val[i])) - } - } - return res - } - return make([]*LocatedNode, 0) -} - -// Start returns the start position. -func (s SliceSelector) Start() int { - return s.start -} - -// End returns the end position. -func (s SliceSelector) End() int { - return s.end -} - -// Step returns the step value. -func (s SliceSelector) Step() int { - return s.step -} - -// Bounds returns the lower and upper bounds for selecting from a slice of -// length. -func (s SliceSelector) Bounds(length int) (int, int) { - start := normalize(s.start, length) - end := normalize(s.end, length) - switch { - case s.step > 0: - return max(min(start, length), 0), max(min(end, length), 0) - case s.step < 0: - return max(min(end, length-1), -1), max(min(start, length-1), -1) - default: - return 0, 0 - } -} - -// normalize normalizes index i relative to a slice of length. -func normalize(i, length int) int { - if i >= 0 { - return i - } - - return length + i -} - -// FilterSelector is a filter selector, e.g., ?(), as defined by [RFC -// 9535 Section 2.3.5]. Interfaces implemented: -// - [Selector] -// - [fmt.Stringer] -// -// [RFC 9535 Section 2.3.5]: https://www.rfc-editor.org/rfc/rfc9535.html#name-filter-selector -type FilterSelector struct { - LogicalOr -} - -// Filter returns a new [FilterSelector] that ORs the evaluation of each expr. -func Filter(expr ...LogicalAnd) *FilterSelector { - return &FilterSelector{LogicalOr: expr} -} - -// String returns a string representation of f. -func (f *FilterSelector) String() string { - buf := new(strings.Builder) - f.writeTo(buf) - return buf.String() -} - -// writeTo writes a string representation of f to buf. Defined by -// [stringWriter]. -func (f *FilterSelector) writeTo(buf *strings.Builder) { - buf.WriteRune('?') - f.LogicalOr.writeTo(buf) -} - -// Select selects and returns values that f filters from current. Filter -// expressions may evaluate the current value (@), the root value ($), or any -// path expression. Defined by the [Selector] interface. -func (f *FilterSelector) Select(current, root any) []any { - ret := []any{} - switch current := current.(type) { - case []any: - for _, v := range current { - if f.Eval(v, root) { - ret = append(ret, v) - } - } - case map[string]any: - for _, v := range current { - if f.Eval(v, root) { - ret = append(ret, v) - } - } - } - - return ret -} - -// SelectLocated selects and returns [LocatedNode] values with values that f -// filters from current. Filter expressions may evaluate the current value -// (@), the root value ($), or any path expression. Defined by the [Selector] -// interface. -func (f *FilterSelector) SelectLocated(current, root any, parent NormalizedPath) []*LocatedNode { - ret := []*LocatedNode{} - switch current := current.(type) { - case []any: - for i, v := range current { - if f.Eval(v, root) { - ret = append(ret, newLocatedNode(append(parent, Index(i)), v)) - } - } - case map[string]any: - for k, v := range current { - if f.Eval(v, root) { - ret = append(ret, newLocatedNode(append(parent, Name(k)), v)) - } - } - } - - return ret -} - -// Eval evaluates the f's [LogicalOr] expression against node and root. Uses -// [FilterSelector.Select] as it iterates over nodes, and always passes the -// root value($) for filter expressions that reference it. -func (f *FilterSelector) Eval(node, root any) bool { - return f.testFilter(node, root) -} - -// isSingular returns false because Filters can return more than one value. -// Defined by the [Selector] interface. -func (f *FilterSelector) isSingular() bool { return false } diff --git a/spec/selector_test.go b/spec/selector_test.go deleted file mode 100644 index b6d4d51..0000000 --- a/spec/selector_test.go +++ /dev/null @@ -1,779 +0,0 @@ -package spec - -import ( - "fmt" - "math" - "strings" - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestSelectorInterface(t *testing.T) { - t.Parallel() - a := assert.New(t) - - for _, tc := range []struct { - name string - tok any - }{ - {"name", Name("hi")}, - {"index", Index(42)}, - {"slice", Slice()}, - {"wildcard", Wildcard()}, - {"filter", Filter(nil)}, - } { - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - a.Implements((*Selector)(nil), tc.tok) - }) - } -} - -func TestSelectorString(t *testing.T) { - t.Parallel() - a := assert.New(t) - - for _, tc := range []struct { - name string - tok Selector - str string - sing bool - }{ - { - name: "name", - tok: Name("hi"), - str: `"hi"`, - sing: true, - }, - { - name: "name_space", - tok: Name("hi there"), - str: `"hi there"`, - sing: true, - }, - { - name: "name_quote", - tok: Name(`hi "there"`), - str: `"hi \"there\""`, - sing: true, - }, - { - name: "name_unicode", - tok: Name(`hi πŸ˜€`), - str: `"hi πŸ˜€"`, - sing: true, - }, - { - name: "name_digits", - tok: Name(`42`), - str: `"42"`, - sing: true, - }, - { - name: "index", - tok: Index(42), - str: "42", - sing: true, - }, - { - name: "index_big", - tok: Index(math.MaxUint32), - str: "4294967295", - sing: true, - }, - { - name: "index_zero", - tok: Index(0), - str: "0", - sing: true, - }, - { - name: "slice_0_4", - tok: Slice(0, 4), - str: ":4", - }, - { - name: "slice_4_5", - tok: Slice(4, 5), - str: "4:5", - }, - { - name: "slice_end_42", - tok: Slice(nil, 42), - str: ":42", - }, - { - name: "slice_start_4", - tok: Slice(4), - str: "4:", - }, - { - name: "slice_start_end_step", - tok: Slice(4, 7, 2), - str: "4:7:2", - }, - { - name: "slice_start_step", - tok: Slice(4, nil, 2), - str: "4::2", - }, - { - name: "slice_end_step", - tok: Slice(nil, 4, 2), - str: ":4:2", - }, - { - name: "slice_step", - tok: Slice(nil, nil, 3), - str: "::3", - }, - { - name: "slice_neg_step", - tok: Slice(nil, nil, -1), - str: "::-1", - }, - { - name: "slice_max_start", - tok: Slice(math.MaxInt), - str: fmt.Sprintf("%v:", math.MaxInt), - }, - { - name: "slice_max_start_neg_step", - tok: Slice(math.MaxInt, nil, -1), - str: "::-1", - }, - { - name: "slice_min_start", - tok: Slice(math.MinInt), - str: fmt.Sprintf("%v:", math.MinInt), - }, - { - name: "slice_min_start_neg_step", - tok: Slice(math.MinInt, nil, -1), - str: fmt.Sprintf("%v::-1", math.MinInt), - }, - { - name: "slice_max_end", - tok: Slice(0, math.MaxInt), - str: ":", - }, - { - name: "slice_max_end_neg_step", - tok: Slice(0, math.MaxInt, -1), - str: "::-1", - }, - { - name: "slice_min_end", - tok: Slice(0, math.MinInt), - str: fmt.Sprintf(":%v", math.MinInt), - }, - { - name: "slice_min_end_neg_step", - tok: Slice(0, math.MinInt, -1), - str: "::-1", - }, - { - name: "wildcard", - tok: Wildcard(), - str: "*", - }, - } { - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - a.Equal(tc.sing, tc.tok.isSingular()) - buf := new(strings.Builder) - tc.tok.writeTo(buf) - a.Equal(tc.str, buf.String()) - a.Equal(tc.str, tc.tok.String()) - }) - } -} - -func TestSliceBounds(t *testing.T) { - t.Parallel() - a := assert.New(t) - - json := []any{"a", "b", "c", "d", "e", "f", "g"} - - extract := func(s SliceSelector) []any { - lower, upper := s.Bounds(len(json)) - res := make([]any, 0, len(json)) - switch { - case s.step > 0: - for i := lower; i < upper; i += s.step { - res = append(res, json[i]) - } - case s.step < 0: - for i := upper; lower < i; i += s.step { - res = append(res, json[i]) - } - } - return res - } - - type lenCase struct { - length int - lower int - upper int - } - - for _, tc := range []struct { - name string - slice SliceSelector - cases []lenCase - exp []any - }{ - { - name: "defaults", - slice: Slice(), - exp: json, - cases: []lenCase{ - {10, 0, 10}, - {3, 0, 3}, - {99, 0, 99}, - }, - }, - { - name: "step_0", - slice: Slice(nil, nil, 0), - exp: []any{}, - cases: []lenCase{ - {10, 0, 0}, - {3, 0, 0}, - {99, 0, 0}, - }, - }, - { - name: "nil_nil_nil", - slice: Slice(nil, nil, nil), - exp: json, - cases: []lenCase{ - {10, 0, 10}, - {3, 0, 3}, - {99, 0, 99}, - }, - }, - { - name: "3_8_2", - slice: Slice(3, 8, 2), - exp: []any{"d", "f"}, - cases: []lenCase{ - {10, 3, 8}, - {3, 3, 3}, - {99, 3, 8}, - }, - }, - { - name: "1_3_1", - slice: Slice(1, 3, 1), - exp: []any{"b", "c"}, - cases: []lenCase{ - {10, 1, 3}, - {2, 1, 2}, - {99, 1, 3}, - }, - }, - { - name: "5_defaults", - slice: Slice(5), - exp: []any{"f", "g"}, - cases: []lenCase{ - {10, 5, 10}, - {8, 5, 8}, - {99, 5, 99}, - }, - }, - { - name: "1_5_2", - slice: Slice(1, 5, 2), - exp: []any{"b", "d"}, - cases: []lenCase{ - {10, 1, 5}, - {4, 1, 4}, - {99, 1, 5}, - }, - }, - { - name: "5_1_neg2", - slice: Slice(5, 1, -2), - exp: []any{"f", "d"}, - cases: []lenCase{ - {10, 1, 5}, - {4, 1, 3}, - {99, 1, 5}, - }, - }, - { - name: "def_def_neg1", - slice: Slice(nil, nil, -1), - exp: []any{"g", "f", "e", "d", "c", "b", "a"}, - cases: []lenCase{ - {10, -1, 9}, - {4, -1, 3}, - {99, -1, 98}, - }, - }, - } { - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - a.False(tc.slice.isSingular()) - for _, lc := range tc.cases { - lower, upper := tc.slice.Bounds(lc.length) - a.Equal(lc.lower, lower) - a.Equal(lc.upper, upper) - } - a.Equal(tc.exp, extract(tc.slice)) - a.Equal(tc.slice.start, tc.slice.Start()) - a.Equal(tc.slice.end, tc.slice.End()) - a.Equal(tc.slice.step, tc.slice.Step()) - }) - } -} - -func TestSlicePanic(t *testing.T) { - t.Parallel() - a := assert.New(t) - - a.PanicsWithValue( - "First value passed to Slice is not an integer", - func() { Slice("hi") }, - ) - a.PanicsWithValue( - "Second value passed to Slice is not an integer", - func() { Slice(nil, "hi") }, - ) - a.PanicsWithValue( - "Third value passed to Slice is not an integer", - func() { Slice(nil, 42, "hi") }, - ) -} - -func TestNameSelect(t *testing.T) { - t.Parallel() - a := assert.New(t) - - for _, tc := range []struct { - name string - sel Name - src any - exp []any - loc []*LocatedNode - }{ - { - name: "got_name", - sel: Name("hi"), - src: map[string]any{"hi": 42}, - exp: []any{42}, - loc: []*LocatedNode{{Path: Normalized(Name("hi")), Node: 42}}, - }, - { - name: "got_name_array", - sel: Name("hi"), - src: map[string]any{"hi": []any{42, true}}, - exp: []any{[]any{42, true}}, - loc: []*LocatedNode{{Path: Normalized(Name("hi")), Node: []any{42, true}}}, - }, - { - name: "no_name", - sel: Name("hi"), - src: map[string]any{"oy": []any{42, true}}, - exp: []any{}, - loc: []*LocatedNode{}, - }, - { - name: "src_array", - sel: Name("hi"), - src: []any{42, true}, - exp: []any{}, - loc: []*LocatedNode{}, - }, - } { - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - a.Equal(tc.exp, tc.sel.Select(tc.src, nil)) - a.Equal(tc.loc, tc.sel.SelectLocated(tc.src, nil, Normalized())) - }) - } -} - -func TestIndexSelect(t *testing.T) { - t.Parallel() - a := assert.New(t) - - for _, tc := range []struct { - name string - sel Index - src any - exp []any - loc []*LocatedNode - }{ - { - name: "index_zero", - sel: Index(0), - src: []any{42, true, "hi"}, - exp: []any{42}, - loc: []*LocatedNode{{Path: Normalized(Index(0)), Node: 42}}, - }, - { - name: "index_two", - sel: Index(2), - src: []any{42, true, "hi"}, - exp: []any{"hi"}, - loc: []*LocatedNode{{Path: Normalized(Index(2)), Node: "hi"}}, - }, - { - name: "index_neg_one", - sel: Index(-1), - src: []any{42, true, "hi"}, - exp: []any{"hi"}, - loc: []*LocatedNode{{Path: Normalized(Index(2)), Node: "hi"}}, - }, - { - name: "index_neg_two", - sel: Index(-2), - src: []any{42, true, "hi"}, - exp: []any{true}, - loc: []*LocatedNode{{Path: Normalized(Index(1)), Node: true}}, - }, - { - name: "out_of_range", - sel: Index(4), - src: []any{42, true, "hi"}, - exp: []any{}, - loc: []*LocatedNode{}, - }, - { - name: "neg_out_of_range", - sel: Index(-4), - src: []any{42, true, "hi"}, - exp: []any{}, - loc: []*LocatedNode{}, - }, - { - name: "src_object", - sel: Index(0), - src: map[string]any{"hi": 42}, - exp: []any{}, - loc: []*LocatedNode{}, - }, - } { - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - a.Equal(tc.exp, tc.sel.Select(tc.src, nil)) - a.Equal(tc.loc, tc.sel.SelectLocated(tc.src, nil, Normalized())) - }) - } -} - -func TestWildcardSelect(t *testing.T) { - t.Parallel() - a := assert.New(t) - - for _, tc := range []struct { - name string - src any - exp []any - loc []*LocatedNode - }{ - { - name: "object", - src: map[string]any{"x": true, "y": []any{true}}, - exp: []any{true, []any{true}}, - loc: []*LocatedNode{ - {Path: Normalized(Name("x")), Node: true}, - {Path: Normalized(Name("y")), Node: []any{true}}, - }, - }, - { - name: "array", - src: []any{true, 42, map[string]any{"x": 6}}, - exp: []any{true, 42, map[string]any{"x": 6}}, - loc: []*LocatedNode{ - {Path: Normalized(Index(0)), Node: true}, - {Path: Normalized(Index(1)), Node: 42}, - {Path: Normalized(Index(2)), Node: map[string]any{"x": 6}}, - }, - }, - { - name: "something_else", - src: 42, - exp: []any{}, - loc: []*LocatedNode{}, - }, - } { - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - if _, ok := tc.src.(map[string]any); ok { - a.ElementsMatch(tc.exp, Wildcard().Select(tc.src, nil)) - a.ElementsMatch(tc.loc, Wildcard().SelectLocated(tc.src, nil, Normalized())) - } else { - a.Equal(tc.exp, Wildcard().Select(tc.src, nil)) - a.Equal(tc.loc, Wildcard().SelectLocated(tc.src, nil, Normalized())) - } - }) - } -} - -func TestSliceSelect(t *testing.T) { - t.Parallel() - a := assert.New(t) - - for _, tc := range []struct { - name string - sel SliceSelector - src any - exp []any - loc []*LocatedNode - }{ - { - name: "0_2", - sel: Slice(0, 2), - src: []any{42, true, "hi"}, - exp: []any{42, true}, - loc: []*LocatedNode{ - {Path: Normalized(Index(0)), Node: 42}, - {Path: Normalized(Index(1)), Node: true}, - }, - }, - { - name: "0_1", - sel: Slice(0, 1), - src: []any{[]any{42, false}, true, "hi"}, - exp: []any{[]any{42, false}}, - loc: []*LocatedNode{ - {Path: Normalized(Index(0)), Node: []any{42, false}}, - }, - }, - { - name: "2_5", - sel: Slice(2, 5), - src: []any{[]any{42, false}, true, "hi", 98.6, 73, "hi", 22}, - exp: []any{"hi", 98.6, 73}, - loc: []*LocatedNode{ - {Path: Normalized(Index(2)), Node: "hi"}, - {Path: Normalized(Index(3)), Node: 98.6}, - {Path: Normalized(Index(4)), Node: 73}, - }, - }, - { - name: "2_5_over_len", - sel: Slice(2, 5), - src: []any{"x", true, "y"}, - exp: []any{"y"}, - loc: []*LocatedNode{ - {Path: Normalized(Index(2)), Node: "y"}, - }, - }, - { - name: "defaults", - sel: Slice(), - src: []any{"x", nil, "y", 42}, - exp: []any{"x", nil, "y", 42}, - loc: []*LocatedNode{ - {Path: Normalized(Index(0)), Node: "x"}, - {Path: Normalized(Index(1)), Node: nil}, - {Path: Normalized(Index(2)), Node: "y"}, - {Path: Normalized(Index(3)), Node: 42}, - }, - }, - { - name: "default_start", - sel: Slice(nil, 3), - src: []any{"x", nil, "y", 42, 98.6, 54}, - exp: []any{"x", nil, "y"}, - loc: []*LocatedNode{ - {Path: Normalized(Index(0)), Node: "x"}, - {Path: Normalized(Index(1)), Node: nil}, - {Path: Normalized(Index(2)), Node: "y"}, - }, - }, - { - name: "default_end", - sel: Slice(2), - src: []any{"x", true, "y", 42, 98.6, 54}, - exp: []any{"y", 42, 98.6, 54}, - loc: []*LocatedNode{ - {Path: Normalized(Index(2)), Node: "y"}, - {Path: Normalized(Index(3)), Node: 42}, - {Path: Normalized(Index(4)), Node: 98.6}, - {Path: Normalized(Index(5)), Node: 54}, - }, - }, - { - name: "step_2", - sel: Slice(nil, nil, 2), - src: []any{"x", true, "y", 42, 98.6, 54}, - exp: []any{"x", "y", 98.6}, - loc: []*LocatedNode{ - {Path: Normalized(Index(0)), Node: "x"}, - {Path: Normalized(Index(2)), Node: "y"}, - {Path: Normalized(Index(4)), Node: 98.6}, - }, - }, - { - name: "step_3", - sel: Slice(nil, nil, 3), - src: []any{"x", true, "y", 42, 98.6, 54, 98, 73}, - exp: []any{"x", 42, 98}, - loc: []*LocatedNode{ - {Path: Normalized(Index(0)), Node: "x"}, - {Path: Normalized(Index(3)), Node: 42}, - {Path: Normalized(Index(6)), Node: 98}, - }, - }, - { - name: "negative_step", - sel: Slice(nil, nil, -1), - src: []any{"x", true, "y", []any{1, 2}}, - exp: []any{[]any{1, 2}, "y", true, "x"}, - loc: []*LocatedNode{ - {Path: Normalized(Index(3)), Node: []any{1, 2}}, - {Path: Normalized(Index(2)), Node: "y"}, - {Path: Normalized(Index(1)), Node: true}, - {Path: Normalized(Index(0)), Node: "x"}, - }, - }, - { - name: "5_0_neg2", - sel: Slice(5, 0, -2), - src: []any{"x", true, "y", 8, 13, 25, 23, 78, 13}, - exp: []any{25, 8, true}, - loc: []*LocatedNode{ - {Path: Normalized(Index(5)), Node: 25}, - {Path: Normalized(Index(3)), Node: 8}, - {Path: Normalized(Index(1)), Node: true}, - }, - }, - { - name: "src_object", - sel: Slice(0, 2), - src: map[string]any{"hi": 42}, - exp: []any{}, - loc: []*LocatedNode{}, - }, - } { - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - a.Equal(tc.exp, tc.sel.Select(tc.src, nil)) - a.Equal(tc.loc, tc.sel.SelectLocated(tc.src, nil, Normalized())) - }) - } -} - -func TestFilterSelector(t *testing.T) { - t.Parallel() - a := assert.New(t) - - for _, tc := range []struct { - name string - filter *FilterSelector - root any - current any - exp []any - loc []*LocatedNode - str string - rand bool - }{ - { - name: "no_filter", - filter: Filter(), - exp: []any{}, - loc: []*LocatedNode{}, - str: "?", - }, - { - name: "array_root", - filter: Filter(And(Existence(Query(true, Child(Index(0)))))), - root: []any{42, true, "hi"}, - current: map[string]any{"x": 2}, - exp: []any{2}, - loc: []*LocatedNode{ - {Path: Normalized(Name("x")), Node: 2}, - }, - str: `?$[0]`, - }, - { - name: "array_root_false", - filter: Filter(And(Existence(Query(true, Child(Index(4)))))), - root: []any{42, true, "hi"}, - current: map[string]any{"x": 2}, - exp: []any{}, - loc: []*LocatedNode{}, - str: `?$[4]`, - }, - { - name: "object_root", - filter: Filter(And(Existence(Query(true, Child(Name("y")))))), - root: map[string]any{"x": 42, "y": "hi"}, - current: map[string]any{"a": 2, "b": 3}, - exp: []any{2, 3}, - loc: []*LocatedNode{ - {Path: Normalized(Name("a")), Node: 2}, - {Path: Normalized(Name("b")), Node: 3}, - }, - str: `?$["y"]`, - rand: true, - }, - { - name: "object_root_false", - filter: Filter(And(Existence(Query(true, Child(Name("z")))))), - root: map[string]any{"x": 42, "y": "hi"}, - current: map[string]any{"a": 2, "b": 3}, - exp: []any{}, - loc: []*LocatedNode{}, - str: `?$["z"]`, - rand: true, - }, - { - name: "array_current", - filter: Filter(And(Existence(Query(false, Child(Index(0)))))), - current: []any{[]any{42}}, - exp: []any{[]any{42}}, - loc: []*LocatedNode{ - {Path: Normalized(Index(0)), Node: []any{42}}, - }, - str: `?@[0]`, - }, - { - name: "array_current_false", - filter: Filter(And(Existence(Query(false, Child(Index(1)))))), - current: []any{[]any{42}}, - exp: []any{}, - loc: []*LocatedNode{}, - str: `?@[1]`, - }, - { - name: "object_current", - filter: Filter(And(Existence(Query(false, Child(Name("x")))))), - current: []any{map[string]any{"x": 42}}, - exp: []any{map[string]any{"x": 42}}, - loc: []*LocatedNode{ - {Path: Normalized(Index(0)), Node: map[string]any{"x": 42}}, - }, - str: `?@["x"]`, - }, - { - name: "object_current_false", - filter: Filter(And(Existence(Query(false, Child(Name("y")))))), - current: []any{map[string]any{"x": 42}}, - exp: []any{}, - loc: []*LocatedNode{}, - str: `?@["y"]`, - }, - } { - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - if tc.rand { - a.ElementsMatch(tc.exp, tc.filter.Select(tc.current, tc.root)) - a.ElementsMatch(tc.loc, tc.filter.SelectLocated(tc.current, tc.root, Normalized())) - } else { - a.Equal(tc.exp, tc.filter.Select(tc.current, tc.root)) - a.Equal(tc.loc, tc.filter.SelectLocated(tc.current, tc.root, Normalized())) - } - a.Equal(tc.str, tc.filter.String()) - a.Equal(tc.str, bufString(tc.filter)) - a.False(tc.filter.isSingular()) - }) - } -} diff --git a/spec/spec.go b/spec/spec.go deleted file mode 100644 index 11e9c6a..0000000 --- a/spec/spec.go +++ /dev/null @@ -1,26 +0,0 @@ -// Package spec provides the [RFC 9535 JSONPath] [AST] and execution for -// [github.com/theory/jsonpath]. It will mainly be of interest to those -// wishing to implement their own parsers, to convert from other JSON -// path-style languages, and to implement functions for -// [github.com/theory/jsonpath/registry]. -// -// # Stability -// -// The following types and constructors are considered stable: -// -// - [Index] -// - [Name] -// - [SliceSelector] and [Slice] -// - [WildcardSelector] and [Wildcard] -// - [FilterSelector] -// - [Segment] and [Child] and [Descendant] -// - [PathQuery] and [Query] -// - [LocatedNode] -// - [NormalizedPath] -// -// The rest of the structs, constructors, and methods in this package remain -// subject to change, although we anticipate no significant revisions. -// -// [RFC 9535 JSONPath]: https://www.rfc-editor.org/rfc/rfc9535.html -// [AST]: https://en.wikipedia.org/wiki/Abstract_syntax_tree -package spec diff --git a/spec/spec_example_test.go b/spec/spec_example_test.go deleted file mode 100644 index 680939e..0000000 --- a/spec/spec_example_test.go +++ /dev/null @@ -1,497 +0,0 @@ -package spec_test - -import ( - "encoding/json" - "fmt" - "log" - - "github.com/theory/jsonpath" - "github.com/theory/jsonpath/registry" - "github.com/theory/jsonpath/spec" -) - -// Select all the authors of the books in a bookstore object. -func Example() { - // Construct a jsonpath query. - p := jsonpath.New(spec.Query( - true, - spec.Child(spec.Name("store")), - spec.Child(spec.Name("book")), - spec.Child(spec.Wildcard()), - spec.Child(spec.Name("author")), - )) - - // Select values from unmarshaled JSON input. - store := bookstore() - nodes := p.Select(store) - - // Show the selected values. - items, err := json.Marshal(nodes) - if err != nil { - log.Fatal(err) - } - fmt.Printf("%s\n", items) - - // Output: ["Nigel Rees","Evelyn Waugh","Herman Melville","J. R. R. Tolkien"] -} - -// Construct a couple of different path queries. -func ExamplePathQuery() { - // Create a query and its segments. - q := spec.Query( - true, - spec.Child(spec.Name("store")), - spec.Child(spec.Name("book")), - spec.Child(spec.Wildcard()), - spec.Child(spec.Name("author")), - ) - fmt.Printf("%v\n", q) - - // Create a query with multi-selector segments. - q = spec.Query( - true, - spec.Child(spec.Name("profile")), - spec.Descendant(spec.Name("email"), spec.Name("phone")), - ) - fmt.Printf("%v\n", q) - - // Output: - // $["store"]["book"][*]["author"] - // $["profile"]..["email","phone"] -} - -// Create a child segment that selects the name "hi", th index 2, or slice -// 1-3. -func ExampleSegment_child() { - child := spec.Child( - spec.Name("hi"), - spec.Index(2), - spec.Slice(1, 3, 1), - ) - fmt.Printf("%v\n", child) - // Output: ["hi",2,1:3] -} - -// Create a descendant segment that selects the name "email" or array index -// zero from a node or any of its descendant nodes. -func ExampleSegment_descendant() { - child := spec.Descendant( - spec.Name("email"), - spec.Index(0), - ) - fmt.Printf("%v\n", child) - // Output: ..["email",0] -} - -// Create a few slice selectors. -func ExampleSliceSelector() { - // Select all values in a slice. - for _, sliceSelector := range []any{ - spec.Slice(), // full slice - spec.Slice(1, 4), // items 1-3 - spec.Slice(nil, 8), // items 0-7 - spec.Slice(4, -1, 3), // items 4-last step by 3 - spec.Slice(5, 1, -2), // items 5-2, step by -2 - - } { - fmt.Printf("%v\n", sliceSelector) - } - // Output: - // : - // 1:4 - // :8 - // 4:-1:3 - // 5:1:-2 -} - -// Create a few name selectors. -func ExampleName() { - for _, nameSelector := range []any{ - spec.Name("hello"), // ascii - spec.Name(`Charlie "Bird" Parker`), // quoted - spec.Name("އައްސަލާމް ޒަލައިކήͺή‰ή°"), // Unicode - spec.Name("πŸ“‘πŸͺ›πŸͺ€"), // emoji - } { - fmt.Printf("%v\n", nameSelector) - } - // Output: - // "hello" - // "Charlie \"Bird\" Parker" - // "އައްސަލާމް ޒަލައިކήͺή‰ή°" - // "πŸ“‘πŸͺ›πŸͺ€" -} - -// Create a few index selectors. -func ExampleIndex() { - for _, indexSelector := range []any{ - spec.Index(0), // first item - spec.Index(3), // fourth item - spec.Index(-1), // last item - } { - fmt.Printf("%v\n", indexSelector) - } - // Output: - // 0 - // 3 - // -1 -} - -func ExampleWildcard() { - fmt.Printf("%v\n", spec.Wildcard()) - // Output: * -} - -// Create a filter selector that selects nodes with a descendant that contains -// an object field named "x". -func ExampleFilterSelector() { - f := spec.Filter(spec.And( - spec.Existence( - spec.Query(false, spec.Descendant(spec.Name("x"))), - ), - )) - fmt.Printf("%v\n", f) - // Output: ?@..["x"] -} - -// Create a comparison expression that compares a literal value to a path -// expression. -func ExampleCompExpr() { - cmp := spec.Comparison( - spec.Literal(42), - spec.EqualTo, - spec.SingularQuery(false, spec.Name("age")), - ) - fmt.Printf("%v\n", cmp) - // Output: 42 == @["age"] -} - -// Create an existence expression as a filter expression. -func ExampleExistExpr() { - filter := spec.Filter(spec.And( - spec.Existence( - spec.Query(false, spec.Child(spec.Name("x"))), - ), - )) - fmt.Printf("%v\n", filter) - // Output: ?@["x"] -} - -// Create a nonexistence expression as a filter expression. -func ExampleNonExistExpr() { - filter := spec.Filter(spec.And( - spec.Nonexistence( - spec.Query(false, spec.Child(spec.Name("x"))), - ), - )) - fmt.Printf("%v\n", filter) - // Output: ?!@["x"] -} - -// Print the [spec.FuncType] for each [spec.PathValue] -// implementation. -func ExamplePathValue() { - fmt.Printf("Implementation FuncType\n") - fmt.Printf("----------------- --------\n") - for _, jv := range []spec.PathValue{ - spec.Value(nil), - spec.Nodes(1, 2), - spec.Logical(true), - } { - fmt.Printf("%-17T %v\n", jv, jv.FuncType()) - } - // Output: - // Implementation FuncType - // ----------------- -------- - // *spec.ValueType Value - // spec.NodesType Nodes - // spec.LogicalType Logical -} - -// Use the standard match() function in a function expression. -func ExampleFuncExpr() { - reg := registry.New() - fe := spec.Function( - reg.Get("match"), - spec.Query(false, spec.Child(spec.Name("rating"))), - spec.Literal("good$"), - ) - fmt.Printf("%v\n", fe) - // Output: match(@["rating"], "good$") -} - -// Use the standard count() function in a function expression. -func ExampleNotFuncExpr() { - reg := registry.New() - nf := spec.NotFunction(spec.Function( - reg.Get("length"), - spec.Query(false, spec.Child(spec.Index(0))), - )) - fmt.Printf("%v\n", nf) - // Output: !length(@[0]) -} - -// Pass a LiteralArg to a function. -func ExampleLiteralArg() { - reg := registry.New() - fe := spec.Function( - reg.Get("length"), - spec.Literal("some string"), - ) - fmt.Printf("%v\n", fe) - // Output: length("some string") -} - -// Assemble a LogicalAnd consisting of multiple expressions. -func ExampleLogicalAnd() { - and := spec.And( - spec.Value(42), - spec.Existence( - spec.Query(false, spec.Child(spec.Name("answer"))), - ), - ) - fmt.Printf("%v\n", and) - // Output: 42 && @["answer"] -} - -// Assemble a LogicalOr consisting of multiple expressions. -func ExampleLogicalOr() { - and := spec.Or( - spec.And(spec.Existence( - spec.Query(false, spec.Child(spec.Name("answer"))), - )), - spec.And(spec.Value(42)), - ) - fmt.Printf("%v\n", and) - // Output: @["answer"] || 42 -} - -func ExampleLogicalType() { - fmt.Printf("%v\n", spec.Logical(true)) - fmt.Printf("%v\n", spec.Logical(false)) - // Output: - // true - // false -} - -// Use a [spec.NodesType] to create a list of nodes, a.k.a. a JSON array. -func ExampleNodesType() { - fmt.Printf("%v\n", spec.Nodes("hi", 42, true)) - // Output: [hi 42 true] -} - -// Use a [spec.ParenExpr] to group the result of a [LogicalOr] expression. -func ExampleParenExpr() { - paren := spec.Paren( - spec.And( - spec.Existence( - spec.Query(false, spec.Child(spec.Name("answer"))), - ), - ), - spec.And( - spec.Existence( - spec.Query(false, spec.Child(spec.Name("question"))), - ), - ), - ) - fmt.Printf("%v\n", paren) - // Output: (@["answer"] || @["question"]) -} - -// Use a [spec.NotParenExpr] to negate the result of a [LogicalOr] expression. -func ExampleNotParenExpr() { - not := spec.NotParen( - spec.And( - spec.Existence( - spec.Query(false, spec.Child(spec.Name("answer"))), - ), - ), - spec.And( - spec.Existence( - spec.Query(false, spec.Child(spec.Name("question"))), - ), - ), - ) - fmt.Printf("%v\n", not) - // Output: !(@["answer"] || @["question"]) -} - -// Compare a normalized JSONPath and its JSON Pointer equivalent. -func ExampleNormalizedPath() { - norm := spec.Normalized( - spec.Name("x"), - spec.Index(1), - spec.Name("y"), - ) - fmt.Printf("%v\n", norm) - fmt.Printf("%v\n", norm.Pointer()) - // Output: - // $['x'][1]['y'] - // /x/1/y -} - -func ExampleSingularQueryExpr() { - singular := spec.SingularQuery( - true, - spec.Name("profile"), - spec.Name("contacts"), - spec.Index(0), - spec.Name("email"), - ) - fmt.Printf("%v\n", singular) - // Output: $["profile"]["contacts"][0]["email"] -} - -// Create A [spec.ValueType] for each supported JSON type. -func ExampleValueType() { - for _, val := range []any{ - "hello", - 42, - 98.6, - json.Number("1024"), - true, - nil, - map[string]any{"x": true}, - []any{1, 2, false}, - } { - fmt.Printf("%v\n", spec.Value(val)) - } - // Output: - // hello - // 42 - // 98.6 - // 1024 - // true - // - // map[x:true] - // [1 2 false] -} - -// Of the implementations of [PathValue], only [spec.ValueType] and nil -// can be converted to [spec.ValueType]. -func ExampleValueFrom() { - for _, val := range []spec.PathValue{ - spec.Value("hello"), // no conversion - nil, // equivalent to no value - } { - fmt.Printf("val: %v\n", spec.ValueFrom(val)) - } - // Output: - // val: hello - // val: -} - -// Of the implementations of [PathValue], only [spec.NodesType] and nil -// can be converted to [spec.NodesType]. -func ExampleNodesFrom() { - for _, val := range []spec.PathValue{ - spec.Nodes(1, 2, 3), // no conversion - nil, // converts to empty NodesType - } { - fmt.Printf("nodes: %v\n", spec.NodesFrom(val)) - } - // Output: - // nodes: [1 2 3] - // nodes: [] -} - -// Of the implementations of [PathValue], [spec.LogicalType], -// [spec.NodesType], and nil can be converted to [spec.NodesType]. -func ExampleLogicalFrom() { - for _, val := range []spec.PathValue{ - spec.Nodes(1, 2, 3), // converts to logical (existence) - spec.Logical(false), // converts to logical - nil, - } { - fmt.Printf("logical: %v\n", spec.LogicalFrom(val)) - } - // Output: - // logical: true - // logical: false - // logical: false -} - -func ExampleLocatedNode() { - // Query all "author" object properties. - p := jsonpath.New(spec.Query( - true, - spec.Descendant(spec.Name("author")), - )) - - // Select LocatedNodes from unmarshaled JSON input. - store := bookstore() - nodes := p.SelectLocated(store) - - // Show the LocatedNodes. - items, err := json.MarshalIndent(nodes, "", " ") - if err != nil { - log.Fatal(err) - } - fmt.Printf("%s\n", items) - - // Output: - // [ - // { - // "node": "Nigel Rees", - // "path": "$['store']['book'][0]['author']" - // }, - // { - // "node": "Evelyn Waugh", - // "path": "$['store']['book'][1]['author']" - // }, - // { - // "node": "Herman Melville", - // "path": "$['store']['book'][2]['author']" - // }, - // { - // "node": "J. R. R. Tolkien", - // "path": "$['store']['book'][3]['author']" - // } - // ] -} - -// bookstore returns an unmarshaled JSON object. -func bookstore() any { - src := []byte(`{ - "store": { - "book": [ - { - "category": "reference", - "author": "Nigel Rees", - "title": "Sayings of the Century", - "price": 8.95 - }, - { - "category": "fiction", - "author": "Evelyn Waugh", - "title": "Sword of Honour", - "price": 12.99 - }, - { - "category": "fiction", - "author": "Herman Melville", - "title": "Moby Dick", - "isbn": "0-553-21311-3", - "price": 8.99 - }, - { - "category": "fiction", - "author": "J. R. R. Tolkien", - "title": "The Lord of the Rings", - "isbn": "0-395-19395-8", - "price": 22.99 - } - ], - "bicycle": { - "color": "red", - "price": 399 - } - } - }`) - - // Parse the JSON. - var value any - if err := json.Unmarshal(src, &value); err != nil { - log.Fatal(err) - } - return value -} diff --git a/src/index.html b/src/index.html new file mode 100644 index 0000000..2cdd48a --- /dev/null +++ b/src/index.html @@ -0,0 +1,319 @@ + + + + Go JSONPath Playground + + + + + + + +
+

Go JSONPath Playground

+
+ + +
+
+
+ About +

+ + About +

+

This is the playground for {{version}} of github.com/theory/jsonpath, a Go package that executes an RFC 9535 JSONPath query to select values from JSON data.

+

The playground runs entirely in the browser thanks to TinyGo, which compiles the + github.com/theory/jsonpath package into Web Assembly.

+

Learn more about JSONPath by reading the IETF JSONPath + Standard, and more about github.com/theory/jsonpath by following the links at the bottom of + the page. The code for this website can be found on + GitHub.

+

Related Playgrounds:

+ +
+
+ JSONPath Expressions +

+ + JSONPath Expressions +

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Syntax ElementDescription
$root node identifier
@current node identifier (valid only within filter selectors)
[<selectors>]child segment: selects zero or more children of a node
.nameshorthand for ['name']
.*shorthand for [*]
..[<selectors>]descendant segment: selects zero or more descendants of a node
..nameshorthand for ..['name']
..*shorthand for ..[*]
'name' or "name"name selector: selects a named child of an object
*wildcard selector: selects all children of a node
3index selector: selects an indexed child of an array (from 0)
0:100:5array slice selector: start:end:step for arrays
?<logical-expr>filter selector: selects particular children using a logical expression
length(@.foo)function extension: invokes a function in a filter expression
+
+ +
+ +
+ + +
+
+
+
+ +
+
+ + +
+
+ + + + diff --git a/src/main.go b/src/main.go new file mode 100644 index 0000000..5c1a1ee --- /dev/null +++ b/src/main.go @@ -0,0 +1,65 @@ +// package main provides the Wasm app. +package main + +import ( + "bytes" + "encoding/json" + "fmt" + "syscall/js" + + "github.com/theory/jsonpath" +) + +const ( + optLocated int = 1 << iota +) + +func main() { + stream := make(chan struct{}) + + js.Global().Set("query", js.FuncOf(query)) + js.Global().Set("optLocated", js.ValueOf(optLocated)) + + <-stream +} + +func query(_ js.Value, args []js.Value) any { + query := args[0].String() + target := args[1].String() + opts := args[2].Int() + + return execute(query, target, opts) +} + +func execute(query, target string, opts int) string { + // Parse the JSON. + var value any + if err := json.Unmarshal([]byte(target), &value); err != nil { + return fmt.Sprintf("Error parsing JSON: %v", err) + } + + // Parse the JSONPath query. + path, err := jsonpath.Parse(query) + if err != nil { + return fmt.Sprintf("Error parsing %v", err) + } + + // Execute the query against the JSON. + var res any + if opts&optLocated == optLocated { + res = path.SelectLocated(value) + } else { + res = path.Select(value) + } + + // Serialize the result + var buf bytes.Buffer + enc := json.NewEncoder(&buf) + enc.SetEscapeHTML(false) + enc.SetIndent("", " ") + if err := enc.Encode(res); err != nil { + return fmt.Sprintf("Error parsing results: %v", err) + } + + return buf.String() +} diff --git a/src/play.css b/src/play.css new file mode 100644 index 0000000..0dbb728 --- /dev/null +++ b/src/play.css @@ -0,0 +1,426 @@ +@media (prefers-color-scheme: dark) { + body { + color-scheme: dark; + --color-fg-default: #c9d1d9; + --color-fg-muted: #8b949e; + --color-fg-subtle: #484f58; + --color-canvas-default: #0d1117; + --color-canvas-subtle: #212222; + --color-canvas-muted: #161c1c; + --color-border-default: #30363d; + --color-border-muted: #21262d; + --color-neutral-muted: rgba(110,118,129,0.4); + --color-accent-fg: #118a7e; + --color-strong-fg: #118a7e; + --color-accent-emphasis: #1f6feb; + --color-attention-subtle: rgba(187,128,9,0.15); + --color-danger-fg: #f85149; + --color-button-lite: #666; + --color-button-lite-hover: #161c1c; + --color-input-bg: #1e1e1e; + --color-input-border: #555; + } +} + +@media (prefers-color-scheme: light) { + body { + /* color-scheme: light; */ + --color-fg-default: #24292f; + --color-fg-muted: #57606a; + --color-fg-subtle: #6e7781; + --color-canvas-default: #fff; + --color-canvas-muted: #ededed; + --color-canvas-subtle: #f6f8fa; + --color-border-default: #d0d7de; + --color-border-muted: hsla(210,18%,87%,1); + --color-neutral-muted: rgba(175,184,193,0.2); + --color-accent-fg: #118a7e; + --color-strong-fg: #118a7e; + --color-accent-emphasis: #0969da; + --color-attention-subtle: #fff8c5; + --color-danger-fg: #cf222e; + --color-button-lite: #bbb; + --color-button-lite-hover: #eee; + --color-input-bg: #fff; + --color-input-border: #ccc; + } +} + +body { + -ms-text-size-adjust: 100%; + -webkit-text-size-adjust: 100%; + --font: ui-monospace,SFMono-Regular,SF Mono,Menlo,Consolas,Liberation Mono,monospace; + --color-button: #eee; + --color-button-bg: #118a7e; + --color-button-hover-bg: #0f776c; + --color-button-disabled-bg: #485871; + margin: 2rem; + color: var(--color-fg-default); + background-color: var(--color-canvas-subtle); + font-family: var(--font); + line-height: 1.5; + word-wrap: break-word; +} + +body [hidden] { + display: none !important; +} + +body a { + background-color: transparent; + color: var(--color-accent-fg); + text-decoration: underline; + text-underline-offset: 3px; +} + +body a:active, +body a:hover { + outline-width: 0; +} + +body a.disable{ + pointer-events: none; + color:var(--color-fg-subtle); +} + +body b, +body strong { + font-weight: 600; + color: var(--color-strong-fg); +} + +body dfn { + font-style: italic; +} + +body h1 { + margin: .67em 0; + font-weight: 600; + font-size: 2em; +} + +body header { + width: 100%; + display:flex; + flex-wrap: wrap; + align-items: center; + padding-bottom: .3em; + color: var(--color-strong-fg); +} + +body header h1 { + flex-direction: column; + width: 100%; + flex: 1 0; + font-weight: 600; +} + +body header button { + flex: 0 0 auto; + padding: .5rem; + line-height: 0; + color: var(--color-button-lite); + background-color: var(--color-canvas-subtle); + border: 1px solid var(--color-button-lite); + border-radius: .5rem; +} + +body header button:hover { + background-color: var(--color-button-lite-hover); +} + +body hr { + box-sizing: content-box; + overflow: hidden; + background: transparent; + border-bottom: 1px solid var(--color-border-muted); + height: .25em; + padding: 0; + margin: 24px 0; + background-color: var(--color-border-default); + border: 0; +} + +body input, body button, body table { + font: inherit; + margin: 0; + overflow: scroll; + font-family: inherit; + font-size: inherit; + line-height: inherit; +} + +body h1, +body h2, +body h3, +body h4, +body h5, +body h6 { + margin-top: 24px; + margin-bottom: 16px; + font-weight: 400; + line-height: 1.25; +} + +body h2 { + color: var(--color-strong-fg); + font-weight: 400; + padding-bottom: .3em; + font-size: 1.5em; +} + +body h3 { + font-weight: 400; + font-size: 1.25em; +} + +body h4 { + font-weight: 400; + font-size: 1em; +} + +body h5 { + font-weight: 400; + font-size: .875em; +} + +body h6 { + font-weight: 400; + font-size: .85em; + color: var(--color-fg-muted); +} + +body p { + margin-top: 0; + margin-bottom: 10px; +} + +body textarea, body input, body tt, body code { + font-family: var(--font); + font-size: inherit; + width:100%; +} + +body ::placeholder { + color: var(--color-fg-subtle); + opacity: 1; +} + +body p, +body blockquote, +body ul, +body ol, +body dl, +body table, +body pre, +body details, .warn { + margin-top: 0; + margin-bottom: 16px; +} + +body ul, body ol { + list-style-position: inside; + padding-left: 0; +} + +#about ul { + list-style: none; +} + +@media screen and (min-width: 768px) { + #container { + display: flex; + flex-direction: row; + } + + #container textarea { + max-height: 70vh !important; + } + + #input { + flex-direction: column; + padding-top: 1em; + width: 50%; + } + + label { + font-weight: bold; + } + + #output { + flex-direction: column; + width: 50%; + height: 100%; + padding: 1rem 0 .5rem 1rem; + } +} + +@media screen and (max-width: 768px) { + body { + margin: 1rem; + } + #container { + display: flex; + flex-direction: column; + } + + #input, #output { + flex-direction: column; + width: 100%; + padding: 1rem 0; + } +} + +#input div { + margin-bottom: 1em; +} + +#clear { + background-color: transparent; + color: var(--color-accent-fg); + text-decoration: none; + cursor: pointer; +} + +#clear:hover { + text-decoration: underline; +} + +#fluid { + width: 100%; + display:flex; + flex-wrap: wrap; +} + +#path { + width: 100%; + flex: 1 0; + padding-left: .5rem; + background-color: var(--color-input-bg); + border: 1px solid var(--color-input-border); + border-radius: .5rem; + box-sizing: border-box; +} + +#go { + display: flex; +} + +#go button { + line-height: 1.5rem; + margin-left: .5rem; + font-size: inherit; + padding: 0 1rem; + color: var(--color-button); + background-color: var(--color-button-bg); + border: 1px solid var(--color-button-bg); + border-radius: .5rem; +} + +#go button:hover { + background-color: var(--color-button-hover-bg) +} + +#go button:disabled { + background-color: var(--color-disabled-hover-bg) +} + +@media screen and (max-width:576px) { + #fluid { + flex-flow: column; + } + #go { + padding-top: .5rem; + flex: 0 0 auto; + height: 2.5rem; + } + #go button { + flex: 1 0; + } + #go button:first-child { + margin: 0; + } +} + +.nowrap { + white-space: nowrap; +} + +#actions > *, fieldset input { + margin-right: 0.5em; +} + +#output input { + width: auto; +} + +#output label.opt { + float: right; +} + +#container textarea { + margin-top: .3rem; + background-color: var(--color-canvas-subtle); + border: 1px solid var(--color-input-border); + padding: .5rem; + border-radius: .5rem; + max-height: 40vh; + box-sizing: border-box; +} + +footer { + margin: 1rem; + display: flex; + flex-direction: row; + flex-wrap: wrap; + justify-content: center; + align-items: center; + text-align: center; +} + +footer > * { + width: 100%; +} + +#refs a { + text-decoration: none; +} + +details[open], .warn { + background-color: var(--color-canvas-muted); + padding: .75rem; + border: 1px solid var(--color-input-border); + border-radius: .5rem; +} + +#about h2 { + margin-top: 0; +} + +details summary { + display: none; +} + +body table { + width: 100%; + border-collapse: collapse; +} + +body thead th { + border-bottom: 1px solid var(--color-fg-default); + padding-bottom: .5rem; + font-weight: bold; +} + +td, th { + vertical-align:top; +} + +body tbody td { + border-bottom: 1px solid var(--color-border-default); + padding: .25rem; +} + +body tbody tr td:first-child{ + font-weight: bold; +}