diff --git a/.cursorignore b/.cursorignore deleted file mode 100644 index 32948baf..00000000 --- a/.cursorignore +++ /dev/null @@ -1,3 +0,0 @@ -# Add directories or file patterns to ignore during indexing (e.g. foo/ or *.csv) -libpg_query/ -lib/tree_sitter_sql/tree-sitter-sql/ \ No newline at end of file diff --git a/.env b/.env deleted file mode 100644 index dda71743..00000000 --- a/.env +++ /dev/null @@ -1 +0,0 @@ -DATABASE_URL=postgresql://postgres:postgres@127.0.0.1:5432/postgres diff --git a/.github/ISSUE_TEMPLATE/1.Bug_report.md b/.github/ISSUE_TEMPLATE/1.Bug_report.md deleted file mode 100644 index 239bef02..00000000 --- a/.github/ISSUE_TEMPLATE/1.Bug_report.md +++ /dev/null @@ -1,51 +0,0 @@ ---- -name: Bug report -about: Create a bug report for one of the supabase repos or examples. -labels: bug ---- - -# Bug report - - - -- [ ] I confirm this is a bug with Supabase, not with my own application. -- [ ] I confirm I have searched the [Docs](https://docs.supabase.com), GitHub [Discussions](https://github.com/supabase/supabase/discussions), and [Discord](https://discord.supabase.com). - -## Describe the bug - -A clear and concise description of what the bug is. - -## To Reproduce - -Steps to reproduce the behavior, please provide code snippets or a repository: - -1. Go to '…' -2. Click on '…' -3. Scroll down to '…' -4. See error - -## Expected behavior - -A clear and concise description of what you expected to happen. - -## Screenshots - -If applicable, add screenshots to help explain your problem. - -## System information - -- OS: [e.g. macOS, Windows] - -## Additional context - -Add any other context about the problem here. diff --git a/.github/ISSUE_TEMPLATE/2.Improve_docs.md b/.github/ISSUE_TEMPLATE/2.Improve_docs.md deleted file mode 100644 index b7001e33..00000000 --- a/.github/ISSUE_TEMPLATE/2.Improve_docs.md +++ /dev/null @@ -1,23 +0,0 @@ ---- -name: Improve documentation -about: Suggest an improvement to our documentation -labels: documentation ---- - -# Improve documentation - -## Link - -Add a link to the page which needs improvement (if relevant) - -## Describe the problem - -Is the documentation missing? Or is it confusing? Why is it confusing? - -## Describe the improvement - -A clear and concise description of the improvement. - -## Additional context - -Add any other context or screenshots that help clarify your question. \ No newline at end of file diff --git a/.github/ISSUE_TEMPLATE/3.Create_a_chore.md b/.github/ISSUE_TEMPLATE/3.Create_a_chore.md deleted file mode 100644 index 249d1793..00000000 --- a/.github/ISSUE_TEMPLATE/3.Create_a_chore.md +++ /dev/null @@ -1,15 +0,0 @@ ---- -name: Create a chore -about: Changes to build processes, tools, refactors. -labels: chore ---- - -# Chore - -## Describe the chore - -A clear and concise description of what the chore is. - -## Additional context - -Add any other context or screenshots that help clarify the task. \ No newline at end of file diff --git a/.github/ISSUE_TEMPLATE/4.Planned_work.md b/.github/ISSUE_TEMPLATE/4.Planned_work.md deleted file mode 100644 index dc08e5b2..00000000 --- a/.github/ISSUE_TEMPLATE/4.Planned_work.md +++ /dev/null @@ -1,7 +0,0 @@ ---- -name: Planned work -about: Planned feature work. Limited to contributors only. -labels: planned ---- - -# Planned Work diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml deleted file mode 100644 index 49086d4a..00000000 --- a/.github/ISSUE_TEMPLATE/config.yml +++ /dev/null @@ -1,11 +0,0 @@ -blank_issues_enabled: false -contact_links: - - name: Feature request - url: https://github.com/supabase/postgres_lsp/discussions/categories/ideas - about: Request a new feature or example. - - name: Ask a question - url: https://github.com/supabase/postgres_lsp/discussions/categories/q-a - about: Ask questions and discuss with other community members. - - name: Want to work with us? - url: https://supabase.io/humans.txt - about: Want to work with us? Get in touch! diff --git a/.github/actions/free-disk-space/action.yml b/.github/actions/free-disk-space/action.yml deleted file mode 100644 index c55bf403..00000000 --- a/.github/actions/free-disk-space/action.yml +++ /dev/null @@ -1,16 +0,0 @@ -name: Free Disk Space -description: Free up disk space on the runner -runs: - using: composite - steps: - - name: Free Disk Space (Ubuntu) - if: runner.os == 'Linux' - uses: jlumbroso/free-disk-space@main - with: - # We need to reclaim some space, but uninstalling everyting takes - # too long. So we'll just remove some of the larger packages. - # https://github.com/jlumbroso/free-disk-space/pull/26 - android: true - dotnet: true - haskell: true - large-packages: false diff --git a/.github/workflows/deploy_docs.yml b/.github/workflows/deploy_docs.yml deleted file mode 100644 index aabd75cd..00000000 --- a/.github/workflows/deploy_docs.yml +++ /dev/null @@ -1,55 +0,0 @@ -name: Build and Deploy Documentation - -on: - release: - types: [released] - workflow_dispatch: - -permissions: - contents: write - pages: write - id-token: write - -jobs: - build: - runs-on: ubuntu-latest - - steps: - - name: Checkout code - uses: actions/checkout@v4 - with: - fetch-depth: 0 - - - name: Install uv - uses: astral-sh/setup-uv@v5 - with: - enable-cache: true - - - name: Set up Python - run: uv python install - - - name: Install the project - run: uv sync --all-extras --dev - - - run: uv run mkdocs gh-deploy --force - - deploy: - needs: build - environment: - name: github-pages - url: ${{ steps.deployment.outputs.page_url }} - runs-on: ubuntu-latest - steps: - - name: Checkout - uses: actions/checkout@v4 - with: - ref: gh-pages - - name: Setup Pages - uses: actions/configure-pages@v5 - - name: Upload artifact - uses: actions/upload-pages-artifact@v3 - with: - path: '.' - - name: Deploy to GitHub Pages - id: deployment - uses: actions/deploy-pages@v4 diff --git a/.github/workflows/publish.dispatch.yml b/.github/workflows/publish.dispatch.yml deleted file mode 100644 index 0f1b3d5f..00000000 --- a/.github/workflows/publish.dispatch.yml +++ /dev/null @@ -1,72 +0,0 @@ -name: Publish NPM (Manual) - -on: - workflow_dispatch: - inputs: - release-tag: - type: string - required: true - description: Release Tag to Publish - -jobs: - validate_tag: - runs-on: ubuntu-latest - outputs: - is-prerelease: ${{ steps.validate-release.outputs.is-prerelease }} - steps: - - uses: actions/github-script@v7 - id: validate-release - with: - script: | - /** the "core" module does not have access to workflow_dispatch inputs */ - const tag = '${{ inputs.release-tag }}'; - - /** Releases don't have a guaranteed order, so we'll have to paginate */ - let exhausted = false; - let page = 1; - while (!exhausted) { - const releases = await github.rest.repos.listReleases({ - owner: context.repo.owner, - repo: context.repo.repo, - page, - per_page: 100, - }).then(r => r.data); - - const matchingRelease = releases.find(r => r.tag_name === tag); - if (matchingRelease) { - core.setOutput('has-release', 'true'); - core.setOutput('is-prerelease', matchingRelease.prerelease.toString()); - return; - } - - if (releases.length < 100) { - exhausted = true; - } else if (page >= 10) { - throw new Error("We iterated over 10 pages. Does the script work?"); - } else { - page++ - } - - } - - core.setOutput('has-release', 'false'); - core.setOutput('is-prerelease', 'false'); - - - name: Abort - if: steps.validate-release.outputs.has-release != 'true' - run: | - { - echo "Tag ${{ github.event.inputs.release-tag }} not found." - exit 1 - } - - publish_npm: - needs: validate_tag - uses: ./.github/workflows/publish.reusable.yml - permissions: - contents: write - id-token: write - with: - release-tag: ${{ github.event.inputs.release-tag }} - is-prerelease: ${{ needs.validate_tag.outputs.is-prerelease }} - secrets: inherit diff --git a/.github/workflows/publish.reusable.yml b/.github/workflows/publish.reusable.yml deleted file mode 100644 index 31e625d7..00000000 --- a/.github/workflows/publish.reusable.yml +++ /dev/null @@ -1,66 +0,0 @@ -name: Publish to NPM & Brew - -on: - workflow_call: - inputs: - release-tag: - type: string - required: true - is-prerelease: - type: string - required: true - -jobs: - publish: - name: Publish All the Things - runs-on: ubuntu-latest - permissions: - contents: write - id-token: write - steps: - - uses: actions/checkout@v4 - - - name: Install Node - uses: actions/setup-node@v4 - with: - node-version: lts/* - registry-url: "https://registry.npmjs.org" - - - name: Generate Packages - id: generate-packages - run: node packages/@postgrestools/postgrestools/scripts/generate-packages.mjs - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - RELEASE_TAG: ${{ inputs.release-tag }} - PRERELEASE: ${{ inputs.is-prerelease }} - - - name: Verify NPM TOKEN exists - run: | - if [ -z "${{ secrets.NPM_TOKEN }}" ]; then - echo "Secret is not defined" - exit 1 - else - echo "Secret is defined" - fi - - - name: Print package.json - run: | - cat packages/@postgrestools/postgrestools/package.json - - - name: Publish npm packages as nightly - if: inputs.is-prerelease == 'true' - run: | - for package in packages/@postgrestools/*; do - npm publish "$package" --tag nightly --access public --provenance - done - env: - NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} # - - - name: Publish npm packages as latest - if: inputs.is-prerelease != 'true' - run: | - for package in packages/@postgrestools/*; do - npm publish "$package" --tag latest --access public --provenance - done - env: - NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} diff --git a/.github/workflows/publish.trigger.yml b/.github/workflows/publish.trigger.yml deleted file mode 100644 index b2a102a5..00000000 --- a/.github/workflows/publish.trigger.yml +++ /dev/null @@ -1,16 +0,0 @@ -name: Publish NPM (Automatic) - -on: - release: - types: [released, prereleased] - -jobs: - publish_npm: - uses: ./.github/workflows/publish.reusable.yml - permissions: - contents: write - id-token: write - with: - release-tag: ${{ github.event.release.tag_name }} - is-prerelease: ${{ github.event.release.prerelease }} - secrets: inherit diff --git a/.github/workflows/pull_request.yml b/.github/workflows/pull_request.yml deleted file mode 100644 index f79392b7..00000000 --- a/.github/workflows/pull_request.yml +++ /dev/null @@ -1,243 +0,0 @@ -name: Pull Request - -on: - workflow_dispatch: - pull_request: - paths: # Only run when changes are made to rust code or root Cargo - - "crates/**" - - "lib/**" - - "fuzz/**" - - "xtask/**" - - "Cargo.toml" - - "Cargo.lock" - - "rust-toolchain.toml" - - "rustfmt.toml" - # or in js packages - - "packages/**" - # or in workflows - - ".github/workflows/**" - -concurrency: - group: ${{ github.workflow }}-${{ github.event_name }}-${{ github.ref }} - cancel-in-progress: true - -env: - RUST_LOG: info - RUST_BACKTRACE: 1 - RUSTUP_WINDOWS_PATH_ADD_BIN: 1 - -jobs: - format: - name: Format - runs-on: ubuntu-latest - steps: - - name: Checkout PR branch - uses: actions/checkout@v4 - - - name: Free Disk Space - uses: ./.github/actions/free-disk-space - - - name: Install toolchain - uses: moonrepo/setup-rust@v1 - with: - components: rustfmt - bins: taplo-cli - cache-base: main - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - - name: Setup Bun - uses: oven-sh/setup-bun@v2 - - - name: Install JS dependencies - run: bun install - - - name: Setup Just - uses: extractions/setup-just@v3 - - - name: Echo Tool Versions - run: | - just format-ci-versions - - - name: Run format - run: | - just format-ci - - actionlint: - name: Lint GitHub Actions - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - name: Download actionlint - id: get_actionlint - run: bash <(curl https://raw.githubusercontent.com/rhysd/actionlint/main/scripts/download-actionlint.bash) - shell: bash - - name: Check workflow files - run: ${{ steps.get_actionlint.outputs.executable }} -color - shell: bash - - lint: - name: Lint Project - runs-on: ubuntu-latest - services: - postgres: - image: postgres:latest - env: - POSTGRES_USER: postgres - POSTGRES_PASSWORD: postgres - POSTGRES_DB: postgres - ports: - - 5432:5432 - steps: - - name: Checkout PR Branch - uses: actions/checkout@v4 - with: - submodules: true - - name: Free Disk Space - uses: ./.github/actions/free-disk-space - - name: Install toolchain - uses: moonrepo/setup-rust@v1 - with: - components: clippy - cache-base: main - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - - name: Setup sqlx-cli - run: cargo install sqlx-cli - - - name: Setup Bun - uses: oven-sh/setup-bun@v2 - - - name: Install JS dependencies - run: bun install - - - name: Setup Just - uses: extractions/setup-just@v3 - - - name: Echo Tool Versions - run: | - just lint-ci-versions - - - name: Run Lints - run: | - just lint-ci - - - name: Check for changes - run: | - if [[ $(git status --porcelain) ]]; then - git status - git diff - exit 1 - fi - - test: - name: Test - runs-on: ${{ matrix.os }} - strategy: - matrix: - include: - # use the same images we use for compiling - - os: windows-2022 - - os: ubuntu-22.04 - steps: - - name: Checkout PR branch - uses: actions/checkout@v4 - with: - submodules: true - - name: Free Disk Space - uses: ./.github/actions/free-disk-space - - name: Install toolchain - uses: moonrepo/setup-rust@v1 - with: - cache-base: main - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - # running containers via `services` only works on linux - # https://github.com/actions/runner/issues/1866 - - name: Setup postgres - uses: ikalnytskyi/action-setup-postgres@v7 - - name: Run tests - run: cargo test --workspace - - test-js-bindings: - name: - Test JS Bindings - # use the same image we use for compiling - runs-on: ubuntu-22.04 - services: - postgres: - image: postgres:latest - env: - POSTGRES_USER: postgres - POSTGRES_PASSWORD: postgres - POSTGRES_DB: postgres - ports: - - 5432:5432 - steps: - - name: Checkout PR branch - uses: actions/checkout@v4 - with: - submodules: true - - name: Free Disk Space - uses: ./.github/actions/free-disk-space - - name: Install toolchain - uses: moonrepo/setup-rust@v1 - with: - cache-base: main - - name: Build main binary - run: cargo build -p pgt_cli --release - - name: Setup Bun - uses: oven-sh/setup-bun@v2 - - name: Install JS dependencies - run: bun install - - name: Build TypeScript code - working-directory: packages/@postgrestools/backend-jsonrpc - run: bun run build - - name: Run JS tests - working-directory: packages/@postgrestools/backend-jsonrpc - run: bun run test - - codegen: - name: Check Codegen - runs-on: ubuntu-22.04 - services: - postgres: - image: postgres:latest - env: - POSTGRES_USER: postgres - POSTGRES_PASSWORD: postgres - POSTGRES_DB: postgres - ports: - - 5432:5432 - steps: - - name: Checkout PR branch - uses: actions/checkout@v4 - with: - submodules: true - - name: Free Disk Space - uses: ./.github/actions/free-disk-space - - name: Install toolchain - uses: moonrepo/setup-rust@v1 - with: - cache-base: main - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - name: Ensure RustFMT on nightly toolchain - run: rustup component add rustfmt --toolchain nightly - - name: echo toolchain - run: rustup show - - name: Run the analyser codegen - run: cargo run -p xtask_codegen -- analyser - - name: Run the configuration codegen - run: cargo run -p xtask_codegen -- configuration - - name: Run the docs codegen - run: cargo run -p docs_codegen - - name: Check for git diff -- run "just ready" if you see an error - run: | - if [[ $(git status --porcelain) ]]; then - git status - git diff - exit 1 - fi diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml deleted file mode 100644 index 934edba9..00000000 --- a/.github/workflows/release.yml +++ /dev/null @@ -1,168 +0,0 @@ -name: Release Binary - -on: - workflow_dispatch: - -permissions: - contents: write - -env: - # Need these guys for cross-compilation - CARGO_TARGET_AARCH64_UNKNOWN_LINUX_GNU_LINKER: aarch64-linux-gnu-gcc - CARGO_TARGET_AARCH64_UNKNOWN_LINUX_MUSL_LINKER: aarch64-linux-gnu-gcc - -jobs: - # windows does not run git cliff so we need to do it here - extract_version: - name: Extract Version - runs-on: ubuntu-latest - outputs: - version: ${{ steps.set_version.outputs.version }} - - steps: - - uses: actions/checkout@v4 - with: - fetch-depth: 0 - - - name: Set up git-cliff - uses: kenji-miyake/setup-git-cliff@v1 - - - name: Set version name - id: set_version - run: echo "version=$(git cliff --bumped-version)" >> "$GITHUB_OUTPUT" - - build_and_test: - name: Build & Test for ${{ matrix.config.target }} - needs: extract_version - strategy: - matrix: - config: - - { os: ubuntu-22.04, target: x86_64-unknown-linux-gnu } - - { os: ubuntu-22.04, target: aarch64-unknown-linux-gnu } - - { os: macos-14, target: x86_64-apple-darwin } - - { os: macos-14, target: aarch64-apple-darwin } - - { os: windows-2022, target: x86_64-pc-windows-msvc } - - { os: windows-2022, target: aarch64-pc-windows-msvc } - - runs-on: ${{ matrix.config.os }} - - outputs: - artifact_url: ${{ steps.upload-artifacts.outputs.artifact-url }} - - steps: - - uses: actions/checkout@v4 - with: - submodules: true - - uses: actions-rust-lang/setup-rust-toolchain@v1 - with: - target: ${{ matrix.config.target }} - - - uses: Swatinem/rust-cache@v2 - id: rust-cache - - # The Aarch64 Linux is a special snowflake, we need to install its toolchain - - name: Install arm64 toolchain - if: matrix.config.target == 'aarch64-unknown-linux-gnu' - run: | - sudo apt-get update - sudo apt-get install -y gcc-aarch64-linux-gnu - - # running containers via `services` only works on linux - # https://github.com/actions/runner/issues/1866 - - name: 🐘 Setup postgres - uses: ikalnytskyi/action-setup-postgres@v7 - - - name: 🧪 Run Tests - run: cargo test --release - env: - DATABASE_URL: postgres://postgres:postgres@localhost:5432/postgres - - - name: 🛠️ Run Build - run: cargo build -p pgt_cli --release --target ${{ matrix.config.target }} - env: - # Strip all debug symbols from the resulting binaries - RUSTFLAGS: "-C strip=symbols -C codegen-units=1" - # Inline the version in the CLI binary - PGT_VERSION: ${{ needs.extract_version.outputs.version }} - - # windows is a special snowflake too, it saves binaries as .exe - - name: 👦 Name the Binary - if: matrix.config.os == 'windows-2022' - run: | - mkdir dist - cp target/${{ matrix.config.target }}/release/postgrestools.exe ./dist/postgrestools_${{ matrix.config.target }} - - name: 👦 Name the Binary - if: matrix.config.os != 'windows-2022' - run: | - mkdir dist - cp target/${{ matrix.config.target }}/release/postgrestools ./dist/postgrestools_${{ matrix.config.target }} - - # It is not possible to return the artifacts from the matrix jobs individually: Matrix outputs overwrite each other. - # A common workaround is to upload and download the resulting artifacts. - - name: 👆 Upload Artifacts - id: upload-artifacts - uses: actions/upload-artifact@v4 - with: - name: postgrestools_${{ matrix.config.target }} - path: ./dist/postgrestools_* - # The default compression level is 6; this took the binary down from 350 to 330MB. - # It is recommended to use a lower level for binaries, since the compressed result is not much smaller, - # and the higher levels of compression take much longer. - compression-level: 2 - if-no-files-found: error - - create_changelog_and_release: - runs-on: ubuntu-latest - needs: [extract_version, build_and_test] # make sure that tests & build work correctly - steps: - - name: Checkout Repo - uses: actions/checkout@v4 - with: - # we need all commits to create a changelog - fetch-depth: 0 - - - name: 📝 Create Changelog - uses: orhun/git-cliff-action@v3 - id: create_changelog - with: - config: cliff.toml - args: --bump --unreleased - env: - GITHUB_REPO: ${{ github.repository }} - - - name: Ensure tag matches - if: steps.create_changelog.outputs.version != needs.extract_version.outputs.version - run: exit 1 - - - name: 👇 Download Artifacts - uses: actions/download-artifact@v4 - id: download - with: - merge-multiple: true - pattern: postgrestools_* - - - name: 📂 Create Release - uses: softprops/action-gh-release@v2 - id: create-release - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - with: - token: ${{ secrets.GITHUB_TOKEN }} - body: ${{ steps.create_changelog.outputs.content }} - tag_name: ${{ steps.create_changelog.outputs.version }} - files: | - postgrestools_* - docs/schemas/latest/schema.json - fail_on_unmatched_files: true - draft: true - - - name: ✅ Output Link to Workflow Summary - run: | - { - echo "# 🚀 Release completed!" - echo "" - echo "Here is the URL to the Release Draft:" - echo "" - echo "[Link](${{ steps.create-release.outputs.url }})" - echo "" - } >> "$GITHUB_STEP_SUMMARY" diff --git a/.gitignore b/.gitignore deleted file mode 100644 index b16dfab6..00000000 --- a/.gitignore +++ /dev/null @@ -1,25 +0,0 @@ -# Generated by Cargo -# will have compiled files and executables -debug/ -target/ - -# These are backup files generated by rustfmt -**/*.rs.bk - -# MSVC Windows builds of rustc generate these, which store debugging information -*.pdb - - -# Added by cargo - -/target - -# File system -.DS_Store -desktop.ini - -*.log - -node_modules/ - -**/dist/ diff --git a/.gitmodules b/.gitmodules deleted file mode 100644 index 4b56d748..00000000 --- a/.gitmodules +++ /dev/null @@ -1,8 +0,0 @@ -[submodule "libpg_query"] - path = libpg_query - url = https://github.com/pganalyze/libpg_query.git - branch = 17-latest -[submodule "crates/tree_sitter_sql/tree-sitter-sql"] - path = lib/tree_sitter_sql/tree-sitter-sql - url = https://github.com/DerekStride/tree-sitter-sql - branch = gh-pages diff --git a/.nojekyll b/.nojekyll new file mode 100644 index 00000000..e69de29b diff --git a/.sqlx/query-1c29eca62591ae2597581be806dd572b3d79c7b8b9b7ffa8915806c947095a96.json b/.sqlx/query-1c29eca62591ae2597581be806dd572b3d79c7b8b9b7ffa8915806c947095a96.json deleted file mode 100644 index fcd4901e..00000000 --- a/.sqlx/query-1c29eca62591ae2597581be806dd572b3d79c7b8b9b7ffa8915806c947095a96.json +++ /dev/null @@ -1,56 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "select\n t.oid :: int8 as \"id!\",\n t.typname as name,\n n.nspname as \"schema!\",\n format_type (t.oid, null) as \"format!\",\n coalesce(t_enums.enums, '[]') as enums,\n coalesce(t_attributes.attributes, '[]') as attributes,\n obj_description (t.oid, 'pg_type') as comment\nfrom\n pg_type t\n left join pg_namespace n on n.oid = t.typnamespace\n left join (\n select\n enumtypid,\n jsonb_agg(\n enumlabel\n order by\n enumsortorder\n ) as enums\n from\n pg_enum\n group by\n enumtypid\n ) as t_enums on t_enums.enumtypid = t.oid\n left join (\n select\n oid,\n jsonb_agg(\n jsonb_build_object('name', a.attname, 'type_id', a.atttypid :: int8)\n order by\n a.attnum asc\n ) as attributes\n from\n pg_class c\n join pg_attribute a on a.attrelid = c.oid\n where\n c.relkind = 'c'\n and not a.attisdropped\n group by\n c.oid\n ) as t_attributes on t_attributes.oid = t.typrelid\nwhere\n (\n t.typrelid = 0\n or (\n select\n c.relkind = 'c'\n from\n pg_class c\n where\n c.oid = t.typrelid\n )\n );", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "id!", - "type_info": "Int8" - }, - { - "ordinal": 1, - "name": "name", - "type_info": "Name" - }, - { - "ordinal": 2, - "name": "schema!", - "type_info": "Name" - }, - { - "ordinal": 3, - "name": "format!", - "type_info": "Text" - }, - { - "ordinal": 4, - "name": "enums", - "type_info": "Jsonb" - }, - { - "ordinal": 5, - "name": "attributes", - "type_info": "Jsonb" - }, - { - "ordinal": 6, - "name": "comment", - "type_info": "Text" - } - ], - "parameters": { - "Left": [] - }, - "nullable": [ - null, - false, - true, - null, - null, - null, - null - ] - }, - "hash": "1c29eca62591ae2597581be806dd572b3d79c7b8b9b7ffa8915806c947095a96" -} diff --git a/.sqlx/query-36862f7f9d2d1c50ba253b28a7648e76ff7e255960a4ce5466674ff35a97b151.json b/.sqlx/query-36862f7f9d2d1c50ba253b28a7648e76ff7e255960a4ce5466674ff35a97b151.json deleted file mode 100644 index 6255c9b9..00000000 --- a/.sqlx/query-36862f7f9d2d1c50ba253b28a7648e76ff7e255960a4ce5466674ff35a97b151.json +++ /dev/null @@ -1,32 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "select\n n.oid :: int8 as \"id!\",\n n.nspname as name,\n u.rolname as \"owner!\"\nfrom\n pg_namespace n,\n pg_roles u\nwhere\n n.nspowner = u.oid\n and (\n pg_has_role(n.nspowner, 'USAGE')\n or has_schema_privilege(n.oid, 'CREATE, USAGE')\n )\n and not pg_catalog.starts_with(n.nspname, 'pg_temp_')\n and not pg_catalog.starts_with(n.nspname, 'pg_toast_temp_');", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "id!", - "type_info": "Int8" - }, - { - "ordinal": 1, - "name": "name", - "type_info": "Name" - }, - { - "ordinal": 2, - "name": "owner!", - "type_info": "Name" - } - ], - "parameters": { - "Left": [] - }, - "nullable": [ - null, - false, - true - ] - }, - "hash": "36862f7f9d2d1c50ba253b28a7648e76ff7e255960a4ce5466674ff35a97b151" -} diff --git a/.sqlx/query-47bbad9dc2cec0231ef726790a9b0a5d9c628c6a2704f5523fb9ee45414350c7.json b/.sqlx/query-47bbad9dc2cec0231ef726790a9b0a5d9c628c6a2704f5523fb9ee45414350c7.json deleted file mode 100644 index 01d857f8..00000000 --- a/.sqlx/query-47bbad9dc2cec0231ef726790a9b0a5d9c628c6a2704f5523fb9ee45414350c7.json +++ /dev/null @@ -1,62 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "select \n schemaname as \"schema_name!\", \n tablename as \"table_name!\", \n policyname as \"name!\", \n permissive as \"is_permissive!\", \n roles as \"role_names!\", \n cmd as \"command!\", \n qual as \"security_qualification\", \n with_check\nfrom \n pg_catalog.pg_policies;", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "schema_name!", - "type_info": "Name" - }, - { - "ordinal": 1, - "name": "table_name!", - "type_info": "Name" - }, - { - "ordinal": 2, - "name": "name!", - "type_info": "Name" - }, - { - "ordinal": 3, - "name": "is_permissive!", - "type_info": "Text" - }, - { - "ordinal": 4, - "name": "role_names!", - "type_info": "NameArray" - }, - { - "ordinal": 5, - "name": "command!", - "type_info": "Text" - }, - { - "ordinal": 6, - "name": "security_qualification", - "type_info": "Text" - }, - { - "ordinal": 7, - "name": "with_check", - "type_info": "Text" - } - ], - "parameters": { - "Left": [] - }, - "nullable": [ - true, - true, - true, - true, - true, - true, - true, - true - ] - }, - "hash": "47bbad9dc2cec0231ef726790a9b0a5d9c628c6a2704f5523fb9ee45414350c7" -} diff --git a/.sqlx/query-5e12c1d242ea9fcc68c20807b72300a7e131d2fb17fc74bd7f40a60b68df56c0.json b/.sqlx/query-5e12c1d242ea9fcc68c20807b72300a7e131d2fb17fc74bd7f40a60b68df56c0.json deleted file mode 100644 index fa456c71..00000000 --- a/.sqlx/query-5e12c1d242ea9fcc68c20807b72300a7e131d2fb17fc74bd7f40a60b68df56c0.json +++ /dev/null @@ -1,44 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "select \n rolname as \"name!\", \n rolsuper as \"is_super_user!\", \n rolcreatedb as \"can_create_db!\", \n rolcanlogin as \"can_login!\",\n rolbypassrls as \"can_bypass_rls!\"\nfrom pg_catalog.pg_roles;", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "name!", - "type_info": "Name" - }, - { - "ordinal": 1, - "name": "is_super_user!", - "type_info": "Bool" - }, - { - "ordinal": 2, - "name": "can_create_db!", - "type_info": "Bool" - }, - { - "ordinal": 3, - "name": "can_login!", - "type_info": "Bool" - }, - { - "ordinal": 4, - "name": "can_bypass_rls!", - "type_info": "Bool" - } - ], - "parameters": { - "Left": [] - }, - "nullable": [ - true, - true, - true, - true, - true - ] - }, - "hash": "5e12c1d242ea9fcc68c20807b72300a7e131d2fb17fc74bd7f40a60b68df56c0" -} diff --git a/.sqlx/query-64d9718b07516f3d2720cb7aa79e496f5337cadbad7a3fb03ccd3e3c21b71389.json b/.sqlx/query-64d9718b07516f3d2720cb7aa79e496f5337cadbad7a3fb03ccd3e3c21b71389.json deleted file mode 100644 index 43d63459..00000000 --- a/.sqlx/query-64d9718b07516f3d2720cb7aa79e496f5337cadbad7a3fb03ccd3e3c21b71389.json +++ /dev/null @@ -1,104 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "with functions as (\n select\n oid,\n proname,\n prosrc,\n prorettype,\n proretset,\n provolatile,\n prosecdef,\n prolang,\n pronamespace,\n proconfig,\n -- proargmodes is null when all arg modes are IN\n coalesce(\n p.proargmodes,\n array_fill(\n 'i' :: text,\n array [cardinality(coalesce(p.proallargtypes, p.proargtypes))]\n )\n ) as arg_modes,\n -- proargnames is null when all args are unnamed\n coalesce(\n p.proargnames,\n array_fill(\n '' :: text,\n array [cardinality(coalesce(p.proallargtypes, p.proargtypes))]\n )\n ) as arg_names,\n -- proallargtypes is null when all arg modes are IN\n coalesce(p.proallargtypes, p.proargtypes) as arg_types,\n array_cat(\n array_fill(false, array [pronargs - pronargdefaults]),\n array_fill(true, array [pronargdefaults])\n ) as arg_has_defaults\n from\n pg_proc as p\n where\n p.prokind = 'f'\n)\nselect\n f.oid :: int8 as \"id!\",\n n.nspname as \"schema!\",\n f.proname as \"name!\",\n l.lanname as \"language!\",\n case\n when l.lanname = 'internal' then null\n else f.prosrc\n end as body,\n case\n when l.lanname = 'internal' then null\n else pg_get_functiondef(f.oid)\n end as definition,\n coalesce(f_args.args, '[]') as args,\n nullif(pg_get_function_arguments(f.oid), '') as argument_types,\n nullif(pg_get_function_identity_arguments(f.oid), '') as identity_argument_types,\n f.prorettype :: int8 as \"return_type_id!\",\n pg_get_function_result(f.oid) as \"return_type!\",\n nullif(rt.typrelid :: int8, 0) as return_type_relation_id,\n f.proretset as is_set_returning_function,\n case\n when f.provolatile = 'i' then 'IMMUTABLE'\n when f.provolatile = 's' then 'STABLE'\n when f.provolatile = 'v' then 'VOLATILE'\n end as behavior,\n f.prosecdef as security_definer\nfrom\n functions f\n left join pg_namespace n on f.pronamespace = n.oid\n left join pg_language l on f.prolang = l.oid\n left join pg_type rt on rt.oid = f.prorettype\n left join (\n select\n oid,\n jsonb_object_agg(param, value) filter (\n where\n param is not null\n ) as config_params\n from\n (\n select\n oid,\n (string_to_array(unnest(proconfig), '=')) [1] as param,\n (string_to_array(unnest(proconfig), '=')) [2] as value\n from\n functions\n ) as t\n group by\n oid\n ) f_config on f_config.oid = f.oid\n left join (\n select\n oid,\n jsonb_agg(\n jsonb_build_object(\n 'mode',\n t2.mode,\n 'name',\n name,\n 'type_id',\n type_id,\n 'has_default',\n has_default\n )\n ) as args\n from\n (\n select\n oid,\n unnest(arg_modes) as mode,\n unnest(arg_names) as name,\n unnest(arg_types) :: int8 as type_id,\n unnest(arg_has_defaults) as has_default\n from\n functions\n ) as t1,\n lateral (\n select\n case\n when t1.mode = 'i' then 'in'\n when t1.mode = 'o' then 'out'\n when t1.mode = 'b' then 'inout'\n when t1.mode = 'v' then 'variadic'\n else 'table'\n end as mode\n ) as t2\n group by\n t1.oid\n ) f_args on f_args.oid = f.oid;", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "id!", - "type_info": "Int8" - }, - { - "ordinal": 1, - "name": "schema!", - "type_info": "Name" - }, - { - "ordinal": 2, - "name": "name!", - "type_info": "Name" - }, - { - "ordinal": 3, - "name": "language!", - "type_info": "Name" - }, - { - "ordinal": 4, - "name": "body", - "type_info": "Text" - }, - { - "ordinal": 5, - "name": "definition", - "type_info": "Text" - }, - { - "ordinal": 6, - "name": "args", - "type_info": "Jsonb" - }, - { - "ordinal": 7, - "name": "argument_types", - "type_info": "Text" - }, - { - "ordinal": 8, - "name": "identity_argument_types", - "type_info": "Text" - }, - { - "ordinal": 9, - "name": "return_type_id!", - "type_info": "Int8" - }, - { - "ordinal": 10, - "name": "return_type!", - "type_info": "Text" - }, - { - "ordinal": 11, - "name": "return_type_relation_id", - "type_info": "Int8" - }, - { - "ordinal": 12, - "name": "is_set_returning_function", - "type_info": "Bool" - }, - { - "ordinal": 13, - "name": "behavior", - "type_info": "Text" - }, - { - "ordinal": 14, - "name": "security_definer", - "type_info": "Bool" - } - ], - "parameters": { - "Left": [] - }, - "nullable": [ - null, - true, - false, - true, - null, - null, - null, - null, - null, - null, - null, - null, - false, - null, - false - ] - }, - "hash": "64d9718b07516f3d2720cb7aa79e496f5337cadbad7a3fb03ccd3e3c21b71389" -} diff --git a/.sqlx/query-66d92238c94b5f1c99fbf068a0b5cf4c296b594fe9e6cebbdc382acde73f4fb9.json b/.sqlx/query-66d92238c94b5f1c99fbf068a0b5cf4c296b594fe9e6cebbdc382acde73f4fb9.json deleted file mode 100644 index 447ba93b..00000000 --- a/.sqlx/query-66d92238c94b5f1c99fbf068a0b5cf4c296b594fe9e6cebbdc382acde73f4fb9.json +++ /dev/null @@ -1,86 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "select\n c.oid :: int8 as \"id!\",\n nc.nspname as schema,\n c.relname as name,\n c.relkind as table_kind,\n c.relrowsecurity as rls_enabled,\n c.relforcerowsecurity as rls_forced,\n case\n when c.relreplident = 'd' then 'DEFAULT'\n when c.relreplident = 'i' then 'INDEX'\n when c.relreplident = 'f' then 'FULL'\n else 'NOTHING'\n end as \"replica_identity!\",\n pg_total_relation_size(format('%I.%I', nc.nspname, c.relname)) :: int8 as \"bytes!\",\n pg_size_pretty(\n pg_total_relation_size(format('%I.%I', nc.nspname, c.relname))\n ) as \"size!\",\n pg_stat_get_live_tuples(c.oid) as \"live_rows_estimate!\",\n pg_stat_get_dead_tuples(c.oid) as \"dead_rows_estimate!\",\n obj_description(c.oid) as comment\nfrom\n pg_namespace nc\n join pg_class c on nc.oid = c.relnamespace\nwhere\n c.relkind in ('r', 'p', 'v', 'm')\n and not pg_is_other_temp_schema(nc.oid)\n and (\n pg_has_role(c.relowner, 'USAGE')\n or has_table_privilege(\n c.oid,\n 'SELECT, INSERT, UPDATE, DELETE, TRUNCATE, REFERENCES, TRIGGER'\n )\n or has_any_column_privilege(c.oid, 'SELECT, INSERT, UPDATE, REFERENCES')\n )\ngroup by\n c.oid,\n c.relname,\n c.relrowsecurity,\n c.relforcerowsecurity,\n c.relreplident,\n nc.nspname;", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "id!", - "type_info": "Int8" - }, - { - "ordinal": 1, - "name": "schema", - "type_info": "Name" - }, - { - "ordinal": 2, - "name": "name", - "type_info": "Name" - }, - { - "ordinal": 3, - "name": "table_kind", - "type_info": "Char" - }, - { - "ordinal": 4, - "name": "rls_enabled", - "type_info": "Bool" - }, - { - "ordinal": 5, - "name": "rls_forced", - "type_info": "Bool" - }, - { - "ordinal": 6, - "name": "replica_identity!", - "type_info": "Text" - }, - { - "ordinal": 7, - "name": "bytes!", - "type_info": "Int8" - }, - { - "ordinal": 8, - "name": "size!", - "type_info": "Text" - }, - { - "ordinal": 9, - "name": "live_rows_estimate!", - "type_info": "Int8" - }, - { - "ordinal": 10, - "name": "dead_rows_estimate!", - "type_info": "Int8" - }, - { - "ordinal": 11, - "name": "comment", - "type_info": "Text" - } - ], - "parameters": { - "Left": [] - }, - "nullable": [ - null, - false, - false, - false, - false, - false, - null, - null, - null, - null, - null, - null - ] - }, - "hash": "66d92238c94b5f1c99fbf068a0b5cf4c296b594fe9e6cebbdc382acde73f4fb9" -} diff --git a/.sqlx/query-d61f2f56ce777c99593df240b3a126cacb3c9ed5f915b7e98052d58df98d480b.json b/.sqlx/query-d61f2f56ce777c99593df240b3a126cacb3c9ed5f915b7e98052d58df98d480b.json deleted file mode 100644 index d1766e30..00000000 --- a/.sqlx/query-d61f2f56ce777c99593df240b3a126cacb3c9ed5f915b7e98052d58df98d480b.json +++ /dev/null @@ -1,38 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "select\n version(),\n current_setting('server_version_num') :: int8 AS version_num,\n (\n select\n count(*) :: int8 AS active_connections\n FROM\n pg_stat_activity\n ) AS active_connections,\n current_setting('max_connections') :: int8 AS max_connections;", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "version", - "type_info": "Text" - }, - { - "ordinal": 1, - "name": "version_num", - "type_info": "Int8" - }, - { - "ordinal": 2, - "name": "active_connections", - "type_info": "Int8" - }, - { - "ordinal": 3, - "name": "max_connections", - "type_info": "Int8" - } - ], - "parameters": { - "Left": [] - }, - "nullable": [ - null, - null, - null, - null - ] - }, - "hash": "d61f2f56ce777c99593df240b3a126cacb3c9ed5f915b7e98052d58df98d480b" -} diff --git a/.sqlx/query-df57cc22f7d63847abce1d0d15675ba8951faa1be2ea6b2bf6714b1aa9127a6f.json b/.sqlx/query-df57cc22f7d63847abce1d0d15675ba8951faa1be2ea6b2bf6714b1aa9127a6f.json deleted file mode 100644 index b6fd2fc8..00000000 --- a/.sqlx/query-df57cc22f7d63847abce1d0d15675ba8951faa1be2ea6b2bf6714b1aa9127a6f.json +++ /dev/null @@ -1,44 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "-- we need to join tables from the pg_catalog since \"TRUNCATE\" triggers are \n-- not available in the information_schema.trigger table.\nselect \n t.tgname as \"name!\",\n c.relname as \"table_name!\",\n p.proname as \"proc_name!\",\n n.nspname as \"schema_name!\",\n t.tgtype as \"details_bitmask!\"\nfrom \n pg_catalog.pg_trigger t \n left join pg_catalog.pg_proc p on t.tgfoid = p.oid\n left join pg_catalog.pg_class c on t.tgrelid = c.oid\n left join pg_catalog.pg_namespace n on c.relnamespace = n.oid\nwhere \n -- triggers enforcing constraints (e.g. unique fields) should not be included.\n t.tgisinternal = false and \n t.tgconstraint = 0;\n", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "name!", - "type_info": "Name" - }, - { - "ordinal": 1, - "name": "table_name!", - "type_info": "Name" - }, - { - "ordinal": 2, - "name": "proc_name!", - "type_info": "Name" - }, - { - "ordinal": 3, - "name": "schema_name!", - "type_info": "Name" - }, - { - "ordinal": 4, - "name": "details_bitmask!", - "type_info": "Int2" - } - ], - "parameters": { - "Left": [] - }, - "nullable": [ - false, - true, - true, - true, - false - ] - }, - "hash": "df57cc22f7d63847abce1d0d15675ba8951faa1be2ea6b2bf6714b1aa9127a6f" -} diff --git a/.sqlx/query-fa065a78ad10eeace15f64083f4d944b9d45eb6c60d7eece8878ed26b3530484.json b/.sqlx/query-fa065a78ad10eeace15f64083f4d944b9d45eb6c60d7eece8878ed26b3530484.json deleted file mode 100644 index 36723330..00000000 --- a/.sqlx/query-fa065a78ad10eeace15f64083f4d944b9d45eb6c60d7eece8878ed26b3530484.json +++ /dev/null @@ -1,92 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "with\n available_tables as (\n select\n c.relname as table_name,\n c.oid as table_oid,\n c.relkind as class_kind,\n n.nspname as schema_name\n from\n pg_catalog.pg_class c\n join pg_catalog.pg_namespace n on n.oid = c.relnamespace\n where\n -- r: normal tables\n -- v: views\n -- m: materialized views\n -- f: foreign tables\n -- p: partitioned tables\n c.relkind in ('r', 'v', 'm', 'f', 'p')\n ),\n available_indexes as (\n select\n unnest (ix.indkey) as attnum,\n ix.indisprimary as is_primary,\n ix.indisunique as is_unique,\n ix.indrelid as table_oid\n from\n pg_catalog.pg_class c\n join pg_catalog.pg_index ix on c.oid = ix.indexrelid\n where\n c.relkind = 'i'\n )\nselect\n atts.attname as name,\n ts.table_name,\n ts.table_oid :: int8 as \"table_oid!\",\n ts.class_kind :: char as \"class_kind!\",\n ts.schema_name,\n atts.atttypid :: int8 as \"type_id!\",\n tps.typname as \"type_name\",\n not atts.attnotnull as \"is_nullable!\",\n nullif(\n information_schema._pg_char_max_length (atts.atttypid, atts.atttypmod),\n -1\n ) as varchar_length,\n pg_get_expr (def.adbin, def.adrelid) as default_expr,\n coalesce(ix.is_primary, false) as \"is_primary_key!\",\n coalesce(ix.is_unique, false) as \"is_unique!\",\n pg_catalog.col_description (ts.table_oid, atts.attnum) as comment\nfrom\n pg_catalog.pg_attribute atts\n join available_tables ts on atts.attrelid = ts.table_oid\n left join available_indexes ix on atts.attrelid = ix.table_oid\n and atts.attnum = ix.attnum\n left join pg_catalog.pg_attrdef def on atts.attrelid = def.adrelid\n and atts.attnum = def.adnum\n left join pg_catalog.pg_type tps on atts.atttypid = tps.oid\nwhere\n -- system columns, such as `cmax` or `tableoid`, have negative `attnum`s\n atts.attnum >= 0 and atts.atttypid is not null and tps.oid is not null\norder by\n schema_name desc,\n table_name,\n atts.attnum;", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "name", - "type_info": "Name" - }, - { - "ordinal": 1, - "name": "table_name", - "type_info": "Name" - }, - { - "ordinal": 2, - "name": "table_oid!", - "type_info": "Int8" - }, - { - "ordinal": 3, - "name": "class_kind!", - "type_info": "Bpchar" - }, - { - "ordinal": 4, - "name": "schema_name", - "type_info": "Name" - }, - { - "ordinal": 5, - "name": "type_id!", - "type_info": "Int8" - }, - { - "ordinal": 6, - "name": "type_name", - "type_info": "Name" - }, - { - "ordinal": 7, - "name": "is_nullable!", - "type_info": "Bool" - }, - { - "ordinal": 8, - "name": "varchar_length", - "type_info": "Int4" - }, - { - "ordinal": 9, - "name": "default_expr", - "type_info": "Text" - }, - { - "ordinal": 10, - "name": "is_primary_key!", - "type_info": "Bool" - }, - { - "ordinal": 11, - "name": "is_unique!", - "type_info": "Bool" - }, - { - "ordinal": 12, - "name": "comment", - "type_info": "Text" - } - ], - "parameters": { - "Left": [] - }, - "nullable": [ - false, - false, - null, - null, - false, - null, - false, - null, - null, - null, - null, - null, - null - ] - }, - "hash": "fa065a78ad10eeace15f64083f4d944b9d45eb6c60d7eece8878ed26b3530484" -} diff --git a/.vscode/settings.json b/.vscode/settings.json deleted file mode 100644 index 43994f87..00000000 --- a/.vscode/settings.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "postgrestools.bin": "./target/debug/postgrestools" -} diff --git a/404.html b/404.html new file mode 100644 index 00000000..3e5728e8 --- /dev/null +++ b/404.html @@ -0,0 +1,122 @@ + + + + + + + + Postgres Language Server + + + + + + + + + + + +
+ + +
+ +
+
+
    +
  • +
  • +
  • +
+
+
+
+
+ + +

404

+ +

Page not found

+ + +
+
+ +
+
+ +
+ +
+ +
+ + + + supabase-community/postgres-language-server + + + + + +
+ + + + + + + + diff --git a/ARCHITECTURE.md b/ARCHITECTURE.md deleted file mode 100644 index 5a10be2d..00000000 --- a/ARCHITECTURE.md +++ /dev/null @@ -1,71 +0,0 @@ -## Architecture - -This document describes the high-level architecture of postgres_lsp. If you want to familiarize yourself with the code base, you are just in the right place! - -> Since the project still evolves rapidly, this document may not be up-to-date. If you find any inconsistency, please let us know by creating an issue. - -### Bird's Eye View - -On the highest level, the postgres language server is a thing which accepts input source code, cuts it into individual sql statements and parses and analyses each. In addition, it connects to a postgres database and stores an im-memory schema cache with all required type information such as tables, columns and functions. The result of the parsing is used alongside the schema cache to answer queries about a statement. - -The client can submit a delta of input data (typically, a change to a single file), and the server will update the affected statements and their analysis accordingly. The underlying engine makes sure that we only re-parse and re-analyse what is necessary. - -### Entry Points - -The main entry point is as of now the `pg_lsp` crate, especially the `main.rs` function. It spawns the language server and starts listening for incoming messages. The server is implemented in the `server` module. - -There might be an additional entry point for a CLI tool in the future. - -### Code Map - -This section talks briefly about various important directories and data structures. - -#### `lib/` - -Independent libraries that are used by the project but are not specific for postgres. - -#### `crates/pg_lsp` - -The main entry point of the language server. It contains the server implementation and the main loop. - -#### `crates/pg_workspace` - -> This crate will grow significantly in near future. The current implementation just contains the base data structures and stores the diagnostic results from various features. - -The main API for consumers of the IDE. It stores the internal state of the workspace, such as the schema cache and the parsed statements and their analysis. - -#### `crates/pg_lexer` - -Simple lexer that tokenizes the input source code. Enhances the output of the `pg_query` tokenizer with the missing whitespace tokens. - -#### `crates/pg_statement_splitter` - -Implements the statement splitter, which cuts the input source code into individual statements. - -#### `crates/pg_base_db` - -Implements the base data structures and defines how documents and statements are stored and updated efficiently. - -#### `crates/pg_schema_cache` - -We store an in-memory representation of the database schema to efficiently resolve types. - -#### `crates/pg_query_ext` - -Simple wrapper crate for `pg_query` to expose types and a function to get the root node for an SQL statement. It also host any "extensions" to the `pg_query` crate that are not yet contributed upstream. Once all extensions are contributed upstream, this crate will be removed. - -#### `crates/pg_query_proto_parser` - -We use procedural macros a lot to generate repetitive code from the protobuf definition provided by `libg_query`. The `pg_query_proto_parser` crate is used to parse the proto file into a more usable data structure. - -#### `crates/pg_syntax` - -Implements the CST parser and AST enhancer. The CST parser is what is described in [this blog post](https://supabase.com/blog/postgres-language-server-implementing-parser). The AST enhancer takes in the CST and enriches the AST returned by `pg_query` with a range for each node. - -#### `crates/pg_type_resolver` - -Utility crate used by the feature crates listed below to resolve the source types to the actual types in the schema cache. - -#### `crates/pg_commands`, `crates/pg_completions`, `crates/pg_hover`, `crates/pg_inlay_hints`, `crates/pg_lint`, `crates/pg_typecheck` - -These crates implement the various features of the language server. They are all independent of each other and always operate on the schema cache and a single statement and its parse results. They are intentionally implemented in separate creates and without any language server flavour to make them reusable eg in a later cli. diff --git a/Cargo.lock b/Cargo.lock deleted file mode 100644 index 4771c8a1..00000000 --- a/Cargo.lock +++ /dev/null @@ -1,5229 +0,0 @@ -# This file is automatically @generated by Cargo. -# It is not intended for manual editing. -version = 4 - -[[package]] -name = "addr2line" -version = "0.24.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dfbe277e56a376000877090da837660b4427aad530e3028d44e0bffe4f89a1c1" -dependencies = [ - "gimli", -] - -[[package]] -name = "adler2" -version = "2.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "512761e0bb2578dd7380c6baaa0f4ce03e84f95e960231d1dec8bf4d7d6e2627" - -[[package]] -name = "ahash" -version = "0.8.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e89da841a80418a9b391ebaea17f5c112ffaaa96f621d2c285b5174da76b9011" -dependencies = [ - "cfg-if", - "getrandom", - "once_cell", - "version_check", - "zerocopy", -] - -[[package]] -name = "aho-corasick" -version = "1.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e60d3430d3a69478ad0993f19238d2df97c507009a52b3c10addcd7f6bcb916" -dependencies = [ - "memchr", -] - -[[package]] -name = "allocator-api2" -version = "0.2.21" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "683d7910e743518b0e34f1186f92494becacb047c7b6bf616c96772180fef923" - -[[package]] -name = "anes" -version = "0.1.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4b46cbb362ab8752921c97e041f5e366ee6297bd428a31275b9fcf1e380f7299" - -[[package]] -name = "anstream" -version = "0.6.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8acc5369981196006228e28809f761875c0327210a891e941f4c683b3a99529b" -dependencies = [ - "anstyle", - "anstyle-parse", - "anstyle-query", - "anstyle-wincon", - "colorchoice", - "is_terminal_polyfill", - "utf8parse", -] - -[[package]] -name = "anstyle" -version = "1.0.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "55cc3b69f167a1ef2e161439aa98aed94e6028e5f9a59be9a6ffb47aef1651f9" - -[[package]] -name = "anstyle-parse" -version = "0.2.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3b2d16507662817a6a20a9ea92df6652ee4f94f914589377d69f3b21bc5798a9" -dependencies = [ - "utf8parse", -] - -[[package]] -name = "anstyle-query" -version = "1.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "79947af37f4177cfead1110013d678905c37501914fba0efea834c3fe9a8d60c" -dependencies = [ - "windows-sys 0.59.0", -] - -[[package]] -name = "anstyle-wincon" -version = "3.0.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2109dbce0e72be3ec00bed26e6a7479ca384ad226efdd66db8fa2e3a38c83125" -dependencies = [ - "anstyle", - "windows-sys 0.59.0", -] - -[[package]] -name = "anyhow" -version = "1.0.94" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c1fd03a028ef38ba2276dce7e33fcd6369c158a1bca17946c4b1b701891c1ff7" - -[[package]] -name = "assert_cmd" -version = "2.0.16" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc1835b7f27878de8525dc71410b5a31cdcc5f230aed5ba5df968e09c201b23d" -dependencies = [ - "anstyle", - "bstr", - "doc-comment", - "libc", - "predicates", - "predicates-core", - "predicates-tree", - "wait-timeout", -] - -[[package]] -name = "async-channel" -version = "1.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "81953c529336010edd6d8e358f886d9581267795c61b19475b71314bffa46d35" -dependencies = [ - "concurrent-queue", - "event-listener 2.5.3", - "futures-core", -] - -[[package]] -name = "async-channel" -version = "2.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "89b47800b0be77592da0afd425cc03468052844aff33b84e33cc696f64e77b6a" -dependencies = [ - "concurrent-queue", - "event-listener-strategy", - "futures-core", - "pin-project-lite", -] - -[[package]] -name = "async-executor" -version = "1.13.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "30ca9a001c1e8ba5149f91a74362376cc6bc5b919d92d988668657bd570bdcec" -dependencies = [ - "async-task", - "concurrent-queue", - "fastrand 2.3.0", - "futures-lite 2.5.0", - "slab", -] - -[[package]] -name = "async-global-executor" -version = "2.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05b1b633a2115cd122d73b955eadd9916c18c8f510ec9cd1686404c60ad1c29c" -dependencies = [ - "async-channel 2.3.1", - "async-executor", - "async-io 2.4.0", - "async-lock 3.4.0", - "blocking", - "futures-lite 2.5.0", - "once_cell", -] - -[[package]] -name = "async-io" -version = "1.13.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0fc5b45d93ef0529756f812ca52e44c221b35341892d3dcc34132ac02f3dd2af" -dependencies = [ - "async-lock 2.8.0", - "autocfg", - "cfg-if", - "concurrent-queue", - "futures-lite 1.13.0", - "log", - "parking", - "polling 2.8.0", - "rustix 0.37.28", - "slab", - "socket2 0.4.10", - "waker-fn", -] - -[[package]] -name = "async-io" -version = "2.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "43a2b323ccce0a1d90b449fd71f2a06ca7faa7c54c2751f06c9bd851fc061059" -dependencies = [ - "async-lock 3.4.0", - "cfg-if", - "concurrent-queue", - "futures-io", - "futures-lite 2.5.0", - "parking", - "polling 3.7.4", - "rustix 0.38.42", - "slab", - "tracing", - "windows-sys 0.59.0", -] - -[[package]] -name = "async-lock" -version = "2.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "287272293e9d8c41773cec55e365490fe034813a2f172f502d6ddcf75b2f582b" -dependencies = [ - "event-listener 2.5.3", -] - -[[package]] -name = "async-lock" -version = "3.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ff6e472cdea888a4bd64f342f09b3f50e1886d32afe8df3d663c01140b811b18" -dependencies = [ - "event-listener 5.3.1", - "event-listener-strategy", - "pin-project-lite", -] - -[[package]] -name = "async-std" -version = "1.13.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c634475f29802fde2b8f0b505b1bd00dfe4df7d4a000f0b36f7671197d5c3615" -dependencies = [ - "async-channel 1.9.0", - "async-global-executor", - "async-io 2.4.0", - "async-lock 3.4.0", - "crossbeam-utils", - "futures-channel", - "futures-core", - "futures-io", - "futures-lite 2.5.0", - "gloo-timers", - "kv-log-macro", - "log", - "memchr", - "once_cell", - "pin-project-lite", - "pin-utils", - "slab", - "wasm-bindgen-futures", -] - -[[package]] -name = "async-task" -version = "4.7.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b75356056920673b02621b35afd0f7dda9306d03c79a30f5c56c44cf256e3de" - -[[package]] -name = "async-trait" -version = "0.1.83" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "721cae7de5c34fbb2acd27e21e6d2cf7b886dce0c27388d46c4e6c47ea4318dd" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.90", -] - -[[package]] -name = "atoi" -version = "2.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f28d99ec8bfea296261ca1af174f24225171fea9664ba9003cbebee704810528" -dependencies = [ - "num-traits", -] - -[[package]] -name = "atomic-waker" -version = "1.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1505bd5d3d116872e7271a6d4e16d81d0c8570876c8de68093a09ac269d8aac0" - -[[package]] -name = "auto_impl" -version = "1.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c87f3f15e7794432337fc718554eaa4dc8f04c9677a950ffe366f20a162ae42" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.90", -] - -[[package]] -name = "autocfg" -version = "1.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ace50bade8e6234aa140d9a2f552bbee1db4d353f69b8217bc503490fc1a9f26" - -[[package]] -name = "backtrace" -version = "0.3.74" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8d82cb332cdfaed17ae235a638438ac4d4839913cc2af585c3c6746e8f8bee1a" -dependencies = [ - "addr2line", - "cfg-if", - "libc", - "miniz_oxide", - "object", - "rustc-demangle", - "windows-targets 0.52.6", -] - -[[package]] -name = "base64" -version = "0.22.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" - -[[package]] -name = "base64ct" -version = "1.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8c3c1a368f70d6cf7302d78f8f7093da241fb8e8807c05cc9e51a125895a6d5b" - -[[package]] -name = "bindgen" -version = "0.66.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f2b84e06fc203107bfbad243f4aba2af864eb7db3b1cf46ea0a023b0b433d2a7" -dependencies = [ - "bitflags 2.6.0", - "cexpr", - "clang-sys", - "lazy_static", - "lazycell", - "log", - "peeking_take_while", - "prettyplease", - "proc-macro2", - "quote", - "regex", - "rustc-hash 1.1.0", - "shlex", - "syn 2.0.90", - "which", -] - -[[package]] -name = "biome_console" -version = "0.5.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c672a9e31e47f8df74549a570ea3245a93ce3404115c724bb16762fcbbfe17e1" -dependencies = [ - "biome_markup", - "biome_text_size", - "schemars", - "serde", - "termcolor", - "unicode-segmentation", - "unicode-width", -] - -[[package]] -name = "biome_deserialize" -version = "0.5.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b4443260d505148169f5fb35634c2a60d8489882f8c9c3f1db8b7cf0cb57632" -dependencies = [ - "biome_console", - "biome_deserialize_macros 0.5.7", - "biome_diagnostics", - "biome_json_parser", - "biome_json_syntax", - "biome_rowan", - "bitflags 2.6.0", - "indexmap 1.9.3", - "serde", - "serde_json", - "tracing", -] - -[[package]] -name = "biome_deserialize" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d6f619dc8ca0595ed8850d729ebc71722d4233aba68c5aec7d9993a53e59f3fe" -dependencies = [ - "biome_console", - "biome_deserialize_macros 0.6.0", - "biome_diagnostics", - "biome_json_parser", - "biome_json_syntax", - "biome_rowan", - "bitflags 2.6.0", - "indexmap 2.7.0", - "schemars", - "serde", -] - -[[package]] -name = "biome_deserialize_macros" -version = "0.5.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3fc1244cc5f0cc267bd26b601e9ccd6851c6a4d395bba07e27c2de641dc84479" -dependencies = [ - "convert_case", - "proc-macro-error", - "proc-macro2", - "quote", - "syn 1.0.109", -] - -[[package]] -name = "biome_deserialize_macros" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "07c12826fff87ac09f63bbacf8bdf5225dfdf890da04d426f758cbcacf068e3e" -dependencies = [ - "biome_string_case", - "proc-macro-error", - "proc-macro2", - "quote", - "syn 1.0.109", -] - -[[package]] -name = "biome_diagnostics" -version = "0.5.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fe1317b6d610541c4e6a0e1f803a946f153ace3468bbc77a8f273dcb04ee526f" -dependencies = [ - "backtrace", - "biome_console", - "biome_diagnostics_categories", - "biome_diagnostics_macros", - "biome_rowan", - "biome_text_edit", - "biome_text_size", - "bitflags 2.6.0", - "bpaf", - "oxc_resolver", - "serde", - "termcolor", - "unicode-width", -] - -[[package]] -name = "biome_diagnostics_categories" -version = "0.5.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "832080d68a2ee2f198d98ff5d26fc0f5c2566907f773d105a4a049ee07664d19" -dependencies = [ - "quote", - "serde", -] - -[[package]] -name = "biome_diagnostics_macros" -version = "0.5.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "540fec04d2e789fb992128c63d111b650733274afffff1cb3f26c8dff5167d3b" -dependencies = [ - "proc-macro-error", - "proc-macro2", - "quote", - "syn 1.0.109", -] - -[[package]] -name = "biome_formatter" -version = "0.5.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0d351a9dc49ae024220a83c44329ab14a9e66887a7ca51fc7ae875e9e56f626c" -dependencies = [ - "biome_console", - "biome_deserialize 0.5.7", - "biome_deserialize_macros 0.5.7", - "biome_diagnostics", - "biome_rowan", - "cfg-if", - "countme", - "drop_bomb", - "indexmap 1.9.3", - "rustc-hash 1.1.0", - "tracing", - "unicode-width", -] - -[[package]] -name = "biome_js_factory" -version = "0.5.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c9847f4dfd16ee242d12b90f96f6b2eb33238dfc4eac7b5c045e14eebe717b7" -dependencies = [ - "biome_js_syntax", - "biome_rowan", -] - -[[package]] -name = "biome_js_formatter" -version = "0.5.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8bc1f8b67a8fa45555a7a9ea1004eca73c159b7f1941050311d35e312cff3bb8" -dependencies = [ - "biome_console", - "biome_deserialize 0.5.7", - "biome_deserialize_macros 0.5.7", - "biome_diagnostics_categories", - "biome_formatter", - "biome_js_factory", - "biome_js_syntax", - "biome_json_syntax", - "biome_rowan", - "biome_text_size", - "biome_unicode_table", - "cfg-if", - "smallvec", - "tracing", - "unicode-width", -] - -[[package]] -name = "biome_js_syntax" -version = "0.5.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "38a524bd8b1f5f7b3355dfe2744196227ee15e9aa3446d562deb9ed511cf2015" -dependencies = [ - "biome_console", - "biome_diagnostics", - "biome_rowan", - "serde", -] - -[[package]] -name = "biome_json_factory" -version = "0.5.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e409eb289040f3660689dad178b00b6ac8cfa9a7fffd8225f35cb6b3d36437cf" -dependencies = [ - "biome_json_syntax", - "biome_rowan", -] - -[[package]] -name = "biome_json_parser" -version = "0.5.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c6d23fb9b683e6356c094b4a0cb38f8aa0acee60ce9c3ef24628d21a204de4d" -dependencies = [ - "biome_console", - "biome_diagnostics", - "biome_json_factory", - "biome_json_syntax", - "biome_parser", - "biome_rowan", - "biome_unicode_table", - "tracing", - "unicode-bom", -] - -[[package]] -name = "biome_json_syntax" -version = "0.5.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f2645ca57f75680d3d390b2482c35db5850b1d849e1f96151a12f15f4abdb097" -dependencies = [ - "biome_rowan", - "serde", -] - -[[package]] -name = "biome_markup" -version = "0.5.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4a7f11cf91599594528e97d216044ef4e410a103327212d909f215cbafe2fd9c" -dependencies = [ - "proc-macro-error", - "proc-macro2", - "quote", -] - -[[package]] -name = "biome_parser" -version = "0.5.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "955dd999f32c086371d5c0e64b4ea1a50f50c98f1f31a3b9fe17ef47198de19b" -dependencies = [ - "biome_console", - "biome_diagnostics", - "biome_rowan", - "bitflags 2.6.0", - "drop_bomb", -] - -[[package]] -name = "biome_rowan" -version = "0.5.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d3c2dc25a7ba6ae89526340034abed6c89fac35b79060786771e32ed4aac77e7" -dependencies = [ - "biome_text_edit", - "biome_text_size", - "countme", - "hashbrown 0.12.3", - "memoffset", - "rustc-hash 1.1.0", - "tracing", -] - -[[package]] -name = "biome_string_case" -version = "0.5.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5868798da491b19a5b27a0bad5d8727e1e65060fa2dac360b382df00ff520774" - -[[package]] -name = "biome_text_edit" -version = "0.5.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d486fdd96d5dad6428213ce64e6b9eb5bfb2fce6387fe901e844d386283de509" -dependencies = [ - "biome_text_size", - "serde", - "similar", -] - -[[package]] -name = "biome_text_size" -version = "0.5.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5ec604d15cefdced636255400359aeacfdea5d1e79445efc7aa32a0de7f0319b" -dependencies = [ - "serde", -] - -[[package]] -name = "biome_unicode_table" -version = "0.5.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87e8604d34b02180a58af1dbdaac166f1805f27f5370934142a3246f83870952" - -[[package]] -name = "bitflags" -version = "1.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" - -[[package]] -name = "bitflags" -version = "2.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b048fb63fd8b5923fc5aa7b340d8e156aec7ec02f0c78fa8a6ddc2613f6f71de" -dependencies = [ - "serde", -] - -[[package]] -name = "block-buffer" -version = "0.10.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71" -dependencies = [ - "generic-array", -] - -[[package]] -name = "blocking" -version = "1.6.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "703f41c54fc768e63e091340b424302bb1c29ef4aa0c7f10fe849dfb114d29ea" -dependencies = [ - "async-channel 2.3.1", - "async-task", - "futures-io", - "futures-lite 2.5.0", - "piper", -] - -[[package]] -name = "bpaf" -version = "0.9.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "50fd5174866dc2fa2ddc96e8fb800852d37f064f32a45c7b7c2f8fa2c64c77fa" -dependencies = [ - "bpaf_derive", - "owo-colors", - "supports-color", -] - -[[package]] -name = "bpaf_derive" -version = "0.5.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cf95d9c7e6aba67f8fc07761091e93254677f4db9e27197adecebc7039a58722" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.90", -] - -[[package]] -name = "bstr" -version = "1.11.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1a68f1f47cdf0ec8ee4b941b2eee2a80cb796db73118c0dd09ac63fbe405be22" -dependencies = [ - "memchr", - "regex-automata 0.4.9", - "serde", -] - -[[package]] -name = "bumpalo" -version = "3.16.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "79296716171880943b8470b5f8d03aa55eb2e645a4874bdbb28adb49162e012c" - -[[package]] -name = "byteorder" -version = "1.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" - -[[package]] -name = "bytes" -version = "1.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "325918d6fe32f23b19878fe4b34794ae41fc19ddbe53b10571a4874d44ffd39b" - -[[package]] -name = "cast" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "37b2a672a2cb129a2e41c10b1224bb368f9f37a2b16b612598138befd7b37eb5" - -[[package]] -name = "cc" -version = "1.2.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "27f657647bcff5394bf56c7317665bbf790a137a50eaaa5c6bfbb9e27a518f2d" -dependencies = [ - "shlex", -] - -[[package]] -name = "cexpr" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6fac387a98bb7c37292057cffc56d62ecb629900026402633ae9160df93a8766" -dependencies = [ - "nom", -] - -[[package]] -name = "cfg-if" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" - -[[package]] -name = "chrono" -version = "0.4.39" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7e36cc9d416881d2e24f9a963be5fb1cd90966419ac844274161d10488b3e825" -dependencies = [ - "num-traits", -] - -[[package]] -name = "ciborium" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "42e69ffd6f0917f5c029256a24d0161db17cea3997d185db0d35926308770f0e" -dependencies = [ - "ciborium-io", - "ciborium-ll", - "serde", -] - -[[package]] -name = "ciborium-io" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05afea1e0a06c9be33d539b876f1ce3692f4afea2cb41f740e7743225ed1c757" - -[[package]] -name = "ciborium-ll" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57663b653d948a338bfb3eeba9bb2fd5fcfaecb9e199e87e1eda4d9e8b240fd9" -dependencies = [ - "ciborium-io", - "half", -] - -[[package]] -name = "clang-sys" -version = "1.8.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b023947811758c97c59bf9d1c188fd619ad4718dcaa767947df1cadb14f39f4" -dependencies = [ - "glob", - "libc", - "libloading", -] - -[[package]] -name = "clap" -version = "4.5.23" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3135e7ec2ef7b10c6ed8950f0f792ed96ee093fa088608f1c76e569722700c84" -dependencies = [ - "clap_builder", - "clap_derive", -] - -[[package]] -name = "clap_builder" -version = "4.5.23" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "30582fc632330df2bd26877bde0c1f4470d57c582bbc070376afcd04d8cb4838" -dependencies = [ - "anstream", - "anstyle", - "clap_lex", - "strsim", -] - -[[package]] -name = "clap_derive" -version = "4.5.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ac6a0c7b1a9e9a5186361f67dfa1b88213572f427fb9ab038efb2bd8c582dab" -dependencies = [ - "heck", - "proc-macro2", - "quote", - "syn 2.0.90", -] - -[[package]] -name = "clap_lex" -version = "0.7.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f46ad14479a25103f283c0f10005961cf086d8dc42205bb44c46ac563475dca6" - -[[package]] -name = "colorchoice" -version = "1.0.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b63caa9aa9397e2d9480a9b13673856c78d8ac123288526c37d7839f2a86990" - -[[package]] -name = "concurrent-queue" -version = "2.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ca0197aee26d1ae37445ee532fefce43251d24cc7c166799f4d46817f1d3973" -dependencies = [ - "crossbeam-utils", -] - -[[package]] -name = "console" -version = "0.15.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0e1f83fc076bd6dd27517eacdf25fef6c4dfe5f1d7448bafaaf3a26f13b5e4eb" -dependencies = [ - "encode_unicode", - "lazy_static", - "libc", - "windows-sys 0.52.0", -] - -[[package]] -name = "const-oid" -version = "0.9.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c2459377285ad874054d797f3ccebf984978aa39129f6eafde5cdc8315b612f8" - -[[package]] -name = "convert_case" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec182b0ca2f35d8fc196cf3404988fd8b8c739a4d270ff118a398feb0cbec1ca" -dependencies = [ - "unicode-segmentation", -] - -[[package]] -name = "countme" -version = "3.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7704b5fdd17b18ae31c4c1da5a2e0305a2bf17b5249300a9ee9ed7b72114c636" - -[[package]] -name = "cpufeatures" -version = "0.2.16" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "16b80225097f2e5ae4e7179dd2266824648f3e2f49d9134d584b76389d31c4c3" -dependencies = [ - "libc", -] - -[[package]] -name = "crc" -version = "3.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "69e6e4d7b33a94f0991c26729976b10ebde1d34c3ee82408fb536164fa10d636" -dependencies = [ - "crc-catalog", -] - -[[package]] -name = "crc-catalog" -version = "2.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "19d374276b40fb8bbdee95aef7c7fa6b5316ec764510eb64b8dd0e2ed0d7e7f5" - -[[package]] -name = "crc32fast" -version = "1.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a97769d94ddab943e4510d138150169a2758b5ef3eb191a9ee688de3e23ef7b3" -dependencies = [ - "cfg-if", -] - -[[package]] -name = "criterion" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f2b12d017a929603d80db1831cd3a24082f8137ce19c69e6447f54f5fc8d692f" -dependencies = [ - "anes", - "cast", - "ciborium", - "clap", - "criterion-plot", - "is-terminal", - "itertools 0.10.5", - "num-traits", - "once_cell", - "oorandom", - "plotters", - "rayon", - "regex", - "serde", - "serde_derive", - "serde_json", - "tinytemplate", - "walkdir", -] - -[[package]] -name = "criterion-plot" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6b50826342786a51a89e2da3a28f1c32b06e387201bc2d19791f622c673706b1" -dependencies = [ - "cast", - "itertools 0.10.5", -] - -[[package]] -name = "crossbeam" -version = "0.8.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1137cd7e7fc0fb5d3c5a8678be38ec56e819125d8d7907411fe24ccb943faca8" -dependencies = [ - "crossbeam-channel", - "crossbeam-deque", - "crossbeam-epoch", - "crossbeam-queue", - "crossbeam-utils", -] - -[[package]] -name = "crossbeam-channel" -version = "0.5.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "33480d6946193aa8033910124896ca395333cae7e2d1113d1fef6c3272217df2" -dependencies = [ - "crossbeam-utils", -] - -[[package]] -name = "crossbeam-deque" -version = "0.8.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "613f8cc01fe9cf1a3eb3d7f488fd2fa8388403e97039e2f73692932e291a770d" -dependencies = [ - "crossbeam-epoch", - "crossbeam-utils", -] - -[[package]] -name = "crossbeam-epoch" -version = "0.9.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b82ac4a3c2ca9c3460964f020e1402edd5753411d7737aa39c3714ad1b5420e" -dependencies = [ - "crossbeam-utils", -] - -[[package]] -name = "crossbeam-queue" -version = "0.3.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "df0346b5d5e76ac2fe4e327c5fd1118d6be7c51dfb18f9b7922923f287471e35" -dependencies = [ - "crossbeam-utils", -] - -[[package]] -name = "crossbeam-utils" -version = "0.8.20" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "22ec99545bb0ed0ea7bb9b8e1e9122ea386ff8a48c0922e43f36d45ab09e0e80" - -[[package]] -name = "crunchy" -version = "0.2.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "43da5946c66ffcc7745f48db692ffbb10a83bfe0afd96235c5c2a4fb23994929" - -[[package]] -name = "crypto-common" -version = "0.1.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" -dependencies = [ - "generic-array", - "typenum", -] - -[[package]] -name = "dashmap" -version = "5.5.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "978747c1d849a7d2ee5e8adc0159961c48fb7e5db2f06af6723b80123bb53856" -dependencies = [ - "cfg-if", - "hashbrown 0.14.5", - "lock_api", - "once_cell", - "parking_lot_core", -] - -[[package]] -name = "dashmap" -version = "6.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5041cc499144891f3790297212f32a74fb938e5136a14943f338ef9e0ae276cf" -dependencies = [ - "cfg-if", - "crossbeam-utils", - "hashbrown 0.14.5", - "lock_api", - "once_cell", - "parking_lot_core", -] - -[[package]] -name = "der" -version = "0.7.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f55bf8e7b65898637379c1b74eb1551107c8294ed26d855ceb9fd1a09cfc9bc0" -dependencies = [ - "const-oid", - "pem-rfc7468", - "zeroize", -] - -[[package]] -name = "deranged" -version = "0.3.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b42b6fa04a440b495c8b04d0e71b707c585f83cb9cb28cf8cd0d976c315e31b4" -dependencies = [ - "powerfmt", -] - -[[package]] -name = "difflib" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6184e33543162437515c2e2b48714794e37845ec9851711914eec9d308f6ebe8" - -[[package]] -name = "digest" -version = "0.10.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" -dependencies = [ - "block-buffer", - "const-oid", - "crypto-common", - "subtle", -] - -[[package]] -name = "directories" -version = "5.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9a49173b84e034382284f27f1af4dcbbd231ffa358c0fe316541a7337f376a35" -dependencies = [ - "dirs-sys", -] - -[[package]] -name = "dirs-sys" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "520f05a5cbd335fae5a99ff7a6ab8627577660ee5cfd6a94a6a929b52ff0321c" -dependencies = [ - "libc", - "option-ext", - "redox_users", - "windows-sys 0.48.0", -] - -[[package]] -name = "displaydoc" -version = "0.2.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.90", -] - -[[package]] -name = "doc-comment" -version = "0.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fea41bba32d969b513997752735605054bc0dfa92b4c56bf1189f2e174be7a10" - -[[package]] -name = "docs_codegen" -version = "0.0.0" -dependencies = [ - "anyhow", - "biome_string_case", - "bpaf", - "pgt_analyse", - "pgt_analyser", - "pgt_cli", - "pgt_configuration", - "pgt_console", - "pgt_diagnostics", - "pgt_flags", - "pgt_query_ext", - "pgt_statement_splitter", - "pgt_workspace", - "pulldown-cmark", - "regex", - "schemars", - "serde", - "serde_json", -] - -[[package]] -name = "dotenv" -version = "0.15.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77c90badedccf4105eca100756a0b1289e191f6fcbdadd3cee1d2f614f97da8f" - -[[package]] -name = "dotenvy" -version = "0.15.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1aaf95b3e5c8f23aa320147307562d361db0ae0d51242340f558153b4eb2439b" - -[[package]] -name = "drop_bomb" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9bda8e21c04aca2ae33ffc2fd8c23134f3cac46db123ba97bd9d3f3b8a4a85e1" - -[[package]] -name = "dunce" -version = "1.0.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "92773504d58c093f6de2459af4af33faa518c13451eb8f2b5698ed3d36e7c813" - -[[package]] -name = "dyn-clone" -version = "1.0.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0d6ef0072f8a535281e4876be788938b528e9a1d43900b82c2569af7da799125" - -[[package]] -name = "either" -version = "1.13.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "60b1af1c220855b6ceac025d3f6ecdd2b7c4894bfe9cd9bda4fbb4bc7c0d4cf0" -dependencies = [ - "serde", -] - -[[package]] -name = "encode_unicode" -version = "0.3.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a357d28ed41a50f9c765dbfe56cbc04a64e53e5fc58ba79fbc34c10ef3df831f" - -[[package]] -name = "enumflags2" -version = "0.7.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba2f4b465f5318854c6f8dd686ede6c0a9dc67d4b1ac241cf0eb51521a309147" -dependencies = [ - "enumflags2_derive", -] - -[[package]] -name = "enumflags2_derive" -version = "0.7.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fc4caf64a58d7a6d65ab00639b046ff54399a39f5f2554728895ace4b297cd79" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.90", -] - -[[package]] -name = "env_filter" -version = "0.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "186e05a59d4c50738528153b83b0b0194d3a29507dfec16eccd4b342903397d0" -dependencies = [ - "log", -] - -[[package]] -name = "env_logger" -version = "0.11.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3716d7a920fb4fac5d84e9d4bce8ceb321e9414b4409da61b07b75c1e3d0697" -dependencies = [ - "anstream", - "anstyle", - "env_filter", - "log", -] - -[[package]] -name = "equivalent" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" - -[[package]] -name = "errno" -version = "0.3.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "33d852cb9b869c2a9b3df2f71a3074817f01e1844f839a144f5fcef059a4eb5d" -dependencies = [ - "libc", - "windows-sys 0.59.0", -] - -[[package]] -name = "etcetera" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "136d1b5283a1ab77bd9257427ffd09d8667ced0570b6f938942bc7568ed5b943" -dependencies = [ - "cfg-if", - "home", - "windows-sys 0.48.0", -] - -[[package]] -name = "event-listener" -version = "2.5.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0206175f82b8d6bf6652ff7d71a1e27fd2e4efde587fd368662814d6ec1d9ce0" - -[[package]] -name = "event-listener" -version = "5.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6032be9bd27023a771701cc49f9f053c751055f71efb2e0ae5c15809093675ba" -dependencies = [ - "concurrent-queue", - "parking", - "pin-project-lite", -] - -[[package]] -name = "event-listener-strategy" -version = "0.5.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c3e4e0dd3673c1139bf041f3008816d9cf2946bbfac2945c09e523b8d7b05b2" -dependencies = [ - "event-listener 5.3.1", - "pin-project-lite", -] - -[[package]] -name = "fastrand" -version = "1.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e51093e27b0797c359783294ca4f0a911c270184cb10f85783b118614a1501be" -dependencies = [ - "instant", -] - -[[package]] -name = "fastrand" -version = "2.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be" - -[[package]] -name = "fixedbitset" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80" - -[[package]] -name = "flate2" -version = "1.0.35" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c936bfdafb507ebbf50b8074c54fa31c5be9a1e7e5f467dd659697041407d07c" -dependencies = [ - "crc32fast", - "miniz_oxide", -] - -[[package]] -name = "float-cmp" -version = "0.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b09cf3155332e944990140d967ff5eceb70df778b34f77d8075db46e4704e6d8" -dependencies = [ - "num-traits", -] - -[[package]] -name = "flume" -version = "0.11.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da0e4dd2a88388a1f4ccc7c9ce104604dab68d9f408dc34cd45823d5a9069095" -dependencies = [ - "futures-core", - "futures-sink", - "spin", -] - -[[package]] -name = "form_urlencoded" -version = "1.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e13624c2627564efccf4934284bdd98cbaa14e79b0b5a141218e507b3a823456" -dependencies = [ - "percent-encoding", -] - -[[package]] -name = "fs_extra" -version = "1.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "42703706b716c37f96a77aea830392ad231f44c9e9a67872fa5548707e11b11c" - -[[package]] -name = "futures" -version = "0.3.31" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "65bc07b1a8bc7c85c5f2e110c476c7389b4554ba72af57d8445ea63a576b0876" -dependencies = [ - "futures-channel", - "futures-core", - "futures-executor", - "futures-io", - "futures-sink", - "futures-task", - "futures-util", -] - -[[package]] -name = "futures-channel" -version = "0.3.31" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2dff15bf788c671c1934e366d07e30c1814a8ef514e1af724a602e8a2fbe1b10" -dependencies = [ - "futures-core", - "futures-sink", -] - -[[package]] -name = "futures-core" -version = "0.3.31" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05f29059c0c2090612e8d742178b0580d2dc940c837851ad723096f87af6663e" - -[[package]] -name = "futures-executor" -version = "0.3.31" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e28d1d997f585e54aebc3f97d39e72338912123a67330d723fdbb564d646c9f" -dependencies = [ - "futures-core", - "futures-task", - "futures-util", -] - -[[package]] -name = "futures-intrusive" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d930c203dd0b6ff06e0201a4a2fe9149b43c684fd4420555b26d21b1a02956f" -dependencies = [ - "futures-core", - "lock_api", - "parking_lot", -] - -[[package]] -name = "futures-io" -version = "0.3.31" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e5c1b78ca4aae1ac06c48a526a655760685149f0d465d21f37abfe57ce075c6" - -[[package]] -name = "futures-lite" -version = "1.13.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49a9d51ce47660b1e808d3c990b4709f2f415d928835a17dfd16991515c46bce" -dependencies = [ - "fastrand 1.9.0", - "futures-core", - "futures-io", - "memchr", - "parking", - "pin-project-lite", - "waker-fn", -] - -[[package]] -name = "futures-lite" -version = "2.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cef40d21ae2c515b51041df9ed313ed21e572df340ea58a922a0aefe7e8891a1" -dependencies = [ - "fastrand 2.3.0", - "futures-core", - "futures-io", - "parking", - "pin-project-lite", -] - -[[package]] -name = "futures-macro" -version = "0.3.31" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.90", -] - -[[package]] -name = "futures-sink" -version = "0.3.31" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e575fab7d1e0dcb8d0c7bcf9a63ee213816ab51902e6d244a95819acacf1d4f7" - -[[package]] -name = "futures-task" -version = "0.3.31" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f90f7dce0722e95104fcb095585910c0977252f286e354b5e3bd38902cd99988" - -[[package]] -name = "futures-util" -version = "0.3.31" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9fa08315bb612088cc391249efdc3bc77536f16c91f6cf495e6fbe85b20a4a81" -dependencies = [ - "futures-channel", - "futures-core", - "futures-io", - "futures-macro", - "futures-sink", - "futures-task", - "memchr", - "pin-project-lite", - "pin-utils", - "slab", -] - -[[package]] -name = "fuzzy-matcher" -version = "0.3.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "54614a3312934d066701a80f20f15fa3b56d67ac7722b39eea5b4c9dd1d66c94" -dependencies = [ - "thread_local", -] - -[[package]] -name = "generic-array" -version = "0.14.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a" -dependencies = [ - "typenum", - "version_check", -] - -[[package]] -name = "gethostname" -version = "0.2.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c1ebd34e35c46e00bb73e81363248d627782724609fe1b6396f553f68fe3862e" -dependencies = [ - "libc", - "winapi", -] - -[[package]] -name = "getopts" -version = "0.2.21" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "14dbbfd5c71d70241ecf9e6f13737f7b5ce823821063188d7e46c41d371eebd5" -dependencies = [ - "unicode-width", -] - -[[package]] -name = "getrandom" -version = "0.2.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c4567c8db10ae91089c99af84c68c38da3ec2f087c3f82960bcdbf3656b6f4d7" -dependencies = [ - "cfg-if", - "libc", - "wasi", -] - -[[package]] -name = "gimli" -version = "0.31.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "07e28edb80900c19c28f1072f2e8aeca7fa06b23cd4169cefe1af5aa3260783f" - -[[package]] -name = "glob" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b" - -[[package]] -name = "globset" -version = "0.4.16" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "54a1028dfc5f5df5da8a56a73e6c153c9a9708ec57232470703592a3f18e49f5" -dependencies = [ - "aho-corasick", - "bstr", - "log", - "regex-automata 0.4.9", - "regex-syntax 0.8.5", -] - -[[package]] -name = "globwalk" -version = "0.9.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0bf760ebf69878d9fd8f110c89703d90ce35095324d1f1edcb595c63945ee757" -dependencies = [ - "bitflags 2.6.0", - "ignore", - "walkdir", -] - -[[package]] -name = "gloo-timers" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bbb143cf96099802033e0d4f4963b19fd2e0b728bcf076cd9cf7f6634f092994" -dependencies = [ - "futures-channel", - "futures-core", - "js-sys", - "wasm-bindgen", -] - -[[package]] -name = "half" -version = "2.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "459196ed295495a68f7d7fe1d84f6c4b7ff0e21fe3017b2f283c6fac3ad803c9" -dependencies = [ - "cfg-if", - "crunchy", -] - -[[package]] -name = "hashbrown" -version = "0.12.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" - -[[package]] -name = "hashbrown" -version = "0.14.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1" -dependencies = [ - "ahash", - "allocator-api2", -] - -[[package]] -name = "hashbrown" -version = "0.15.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bf151400ff0baff5465007dd2f3e717f3fe502074ca563069ce3a6629d07b289" - -[[package]] -name = "hashlink" -version = "0.9.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6ba4ff7128dee98c7dc9794b6a411377e1404dba1c97deb8d1a55297bd25d8af" -dependencies = [ - "hashbrown 0.14.5", -] - -[[package]] -name = "hdrhistogram" -version = "7.5.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "765c9198f173dd59ce26ff9f95ef0aafd0a0fe01fb9d72841bc5066a4c06511d" -dependencies = [ - "byteorder", - "num-traits", -] - -[[package]] -name = "heck" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" - -[[package]] -name = "hermit-abi" -version = "0.3.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d231dfb89cfffdbc30e7fc41579ed6066ad03abda9e567ccafae602b97ec5024" - -[[package]] -name = "hermit-abi" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fbf6a919d6cf397374f7dfeeea91d974c7c0a7221d0d0f4f20d859d329e53fcc" - -[[package]] -name = "hermit-abi" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fbd780fe5cc30f81464441920d82ac8740e2e46b29a6fad543ddd075229ce37e" - -[[package]] -name = "hex" -version = "0.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" - -[[package]] -name = "hkdf" -version = "0.12.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b5f8eb2ad728638ea2c7d47a21db23b7b58a72ed6a38256b8a1849f15fbbdf7" -dependencies = [ - "hmac", -] - -[[package]] -name = "hmac" -version = "0.12.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c49c37c09c17a53d937dfbb742eb3a961d65a994e6bcdcf37e7399d0cc8ab5e" -dependencies = [ - "digest", -] - -[[package]] -name = "home" -version = "0.5.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3d1354bf6b7235cb4a0576c2619fd4ed18183f689b12b006a0ee7329eeff9a5" -dependencies = [ - "windows-sys 0.52.0", -] - -[[package]] -name = "httparse" -version = "1.9.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7d71d3574edd2771538b901e6549113b4006ece66150fb69c0fb6d9a2adae946" - -[[package]] -name = "icu_collections" -version = "1.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "db2fa452206ebee18c4b5c2274dbf1de17008e874b4dc4f0aea9d01ca79e4526" -dependencies = [ - "displaydoc", - "yoke", - "zerofrom", - "zerovec", -] - -[[package]] -name = "icu_locid" -version = "1.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "13acbb8371917fc971be86fc8057c41a64b521c184808a698c02acc242dbf637" -dependencies = [ - "displaydoc", - "litemap", - "tinystr", - "writeable", - "zerovec", -] - -[[package]] -name = "icu_locid_transform" -version = "1.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "01d11ac35de8e40fdeda00d9e1e9d92525f3f9d887cdd7aa81d727596788b54e" -dependencies = [ - "displaydoc", - "icu_locid", - "icu_locid_transform_data", - "icu_provider", - "tinystr", - "zerovec", -] - -[[package]] -name = "icu_locid_transform_data" -version = "1.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fdc8ff3388f852bede6b579ad4e978ab004f139284d7b28715f773507b946f6e" - -[[package]] -name = "icu_normalizer" -version = "1.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "19ce3e0da2ec68599d193c93d088142efd7f9c5d6fc9b803774855747dc6a84f" -dependencies = [ - "displaydoc", - "icu_collections", - "icu_normalizer_data", - "icu_properties", - "icu_provider", - "smallvec", - "utf16_iter", - "utf8_iter", - "write16", - "zerovec", -] - -[[package]] -name = "icu_normalizer_data" -version = "1.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f8cafbf7aa791e9b22bec55a167906f9e1215fd475cd22adfcf660e03e989516" - -[[package]] -name = "icu_properties" -version = "1.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "93d6020766cfc6302c15dbbc9c8778c37e62c14427cb7f6e601d849e092aeef5" -dependencies = [ - "displaydoc", - "icu_collections", - "icu_locid_transform", - "icu_properties_data", - "icu_provider", - "tinystr", - "zerovec", -] - -[[package]] -name = "icu_properties_data" -version = "1.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "67a8effbc3dd3e4ba1afa8ad918d5684b8868b3b26500753effea8d2eed19569" - -[[package]] -name = "icu_provider" -version = "1.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6ed421c8a8ef78d3e2dbc98a973be2f3770cb42b606e3ab18d6237c4dfde68d9" -dependencies = [ - "displaydoc", - "icu_locid", - "icu_provider_macros", - "stable_deref_trait", - "tinystr", - "writeable", - "yoke", - "zerofrom", - "zerovec", -] - -[[package]] -name = "icu_provider_macros" -version = "1.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1ec89e9337638ecdc08744df490b221a7399bf8d164eb52a665454e60e075ad6" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.90", -] - -[[package]] -name = "idna" -version = "1.0.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "686f825264d630750a544639377bae737628043f20d38bbc029e8f29ea968a7e" -dependencies = [ - "idna_adapter", - "smallvec", - "utf8_iter", -] - -[[package]] -name = "idna_adapter" -version = "1.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "daca1df1c957320b2cf139ac61e7bd64fed304c5040df000a745aa1de3b4ef71" -dependencies = [ - "icu_normalizer", - "icu_properties", -] - -[[package]] -name = "ignore" -version = "0.4.23" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6d89fd380afde86567dfba715db065673989d6253f42b88179abd3eae47bda4b" -dependencies = [ - "crossbeam-deque", - "globset", - "log", - "memchr", - "regex-automata 0.4.9", - "same-file", - "walkdir", - "winapi-util", -] - -[[package]] -name = "indexmap" -version = "1.9.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99" -dependencies = [ - "autocfg", - "hashbrown 0.12.3", - "serde", -] - -[[package]] -name = "indexmap" -version = "2.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62f822373a4fe84d4bb149bf54e584a7f4abec90e072ed49cda0edea5b95471f" -dependencies = [ - "equivalent", - "hashbrown 0.15.2", - "serde", -] - -[[package]] -name = "insta" -version = "1.42.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "71c1b125e30d93896b365e156c33dadfffab45ee8400afcbba4752f59de08a86" -dependencies = [ - "console", - "linked-hash-map", - "once_cell", - "pin-project", - "similar", -] - -[[package]] -name = "instant" -version = "0.1.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e0242819d153cba4b4b05a5a8f2a7e9bbf97b6055b2a002b395c96b5ff3c0222" -dependencies = [ - "cfg-if", -] - -[[package]] -name = "io-lifetimes" -version = "1.0.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eae7b9aee968036d54dce06cebaefd919e4472e753296daccd6d344e3e2df0c2" -dependencies = [ - "hermit-abi 0.3.9", - "libc", - "windows-sys 0.48.0", -] - -[[package]] -name = "is-terminal" -version = "0.4.16" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e04d7f318608d35d4b61ddd75cbdaee86b023ebe2bd5a66ee0915f0bf93095a9" -dependencies = [ - "hermit-abi 0.5.0", - "libc", - "windows-sys 0.59.0", -] - -[[package]] -name = "is_ci" -version = "1.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7655c9839580ee829dfacba1d1278c2b7883e50a277ff7541299489d6bdfdc45" - -[[package]] -name = "is_terminal_polyfill" -version = "1.70.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7943c866cc5cd64cbc25b2e01621d07fa8eb2a1a23160ee81ce38704e97b8ecf" - -[[package]] -name = "itertools" -version = "0.10.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473" -dependencies = [ - "either", -] - -[[package]] -name = "itertools" -version = "0.14.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2b192c782037fadd9cfa75548310488aabdbf3d2da73885b31bd0abd03351285" -dependencies = [ - "either", -] - -[[package]] -name = "itoa" -version = "1.0.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d75a2a4b1b190afb6f5425f10f6a8f959d2ea0b9c2b1d79553551850539e4674" - -[[package]] -name = "js-sys" -version = "0.3.76" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6717b6b5b077764fb5966237269cb3c64edddde4b14ce42647430a78ced9e7b7" -dependencies = [ - "once_cell", - "wasm-bindgen", -] - -[[package]] -name = "json-strip-comments" -version = "1.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b271732a960335e715b6b2ae66a086f115c74eb97360e996d2bd809bfc063bba" -dependencies = [ - "memchr", -] - -[[package]] -name = "kv-log-macro" -version = "1.0.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0de8b303297635ad57c9f5059fd9cee7a47f8e8daa09df0fcd07dd39fb22977f" -dependencies = [ - "log", -] - -[[package]] -name = "lazy_static" -version = "1.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" -dependencies = [ - "spin", -] - -[[package]] -name = "lazycell" -version = "1.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55" - -[[package]] -name = "libc" -version = "0.2.168" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5aaeb2981e0606ca11d79718f8bb01164f1d6ed75080182d3abf017e6d244b6d" - -[[package]] -name = "libloading" -version = "0.8.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fc2f4eb4bc735547cfed7c0a4922cbd04a4655978c09b54f1f7b228750664c34" -dependencies = [ - "cfg-if", - "windows-targets 0.52.6", -] - -[[package]] -name = "libm" -version = "0.2.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8355be11b20d696c8f18f6cc018c4e372165b1fa8126cef092399c9951984ffa" - -[[package]] -name = "libmimalloc-sys" -version = "0.1.39" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "23aa6811d3bd4deb8a84dde645f943476d13b248d818edcf8ce0b2f37f036b44" -dependencies = [ - "cc", - "libc", -] - -[[package]] -name = "libredox" -version = "0.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c0ff37bd590ca25063e35af745c343cb7a0271906fb7b37e4813e8f79f00268d" -dependencies = [ - "bitflags 2.6.0", - "libc", -] - -[[package]] -name = "libsqlite3-sys" -version = "0.30.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2e99fb7a497b1e3339bc746195567ed8d3e24945ecd636e3619d20b9de9e9149" -dependencies = [ - "cc", - "pkg-config", - "vcpkg", -] - -[[package]] -name = "linked-hash-map" -version = "0.5.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0717cef1bc8b636c6e1c1bbdefc09e6322da8a9321966e8928ef80d20f7f770f" - -[[package]] -name = "linux-raw-sys" -version = "0.3.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ef53942eb7bf7ff43a617b3e2c1c4a5ecf5944a7c1bc12d7ee39bbb15e5c1519" - -[[package]] -name = "linux-raw-sys" -version = "0.4.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "78b3ae25bc7c8c38cec158d1f2757ee79e9b3740fbc7ccf0e59e4b08d793fa89" - -[[package]] -name = "litemap" -version = "0.7.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ee93343901ab17bd981295f2cf0026d4ad018c7c31ba84549a4ddbb47a45104" - -[[package]] -name = "lock_api" -version = "0.4.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "07af8b9cdd281b7915f413fa73f29ebd5d55d0d3f0155584dade1ff18cea1b17" -dependencies = [ - "autocfg", - "scopeguard", -] - -[[package]] -name = "log" -version = "0.4.22" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a7a70ba024b9dc04c27ea2f0c0548feb474ec5c54bba33a7f72f873a39d07b24" -dependencies = [ - "value-bag", -] - -[[package]] -name = "lsp-types" -version = "0.94.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c66bfd44a06ae10647fe3f8214762e9369fd4248df1350924b4ef9e770a85ea1" -dependencies = [ - "bitflags 1.3.2", - "serde", - "serde_json", - "serde_repr", - "url", -] - -[[package]] -name = "matchers" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8263075bb86c5a1b1427b5ae862e8889656f126e9f77c484496e8b47cf5c5558" -dependencies = [ - "regex-automata 0.1.10", -] - -[[package]] -name = "md-5" -version = "0.10.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d89e7ee0cfbedfc4da3340218492196241d89eefb6dab27de5df917a6d2e78cf" -dependencies = [ - "cfg-if", - "digest", -] - -[[package]] -name = "memchr" -version = "2.7.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3" - -[[package]] -name = "memoffset" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d61c719bcfbcf5d62b3a09efa6088de8c54bc0bfcd3ea7ae39fcc186108b8de1" -dependencies = [ - "autocfg", -] - -[[package]] -name = "mimalloc" -version = "0.1.43" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "68914350ae34959d83f732418d51e2427a794055d0b9529f48259ac07af65633" -dependencies = [ - "libmimalloc-sys", -] - -[[package]] -name = "minimal-lexical" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a" - -[[package]] -name = "miniz_oxide" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e2d80299ef12ff69b16a84bb182e3b9df68b5a91574d3d4fa6e41b65deec4df1" -dependencies = [ - "adler2", -] - -[[package]] -name = "mio" -version = "1.0.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2886843bf800fba2e3377cff24abf6379b4c4d5c6681eaf9ea5b0d15090450bd" -dependencies = [ - "libc", - "wasi", - "windows-sys 0.52.0", -] - -[[package]] -name = "multimap" -version = "0.8.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e5ce46fe64a9d73be07dcbe690a38ce1b293be448fd8ce1e6c1b8062c9f72c6a" - -[[package]] -name = "newtype-uuid" -version = "1.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c8781e2ef64806278a55ad223f0bc875772fd40e1fe6e73e8adbf027817229d" -dependencies = [ - "uuid", -] - -[[package]] -name = "nom" -version = "7.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d273983c5a657a70a3e8f2a01329822f3b8c8172b73826411a55751e404a0a4a" -dependencies = [ - "memchr", - "minimal-lexical", -] - -[[package]] -name = "normalize-line-endings" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "61807f77802ff30975e01f4f071c8ba10c022052f98b3294119f3e615d13e5be" - -[[package]] -name = "ntest" -version = "0.9.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fb183f0a1da7a937f672e5ee7b7edb727bf52b8a52d531374ba8ebb9345c0330" -dependencies = [ - "ntest_test_cases", - "ntest_timeout", -] - -[[package]] -name = "ntest_test_cases" -version = "0.9.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "16d0d3f2a488592e5368ebbe996e7f1d44aa13156efad201f5b4d84e150eaa93" -dependencies = [ - "proc-macro2", - "quote", - "syn 1.0.109", -] - -[[package]] -name = "ntest_timeout" -version = "0.9.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fcc7c92f190c97f79b4a332f5e81dcf68c8420af2045c936c9be0bc9de6f63b5" -dependencies = [ - "proc-macro-crate", - "proc-macro2", - "quote", - "syn 1.0.109", -] - -[[package]] -name = "nu-ansi-term" -version = "0.46.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77a8165726e8236064dbb45459242600304b42a5ea24ee2948e18e023bf7ba84" -dependencies = [ - "overload", - "winapi", -] - -[[package]] -name = "nu-ansi-term" -version = "0.50.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d4a28e057d01f97e61255210fcff094d74ed0466038633e95017f5beb68e4399" -dependencies = [ - "windows-sys 0.52.0", -] - -[[package]] -name = "num-bigint-dig" -version = "0.8.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc84195820f291c7697304f3cbdadd1cb7199c0efc917ff5eafd71225c136151" -dependencies = [ - "byteorder", - "lazy_static", - "libm", - "num-integer", - "num-iter", - "num-traits", - "rand", - "smallvec", - "zeroize", -] - -[[package]] -name = "num-conv" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "51d515d32fb182ee37cda2ccdcb92950d6a3c2893aa280e540671c2cd0f3b1d9" - -[[package]] -name = "num-integer" -version = "0.1.46" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7969661fd2958a5cb096e56c8e1ad0444ac2bbcd0061bd28660485a44879858f" -dependencies = [ - "num-traits", -] - -[[package]] -name = "num-iter" -version = "0.1.45" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1429034a0490724d0075ebb2bc9e875d6503c3cf69e235a8941aa757d83ef5bf" -dependencies = [ - "autocfg", - "num-integer", - "num-traits", -] - -[[package]] -name = "num-traits" -version = "0.2.19" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841" -dependencies = [ - "autocfg", - "libm", -] - -[[package]] -name = "num_threads" -version = "0.1.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c7398b9c8b70908f6371f47ed36737907c87c52af34c268fed0bf0ceb92ead9" -dependencies = [ - "libc", -] - -[[package]] -name = "object" -version = "0.36.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aedf0a2d09c573ed1d8d85b30c119153926a2b36dce0ab28322c09a117a4683e" -dependencies = [ - "memchr", -] - -[[package]] -name = "once_cell" -version = "1.20.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1261fe7e33c73b354eab43b1273a57c8f967d0391e80353e51f764ac02cf6775" - -[[package]] -name = "oorandom" -version = "11.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d6790f58c7ff633d8771f42965289203411a5e5c68388703c06e14f24770b41e" - -[[package]] -name = "option-ext" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "04744f49eae99ab78e0d5c0b603ab218f515ea8cfe5a456d7629ad883a3b6e7d" - -[[package]] -name = "overload" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39" - -[[package]] -name = "owo-colors" -version = "4.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fb37767f6569cd834a413442455e0f066d0d522de8630436e2a1761d9726ba56" - -[[package]] -name = "oxc_resolver" -version = "1.12.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c20bb345f290c46058ba650fef7ca2b579612cf2786b927ebad7b8bec0845a7" -dependencies = [ - "cfg-if", - "dashmap 6.1.0", - "dunce", - "indexmap 2.7.0", - "json-strip-comments", - "once_cell", - "rustc-hash 2.1.0", - "serde", - "serde_json", - "simdutf8", - "thiserror 1.0.69", - "tracing", -] - -[[package]] -name = "parking" -version = "2.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f38d5652c16fde515bb1ecef450ab0f6a219d619a7274976324d5e377f7dceba" - -[[package]] -name = "parking_lot" -version = "0.12.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f1bf18183cf54e8d6059647fc3063646a1801cf30896933ec2311622cc4b9a27" -dependencies = [ - "lock_api", - "parking_lot_core", -] - -[[package]] -name = "parking_lot_core" -version = "0.9.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e401f977ab385c9e4e3ab30627d6f26d00e2c73eef317493c4ec6d468726cf8" -dependencies = [ - "cfg-if", - "libc", - "redox_syscall", - "smallvec", - "windows-targets 0.52.6", -] - -[[package]] -name = "paste" -version = "1.0.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a" - -[[package]] -name = "path-absolutize" -version = "3.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e4af381fe79fa195b4909485d99f73a80792331df0625188e707854f0b3383f5" -dependencies = [ - "path-dedot", -] - -[[package]] -name = "path-dedot" -version = "3.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "07ba0ad7e047712414213ff67533e6dd477af0a4e1d14fb52343e53d30ea9397" -dependencies = [ - "once_cell", -] - -[[package]] -name = "peeking_take_while" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "19b17cddbe7ec3f8bc800887bab5e717348c95ea2ca0b1bf0837fb964dc67099" - -[[package]] -name = "pem-rfc7468" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "88b39c9bfcfc231068454382784bb460aae594343fb030d46e9f50a645418412" -dependencies = [ - "base64ct", -] - -[[package]] -name = "percent-encoding" -version = "2.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" - -[[package]] -name = "petgraph" -version = "0.6.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b4c5cc86750666a3ed20bdaf5ca2a0344f9c67674cae0515bec2da16fbaa47db" -dependencies = [ - "fixedbitset", - "indexmap 2.7.0", -] - -[[package]] -name = "pg_query" -version = "6.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f71c7c56dfe299ec6f98aa210aa23458be3b0610c485be60a5873c2f3627c40e" -dependencies = [ - "bindgen", - "cc", - "fs_extra", - "glob", - "itertools 0.10.5", - "prost", - "prost-build", - "serde", - "serde_json", - "thiserror 1.0.69", -] - -[[package]] -name = "pgt_analyse" -version = "0.0.0" -dependencies = [ - "biome_deserialize 0.6.0", - "biome_deserialize_macros 0.6.0", - "enumflags2", - "pgt_console", - "pgt_diagnostics", - "pgt_query_ext", - "pgt_text_size", - "rustc-hash 2.1.0", - "schemars", - "serde", -] - -[[package]] -name = "pgt_analyser" -version = "0.0.0" -dependencies = [ - "insta", - "pgt_analyse", - "pgt_console", - "pgt_diagnostics", - "pgt_query_ext", - "pgt_test_macros", - "serde", - "termcolor", -] - -[[package]] -name = "pgt_cli" -version = "0.0.0" -dependencies = [ - "anyhow", - "assert_cmd", - "biome_deserialize 0.6.0", - "biome_deserialize_macros 0.6.0", - "bpaf", - "crossbeam", - "dashmap 5.5.3", - "hdrhistogram", - "libc", - "mimalloc", - "path-absolutize", - "pgt_analyse", - "pgt_configuration", - "pgt_console", - "pgt_diagnostics", - "pgt_flags", - "pgt_fs", - "pgt_lsp", - "pgt_text_edit", - "pgt_workspace", - "predicates", - "quick-junit", - "rayon", - "rustc-hash 2.1.0", - "serde", - "serde_json", - "tikv-jemallocator", - "tokio", - "tracing", - "tracing-appender", - "tracing-bunyan-formatter", - "tracing-subscriber", - "tracing-tree", -] - -[[package]] -name = "pgt_completions" -version = "0.0.0" -dependencies = [ - "async-std", - "criterion", - "fuzzy-matcher", - "pgt_schema_cache", - "pgt_test_utils", - "pgt_text_size", - "pgt_treesitter_queries", - "schemars", - "serde", - "serde_json", - "sqlx", - "tokio", - "tracing", - "tree-sitter", - "tree_sitter_sql", -] - -[[package]] -name = "pgt_configuration" -version = "0.0.0" -dependencies = [ - "biome_deserialize 0.6.0", - "biome_deserialize_macros 0.6.0", - "bpaf", - "indexmap 2.7.0", - "pgt_analyse", - "pgt_analyser", - "pgt_console", - "pgt_diagnostics", - "pgt_text_size", - "rustc-hash 2.1.0", - "schemars", - "serde", - "serde_json", -] - -[[package]] -name = "pgt_console" -version = "0.0.0" -dependencies = [ - "pgt_markup", - "pgt_text_size", - "schemars", - "serde", - "termcolor", - "trybuild", - "unicode-segmentation", - "unicode-width", -] - -[[package]] -name = "pgt_diagnostics" -version = "0.0.0" -dependencies = [ - "backtrace", - "bpaf", - "enumflags2", - "pgt_console", - "pgt_diagnostics_categories", - "pgt_diagnostics_macros", - "pgt_text_edit", - "pgt_text_size", - "schemars", - "serde", - "serde_json", - "termcolor", - "unicode-width", -] - -[[package]] -name = "pgt_diagnostics_categories" -version = "0.0.0" -dependencies = [ - "quote", - "schemars", - "serde", -] - -[[package]] -name = "pgt_diagnostics_macros" -version = "0.0.0" -dependencies = [ - "proc-macro-error", - "proc-macro2", - "quote", - "syn 1.0.109", -] - -[[package]] -name = "pgt_flags" -version = "0.0.0" -dependencies = [ - "pgt_console", -] - -[[package]] -name = "pgt_fs" -version = "0.0.0" -dependencies = [ - "crossbeam", - "directories", - "enumflags2", - "parking_lot", - "pgt_diagnostics", - "rayon", - "rustc-hash 2.1.0", - "schemars", - "serde", - "smallvec", - "tracing", -] - -[[package]] -name = "pgt_lexer" -version = "0.0.0" -dependencies = [ - "insta", - "pg_query", - "pgt_diagnostics", - "pgt_lexer_codegen", - "pgt_text_size", - "regex", -] - -[[package]] -name = "pgt_lexer_codegen" -version = "0.0.0" -dependencies = [ - "pgt_query_proto_parser", - "proc-macro2", - "quote", -] - -[[package]] -name = "pgt_lsp" -version = "0.0.0" -dependencies = [ - "anyhow", - "biome_deserialize 0.6.0", - "futures", - "pgt_analyse", - "pgt_completions", - "pgt_configuration", - "pgt_console", - "pgt_diagnostics", - "pgt_fs", - "pgt_test_utils", - "pgt_text_edit", - "pgt_text_size", - "pgt_workspace", - "rustc-hash 2.1.0", - "serde", - "serde_json", - "sqlx", - "strum", - "test-log", - "tokio", - "tower", - "tower-lsp", - "tracing", -] - -[[package]] -name = "pgt_markup" -version = "0.0.0" -dependencies = [ - "proc-macro-error", - "proc-macro2", - "quote", -] - -[[package]] -name = "pgt_query_ext" -version = "0.0.0" -dependencies = [ - "petgraph", - "pg_query", - "pgt_diagnostics", - "pgt_lexer", - "pgt_query_ext_codegen", - "pgt_text_size", -] - -[[package]] -name = "pgt_query_ext_codegen" -version = "0.0.0" -dependencies = [ - "pgt_query_proto_parser", - "proc-macro2", - "quote", -] - -[[package]] -name = "pgt_query_proto_parser" -version = "0.0.0" -dependencies = [ - "convert_case", - "protobuf", - "protobuf-parse", -] - -[[package]] -name = "pgt_schema_cache" -version = "0.0.0" -dependencies = [ - "anyhow", - "async-std", - "futures-util", - "pgt_console", - "pgt_diagnostics", - "pgt_test_utils", - "serde", - "serde_json", - "sqlx", - "strum", - "tokio", -] - -[[package]] -name = "pgt_statement_splitter" -version = "0.0.0" -dependencies = [ - "ntest", - "pgt_diagnostics", - "pgt_lexer", - "pgt_query_ext", - "pgt_text_size", - "regex", -] - -[[package]] -name = "pgt_test_macros" -version = "0.0.0" -dependencies = [ - "globwalk", - "proc-macro-error", - "proc-macro2", - "quote", - "syn 1.0.109", -] - -[[package]] -name = "pgt_test_utils" -version = "0.0.0" -dependencies = [ - "anyhow", - "clap", - "dotenv", - "sqlx", - "tree-sitter", - "tree_sitter_sql", - "uuid", -] - -[[package]] -name = "pgt_text_edit" -version = "0.0.0" -dependencies = [ - "pgt_text_size", - "schemars", - "serde", - "similar", -] - -[[package]] -name = "pgt_text_size" -version = "0.0.0" -dependencies = [ - "schemars", - "serde", - "serde_test", - "static_assertions", -] - -[[package]] -name = "pgt_treesitter_queries" -version = "0.0.0" -dependencies = [ - "clap", - "tree-sitter", - "tree_sitter_sql", -] - -[[package]] -name = "pgt_type_resolver" -version = "0.0.0" -dependencies = [ - "pgt_query_ext", - "pgt_schema_cache", -] - -[[package]] -name = "pgt_typecheck" -version = "0.0.0" -dependencies = [ - "insta", - "pgt_console", - "pgt_diagnostics", - "pgt_query_ext", - "pgt_schema_cache", - "pgt_test_utils", - "pgt_text_size", - "pgt_treesitter_queries", - "sqlx", - "tokio", - "tree-sitter", - "tree_sitter_sql", -] - -[[package]] -name = "pgt_workspace" -version = "0.0.0" -dependencies = [ - "biome_deserialize 0.6.0", - "biome_js_factory", - "biome_js_syntax", - "biome_rowan", - "dashmap 5.5.3", - "futures", - "globset", - "ignore", - "pgt_analyse", - "pgt_analyser", - "pgt_completions", - "pgt_configuration", - "pgt_console", - "pgt_diagnostics", - "pgt_fs", - "pgt_lexer", - "pgt_query_ext", - "pgt_schema_cache", - "pgt_statement_splitter", - "pgt_text_size", - "pgt_typecheck", - "rustc-hash 2.1.0", - "schemars", - "serde", - "serde_json", - "sqlx", - "strum", - "tempfile", - "tokio", - "tracing", - "tree-sitter", - "tree_sitter_sql", -] - -[[package]] -name = "pin-project" -version = "1.1.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "be57f64e946e500c8ee36ef6331845d40a93055567ec57e8fae13efd33759b95" -dependencies = [ - "pin-project-internal", -] - -[[package]] -name = "pin-project-internal" -version = "1.1.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c0f5fad0874fc7abcd4d750e76917eaebbecaa2c20bde22e1dbeeba8beb758c" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.90", -] - -[[package]] -name = "pin-project-lite" -version = "0.2.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "915a1e146535de9163f3987b8944ed8cf49a18bb0056bcebcdcece385cece4ff" - -[[package]] -name = "pin-utils" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" - -[[package]] -name = "piper" -version = "0.2.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "96c8c490f422ef9a4efd2cb5b42b76c8613d7e7dfc1caf667b8a3350a5acc066" -dependencies = [ - "atomic-waker", - "fastrand 2.3.0", - "futures-io", -] - -[[package]] -name = "pkcs1" -version = "0.7.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c8ffb9f10fa047879315e6625af03c164b16962a5368d724ed16323b68ace47f" -dependencies = [ - "der", - "pkcs8", - "spki", -] - -[[package]] -name = "pkcs8" -version = "0.10.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f950b2377845cebe5cf8b5165cb3cc1a5e0fa5cfa3e1f7f55707d8fd82e0a7b7" -dependencies = [ - "der", - "spki", -] - -[[package]] -name = "pkg-config" -version = "0.3.31" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "953ec861398dccce10c670dfeaf3ec4911ca479e9c02154b3a215178c5f566f2" - -[[package]] -name = "plotters" -version = "0.3.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5aeb6f403d7a4911efb1e33402027fc44f29b5bf6def3effcc22d7bb75f2b747" -dependencies = [ - "num-traits", - "plotters-backend", - "plotters-svg", - "wasm-bindgen", - "web-sys", -] - -[[package]] -name = "plotters-backend" -version = "0.3.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "df42e13c12958a16b3f7f4386b9ab1f3e7933914ecea48da7139435263a4172a" - -[[package]] -name = "plotters-svg" -version = "0.3.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "51bae2ac328883f7acdfea3d66a7c35751187f870bc81f94563733a154d7a670" -dependencies = [ - "plotters-backend", -] - -[[package]] -name = "polling" -version = "2.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4b2d323e8ca7996b3e23126511a523f7e62924d93ecd5ae73b333815b0eb3dce" -dependencies = [ - "autocfg", - "bitflags 1.3.2", - "cfg-if", - "concurrent-queue", - "libc", - "log", - "pin-project-lite", - "windows-sys 0.48.0", -] - -[[package]] -name = "polling" -version = "3.7.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a604568c3202727d1507653cb121dbd627a58684eb09a820fd746bee38b4442f" -dependencies = [ - "cfg-if", - "concurrent-queue", - "hermit-abi 0.4.0", - "pin-project-lite", - "rustix 0.38.42", - "tracing", - "windows-sys 0.59.0", -] - -[[package]] -name = "powerfmt" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391" - -[[package]] -name = "ppv-lite86" -version = "0.2.20" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77957b295656769bb8ad2b6a6b09d897d94f05c41b069aede1fcdaa675eaea04" -dependencies = [ - "zerocopy", -] - -[[package]] -name = "predicates" -version = "3.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a5d19ee57562043d37e82899fade9a22ebab7be9cef5026b07fda9cdd4293573" -dependencies = [ - "anstyle", - "difflib", - "float-cmp", - "normalize-line-endings", - "predicates-core", - "regex", -] - -[[package]] -name = "predicates-core" -version = "1.0.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "727e462b119fe9c93fd0eb1429a5f7647394014cf3c04ab2c0350eeb09095ffa" - -[[package]] -name = "predicates-tree" -version = "1.0.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72dd2d6d381dfb73a193c7fca536518d7caee39fc8503f74e7dc0be0531b425c" -dependencies = [ - "predicates-core", - "termtree", -] - -[[package]] -name = "prettyplease" -version = "0.2.25" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "64d1ec885c64d0457d564db4ec299b2dae3f9c02808b8ad9c3a089c591b18033" -dependencies = [ - "proc-macro2", - "syn 2.0.90", -] - -[[package]] -name = "proc-macro-crate" -version = "3.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ecf48c7ca261d60b74ab1a7b20da18bede46776b2e55535cb958eb595c5fa7b" -dependencies = [ - "toml_edit", -] - -[[package]] -name = "proc-macro-error" -version = "1.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c" -dependencies = [ - "proc-macro-error-attr", - "proc-macro2", - "quote", - "syn 1.0.109", - "version_check", -] - -[[package]] -name = "proc-macro-error-attr" -version = "1.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869" -dependencies = [ - "proc-macro2", - "quote", - "version_check", -] - -[[package]] -name = "proc-macro2" -version = "1.0.93" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "60946a68e5f9d28b0dc1c21bb8a97ee7d018a8b322fa57838ba31cc878e22d99" -dependencies = [ - "unicode-ident", -] - -[[package]] -name = "prost" -version = "0.13.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2796faa41db3ec313a31f7624d9286acf277b52de526150b7e69f3debf891ee5" -dependencies = [ - "bytes", - "prost-derive", -] - -[[package]] -name = "prost-build" -version = "0.13.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "be769465445e8c1474e9c5dac2018218498557af32d9ed057325ec9a41ae81bf" -dependencies = [ - "heck", - "itertools 0.14.0", - "log", - "multimap", - "once_cell", - "petgraph", - "prettyplease", - "prost", - "prost-types", - "regex", - "syn 2.0.90", - "tempfile", -] - -[[package]] -name = "prost-derive" -version = "0.13.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a56d757972c98b346a9b766e3f02746cde6dd1cd1d1d563472929fdd74bec4d" -dependencies = [ - "anyhow", - "itertools 0.14.0", - "proc-macro2", - "quote", - "syn 2.0.90", -] - -[[package]] -name = "prost-types" -version = "0.13.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "52c2c1bf36ddb1a1c396b3601a3cec27c2462e45f07c386894ec3ccf5332bd16" -dependencies = [ - "prost", -] - -[[package]] -name = "protobuf" -version = "3.7.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a3a7c64d9bf75b1b8d981124c14c179074e8caa7dfe7b6a12e6222ddcd0c8f72" -dependencies = [ - "once_cell", - "protobuf-support", - "thiserror 1.0.69", -] - -[[package]] -name = "protobuf-parse" -version = "3.7.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "322330e133eab455718444b4e033ebfac7c6528972c784fcde28d2cc783c6257" -dependencies = [ - "anyhow", - "indexmap 2.7.0", - "log", - "protobuf", - "protobuf-support", - "tempfile", - "thiserror 1.0.69", - "which", -] - -[[package]] -name = "protobuf-support" -version = "3.7.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b088fd20b938a875ea00843b6faf48579462630015c3788d397ad6a786663252" -dependencies = [ - "thiserror 1.0.69", -] - -[[package]] -name = "pulldown-cmark" -version = "0.12.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f86ba2052aebccc42cbbb3ed234b8b13ce76f75c3551a303cb2bcffcff12bb14" -dependencies = [ - "bitflags 2.6.0", - "getopts", - "memchr", - "pulldown-cmark-escape", - "unicase", -] - -[[package]] -name = "pulldown-cmark-escape" -version = "0.11.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "007d8adb5ddab6f8e3f491ac63566a7d5002cc7ed73901f72057943fa71ae1ae" - -[[package]] -name = "quick-junit" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3ed1a693391a16317257103ad06a88c6529ac640846021da7c435a06fffdacd7" -dependencies = [ - "chrono", - "indexmap 2.7.0", - "newtype-uuid", - "quick-xml", - "strip-ansi-escapes", - "thiserror 2.0.6", - "uuid", -] - -[[package]] -name = "quick-xml" -version = "0.37.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f22f29bdff3987b4d8632ef95fd6424ec7e4e0a57e2f4fc63e489e75357f6a03" -dependencies = [ - "memchr", -] - -[[package]] -name = "quote" -version = "1.0.37" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b5b9d34b8991d19d98081b46eacdd8eb58c6f2b201139f7c5f643cc155a633af" -dependencies = [ - "proc-macro2", -] - -[[package]] -name = "rand" -version = "0.8.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" -dependencies = [ - "libc", - "rand_chacha", - "rand_core", -] - -[[package]] -name = "rand_chacha" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" -dependencies = [ - "ppv-lite86", - "rand_core", -] - -[[package]] -name = "rand_core" -version = "0.6.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" -dependencies = [ - "getrandom", -] - -[[package]] -name = "rayon" -version = "1.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b418a60154510ca1a002a752ca9714984e21e4241e804d32555251faf8b78ffa" -dependencies = [ - "either", - "rayon-core", -] - -[[package]] -name = "rayon-core" -version = "1.12.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1465873a3dfdaa8ae7cb14b4383657caab0b3e8a0aa9ae8e04b044854c8dfce2" -dependencies = [ - "crossbeam-deque", - "crossbeam-utils", -] - -[[package]] -name = "redox_syscall" -version = "0.5.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b6dfecf2c74bce2466cabf93f6664d6998a69eb21e39f4207930065b27b771f" -dependencies = [ - "bitflags 2.6.0", -] - -[[package]] -name = "redox_users" -version = "0.4.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba009ff324d1fc1b900bd1fdb31564febe58a8ccc8a6fdbb93b543d33b13ca43" -dependencies = [ - "getrandom", - "libredox", - "thiserror 1.0.69", -] - -[[package]] -name = "regex" -version = "1.11.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b544ef1b4eac5dc2db33ea63606ae9ffcfac26c1416a2806ae0bf5f56b201191" -dependencies = [ - "aho-corasick", - "memchr", - "regex-automata 0.4.9", - "regex-syntax 0.8.5", -] - -[[package]] -name = "regex-automata" -version = "0.1.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132" -dependencies = [ - "regex-syntax 0.6.29", -] - -[[package]] -name = "regex-automata" -version = "0.4.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "809e8dc61f6de73b46c85f4c96486310fe304c434cfa43669d7b40f711150908" -dependencies = [ - "aho-corasick", - "memchr", - "regex-syntax 0.8.5", -] - -[[package]] -name = "regex-syntax" -version = "0.6.29" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1" - -[[package]] -name = "regex-syntax" -version = "0.8.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c" - -[[package]] -name = "rsa" -version = "0.9.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "47c75d7c5c6b673e58bf54d8544a9f432e3a925b0e80f7cd3602ab5c50c55519" -dependencies = [ - "const-oid", - "digest", - "num-bigint-dig", - "num-integer", - "num-traits", - "pkcs1", - "pkcs8", - "rand_core", - "signature", - "spki", - "subtle", - "zeroize", -] - -[[package]] -name = "rules_check" -version = "0.0.0" -dependencies = [ - "anyhow", - "pgt_analyse", - "pgt_analyser", - "pgt_console", - "pgt_diagnostics", - "pgt_query_ext", - "pgt_statement_splitter", - "pgt_workspace", - "pulldown-cmark", -] - -[[package]] -name = "rustc-demangle" -version = "0.1.24" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "719b953e2095829ee67db738b3bfa9fa368c94900df327b3f07fe6e794d2fe1f" - -[[package]] -name = "rustc-hash" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2" - -[[package]] -name = "rustc-hash" -version = "2.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c7fb8039b3032c191086b10f11f319a6e99e1e82889c5cc6046f515c9db1d497" - -[[package]] -name = "rustix" -version = "0.37.28" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "519165d378b97752ca44bbe15047d5d3409e875f39327546b42ac81d7e18c1b6" -dependencies = [ - "bitflags 1.3.2", - "errno", - "io-lifetimes", - "libc", - "linux-raw-sys 0.3.8", - "windows-sys 0.48.0", -] - -[[package]] -name = "rustix" -version = "0.38.42" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f93dc38ecbab2eb790ff964bb77fa94faf256fd3e73285fd7ba0903b76bedb85" -dependencies = [ - "bitflags 2.6.0", - "errno", - "libc", - "linux-raw-sys 0.4.14", - "windows-sys 0.59.0", -] - -[[package]] -name = "rustversion" -version = "1.0.20" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eded382c5f5f786b989652c49544c4877d9f015cc22e145a5ea8ea66c2921cd2" - -[[package]] -name = "ryu" -version = "1.0.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f3cb5ba0dc43242ce17de99c180e96db90b235b8a9fdc9543c96d2209116bd9f" - -[[package]] -name = "same-file" -version = "1.0.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502" -dependencies = [ - "winapi-util", -] - -[[package]] -name = "schemars" -version = "0.8.22" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3fbf2ae1b8bc8e02df939598064d22402220cd5bbcca1c76f7d6a310974d5615" -dependencies = [ - "dyn-clone", - "indexmap 1.9.3", - "indexmap 2.7.0", - "schemars_derive", - "serde", - "serde_json", - "smallvec", -] - -[[package]] -name = "schemars_derive" -version = "0.8.22" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32e265784ad618884abaea0600a9adf15393368d840e0222d101a072f3f7534d" -dependencies = [ - "proc-macro2", - "quote", - "serde_derive_internals", - "syn 2.0.90", -] - -[[package]] -name = "scopeguard" -version = "1.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" - -[[package]] -name = "serde" -version = "1.0.215" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6513c1ad0b11a9376da888e3e0baa0077f1aed55c17f50e7b2397136129fb88f" -dependencies = [ - "serde_derive", -] - -[[package]] -name = "serde_derive" -version = "1.0.215" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ad1e866f866923f252f05c889987993144fb74e722403468a4ebd70c3cd756c0" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.90", -] - -[[package]] -name = "serde_derive_internals" -version = "0.29.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "18d26a20a969b9e3fdf2fc2d9f21eda6c40e2de84c9408bb5d3b05d499aae711" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.90", -] - -[[package]] -name = "serde_json" -version = "1.0.133" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c7fceb2473b9166b2294ef05efcb65a3db80803f0b03ef86a5fc88a2b85ee377" -dependencies = [ - "indexmap 2.7.0", - "itoa", - "memchr", - "ryu", - "serde", -] - -[[package]] -name = "serde_repr" -version = "0.1.19" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c64451ba24fc7a6a2d60fc75dd9c83c90903b19028d4eff35e88fc1e86564e9" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.90", -] - -[[package]] -name = "serde_spanned" -version = "0.6.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87607cb1398ed59d48732e575a4c28a7a8ebf2454b964fe3f224f2afc07909e1" -dependencies = [ - "serde", -] - -[[package]] -name = "serde_test" -version = "1.0.177" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f901ee573cab6b3060453d2d5f0bae4e6d628c23c0a962ff9b5f1d7c8d4f1ed" -dependencies = [ - "serde", -] - -[[package]] -name = "serde_urlencoded" -version = "0.7.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d3491c14715ca2294c4d6a88f15e84739788c1d030eed8c110436aafdaa2f3fd" -dependencies = [ - "form_urlencoded", - "itoa", - "ryu", - "serde", -] - -[[package]] -name = "sha1" -version = "0.10.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3bf829a2d51ab4a5ddf1352d8470c140cadc8301b2ae1789db023f01cedd6ba" -dependencies = [ - "cfg-if", - "cpufeatures", - "digest", -] - -[[package]] -name = "sha2" -version = "0.10.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "793db75ad2bcafc3ffa7c68b215fee268f537982cd901d132f89c6343f3a3dc8" -dependencies = [ - "cfg-if", - "cpufeatures", - "digest", -] - -[[package]] -name = "sharded-slab" -version = "0.1.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f40ca3c46823713e0d4209592e8d6e826aa57e928f09752619fc696c499637f6" -dependencies = [ - "lazy_static", -] - -[[package]] -name = "shlex" -version = "1.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" - -[[package]] -name = "signal-hook-registry" -version = "1.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a9e9e0b4211b72e7b8b6e85c807d36c212bdb33ea8587f7569562a84df5465b1" -dependencies = [ - "libc", -] - -[[package]] -name = "signature" -version = "2.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77549399552de45a898a580c1b41d445bf730df867cc44e6c0233bbc4b8329de" -dependencies = [ - "digest", - "rand_core", -] - -[[package]] -name = "simdutf8" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3a9fe34e3e7a50316060351f37187a3f546bce95496156754b601a5fa71b76e" - -[[package]] -name = "similar" -version = "2.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1de1d4f81173b03af4c0cbed3c898f6bff5b870e4a7f5d6f4057d62a7a4b686e" -dependencies = [ - "bstr", - "unicode-segmentation", -] - -[[package]] -name = "slab" -version = "0.4.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f92a496fb766b417c996b9c5e57daf2f7ad3b0bebe1ccfca4856390e3d3bb67" -dependencies = [ - "autocfg", -] - -[[package]] -name = "smallvec" -version = "1.13.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c5e1a9a646d36c3599cd173a41282daf47c44583ad367b8e6837255952e5c67" -dependencies = [ - "serde", -] - -[[package]] -name = "socket2" -version = "0.4.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9f7916fc008ca5542385b89a3d3ce689953c143e9304a9bf8beec1de48994c0d" -dependencies = [ - "libc", - "winapi", -] - -[[package]] -name = "socket2" -version = "0.5.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c970269d99b64e60ec3bd6ad27270092a5394c4e309314b18ae3fe575695fbe8" -dependencies = [ - "libc", - "windows-sys 0.52.0", -] - -[[package]] -name = "spin" -version = "0.9.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6980e8d7511241f8acf4aebddbb1ff938df5eebe98691418c4468d0b72a96a67" -dependencies = [ - "lock_api", -] - -[[package]] -name = "spki" -version = "0.7.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d91ed6c858b01f942cd56b37a94b3e0a1798290327d1236e4d9cf4eaca44d29d" -dependencies = [ - "base64ct", - "der", -] - -[[package]] -name = "sqlformat" -version = "0.2.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7bba3a93db0cc4f7bdece8bb09e77e2e785c20bfebf79eb8340ed80708048790" -dependencies = [ - "nom", - "unicode_categories", -] - -[[package]] -name = "sqlx" -version = "0.8.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "93334716a037193fac19df402f8571269c84a00852f6a7066b5d2616dcd64d3e" -dependencies = [ - "sqlx-core", - "sqlx-macros", - "sqlx-mysql", - "sqlx-postgres", - "sqlx-sqlite", -] - -[[package]] -name = "sqlx-core" -version = "0.8.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d4d8060b456358185f7d50c55d9b5066ad956956fddec42ee2e8567134a8936e" -dependencies = [ - "async-io 1.13.0", - "async-std", - "atoi", - "byteorder", - "bytes", - "crc", - "crossbeam-queue", - "either", - "event-listener 5.3.1", - "futures-channel", - "futures-core", - "futures-intrusive", - "futures-io", - "futures-util", - "hashbrown 0.14.5", - "hashlink", - "hex", - "indexmap 2.7.0", - "log", - "memchr", - "once_cell", - "paste", - "percent-encoding", - "serde", - "serde_json", - "sha2", - "smallvec", - "sqlformat", - "thiserror 1.0.69", - "tokio", - "tokio-stream", - "tracing", - "url", -] - -[[package]] -name = "sqlx-macros" -version = "0.8.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cac0692bcc9de3b073e8d747391827297e075c7710ff6276d9f7a1f3d58c6657" -dependencies = [ - "proc-macro2", - "quote", - "sqlx-core", - "sqlx-macros-core", - "syn 2.0.90", -] - -[[package]] -name = "sqlx-macros-core" -version = "0.8.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1804e8a7c7865599c9c79be146dc8a9fd8cc86935fa641d3ea58e5f0688abaa5" -dependencies = [ - "async-std", - "dotenvy", - "either", - "heck", - "hex", - "once_cell", - "proc-macro2", - "quote", - "serde", - "serde_json", - "sha2", - "sqlx-core", - "sqlx-mysql", - "sqlx-postgres", - "sqlx-sqlite", - "syn 2.0.90", - "tempfile", - "tokio", - "url", -] - -[[package]] -name = "sqlx-mysql" -version = "0.8.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "64bb4714269afa44aef2755150a0fc19d756fb580a67db8885608cf02f47d06a" -dependencies = [ - "atoi", - "base64", - "bitflags 2.6.0", - "byteorder", - "bytes", - "crc", - "digest", - "dotenvy", - "either", - "futures-channel", - "futures-core", - "futures-io", - "futures-util", - "generic-array", - "hex", - "hkdf", - "hmac", - "itoa", - "log", - "md-5", - "memchr", - "once_cell", - "percent-encoding", - "rand", - "rsa", - "serde", - "sha1", - "sha2", - "smallvec", - "sqlx-core", - "stringprep", - "thiserror 1.0.69", - "tracing", - "whoami", -] - -[[package]] -name = "sqlx-postgres" -version = "0.8.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6fa91a732d854c5d7726349bb4bb879bb9478993ceb764247660aee25f67c2f8" -dependencies = [ - "atoi", - "base64", - "bitflags 2.6.0", - "byteorder", - "crc", - "dotenvy", - "etcetera", - "futures-channel", - "futures-core", - "futures-io", - "futures-util", - "hex", - "hkdf", - "hmac", - "home", - "itoa", - "log", - "md-5", - "memchr", - "once_cell", - "rand", - "serde", - "serde_json", - "sha2", - "smallvec", - "sqlx-core", - "stringprep", - "thiserror 1.0.69", - "tracing", - "whoami", -] - -[[package]] -name = "sqlx-sqlite" -version = "0.8.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d5b2cf34a45953bfd3daaf3db0f7a7878ab9b7a6b91b422d24a7a9e4c857b680" -dependencies = [ - "atoi", - "flume", - "futures-channel", - "futures-core", - "futures-executor", - "futures-intrusive", - "futures-util", - "libsqlite3-sys", - "log", - "percent-encoding", - "serde", - "serde_urlencoded", - "sqlx-core", - "tracing", - "url", -] - -[[package]] -name = "stable_deref_trait" -version = "1.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" - -[[package]] -name = "static_assertions" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" - -[[package]] -name = "stringprep" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b4df3d392d81bd458a8a621b8bffbd2302a12ffe288a9d931670948749463b1" -dependencies = [ - "unicode-bidi", - "unicode-normalization", - "unicode-properties", -] - -[[package]] -name = "strip-ansi-escapes" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "55ff8ef943b384c414f54aefa961dd2bd853add74ec75e7ac74cf91dba62bcfa" -dependencies = [ - "vte", -] - -[[package]] -name = "strsim" -version = "0.11.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" - -[[package]] -name = "strum" -version = "0.27.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f64def088c51c9510a8579e3c5d67c65349dcf755e5479ad3d010aa6454e2c32" -dependencies = [ - "strum_macros", -] - -[[package]] -name = "strum_macros" -version = "0.27.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c77a8c5abcaf0f9ce05d62342b7d298c346515365c36b673df4ebe3ced01fde8" -dependencies = [ - "heck", - "proc-macro2", - "quote", - "rustversion", - "syn 2.0.90", -] - -[[package]] -name = "subtle" -version = "2.6.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292" - -[[package]] -name = "supports-color" -version = "3.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c64fc7232dd8d2e4ac5ce4ef302b1d81e0b80d055b9d77c7c4f51f6aa4c867d6" -dependencies = [ - "is_ci", -] - -[[package]] -name = "syn" -version = "1.0.109" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" -dependencies = [ - "proc-macro2", - "quote", - "unicode-ident", -] - -[[package]] -name = "syn" -version = "2.0.90" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "919d3b74a5dd0ccd15aeb8f93e7006bd9e14c295087c9896a110f490752bcf31" -dependencies = [ - "proc-macro2", - "quote", - "unicode-ident", -] - -[[package]] -name = "synstructure" -version = "0.13.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c8af7666ab7b6390ab78131fb5b0fce11d6b7a6951602017c35fa82800708971" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.90", -] - -[[package]] -name = "target-triple" -version = "0.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "42a4d50cdb458045afc8131fd91b64904da29548bcb63c7236e0844936c13078" - -[[package]] -name = "tempfile" -version = "3.15.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9a8a559c81686f576e8cd0290cd2a24a2a9ad80c98b3478856500fcbd7acd704" -dependencies = [ - "cfg-if", - "fastrand 2.3.0", - "getrandom", - "once_cell", - "rustix 0.38.42", - "windows-sys 0.59.0", -] - -[[package]] -name = "termcolor" -version = "1.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "06794f8f6c5c898b3275aebefa6b8a1cb24cd2c6c79397ab15774837a0bc5755" -dependencies = [ - "winapi-util", -] - -[[package]] -name = "termtree" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f50febec83f5ee1df3015341d8bd429f2d1cc62bcba7ea2076759d315084683" - -[[package]] -name = "test-log" -version = "0.2.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e7f46083d221181166e5b6f6b1e5f1d499f3a76888826e6cb1d057554157cd0f" -dependencies = [ - "env_logger", - "test-log-macros", - "tracing-subscriber", -] - -[[package]] -name = "test-log-macros" -version = "0.2.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "888d0c3c6db53c0fdab160d2ed5e12ba745383d3e85813f2ea0f2b1475ab553f" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.90", -] - -[[package]] -name = "thiserror" -version = "1.0.69" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6aaf5339b578ea85b50e080feb250a3e8ae8cfcdff9a461c9ec2904bc923f52" -dependencies = [ - "thiserror-impl 1.0.69", -] - -[[package]] -name = "thiserror" -version = "2.0.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8fec2a1820ebd077e2b90c4df007bebf344cd394098a13c563957d0afc83ea47" -dependencies = [ - "thiserror-impl 2.0.6", -] - -[[package]] -name = "thiserror-impl" -version = "1.0.69" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.90", -] - -[[package]] -name = "thiserror-impl" -version = "2.0.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d65750cab40f4ff1929fb1ba509e9914eb756131cef4210da8d5d700d26f6312" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.90", -] - -[[package]] -name = "thread_local" -version = "1.1.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b9ef9bad013ada3808854ceac7b46812a6465ba368859a37e2100283d2d719c" -dependencies = [ - "cfg-if", - "once_cell", -] - -[[package]] -name = "tikv-jemalloc-sys" -version = "0.6.0+5.3.0-1-ge13ca993e8ccb9ba9847cc330696e02839f328f7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cd3c60906412afa9c2b5b5a48ca6a5abe5736aec9eb48ad05037a677e52e4e2d" -dependencies = [ - "cc", - "libc", -] - -[[package]] -name = "tikv-jemallocator" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4cec5ff18518d81584f477e9bfdf957f5bb0979b0bac3af4ca30b5b3ae2d2865" -dependencies = [ - "libc", - "tikv-jemalloc-sys", -] - -[[package]] -name = "time" -version = "0.3.37" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "35e7868883861bd0e56d9ac6efcaaca0d6d5d82a2a7ec8209ff492c07cf37b21" -dependencies = [ - "deranged", - "itoa", - "libc", - "num-conv", - "num_threads", - "powerfmt", - "serde", - "time-core", - "time-macros", -] - -[[package]] -name = "time-core" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ef927ca75afb808a4d64dd374f00a2adf8d0fcff8e7b184af886c3c87ec4a3f3" - -[[package]] -name = "time-macros" -version = "0.2.19" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2834e6017e3e5e4b9834939793b282bc03b37a3336245fa820e35e233e2a85de" -dependencies = [ - "num-conv", - "time-core", -] - -[[package]] -name = "tinystr" -version = "0.7.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9117f5d4db391c1cf6927e7bea3db74b9a1c1add8f7eda9ffd5364f40f57b82f" -dependencies = [ - "displaydoc", - "zerovec", -] - -[[package]] -name = "tinytemplate" -version = "1.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "be4d6b5f19ff7664e8c98d03e2139cb510db9b0a60b55f8e8709b689d939b6bc" -dependencies = [ - "serde", - "serde_json", -] - -[[package]] -name = "tinyvec" -version = "1.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "445e881f4f6d382d5f27c034e25eb92edd7c784ceab92a0937db7f2e9471b938" -dependencies = [ - "tinyvec_macros", -] - -[[package]] -name = "tinyvec_macros" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" - -[[package]] -name = "tokio" -version = "1.42.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5cec9b21b0450273377fc97bd4c33a8acffc8c996c987a7c5b319a0083707551" -dependencies = [ - "backtrace", - "bytes", - "libc", - "mio", - "parking_lot", - "pin-project-lite", - "signal-hook-registry", - "socket2 0.5.8", - "tokio-macros", - "windows-sys 0.52.0", -] - -[[package]] -name = "tokio-macros" -version = "2.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "693d596312e88961bc67d7f1f97af8a70227d9f90c31bba5806eec004978d752" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.90", -] - -[[package]] -name = "tokio-stream" -version = "0.1.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eca58d7bba4a75707817a2c44174253f9236b2d5fbd055602e9d5c07c139a047" -dependencies = [ - "futures-core", - "pin-project-lite", - "tokio", -] - -[[package]] -name = "tokio-util" -version = "0.7.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d7fcaa8d55a2bdd6b83ace262b016eca0d79ee02818c5c1bcdf0305114081078" -dependencies = [ - "bytes", - "futures-core", - "futures-sink", - "pin-project-lite", - "tokio", -] - -[[package]] -name = "toml" -version = "0.8.19" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1ed1f98e3fdc28d6d910e6737ae6ab1a93bf1985935a1193e68f93eeb68d24e" -dependencies = [ - "serde", - "serde_spanned", - "toml_datetime", - "toml_edit", -] - -[[package]] -name = "toml_datetime" -version = "0.6.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0dd7358ecb8fc2f8d014bf86f6f638ce72ba252a2c3a2572f2a795f1d23efb41" -dependencies = [ - "serde", -] - -[[package]] -name = "toml_edit" -version = "0.22.22" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ae48d6208a266e853d946088ed816055e556cc6028c5e8e2b84d9fa5dd7c7f5" -dependencies = [ - "indexmap 2.7.0", - "serde", - "serde_spanned", - "toml_datetime", - "winnow", -] - -[[package]] -name = "tower" -version = "0.4.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b8fa9be0de6cf49e536ce1851f987bd21a43b771b09473c3549a6c853db37c1c" -dependencies = [ - "futures-core", - "futures-util", - "pin-project", - "pin-project-lite", - "tokio", - "tower-layer", - "tower-service", - "tracing", -] - -[[package]] -name = "tower-layer" -version = "0.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "121c2a6cda46980bb0fcd1647ffaf6cd3fc79a013de288782836f6df9c48780e" - -[[package]] -name = "tower-lsp" -version = "0.20.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d4ba052b54a6627628d9b3c34c176e7eda8359b7da9acd497b9f20998d118508" -dependencies = [ - "async-trait", - "auto_impl", - "bytes", - "dashmap 5.5.3", - "futures", - "httparse", - "lsp-types", - "memchr", - "serde", - "serde_json", - "tokio", - "tokio-util", - "tower", - "tower-lsp-macros", - "tracing", -] - -[[package]] -name = "tower-lsp-macros" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "84fd902d4e0b9a4b27f2f440108dc034e1758628a9b702f8ec61ad66355422fa" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.90", -] - -[[package]] -name = "tower-service" -version = "0.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3" - -[[package]] -name = "tracing" -version = "0.1.41" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "784e0ac535deb450455cbfa28a6f0df145ea1bb7ae51b821cf5e7927fdcfbdd0" -dependencies = [ - "log", - "pin-project-lite", - "tracing-attributes", - "tracing-core", -] - -[[package]] -name = "tracing-appender" -version = "0.2.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3566e8ce28cc0a3fe42519fc80e6b4c943cc4c8cef275620eb8dac2d3d4e06cf" -dependencies = [ - "crossbeam-channel", - "thiserror 1.0.69", - "time", - "tracing-subscriber", -] - -[[package]] -name = "tracing-attributes" -version = "0.1.28" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "395ae124c09f9e6918a2310af6038fba074bcf474ac352496d5910dd59a2226d" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.90", -] - -[[package]] -name = "tracing-bunyan-formatter" -version = "0.3.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2d637245a0d8774bd48df6482e086c59a8b5348a910c3b0579354045a9d82411" -dependencies = [ - "ahash", - "gethostname", - "log", - "serde", - "serde_json", - "time", - "tracing", - "tracing-core", - "tracing-log 0.1.4", - "tracing-subscriber", -] - -[[package]] -name = "tracing-core" -version = "0.1.33" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e672c95779cf947c5311f83787af4fa8fffd12fb27e4993211a84bdfd9610f9c" -dependencies = [ - "once_cell", - "valuable", -] - -[[package]] -name = "tracing-log" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f751112709b4e791d8ce53e32c4ed2d353565a795ce84da2285393f41557bdf2" -dependencies = [ - "log", - "once_cell", - "tracing-core", -] - -[[package]] -name = "tracing-log" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ee855f1f400bd0e5c02d150ae5de3840039a3f54b025156404e34c23c03f47c3" -dependencies = [ - "log", - "once_cell", - "tracing-core", -] - -[[package]] -name = "tracing-serde" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "704b1aeb7be0d0a84fc9828cae51dab5970fee5088f83d1dd7ee6f6246fc6ff1" -dependencies = [ - "serde", - "tracing-core", -] - -[[package]] -name = "tracing-subscriber" -version = "0.3.19" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e8189decb5ac0fa7bc8b96b7cb9b2701d60d48805aca84a238004d665fcc4008" -dependencies = [ - "matchers", - "nu-ansi-term 0.46.0", - "once_cell", - "regex", - "serde", - "serde_json", - "sharded-slab", - "smallvec", - "thread_local", - "tracing", - "tracing-core", - "tracing-log 0.2.0", - "tracing-serde", -] - -[[package]] -name = "tracing-tree" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f459ca79f1b0d5f71c54ddfde6debfc59c8b6eeb46808ae492077f739dc7b49c" -dependencies = [ - "nu-ansi-term 0.50.1", - "time", - "tracing-core", - "tracing-log 0.2.0", - "tracing-subscriber", -] - -[[package]] -name = "tree-sitter" -version = "0.20.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e747b1f9b7b931ed39a548c1fae149101497de3c1fc8d9e18c62c1a66c683d3d" -dependencies = [ - "cc", - "regex", -] - -[[package]] -name = "tree_sitter_sql" -version = "0.0.0" -dependencies = [ - "cc", - "tree-sitter", -] - -[[package]] -name = "trybuild" -version = "1.0.101" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8dcd332a5496c026f1e14b7f3d2b7bd98e509660c04239c58b0ba38a12daded4" -dependencies = [ - "glob", - "serde", - "serde_derive", - "serde_json", - "target-triple", - "termcolor", - "toml", -] - -[[package]] -name = "typenum" -version = "1.17.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825" - -[[package]] -name = "unicase" -version = "2.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7e51b68083f157f853b6379db119d1c1be0e6e4dec98101079dec41f6f5cf6df" - -[[package]] -name = "unicode-bidi" -version = "0.3.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5ab17db44d7388991a428b2ee655ce0c212e862eff1768a455c58f9aad6e7893" - -[[package]] -name = "unicode-bom" -version = "2.0.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7eec5d1121208364f6793f7d2e222bf75a915c19557537745b195b253dd64217" - -[[package]] -name = "unicode-ident" -version = "1.0.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "adb9e6ca4f869e1180728b7950e35922a7fc6397f7b641499e8f3ef06e50dc83" - -[[package]] -name = "unicode-normalization" -version = "0.1.24" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5033c97c4262335cded6d6fc3e5c18ab755e1a3dc96376350f3d8e9f009ad956" -dependencies = [ - "tinyvec", -] - -[[package]] -name = "unicode-properties" -version = "0.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e70f2a8b45122e719eb623c01822704c4e0907e7e426a05927e1a1cfff5b75d0" - -[[package]] -name = "unicode-segmentation" -version = "1.12.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f6ccf251212114b54433ec949fd6a7841275f9ada20dddd2f29e9ceea4501493" - -[[package]] -name = "unicode-width" -version = "0.1.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7dd6e30e90baa6f72411720665d41d89b9a3d039dc45b8faea1ddd07f617f6af" - -[[package]] -name = "unicode_categories" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "39ec24b3121d976906ece63c9daad25b85969647682eee313cb5779fdd69e14e" - -[[package]] -name = "url" -version = "2.5.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32f8b686cadd1473f4bd0117a5d28d36b1ade384ea9b5069a1c40aefed7fda60" -dependencies = [ - "form_urlencoded", - "idna", - "percent-encoding", - "serde", -] - -[[package]] -name = "utf16_iter" -version = "1.0.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c8232dd3cdaed5356e0f716d285e4b40b932ac434100fe9b7e0e8e935b9e6246" - -[[package]] -name = "utf8_iter" -version = "1.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be" - -[[package]] -name = "utf8parse" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" - -[[package]] -name = "uuid" -version = "1.11.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f8c5f0a0af699448548ad1a2fbf920fb4bee257eae39953ba95cb84891a0446a" -dependencies = [ - "getrandom", -] - -[[package]] -name = "valuable" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "830b7e5d4d90034032940e4ace0d9a9a057e7a45cd94e6c007832e39edb82f6d" - -[[package]] -name = "value-bag" -version = "1.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3ef4c4aa54d5d05a279399bfa921ec387b7aba77caf7a682ae8d86785b8fdad2" - -[[package]] -name = "vcpkg" -version = "0.2.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" - -[[package]] -name = "version_check" -version = "0.9.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" - -[[package]] -name = "vte" -version = "0.11.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f5022b5fbf9407086c180e9557be968742d839e68346af7792b8592489732197" -dependencies = [ - "utf8parse", - "vte_generate_state_changes", -] - -[[package]] -name = "vte_generate_state_changes" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2e369bee1b05d510a7b4ed645f5faa90619e05437111783ea5848f28d97d3c2e" -dependencies = [ - "proc-macro2", - "quote", -] - -[[package]] -name = "wait-timeout" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09ac3b126d3914f9849036f826e054cbabdc8519970b8998ddaf3b5bd3c65f11" -dependencies = [ - "libc", -] - -[[package]] -name = "waker-fn" -version = "1.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "317211a0dc0ceedd78fb2ca9a44aed3d7b9b26f81870d485c07122b4350673b7" - -[[package]] -name = "walkdir" -version = "2.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "29790946404f91d9c5d06f9874efddea1dc06c5efe94541a7d6863108e3a5e4b" -dependencies = [ - "same-file", - "winapi-util", -] - -[[package]] -name = "wasi" -version = "0.11.0+wasi-snapshot-preview1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" - -[[package]] -name = "wasite" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b8dad83b4f25e74f184f64c43b150b91efe7647395b42289f38e50566d82855b" - -[[package]] -name = "wasm-bindgen" -version = "0.2.99" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a474f6281d1d70c17ae7aa6a613c87fce69a127e2624002df63dcb39d6cf6396" -dependencies = [ - "cfg-if", - "once_cell", - "wasm-bindgen-macro", -] - -[[package]] -name = "wasm-bindgen-backend" -version = "0.2.99" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f89bb38646b4f81674e8f5c3fb81b562be1fd936d84320f3264486418519c79" -dependencies = [ - "bumpalo", - "log", - "proc-macro2", - "quote", - "syn 2.0.90", - "wasm-bindgen-shared", -] - -[[package]] -name = "wasm-bindgen-futures" -version = "0.4.49" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "38176d9b44ea84e9184eff0bc34cc167ed044f816accfe5922e54d84cf48eca2" -dependencies = [ - "cfg-if", - "js-sys", - "once_cell", - "wasm-bindgen", - "web-sys", -] - -[[package]] -name = "wasm-bindgen-macro" -version = "0.2.99" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2cc6181fd9a7492eef6fef1f33961e3695e4579b9872a6f7c83aee556666d4fe" -dependencies = [ - "quote", - "wasm-bindgen-macro-support", -] - -[[package]] -name = "wasm-bindgen-macro-support" -version = "0.2.99" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "30d7a95b763d3c45903ed6c81f156801839e5ee968bb07e534c44df0fcd330c2" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.90", - "wasm-bindgen-backend", - "wasm-bindgen-shared", -] - -[[package]] -name = "wasm-bindgen-shared" -version = "0.2.99" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "943aab3fdaaa029a6e0271b35ea10b72b943135afe9bffca82384098ad0e06a6" - -[[package]] -name = "web-sys" -version = "0.3.76" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "04dd7223427d52553d3702c004d3b2fe07c148165faa56313cb00211e31c12bc" -dependencies = [ - "js-sys", - "wasm-bindgen", -] - -[[package]] -name = "which" -version = "4.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87ba24419a2078cd2b0f2ede2691b6c66d8e47836da3b6db8265ebad47afbfc7" -dependencies = [ - "either", - "home", - "once_cell", - "rustix 0.38.42", -] - -[[package]] -name = "whoami" -version = "1.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "372d5b87f58ec45c384ba03563b03544dc5fadc3983e434b286913f5b4a9bb6d" -dependencies = [ - "redox_syscall", - "wasite", -] - -[[package]] -name = "winapi" -version = "0.3.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" -dependencies = [ - "winapi-i686-pc-windows-gnu", - "winapi-x86_64-pc-windows-gnu", -] - -[[package]] -name = "winapi-i686-pc-windows-gnu" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" - -[[package]] -name = "winapi-util" -version = "0.1.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb" -dependencies = [ - "windows-sys 0.59.0", -] - -[[package]] -name = "winapi-x86_64-pc-windows-gnu" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" - -[[package]] -name = "windows-sys" -version = "0.48.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" -dependencies = [ - "windows-targets 0.48.5", -] - -[[package]] -name = "windows-sys" -version = "0.52.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" -dependencies = [ - "windows-targets 0.52.6", -] - -[[package]] -name = "windows-sys" -version = "0.59.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" -dependencies = [ - "windows-targets 0.52.6", -] - -[[package]] -name = "windows-targets" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c" -dependencies = [ - "windows_aarch64_gnullvm 0.48.5", - "windows_aarch64_msvc 0.48.5", - "windows_i686_gnu 0.48.5", - "windows_i686_msvc 0.48.5", - "windows_x86_64_gnu 0.48.5", - "windows_x86_64_gnullvm 0.48.5", - "windows_x86_64_msvc 0.48.5", -] - -[[package]] -name = "windows-targets" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" -dependencies = [ - "windows_aarch64_gnullvm 0.52.6", - "windows_aarch64_msvc 0.52.6", - "windows_i686_gnu 0.52.6", - "windows_i686_gnullvm", - "windows_i686_msvc 0.52.6", - "windows_x86_64_gnu 0.52.6", - "windows_x86_64_gnullvm 0.52.6", - "windows_x86_64_msvc 0.52.6", -] - -[[package]] -name = "windows_aarch64_gnullvm" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" - -[[package]] -name = "windows_aarch64_gnullvm" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" - -[[package]] -name = "windows_aarch64_msvc" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" - -[[package]] -name = "windows_aarch64_msvc" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" - -[[package]] -name = "windows_i686_gnu" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" - -[[package]] -name = "windows_i686_gnu" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" - -[[package]] -name = "windows_i686_gnullvm" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" - -[[package]] -name = "windows_i686_msvc" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" - -[[package]] -name = "windows_i686_msvc" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" - -[[package]] -name = "windows_x86_64_gnu" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" - -[[package]] -name = "windows_x86_64_gnu" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" - -[[package]] -name = "windows_x86_64_gnullvm" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" - -[[package]] -name = "windows_x86_64_gnullvm" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" - -[[package]] -name = "windows_x86_64_msvc" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" - -[[package]] -name = "windows_x86_64_msvc" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" - -[[package]] -name = "winnow" -version = "0.6.20" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "36c1fec1a2bb5866f07c25f68c26e565c4c200aebb96d7e55710c19d3e8ac49b" -dependencies = [ - "memchr", -] - -[[package]] -name = "write-json" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "23f6174b2566cc4a74f95e1367ec343e7fa80c93cc8087f5c4a3d6a1088b2118" - -[[package]] -name = "write16" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d1890f4022759daae28ed4fe62859b1236caebfc61ede2f63ed4e695f3f6d936" - -[[package]] -name = "writeable" -version = "0.5.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e9df38ee2d2c3c5948ea468a8406ff0db0b29ae1ffde1bcf20ef305bcc95c51" - -[[package]] -name = "xflags" -version = "0.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7d9e15fbb3de55454b0106e314b28e671279009b363e6f1d8e39fdc3bf048944" -dependencies = [ - "xflags-macros", -] - -[[package]] -name = "xflags-macros" -version = "0.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "672423d4fea7ffa2f6c25ba60031ea13dc6258070556f125cc4d790007d4a155" - -[[package]] -name = "xshell" -version = "0.2.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e7290c623014758632efe00737145b6867b66292c42167f2ec381eb566a373d" -dependencies = [ - "xshell-macros", -] - -[[package]] -name = "xshell-macros" -version = "0.2.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32ac00cd3f8ec9c1d33fb3e7958a82df6989c42d747bd326c822b1d625283547" - -[[package]] -name = "xtask" -version = "0.0.0" -dependencies = [ - "anyhow", - "flate2", - "time", - "write-json", - "xflags", - "xshell", - "zip", -] - -[[package]] -name = "xtask_codegen" -version = "0.0.0" -dependencies = [ - "anyhow", - "biome_js_factory", - "biome_js_formatter", - "biome_js_syntax", - "biome_rowan", - "biome_string_case", - "bpaf", - "pgt_analyse", - "pgt_analyser", - "pgt_workspace", - "proc-macro2", - "pulldown-cmark", - "quote", - "xtask", -] - -[[package]] -name = "yoke" -version = "0.7.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "120e6aef9aa629e3d4f52dc8cc43a015c7724194c97dfaf45180d2daf2b77f40" -dependencies = [ - "serde", - "stable_deref_trait", - "yoke-derive", - "zerofrom", -] - -[[package]] -name = "yoke-derive" -version = "0.7.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2380878cad4ac9aac1e2435f3eb4020e8374b5f13c296cb75b4620ff8e229154" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.90", - "synstructure", -] - -[[package]] -name = "zerocopy" -version = "0.7.35" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b9b4fd18abc82b8136838da5d50bae7bdea537c574d8dc1a34ed098d6c166f0" -dependencies = [ - "byteorder", - "zerocopy-derive", -] - -[[package]] -name = "zerocopy-derive" -version = "0.7.35" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.90", -] - -[[package]] -name = "zerofrom" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cff3ee08c995dee1859d998dea82f7374f2826091dd9cd47def953cae446cd2e" -dependencies = [ - "zerofrom-derive", -] - -[[package]] -name = "zerofrom-derive" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "595eed982f7d355beb85837f651fa22e90b3c044842dc7f2c2842c086f295808" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.90", - "synstructure", -] - -[[package]] -name = "zeroize" -version = "1.8.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ced3678a2879b30306d323f4542626697a464a97c0a07c9aebf7ebca65cd4dde" - -[[package]] -name = "zerovec" -version = "0.10.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aa2b893d79df23bfb12d5461018d408ea19dfafe76c2c7ef6d4eba614f8ff079" -dependencies = [ - "yoke", - "zerofrom", - "zerovec-derive", -] - -[[package]] -name = "zerovec-derive" -version = "0.10.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6eafa6dfb17584ea3e2bd6e76e0cc15ad7af12b09abdd1ca55961bed9b1063c6" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.90", -] - -[[package]] -name = "zip" -version = "0.6.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "760394e246e4c28189f19d488c058bf16f564016aefac5d32bb1f3b51d5e9261" -dependencies = [ - "byteorder", - "crc32fast", - "crossbeam-utils", - "flate2", - "time", -] diff --git a/Cargo.toml b/Cargo.toml deleted file mode 100644 index aaaa9035..00000000 --- a/Cargo.toml +++ /dev/null @@ -1,87 +0,0 @@ -[workspace] -members = ["crates/*", "lib/*", "xtask/codegen", "xtask/rules_check", "docs/codegen"] -resolver = "2" - -[workspace.package] -authors = ["Supabase Communnity"] -categories = ["development-tools", "postgres", "supabase"] -edition = "2024" -homepage = "https://supabase.com/" -keywords = ["linter", "typechecker", "postgres", "language-server"] -license = "MIT" -repository = "https://github.com/supabase-community/postgres-language-server" -rust-version = "1.86.0" - -[workspace.dependencies] -# supporting crates unrelated to postgres -anyhow = "1.0.92" -biome_deserialize = "0.6.0" -biome_deserialize_macros = "0.6.0" -biome_js_factory = "0.5.7" -biome_js_formatter = "0.5.7" -biome_js_syntax = "0.5.7" -biome_rowan = "0.5.7" -biome_string_case = "0.5.8" -bpaf = { version = "0.9.15", features = ["derive"] } -crossbeam = "0.8.4" -enumflags2 = "0.7.11" -ignore = "0.4.23" -indexmap = { version = "2.6.0", features = ["serde"] } -insta = "1.31.0" -pg_query = "6.1.0" -proc-macro2 = "1.0.66" -quote = "1.0.33" -rayon = "1.10.0" -regex = "1.11.1" -rustc-hash = "2.0.0" -schemars = { version = "0.8.22", features = ["indexmap2", "smallvec"] } -serde = "1.0.195" -serde_json = "1.0.114" -similar = "2.6.0" -smallvec = { version = "1.13.2", features = ["union", "const_new", "serde"] } -strum = { version = "0.27.1", features = ["derive"] } -# this will use tokio if available, otherwise async-std -sqlx = { version = "0.8.2", features = ["runtime-tokio", "runtime-async-std", "postgres", "json"] } -syn = "1.0.109" -termcolor = "1.4.1" -test-log = "0.2.17" -tokio = { version = "1.40.0", features = ["full"] } -tracing = { version = "0.1.40", default-features = false, features = ["std"] } -tracing-bunyan-formatter = { version = "0.3.10 " } -tracing-subscriber = "0.3.18" -tree-sitter = "0.20.10" -tree_sitter_sql = { path = "./lib/tree_sitter_sql", version = "0.0.0" } -unicode-width = "0.1.12" - -# postgres specific crates -pgt_analyse = { path = "./crates/pgt_analyse", version = "0.0.0" } -pgt_analyser = { path = "./crates/pgt_analyser", version = "0.0.0" } -pgt_cli = { path = "./crates/pgt_cli", version = "0.0.0" } -pgt_completions = { path = "./crates/pgt_completions", version = "0.0.0" } -pgt_configuration = { path = "./crates/pgt_configuration", version = "0.0.0" } -pgt_console = { path = "./crates/pgt_console", version = "0.0.0" } -pgt_diagnostics = { path = "./crates/pgt_diagnostics", version = "0.0.0" } -pgt_diagnostics_categories = { path = "./crates/pgt_diagnostics_categories", version = "0.0.0" } -pgt_diagnostics_macros = { path = "./crates/pgt_diagnostics_macros", version = "0.0.0" } -pgt_flags = { path = "./crates/pgt_flags", version = "0.0.0" } -pgt_fs = { path = "./crates/pgt_fs", version = "0.0.0" } -pgt_lexer = { path = "./crates/pgt_lexer", version = "0.0.0" } -pgt_lexer_codegen = { path = "./crates/pgt_lexer_codegen", version = "0.0.0" } -pgt_lsp = { path = "./crates/pgt_lsp", version = "0.0.0" } -pgt_markup = { path = "./crates/pgt_markup", version = "0.0.0" } -pgt_query_ext = { path = "./crates/pgt_query_ext", version = "0.0.0" } -pgt_query_ext_codegen = { path = "./crates/pgt_query_ext_codegen", version = "0.0.0" } -pgt_query_proto_parser = { path = "./crates/pgt_query_proto_parser", version = "0.0.0" } -pgt_schema_cache = { path = "./crates/pgt_schema_cache", version = "0.0.0" } -pgt_statement_splitter = { path = "./crates/pgt_statement_splitter", version = "0.0.0" } -pgt_text_edit = { path = "./crates/pgt_text_edit", version = "0.0.0" } -pgt_text_size = { path = "./crates/pgt_text_size", version = "0.0.0" } -pgt_treesitter_queries = { path = "./crates/pgt_treesitter_queries", version = "0.0.0" } -pgt_typecheck = { path = "./crates/pgt_typecheck", version = "0.0.0" } -pgt_workspace = { path = "./crates/pgt_workspace", version = "0.0.0" } - -pgt_test_macros = { path = "./crates/pgt_test_macros" } -pgt_test_utils = { path = "./crates/pgt_test_utils" } - -[profile.dev.package] -insta.opt-level = 3 diff --git a/LICENSE b/LICENSE deleted file mode 100644 index 065fae0f..00000000 --- a/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2023 Philipp Steinrötter - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/README.md b/README.md deleted file mode 100644 index 162bb9c0..00000000 --- a/README.md +++ /dev/null @@ -1,42 +0,0 @@ -![Postgres Language Server](/docs/images/pls-github.png) - -# Postgres Language Server - -A collection of language tools and a Language Server Protocol (LSP) implementation for Postgres, focusing on developer experience and reliable SQL tooling. - -Docs: [pgtools.dev](https://pgtools.dev/) - -Install: [instructions](https://pgtools.dev/#installation) - -- [CLI releases](https://github.com/supabase-community/postgres-language-server/releases) -- [VSCode](https://marketplace.visualstudio.com/items?itemName=Supabase.postgrestools) -- [Neovim](https://github.com/neovim/nvim-lspconfig/blob/master/doc/configs.md#postgres_lsp) -- [Zed](https://github.com/LoamStudios/zed-postgres-language-server) - -## Overview -LSP Demo | CLI Demo -:-------------------------:|:-------------------------: -![LSP Demo](/docs/images/lsp-demo.gif) | ![CLI Demo](/docs/images/cli-demo.png) - -This project provides a toolchain for Postgres development, built on Postgres' own parser `libpg_query` to ensure 100% syntax compatibility. It is built on a Server-Client architecture with a transport-agnostic design. This means all features can be accessed not only through the [Language Server Protocol](https://microsoft.github.io/language-server-protocol/), but also through other interfaces like a CLI, HTTP APIs, or a WebAssembly module. The goal is to make all the great Postgres tooling out there as accessible as possible, and to build anything that is missing ourselves. - -The following features are implemented: -- Autocompletion -- Syntax Error Highlighting -- Type-checking (via `EXPLAIN` error insights) -- Linter, inspired by [Squawk](https://squawkhq.com) - -Our current focus is on refining and enhancing these core features while building a robust and easily accessible infrastructure. For future plans and opportunities to contribute, please check out the issues and discussions. Any contributions are welcome! - -## Contributors - -- [psteinroe](https://github.com/psteinroe) -- [juleswritescode](https://github.com/juleswritescode) - -## Acknowledgements - -A big thanks to the following projects, without which this project wouldn't have been possible: - -- [libpg_query](https://github.com/pganalyze/libpg_query): For extracting the Postgres' parser -- [Biome](https://github.com/biomejs/biome): For implementing a toolchain infrastructure we could copy from -- [Squawk](https://github.com/sbdchd/squawk): For the linter inspiration diff --git a/biome.jsonc b/biome.jsonc deleted file mode 100644 index 582bee9b..00000000 --- a/biome.jsonc +++ /dev/null @@ -1,31 +0,0 @@ -{ - "$schema": "./node_modules/@biomejs/biome/configuration_schema.json", - "vcs": { - "enabled": false, - "clientKind": "git", - "useIgnoreFile": true - }, - "files": { - "ignoreUnknown": false, - "ignore": [], - "include": ["/packages/**/*"] - }, - "formatter": { - "enabled": true, - "indentStyle": "tab" - }, - "organizeImports": { - "enabled": true - }, - "linter": { - "enabled": true, - "rules": { - "recommended": true - } - }, - "javascript": { - "formatter": { - "quoteStyle": "double" - } - } -} diff --git a/bun.lock b/bun.lock deleted file mode 100644 index 705686b7..00000000 --- a/bun.lock +++ /dev/null @@ -1,166 +0,0 @@ -{ - "lockfileVersion": 1, - "workspaces": { - "": { - "name": "postgres_lsp", - "devDependencies": { - "@biomejs/biome": "1.9.4", - "@types/bun": "latest" - }, - "peerDependencies": { - "typescript": "^5" - } - }, - "packages/@postgrestools/backend-jsonrpc": { - "name": "@postgrestools/backend-jsonrpc", - "optionalDependencies": { - "@postgrestools/cli-darwin-arm64": "", - "@postgrestools/cli-darwin-x64": "", - "@postgrestools/cli-linux-arm64": "", - "@postgrestools/cli-linux-x64": "", - "@postgrestools/cli-win32-arm64": "", - "@postgrestools/cli-win32-x64": "" - } - }, - "packages/@postgrestools/postgrestools": { - "name": "@postgrestools/postgrestools", - "bin": { - "postgrestools": "bin/postgrestools" - }, - "optionalDependencies": { - "@postgrestools/cli-aarch64-apple-darwin": "", - "@postgrestools/cli-aarch64-linux-gnu": "", - "@postgrestools/cli-aarch64-windows-msvc": "", - "@postgrestools/cli-x86_64-apple-darwin": "", - "@postgrestools/cli-x86_64-linux-gnu": "", - "@postgrestools/cli-x86_64-windows-msvc": "" - } - } - }, - "packages": { - "@biomejs/biome": [ - "@biomejs/biome@1.9.4", - "", - { - "optionalDependencies": { - "@biomejs/cli-darwin-arm64": "1.9.4", - "@biomejs/cli-darwin-x64": "1.9.4", - "@biomejs/cli-linux-arm64": "1.9.4", - "@biomejs/cli-linux-arm64-musl": "1.9.4", - "@biomejs/cli-linux-x64": "1.9.4", - "@biomejs/cli-linux-x64-musl": "1.9.4", - "@biomejs/cli-win32-arm64": "1.9.4", - "@biomejs/cli-win32-x64": "1.9.4" - }, - "bin": { "biome": "bin/biome" } - }, - "sha512-1rkd7G70+o9KkTn5KLmDYXihGoTaIGO9PIIN2ZB7UJxFrWw04CZHPYiMRjYsaDvVV7hP1dYNRLxSANLaBFGpog==" - ], - - "@biomejs/cli-darwin-arm64": [ - "@biomejs/cli-darwin-arm64@1.9.4", - "", - { "os": "darwin", "cpu": "arm64" }, - "sha512-bFBsPWrNvkdKrNCYeAp+xo2HecOGPAy9WyNyB/jKnnedgzl4W4Hb9ZMzYNbf8dMCGmUdSavlYHiR01QaYR58cw==" - ], - - "@biomejs/cli-darwin-x64": [ - "@biomejs/cli-darwin-x64@1.9.4", - "", - { "os": "darwin", "cpu": "x64" }, - "sha512-ngYBh/+bEedqkSevPVhLP4QfVPCpb+4BBe2p7Xs32dBgs7rh9nY2AIYUL6BgLw1JVXV8GlpKmb/hNiuIxfPfZg==" - ], - - "@biomejs/cli-linux-arm64": [ - "@biomejs/cli-linux-arm64@1.9.4", - "", - { "os": "linux", "cpu": "arm64" }, - "sha512-fJIW0+LYujdjUgJJuwesP4EjIBl/N/TcOX3IvIHJQNsAqvV2CHIogsmA94BPG6jZATS4Hi+xv4SkBBQSt1N4/g==" - ], - - "@biomejs/cli-linux-arm64-musl": [ - "@biomejs/cli-linux-arm64-musl@1.9.4", - "", - { "os": "linux", "cpu": "arm64" }, - "sha512-v665Ct9WCRjGa8+kTr0CzApU0+XXtRgwmzIf1SeKSGAv+2scAlW6JR5PMFo6FzqqZ64Po79cKODKf3/AAmECqA==" - ], - - "@biomejs/cli-linux-x64": [ - "@biomejs/cli-linux-x64@1.9.4", - "", - { "os": "linux", "cpu": "x64" }, - "sha512-lRCJv/Vi3Vlwmbd6K+oQ0KhLHMAysN8lXoCI7XeHlxaajk06u7G+UsFSO01NAs5iYuWKmVZjmiOzJ0OJmGsMwg==" - ], - - "@biomejs/cli-linux-x64-musl": [ - "@biomejs/cli-linux-x64-musl@1.9.4", - "", - { "os": "linux", "cpu": "x64" }, - "sha512-gEhi/jSBhZ2m6wjV530Yy8+fNqG8PAinM3oV7CyO+6c3CEh16Eizm21uHVsyVBEB6RIM8JHIl6AGYCv6Q6Q9Tg==" - ], - - "@biomejs/cli-win32-arm64": [ - "@biomejs/cli-win32-arm64@1.9.4", - "", - { "os": "win32", "cpu": "arm64" }, - "sha512-tlbhLk+WXZmgwoIKwHIHEBZUwxml7bRJgk0X2sPyNR3S93cdRq6XulAZRQJ17FYGGzWne0fgrXBKpl7l4M87Hg==" - ], - - "@biomejs/cli-win32-x64": [ - "@biomejs/cli-win32-x64@1.9.4", - "", - { "os": "win32", "cpu": "x64" }, - "sha512-8Y5wMhVIPaWe6jw2H+KlEm4wP/f7EW3810ZLmDlrEEy5KvBsb9ECEfu/kMWD484ijfQ8+nIi0giMgu9g1UAuuA==" - ], - - "@postgrestools/backend-jsonrpc": [ - "@postgrestools/backend-jsonrpc@workspace:packages/@postgrestools/backend-jsonrpc" - ], - - "@postgrestools/postgrestools": [ - "@postgrestools/postgrestools@workspace:packages/@postgrestools/postgrestools" - ], - - "@types/bun": [ - "@types/bun@1.2.5", - "", - { "dependencies": { "bun-types": "1.2.5" } }, - "sha512-w2OZTzrZTVtbnJew1pdFmgV99H0/L+Pvw+z1P67HaR18MHOzYnTYOi6qzErhK8HyT+DB782ADVPPE92Xu2/Opg==" - ], - - "@types/node": [ - "@types/node@22.13.10", - "", - { "dependencies": { "undici-types": "~6.20.0" } }, - "sha512-I6LPUvlRH+O6VRUqYOcMudhaIdUVWfsjnZavnsraHvpBwaEyMN29ry+0UVJhImYL16xsscu0aske3yA+uPOWfw==" - ], - - "@types/ws": [ - "@types/ws@8.5.14", - "", - { "dependencies": { "@types/node": "*" } }, - "sha512-bd/YFLW+URhBzMXurx7lWByOu+xzU9+kb3RboOteXYDfW+tr+JZa99OyNmPINEGB/ahzKrEuc8rcv4gnpJmxTw==" - ], - - "bun-types": [ - "bun-types@1.2.5", - "", - { "dependencies": { "@types/node": "*", "@types/ws": "~8.5.10" } }, - "sha512-3oO6LVGGRRKI4kHINx5PIdIgnLRb7l/SprhzqXapmoYkFl5m4j6EvALvbDVuuBFaamB46Ap6HCUxIXNLCGy+tg==" - ], - - "typescript": [ - "typescript@5.8.2", - "", - { "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" } }, - "sha512-aJn6wq13/afZp/jT9QZmwEjDqqvSGp1VT5GVg+f/t6/oVyrgXM6BY1h9BRh/O5p3PlUPAe+WuiEZOmb/49RqoQ==" - ], - - "undici-types": [ - "undici-types@6.20.0", - "", - {}, - "sha512-Ny6QZ2Nju20vw1SRHe3d9jVu6gJ+4e3+MMpqu7pqE5HT6WsTSlce++GQmK5UXS8mzV8DSYHrQH+Xrf2jVcuKNg==" - ] - } -} diff --git a/checking_migrations/index.html b/checking_migrations/index.html new file mode 100644 index 00000000..61d626e7 --- /dev/null +++ b/checking_migrations/index.html @@ -0,0 +1,155 @@ + + + + + + + + Linting Migrations - Postgres Language Server + + + + + + + + + + + + + +
+ + +
+ +
+
+ +
+
+
+
+ +

Linting Migrations

+

Postgres Language Tools comes with a check command that can be integrated into your development workflow to catch problematic schema changes and encourage best practices.

+

To run it, simply point at your migrations directory.

+
postgrestools check supabase/migrations
+
+

When you are setting it up in an existing project, you might want to ignore all migrations that are already applied. To do so, add migrationsDir and after to your postgrestools.jsonc file

+
{
+    "migrations": {
+        "migrationsDir": "supabase/migrations",
+        "after": 1740868021
+    }
+}
+
+

Alternatively, pass them directly.

+
postgrestools check supabase/migrations --migrations-dir="supabase/migrations" --after=1740868021
+
+

This will only check migrations after the specified timestamp.

+

For pre-commit hooks and when working locally, use --staged to only lint files that have been staged. In CI environments, you most likely want to use --changed to only lint files that have been changed compared to your vcs.default_branch configuration. If default_branch is not set in your postgrestools.jsonc, use --since=REF to specify the base branch to compare against.

+ +
+
+ +
+
+ +
+ +
+ + + + + + + + + + diff --git a/cli_reference/index.html b/cli_reference/index.html new file mode 100644 index 00000000..eb684311 --- /dev/null +++ b/cli_reference/index.html @@ -0,0 +1,376 @@ + + + + + + + + CLI - Postgres Language Server + + + + + + + + + + + + + +
+ + +
+ +
+
+ +
+
+
+
+ +

CLI Reference

+

Command summary

+ +

postgrestools

+

PostgresTools official CLI. Use it to check the health of your project or run it to check single files.

+

Usage: postgrestools COMMAND ...

+

Available options:

+
    +
  • -h, --help — + Prints help information
  • +
  • -V, --version — + Prints version information
  • +
+

Available commands:

+
    +
  • version — + Shows the version information and quit.
  • +
  • check — + Runs everything to the requested files.
  • +
  • start — + Starts the daemon server process.
  • +
  • stop — + Stops the daemon server process.
  • +
  • init — + Bootstraps a new project. Creates a configuration file with some defaults.
  • +
  • lsp-proxy — + Acts as a server for the Language Server Protocol over stdin/stdout.
  • +
  • clean — + Cleans the logs emitted by the daemon.
  • +
+

postgrestools version

+

Shows the version information and quit.

+

Usage: postgrestools version

+

Global options applied to all commands

+
    +
  • --colors=<off|force> — + Set the formatting mode for markup: "off" prints everything as plain text, "force" forces the formatting of markup using ANSI even if the console output is determined to be incompatible
  • +
  • --use-server — + Connect to a running instance of the daemon server.
  • +
  • --skip-db — + Skip connecting to the database and only run checks that don't require a database connection.
  • +
  • --verbose — + Print additional diagnostics, and some diagnostics show more information. Also, print out what files were processed and which ones were modified.
  • +
  • --config-path=PATH — + Set the file path to the configuration file, or the directory path to find postgrestools.jsonc. If used, it disables the default configuration file resolution.
  • +
  • --max-diagnostics=<none|<NUMBER>> — + Cap the amount of diagnostics displayed. When none is provided, the limit is lifted. + [default: 20]
  • +
  • --skip-errors — + Skip over files containing syntax errors instead of emitting an error diagnostic.
  • +
  • --no-errors-on-unmatched — + Silence errors that would be emitted in case no files were processed during the execution of the command.
  • +
  • --error-on-warnings — + Tell Postgres Tools to exit with an error code if some diagnostics emit warnings.
  • +
  • --reporter=<json|json-pretty|github|junit|summary|gitlab> — + Allows to change how diagnostics and summary are reported.
  • +
  • --log-level=<none|debug|info|warn|error> — + The level of logging. In order, from the most verbose to the least verbose: debug, info, warn, error.
  • +
+

The value none won't show any logging.

+

[default: none]

+
    +
  • --log-kind=<pretty|compact|json> — + How the log should look like. + [default: pretty]
  • +
  • --diagnostic-level=<info|warn|error> — + The level of diagnostics to show. In order, from the lowest to the most important: info, warn, error. Passing --diagnostic-level=error will cause Postgres Tools to print only diagnostics that contain only errors. + [default: info]
  • +
+

Available options:

+
    +
  • -h, --help — + Prints help information
  • +
+

postgrestools check

+

Runs everything to the requested files.

+

Usage: postgrestools check [--staged] [--changed] [--since=REF] [PATH]...

+

The configuration that is contained inside the configuration file.

+
    +
  • --vcs-enabled=<true|false> — + Whether we should integrate itself with the VCS client
  • +
  • --vcs-client-kind=<git> — + The kind of client.
  • +
  • --vcs-use-ignore-file=<true|false> — + Whether we should use the VCS ignore file. When [true], we will ignore the files specified in the ignore file.
  • +
  • --vcs-root=PATH — + The folder where we should check for VCS files. By default, we will use the same folder where postgrestools.jsonc was found.
  • +
+

If we can't find the configuration, it will attempt to use the current working directory. If no current working directory can't be found, we won't use the VCS integration, and a diagnostic will be emitted

+
    +
  • --vcs-default-branch=BRANCH — + The main branch of the project
  • +
  • --files-max-size=NUMBER — + The maximum allowed size for source code files in bytes. Files above this limit will be ignored for performance reasons. Defaults to 1 MiB
  • +
  • --migrations-dir=ARG — + The directory where the migration files are stored
  • +
  • --after=ARG — + Ignore any migrations before this timestamp
  • +
  • --host=ARG — + The host of the database.
  • +
  • --port=ARG — + The port of the database.
  • +
  • --username=ARG — + The username to connect to the database.
  • +
  • --password=ARG — + The password to connect to the database.
  • +
  • --database=ARG — + The name of the database.
  • +
  • --conn_timeout_secs=ARG — + The connection timeout in seconds. + [default: Some(10)]
  • +
+

Global options applied to all commands

+
    +
  • --colors=<off|force> — + Set the formatting mode for markup: "off" prints everything as plain text, "force" forces the formatting of markup using ANSI even if the console output is determined to be incompatible
  • +
  • --use-server — + Connect to a running instance of the daemon server.
  • +
  • --skip-db — + Skip connecting to the database and only run checks that don't require a database connection.
  • +
  • --verbose — + Print additional diagnostics, and some diagnostics show more information. Also, print out what files were processed and which ones were modified.
  • +
  • --config-path=PATH — + Set the file path to the configuration file, or the directory path to find postgrestools.jsonc. If used, it disables the default configuration file resolution.
  • +
  • --max-diagnostics=<none|<NUMBER>> — + Cap the amount of diagnostics displayed. When none is provided, the limit is lifted. + [default: 20]
  • +
  • --skip-errors — + Skip over files containing syntax errors instead of emitting an error diagnostic.
  • +
  • --no-errors-on-unmatched — + Silence errors that would be emitted in case no files were processed during the execution of the command.
  • +
  • --error-on-warnings — + Tell Postgres Tools to exit with an error code if some diagnostics emit warnings.
  • +
  • --reporter=<json|json-pretty|github|junit|summary|gitlab> — + Allows to change how diagnostics and summary are reported.
  • +
  • --log-level=<none|debug|info|warn|error> — + The level of logging. In order, from the most verbose to the least verbose: debug, info, warn, error.
  • +
+

The value none won't show any logging.

+

[default: none]

+
    +
  • --log-kind=<pretty|compact|json> — + How the log should look like. + [default: pretty]
  • +
  • --diagnostic-level=<info|warn|error> — + The level of diagnostics to show. In order, from the lowest to the most important: info, warn, error. Passing --diagnostic-level=error will cause Postgres Tools to print only diagnostics that contain only errors. + [default: info]
  • +
+

Available positional items:

+
    +
  • PATH — + Single file, single path or list of paths
  • +
+

Available options:

+
    +
  • --stdin-file-path=PATH — + Use this option when you want to format code piped from stdin, and print the output to stdout.
  • +
+

The file doesn't need to exist on disk, what matters is the extension of the file. Based on the extension, we know how to check the code.

+

Example: echo 'let a;' | pgt_cli check --stdin-file-path=test.sql

+
    +
  • --staged — + When set to true, only the files that have been staged (the ones prepared to be committed) will be linted. This option should be used when working locally.
  • +
  • --changed — + When set to true, only the files that have been changed compared to your defaultBranch configuration will be linted. This option should be used in CI environments.
  • +
  • --since=REF — + Use this to specify the base branch to compare against when you're using the --changed flag and the defaultBranch is not set in your postgrestools.jsonc
  • +
  • -h, --help — + Prints help information
  • +
+

postgrestools start

+

Starts the daemon server process.

+

Usage: postgrestools start [--config-path=PATH]

+

Available options:

+
    +
  • --log-prefix-name=STRING — + Allows to change the prefix applied to the file name of the logs. + Uses environment variable PGT_LOG_PREFIX_NAME + [default: server.log]
  • +
  • --log-path=PATH — + Allows to change the folder where logs are stored. + Uses environment variable PGT_LOG_PATH
  • +
  • --config-path=PATH — + Allows to set a custom file path to the configuration file, or a custom directory path to find postgrestools.jsonc + Uses environment variable PGT_LOG_PREFIX_NAME
  • +
  • -h, --help — + Prints help information
  • +
+

postgrestools stop

+

Stops the daemon server process.

+

Usage: postgrestools stop

+

Available options:

+
    +
  • -h, --help — + Prints help information
  • +
+

postgrestools init

+

Bootstraps a new project. Creates a configuration file with some defaults.

+

Usage: postgrestools init

+

Available options:

+
    +
  • -h, --help — + Prints help information
  • +
+

postgrestools lsp-proxy

+

Acts as a server for the Language Server Protocol over stdin/stdout.

+

Usage: postgrestools lsp-proxy [--config-path=PATH]

+

Available options:

+
    +
  • --log-prefix-name=STRING — + Allows to change the prefix applied to the file name of the logs. + Uses environment variable PGT_LOG_PREFIX_NAME + [default: server.log]
  • +
  • --log-path=PATH — + Allows to change the folder where logs are stored. + Uses environment variable PGT_LOG_PATH
  • +
  • --config-path=PATH — + Allows to set a custom file path to the configuration file, or a custom directory path to find postgrestools.jsonc + Uses environment variable PGT_CONFIG_PATH
  • +
  • -h, --help — + Prints help information
  • +
+

postgrestools clean

+

Cleans the logs emitted by the daemon.

+

Usage: postgrestools clean

+

Available options:

+
    +
  • -h, --help — + Prints help information
  • +
+ +
+
+ +
+
+ +
+ +
+ + + + + + + + + + diff --git a/cliff.toml b/cliff.toml deleted file mode 100644 index 3406b80c..00000000 --- a/cliff.toml +++ /dev/null @@ -1,91 +0,0 @@ -# git-cliff ~ default configuration file -# https://git-cliff.org/docs/configuration -# -# Lines starting with "#" are comments. -# Configuration options are organized into tables and keys. -# See documentation for more information on available options. - -[changelog] -# template for the changelog header -header = """ -# Postgres Language Server\n -A collection of language tools and a Language Server Protocol (LSP) implementation for Postgres, focusing on developer experience and reliable SQL tooling.\n -""" -# template for the changelog body -# https://keats.github.io/tera/docs/#introduction -body = """ -{% if version %}\ - ## {{ version | trim_start_matches(pat="v") }} -{% else %}\ - ## [unreleased] -{% endif %}\ -{% for group, commits in commits | group_by(attribute="group") %} - ### {{ group | striptags | trim | upper_first }} - {% for commit in commits %} - - {% if commit.scope %}*({{ commit.scope }})* {% endif %}\ - {% if commit.breaking %}[**breaking**] {% endif %}\ - {{ commit.message | upper_first }}\ - {% endfor %} -{% endfor %}\n - -## Contributors -{% for contributor in github.contributors | filter(attribute="is_first_time", value=true) %} - * @{{ contributor.username }} made their first contribution in #{{ contributor.pr_number }} 🎉 -{%- endfor -%} -{% for contributor in github.contributors | filter(attribute="is_first_time", value=false) %} - * @{{ contributor.username }} -{%- endfor -%}\n -""" -# template for the changelog footer -footer = """ -""" -# remove the leading and trailing s -trim = true -# postprocessors -postprocessors = [ - # { pattern = '', replace = "https://github.com/orhun/git-cliff" }, # replace repository URL -] -# render body even when there are no releases to process -# render_always = true -# output file path -# output = "test.md" - -[git] -# parse the commits based on https://www.conventionalcommits.org -conventional_commits = true -# filter out the commits that are not conventional -filter_unconventional = true -# process each line of a commit as an individual commit -split_commits = false -# regex for preprocessing the commit messages -commit_preprocessors = [ - # Replace issue numbers - #{ pattern = '\((\w+\s)?#([0-9]+)\)', replace = "([#${2}](/issues/${2}))"}, - # Check spelling of the commit with https://github.com/crate-ci/typos - # If the spelling is incorrect, it will be automatically fixed. - #{ pattern = '.*', replace_command = 'typos --write-changes -' }, -] -# regex for parsing and grouping commits -commit_parsers = [ - { message = "^feat", group = "🚀 Features" }, - { message = "^fix", group = "🐛 Bug Fixes" }, - { message = "^doc", group = "📚 Documentation" }, - { message = "^perf", group = "⚡ Performance" }, - { message = "^refactor", group = "🚜 Refactor" }, - { message = "^style", group = "🎨 Styling" }, - { message = "^test", group = "🧪 Testing" }, - { message = "^chore\\(release\\): prepare for", skip = true }, - { message = "^chore\\(deps.*\\)", skip = true }, - { message = "^chore\\(pr\\)", skip = true }, - { message = "^chore\\(pull\\)", skip = true }, - { message = "^chore|^ci", group = "⚙️ Miscellaneous Tasks" }, - { body = ".*security", group = "🛡️ Security" }, - { message = "^revert", group = "◀️ Revert" }, - { message = ".*", group = "💼 Other" }, -] -# filter out the commits that are not matched by commit parsers -filter_commits = false -# sort the tags topologically -topo_order = false -# sort the commits inside sections by oldest/newest order -sort_commits = "oldest" diff --git a/clippy.toml b/clippy.toml deleted file mode 100644 index 4296655a..00000000 --- a/clippy.toml +++ /dev/null @@ -1 +0,0 @@ -allow-dbg-in-tests = true diff --git a/docs/codegen/Cargo.toml b/codegen/Cargo.toml similarity index 100% rename from docs/codegen/Cargo.toml rename to codegen/Cargo.toml diff --git a/docs/codegen/src/cli_doc.rs b/codegen/src/cli_doc.rs similarity index 100% rename from docs/codegen/src/cli_doc.rs rename to codegen/src/cli_doc.rs diff --git a/docs/codegen/src/default_configuration.rs b/codegen/src/default_configuration.rs similarity index 100% rename from docs/codegen/src/default_configuration.rs rename to codegen/src/default_configuration.rs diff --git a/docs/codegen/src/env_variables.rs b/codegen/src/env_variables.rs similarity index 100% rename from docs/codegen/src/env_variables.rs rename to codegen/src/env_variables.rs diff --git a/docs/codegen/src/lib.rs b/codegen/src/lib.rs similarity index 100% rename from docs/codegen/src/lib.rs rename to codegen/src/lib.rs diff --git a/docs/codegen/src/main.rs b/codegen/src/main.rs similarity index 100% rename from docs/codegen/src/main.rs rename to codegen/src/main.rs diff --git a/docs/codegen/src/rules_docs.rs b/codegen/src/rules_docs.rs similarity index 100% rename from docs/codegen/src/rules_docs.rs rename to codegen/src/rules_docs.rs diff --git a/docs/codegen/src/rules_index.rs b/codegen/src/rules_index.rs similarity index 100% rename from docs/codegen/src/rules_index.rs rename to codegen/src/rules_index.rs diff --git a/docs/codegen/src/rules_sources.rs b/codegen/src/rules_sources.rs similarity index 100% rename from docs/codegen/src/rules_sources.rs rename to codegen/src/rules_sources.rs diff --git a/docs/codegen/src/schema.rs b/codegen/src/schema.rs similarity index 100% rename from docs/codegen/src/schema.rs rename to codegen/src/schema.rs diff --git a/docs/codegen/src/utils.rs b/codegen/src/utils.rs similarity index 100% rename from docs/codegen/src/utils.rs rename to codegen/src/utils.rs diff --git a/crates/pgt_analyse/Cargo.toml b/crates/pgt_analyse/Cargo.toml deleted file mode 100644 index 75eb0211..00000000 --- a/crates/pgt_analyse/Cargo.toml +++ /dev/null @@ -1,30 +0,0 @@ - -[package] -authors.workspace = true -categories.workspace = true -description = "" -edition.workspace = true -homepage.workspace = true -keywords.workspace = true -license.workspace = true -name = "pgt_analyse" -repository.workspace = true -version = "0.0.0" - - -[dependencies] -pgt_console.workspace = true -pgt_diagnostics.workspace = true -pgt_query_ext.workspace = true -rustc-hash = { workspace = true } - -biome_deserialize = { workspace = true, optional = true } -biome_deserialize_macros = { workspace = true, optional = true } -enumflags2.workspace = true -pgt_text_size.workspace = true -schemars = { workspace = true, optional = true } -serde = { workspace = true, features = ["derive"], optional = true } - -[features] -schema = ["dep:schemars"] -serde = ["dep:serde", "dep:biome_deserialize", "dep:biome_deserialize_macros"] diff --git a/crates/pgt_analyse/src/categories.rs b/crates/pgt_analyse/src/categories.rs deleted file mode 100644 index e5dd51c2..00000000 --- a/crates/pgt_analyse/src/categories.rs +++ /dev/null @@ -1,327 +0,0 @@ -use enumflags2::{BitFlags, bitflags}; -use std::borrow::Cow; - -#[derive(Copy, Clone, Debug, Eq, PartialEq)] -#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] -#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] -pub enum RuleCategory { - /// This rule performs static analysis of the source code to detect - /// invalid or error-prone patterns, and emits diagnostics along with - /// proposed fixes - Lint, - /// This rule detects refactoring opportunities and emits code action - /// signals - Action, - /// This rule detects transformations that should be applied to the code - Transformation, -} - -/// Actions that suppress rules should start with this string -pub const SUPPRESSION_ACTION_CATEGORY: &str = "quickfix.suppressRule"; - -/// The category of a code action, this type maps directly to the -/// [CodeActionKind] type in the Language Server Protocol specification -/// -/// [CodeActionKind]: https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#codeActionKind -#[derive(Clone, Debug, PartialEq, Eq)] -#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] -#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] -pub enum ActionCategory { - /// Base kind for quickfix actions: 'quickfix'. - /// - /// This action provides a fix to the diagnostic emitted by the same signal - QuickFix(Cow<'static, str>), - /// Base kind for refactoring actions: 'refactor'. - /// - /// This action provides an optional refactor opportunity - Refactor(RefactorKind), - /// Base kind for source actions: `source`. - /// - /// Source code actions apply to the entire file. - Source(SourceActionKind), - /// This action is using a base kind not covered by any of the previous - /// variants - Other(Cow<'static, str>), -} - -impl ActionCategory { - /// Returns true if this category matches the provided filter - /// - /// ## Examples - /// - /// ``` - /// use std::borrow::Cow; - /// use pgt_analyse::{ActionCategory, RefactorKind}; - /// - /// assert!(ActionCategory::QuickFix(Cow::from("quickfix")).matches("quickfix")); - /// - /// assert!(ActionCategory::Refactor(RefactorKind::None).matches("refactor")); - /// assert!(!ActionCategory::Refactor(RefactorKind::None).matches("refactor.extract")); - /// - /// assert!(ActionCategory::Refactor(RefactorKind::Extract).matches("refactor")); - /// assert!(ActionCategory::Refactor(RefactorKind::Extract).matches("refactor.extract")); - /// ``` - pub fn matches(&self, filter: &str) -> bool { - self.to_str().starts_with(filter) - } - - /// Returns the representation of this [ActionCategory] as a `CodeActionKind` string - pub fn to_str(&self) -> Cow<'static, str> { - match self { - ActionCategory::QuickFix(tag) => { - if tag.is_empty() { - Cow::Borrowed("quickfix.pgt") - } else { - Cow::Owned(format!("quickfix.pgt.{tag}")) - } - } - - ActionCategory::Refactor(RefactorKind::None) => Cow::Borrowed("refactor.pgt"), - ActionCategory::Refactor(RefactorKind::Extract) => { - Cow::Borrowed("refactor.extract.pgt") - } - ActionCategory::Refactor(RefactorKind::Inline) => Cow::Borrowed("refactor.inline.pgt"), - ActionCategory::Refactor(RefactorKind::Rewrite) => { - Cow::Borrowed("refactor.rewrite.pgt") - } - ActionCategory::Refactor(RefactorKind::Other(tag)) => { - Cow::Owned(format!("refactor.{tag}.pgt")) - } - - ActionCategory::Source(SourceActionKind::None) => Cow::Borrowed("source.pgt"), - ActionCategory::Source(SourceActionKind::FixAll) => Cow::Borrowed("source.fixAll.pgt"), - ActionCategory::Source(SourceActionKind::OrganizeImports) => { - Cow::Borrowed("source.organizeImports.pgt") - } - ActionCategory::Source(SourceActionKind::Other(tag)) => { - Cow::Owned(format!("source.{tag}.pgt")) - } - - ActionCategory::Other(tag) => Cow::Owned(format!("{tag}.pgt")), - } - } -} - -/// The sub-category of a refactor code action. -/// -/// [Check the LSP spec](https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#codeActionKind) for more information: -#[derive(Clone, Debug, PartialEq, Eq)] -#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] -#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] -pub enum RefactorKind { - /// This action describes a refactor with no particular sub-category - None, - /// Base kind for refactoring extraction actions: 'refactor.extract'. - /// - /// Example extract actions: - /// - Extract method - /// - Extract function - /// - Extract variable - /// - Extract interface from class - Extract, - /// Base kind for refactoring inline actions: 'refactor.inline'. - /// - /// Example inline actions: - /// - Inline function - /// - Inline variable - /// - Inline constant - /// - ... - Inline, - /// Base kind for refactoring rewrite actions: 'refactor.rewrite'. - /// - /// Example rewrite actions: - /// - Convert JavaScript function to class - /// - Add or remove parameter - /// - Encapsulate field - /// - Make method static - /// - Move method to base class - /// - ... - Rewrite, - /// This action is using a refactor kind not covered by any of the previous - /// variants - Other(Cow<'static, str>), -} - -/// The sub-category of a source code action -#[derive(Clone, Debug, PartialEq, Eq)] -#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] -#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] -pub enum SourceActionKind { - /// This action describes a source action with no particular sub-category - None, - // Base kind for a 'fix all' source action: `source.fixAll`. - // - // 'Fix all' actions automatically fix errors that have a clear fix that - // do not require user input. They should not suppress errors or perform - // unsafe fixes such as generating new types or classes. - FixAll, - /// Base kind for an organize imports source action: `source.organizeImports`. - OrganizeImports, - /// This action is using a source action kind not covered by any of the - /// previous variants - Other(Cow<'static, str>), -} - -#[derive(Debug, Copy, Clone, Eq, PartialEq)] -#[bitflags] -#[repr(u8)] -pub(crate) enum Categories { - Lint = 1 << RuleCategory::Lint as u8, - Action = 1 << RuleCategory::Action as u8, - Transformation = 1 << RuleCategory::Transformation as u8, -} - -#[derive(Debug, Copy, Clone)] -/// The categories supported by the analyser. -/// -/// The default implementation of this type returns an instance with all the categories. -/// -/// Use [RuleCategoriesBuilder] to generate the categories you want to query. -pub struct RuleCategories(BitFlags); - -impl RuleCategories { - pub fn empty() -> Self { - let empty: BitFlags = BitFlags::empty(); - Self(empty) - } - - pub fn all() -> Self { - let empty: BitFlags = BitFlags::all(); - Self(empty) - } - - /// Checks whether the current categories contain a specific [RuleCategories] - pub fn contains(&self, other: impl Into) -> bool { - self.0.contains(other.into().0) - } -} - -impl Default for RuleCategories { - fn default() -> Self { - Self::all() - } -} - -impl From for RuleCategories { - fn from(input: RuleCategory) -> Self { - match input { - RuleCategory::Lint => RuleCategories(BitFlags::from_flag(Categories::Lint)), - RuleCategory::Action => RuleCategories(BitFlags::from_flag(Categories::Action)), - RuleCategory::Transformation => { - RuleCategories(BitFlags::from_flag(Categories::Transformation)) - } - } - } -} - -#[cfg(feature = "serde")] -impl serde::Serialize for RuleCategories { - fn serialize(&self, serializer: S) -> Result - where - S: serde::Serializer, - { - let mut flags = Vec::new(); - - if self.0.contains(Categories::Lint) { - flags.push(RuleCategory::Lint); - } - - if self.0.contains(Categories::Action) { - flags.push(RuleCategory::Action); - } - - if self.0.contains(Categories::Transformation) { - flags.push(RuleCategory::Transformation); - } - - serializer.collect_seq(flags) - } -} - -#[cfg(feature = "serde")] -impl<'de> serde::Deserialize<'de> for RuleCategories { - fn deserialize(deserializer: D) -> Result - where - D: serde::Deserializer<'de>, - { - use serde::de::{self, SeqAccess}; - use std::fmt::{self, Formatter}; - - struct Visitor; - - impl<'de> de::Visitor<'de> for Visitor { - type Value = RuleCategories; - - fn expecting(&self, formatter: &mut Formatter) -> fmt::Result { - write!(formatter, "RuleCategories") - } - - fn visit_seq(self, mut seq: A) -> Result - where - A: SeqAccess<'de>, - { - let mut result = RuleCategories::empty(); - - while let Some(item) = seq.next_element::()? { - result.0 |= RuleCategories::from(item).0; - } - - Ok(result) - } - } - - deserializer.deserialize_seq(Visitor) - } -} - -#[cfg(feature = "schema")] -impl schemars::JsonSchema for RuleCategories { - fn schema_name() -> String { - String::from("RuleCategories") - } - - fn json_schema(r#gen: &mut schemars::r#gen::SchemaGenerator) -> schemars::schema::Schema { - >::json_schema(r#gen) - } -} - -#[derive(Debug, Default)] -/// A convenient type create a [RuleCategories] type -/// -/// ``` -/// use pgt_analyse::{RuleCategoriesBuilder, RuleCategory}; -/// let mut categories = RuleCategoriesBuilder::default().with_lint().build(); -/// -/// assert!(categories.contains(RuleCategory::Lint)); -/// assert!(!categories.contains(RuleCategory::Action)); -/// assert!(!categories.contains(RuleCategory::Transformation)); -/// ``` -pub struct RuleCategoriesBuilder { - flags: BitFlags, -} - -impl RuleCategoriesBuilder { - pub fn with_lint(mut self) -> Self { - self.flags.insert(Categories::Lint); - self - } - - pub fn with_action(mut self) -> Self { - self.flags.insert(Categories::Action); - self - } - - pub fn with_transformation(mut self) -> Self { - self.flags.insert(Categories::Transformation); - self - } - - pub fn all(mut self) -> Self { - self.flags = BitFlags::all(); - self - } - - pub fn build(self) -> RuleCategories { - RuleCategories(self.flags) - } -} diff --git a/crates/pgt_analyse/src/context.rs b/crates/pgt_analyse/src/context.rs deleted file mode 100644 index cd069657..00000000 --- a/crates/pgt_analyse/src/context.rs +++ /dev/null @@ -1,73 +0,0 @@ -use crate::{ - categories::RuleCategory, - rule::{GroupCategory, Rule, RuleGroup, RuleMetadata}, -}; - -pub struct RuleContext<'a, R: Rule> { - stmt: &'a pgt_query_ext::NodeEnum, - options: &'a R::Options, -} - -impl<'a, R> RuleContext<'a, R> -where - R: Rule + Sized + 'static, -{ - #[allow(clippy::too_many_arguments)] - pub fn new(stmt: &'a pgt_query_ext::NodeEnum, options: &'a R::Options) -> Self { - Self { stmt, options } - } - - /// Returns the group that belongs to the current rule - pub fn group(&self) -> &'static str { - ::NAME - } - - /// Returns the category that belongs to the current rule - pub fn category(&self) -> RuleCategory { - <::Category as GroupCategory>::CATEGORY - } - - /// Returns the AST root - pub fn stmt(&self) -> &pgt_query_ext::NodeEnum { - self.stmt - } - - /// Returns the metadata of the rule - /// - /// The metadata contains information about the rule, such as the name, version, language, and whether it is recommended. - /// - /// ## Examples - /// ```rust,ignore - /// declare_lint_rule! { - /// /// Some doc - /// pub(crate) Foo { - /// version: "0.0.0", - /// name: "foo", - /// recommended: true, - /// } - /// } - /// - /// impl Rule for Foo { - /// const CATEGORY: RuleCategory = RuleCategory::Lint; - /// type State = (); - /// type Signals = (); - /// type Options = (); - /// - /// fn run(ctx: &RuleContext) -> Self::Signals { - /// assert_eq!(ctx.metadata().name, "foo"); - /// } - /// } - /// ``` - pub fn metadata(&self) -> &RuleMetadata { - &R::METADATA - } - - /// It retrieves the options that belong to a rule, if they exist. - /// - /// In order to retrieve a typed data structure, you have to create a deserializable - /// data structure and define it inside the generic type `type Options` of the [Rule] - /// - pub fn options(&self) -> &R::Options { - self.options - } -} diff --git a/crates/pgt_analyse/src/filter.rs b/crates/pgt_analyse/src/filter.rs deleted file mode 100644 index 02844d3e..00000000 --- a/crates/pgt_analyse/src/filter.rs +++ /dev/null @@ -1,189 +0,0 @@ -use std::fmt::{Debug, Display, Formatter}; - -use crate::{ - categories::RuleCategories, - rule::{GroupCategory, Rule, RuleGroup}, -}; - -/// Allow filtering a single rule or group of rules by their names -#[derive(Clone, Copy, Eq, Hash, Ord, PartialEq, PartialOrd)] -pub enum RuleFilter<'a> { - Group(&'a str), - Rule(&'a str, &'a str), -} - -/// Allows filtering the list of rules that will be executed in a run of the analyser, -/// and at what source code range signals (diagnostics or actions) may be raised -#[derive(Debug, Default, Clone, Copy)] -pub struct AnalysisFilter<'a> { - /// Only allow rules with these categories to emit signals - pub categories: RuleCategories, - /// Only allow rules matching these names to emit signals - /// If `enabled_rules` is set to `None`, then all rules are enabled. - pub enabled_rules: Option<&'a [RuleFilter<'a>]>, - /// Do not allow rules matching these names to emit signals - pub disabled_rules: &'a [RuleFilter<'a>], -} - -impl<'analysis> AnalysisFilter<'analysis> { - /// It creates a new filter with the set of [enabled rules](RuleFilter) passed as argument - pub fn from_enabled_rules(enabled_rules: &'analysis [RuleFilter<'analysis>]) -> Self { - Self { - enabled_rules: Some(enabled_rules), - ..AnalysisFilter::default() - } - } - - /// Return `true` if the category `C` matches this filter - pub fn match_category(&self) -> bool { - self.categories.contains(C::CATEGORY) - } - - /// Return `true` if the group `G` matches this filter - pub fn match_group(&self) -> bool { - self.match_category::() - && self.enabled_rules.is_none_or(|enabled_rules| { - enabled_rules.iter().any(|filter| filter.match_group::()) - }) - && !self - .disabled_rules - .iter() - .any(|filter| matches!(filter, RuleFilter::Group(_)) && filter.match_group::()) - } - - /// Return `true` if the rule `R` matches this filter - pub fn match_rule(&self) -> bool { - self.match_category::<::Category>() - && self.enabled_rules.is_none_or(|enabled_rules| { - enabled_rules.iter().any(|filter| filter.match_rule::()) - }) - && !self - .disabled_rules - .iter() - .any(|filter| filter.match_rule::()) - } -} - -impl<'a> RuleFilter<'a> { - // Returns the group name of this filter. - pub fn group(self) -> &'a str { - match self { - RuleFilter::Group(group) => group, - RuleFilter::Rule(group, _) => group, - } - } - /// Return `true` if the group `G` matches this filter - pub fn match_group(self) -> bool { - match self { - RuleFilter::Group(group) => group == G::NAME, - RuleFilter::Rule(group, _) => group == G::NAME, - } - } - - /// Return `true` if the rule `R` matches this filter - pub fn match_rule(self) -> bool - where - R: Rule, - { - match self { - RuleFilter::Group(group) => group == ::NAME, - RuleFilter::Rule(group, rule) => { - group == ::NAME && rule == R::METADATA.name - } - } - } -} - -impl Debug for RuleFilter<'_> { - fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { - Display::fmt(self, f) - } -} - -impl Display for RuleFilter<'_> { - fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { - match self { - RuleFilter::Group(group) => { - write!(f, "{group}") - } - RuleFilter::Rule(group, rule) => { - write!(f, "{group}/{rule}") - } - } - } -} - -impl pgt_console::fmt::Display for RuleFilter<'_> { - fn fmt(&self, fmt: &mut pgt_console::fmt::Formatter) -> std::io::Result<()> { - match self { - RuleFilter::Group(group) => { - write!(fmt, "{group}") - } - RuleFilter::Rule(group, rule) => { - write!(fmt, "{group}/{rule}") - } - } - } -} - -/// Opaque identifier for a group of rule -#[derive(Copy, Clone, Debug, PartialEq, Eq)] -pub struct GroupKey { - group: &'static str, -} - -impl GroupKey { - pub(crate) fn new(group: &'static str) -> Self { - Self { group } - } - - pub fn group() -> Self { - Self::new(G::NAME) - } -} - -impl From for RuleFilter<'static> { - fn from(key: GroupKey) -> Self { - RuleFilter::Group(key.group) - } -} - -/// Opaque identifier for a single rule -#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] -pub struct RuleKey { - group: &'static str, - rule: &'static str, -} - -impl RuleKey { - pub fn new(group: &'static str, rule: &'static str) -> Self { - Self { group, rule } - } - - pub fn rule() -> Self { - Self::new(::NAME, R::METADATA.name) - } - - pub fn group(&self) -> &'static str { - self.group - } - - pub fn rule_name(&self) -> &'static str { - self.rule - } -} - -impl From for RuleFilter<'static> { - fn from(key: RuleKey) -> Self { - RuleFilter::Rule(key.group, key.rule) - } -} - -impl PartialEq for RuleFilter<'static> { - fn eq(&self, other: &RuleKey) -> bool { - match *self { - RuleFilter::Group(group) => group == other.group, - RuleFilter::Rule(group, rule) => group == other.group && rule == other.rule, - } - } -} diff --git a/crates/pgt_analyse/src/lib.rs b/crates/pgt_analyse/src/lib.rs deleted file mode 100644 index f312de45..00000000 --- a/crates/pgt_analyse/src/lib.rs +++ /dev/null @@ -1,23 +0,0 @@ -mod categories; -pub mod context; -mod filter; -pub mod macros; -pub mod options; -mod registry; -mod rule; - -// Re-exported for use in the `declare_group` macro -pub use pgt_diagnostics::category_concat; - -pub use crate::categories::{ - ActionCategory, RefactorKind, RuleCategories, RuleCategoriesBuilder, RuleCategory, - SUPPRESSION_ACTION_CATEGORY, SourceActionKind, -}; -pub use crate::filter::{AnalysisFilter, GroupKey, RuleFilter, RuleKey}; -pub use crate::options::{AnalyserOptions, AnalyserRules}; -pub use crate::registry::{ - MetadataRegistry, RegistryRuleParams, RegistryVisitor, RuleRegistry, RuleRegistryBuilder, -}; -pub use crate::rule::{ - GroupCategory, Rule, RuleDiagnostic, RuleGroup, RuleMeta, RuleMetadata, RuleSource, -}; diff --git a/crates/pgt_analyse/src/macros.rs b/crates/pgt_analyse/src/macros.rs deleted file mode 100644 index d9f70ed3..00000000 --- a/crates/pgt_analyse/src/macros.rs +++ /dev/null @@ -1,120 +0,0 @@ -/// This macro is used to declare an analyser rule type, and implement the -// [RuleMeta] trait for it -/// # Example -/// -/// The macro itself expect the following syntax: -/// -/// ```rust,ignore -///use pgt_analyse::declare_rule; -/// -/// declare_lint_rule! { -/// /// Documentation -/// pub(crate) ExampleRule { -/// version: "1.0.0", -/// name: "rule-name", -/// recommended: false, -/// } -/// } -/// ``` -/// -/// Check [crate](module documentation) for a better -/// understanding of how the macro works -#[macro_export] -macro_rules! declare_lint_rule { - ( $( #[doc = $doc:literal] )+ $vis:vis $id:ident { - version: $version:literal, - name: $name:tt, - $( $key:ident: $value:expr_2021, )* - } ) => { - - pgt_analyse::declare_rule!( - $( #[doc = $doc] )* - $vis $id { - version: $version, - name: $name, - $( $key: $value, )* - } - ); - - // Declare a new `rule_category!` macro in the module context that - // expands to the category of this rule - // This is implemented by calling the `group_category!` macro from the - // parent module (that should be declared by a call to `declare_group!`) - // and providing it with the name of this rule as a string literal token - #[allow(unused_macros)] - macro_rules! rule_category { - () => { super::group_category!( $name ) }; - } - }; -} - -#[macro_export] -macro_rules! declare_rule { - ( $( #[doc = $doc:literal] )+ $vis:vis $id:ident { - version: $version:literal, - name: $name:tt, - $( $key:ident: $value:expr_2021, )* - } ) => { - $( #[doc = $doc] )* - $vis enum $id {} - - impl $crate::RuleMeta for $id { - type Group = super::Group; - const METADATA: $crate::RuleMetadata = - $crate::RuleMetadata::new($version, $name, concat!( $( $doc, "\n", )* )) $( .$key($value) )*; - } - } -} - -/// This macro is used by the codegen script to declare an analyser rule group, -/// and implement the [RuleGroup] trait for it -#[macro_export] -macro_rules! declare_lint_group { - ( $vis:vis $id:ident { name: $name:tt, rules: [ $( $( $rule:ident )::* , )* ] } ) => { - $vis enum $id {} - - impl $crate::RuleGroup for $id { - type Category = super::Category; - - const NAME: &'static str = $name; - - fn record_rules(registry: &mut V) { - $( registry.record_rule::<$( $rule )::*>(); )* - } - } - - pub(self) use $id as Group; - - // Declare a `group_category!` macro in the context of this module (and - // all its children). This macro takes the name of a rule as a string - // literal token and expands to the category of the lint rule with this - // name within this group. - // This is implemented by calling the `category_concat!` macro with the - // "lint" prefix, the name of this group, and the rule name argument - #[allow(unused_macros)] - macro_rules! group_category { - ( $rule_name:tt ) => { $crate::category_concat!( "lint", $name, $rule_name ) }; - } - - // Re-export the macro for child modules, so `declare_rule!` can access - // the category of its parent group by using the `super` module - pub(self) use group_category; - }; -} - -#[macro_export] -macro_rules! declare_category { - ( $vis:vis $id:ident { kind: $kind:ident, groups: [ $( $( $group:ident )::* , )* ] } ) => { - $vis enum $id {} - - impl $crate::GroupCategory for $id { - const CATEGORY: $crate::RuleCategory = $crate::RuleCategory::$kind; - - fn record_groups(registry: &mut V) { - $( registry.record_group::<$( $group )::*>(); )* - } - } - - pub(self) use $id as Category; - }; -} diff --git a/crates/pgt_analyse/src/options.rs b/crates/pgt_analyse/src/options.rs deleted file mode 100644 index 211cb1dc..00000000 --- a/crates/pgt_analyse/src/options.rs +++ /dev/null @@ -1,61 +0,0 @@ -use rustc_hash::FxHashMap; - -use crate::{Rule, RuleKey}; -use std::any::{Any, TypeId}; -use std::fmt::Debug; - -/// A convenient new type data structure to store the options that belong to a rule -#[derive(Debug)] -pub struct RuleOptions(TypeId, Box); - -impl RuleOptions { - /// Creates a new [RuleOptions] - pub fn new(options: O) -> Self { - Self(TypeId::of::(), Box::new(options)) - } - - /// It returns the deserialized rule option - pub fn value(&self) -> &O { - let RuleOptions(type_id, value) = &self; - let current_id = TypeId::of::(); - debug_assert_eq!(type_id, ¤t_id); - // SAFETY: the code should fail when asserting the types. - // If the code throws an error here, it means that the developer didn't test - // the rule with the options - value.downcast_ref::().unwrap() - } -} - -/// A convenient new type data structure to insert and get rules -#[derive(Debug, Default)] -pub struct AnalyserRules(FxHashMap); - -impl AnalyserRules { - /// It tracks the options of a specific rule - pub fn push_rule(&mut self, rule_key: RuleKey, options: RuleOptions) { - self.0.insert(rule_key, options); - } - - /// It retrieves the options of a stored rule, given its name - pub fn get_rule_options(&self, rule_key: &RuleKey) -> Option<&O> { - self.0.get(rule_key).map(|o| o.value::()) - } -} - -/// A set of information useful to the analyser infrastructure -#[derive(Debug, Default)] -pub struct AnalyserOptions { - /// A data structured derived from the [`postgrestools.jsonc`] file - pub rules: AnalyserRules, -} - -impl AnalyserOptions { - pub fn rule_options(&self) -> Option - where - R: Rule + 'static, - { - self.rules - .get_rule_options::(&RuleKey::rule::()) - .cloned() - } -} diff --git a/crates/pgt_analyse/src/registry.rs b/crates/pgt_analyse/src/registry.rs deleted file mode 100644 index 48b73b15..00000000 --- a/crates/pgt_analyse/src/registry.rs +++ /dev/null @@ -1,189 +0,0 @@ -use std::{borrow, collections::BTreeSet}; - -use crate::{ - AnalyserOptions, - context::RuleContext, - filter::{AnalysisFilter, GroupKey, RuleKey}, - rule::{GroupCategory, Rule, RuleDiagnostic, RuleGroup}, -}; - -pub trait RegistryVisitor { - /// Record the category `C` to this visitor - fn record_category(&mut self) { - C::record_groups(self); - } - - /// Record the group `G` to this visitor - fn record_group(&mut self) { - G::record_rules(self); - } - - /// Record the rule `R` to this visitor - fn record_rule(&mut self) - where - R: Rule + 'static; -} - -/// Key struct for a rule in the metadata map, sorted alphabetically -#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)] -pub struct MetadataKey { - inner: (&'static str, &'static str), -} - -impl MetadataKey { - fn into_group_key(self) -> GroupKey { - let (group, _) = self.inner; - GroupKey::new(group) - } - - fn into_rule_key(self) -> RuleKey { - let (group, rule) = self.inner; - RuleKey::new(group, rule) - } -} - -impl<'a> borrow::Borrow<(&'a str, &'a str)> for MetadataKey { - fn borrow(&self) -> &(&'a str, &'a str) { - &self.inner - } -} - -impl borrow::Borrow for MetadataKey { - fn borrow(&self) -> &str { - self.inner.0 - } -} - -/// Stores metadata information for all the rules in the registry, sorted -/// alphabetically -#[derive(Debug, Default)] -pub struct MetadataRegistry { - inner: BTreeSet, -} - -impl MetadataRegistry { - /// Return a unique identifier for a rule group if it's known by this registry - pub fn find_group(&self, group: &str) -> Option { - let key = self.inner.get(group)?; - Some(key.into_group_key()) - } - - /// Return a unique identifier for a rule if it's known by this registry - pub fn find_rule(&self, group: &str, rule: &str) -> Option { - let key = self.inner.get(&(group, rule))?; - Some(key.into_rule_key()) - } - - pub(crate) fn insert_rule(&mut self, group: &'static str, rule: &'static str) { - self.inner.insert(MetadataKey { - inner: (group, rule), - }); - } -} - -impl RegistryVisitor for MetadataRegistry { - fn record_rule(&mut self) - where - R: Rule + 'static, - { - self.insert_rule(::NAME, R::METADATA.name); - } -} - -pub struct RuleRegistryBuilder<'a> { - filter: &'a AnalysisFilter<'a>, - // Rule Registry - registry: RuleRegistry, -} - -impl RegistryVisitor for RuleRegistryBuilder<'_> { - fn record_category(&mut self) { - if self.filter.match_category::() { - C::record_groups(self); - } - } - - fn record_group(&mut self) { - if self.filter.match_group::() { - G::record_rules(self); - } - } - - /// Add the rule `R` to the list of rules stored in this registry instance - fn record_rule(&mut self) - where - R: Rule + 'static, - { - if !self.filter.match_rule::() { - return; - } - - let rule = RegistryRule::new::(); - - self.registry.rules.push(rule); - } -} - -/// The rule registry holds type-erased instances of all active analysis rules -pub struct RuleRegistry { - pub rules: Vec, -} - -impl IntoIterator for RuleRegistry { - type Item = RegistryRule; - type IntoIter = std::vec::IntoIter; - - fn into_iter(self) -> Self::IntoIter { - self.rules.into_iter() - } -} - -/// Internal representation of a single rule in the registry -#[derive(Copy, Clone)] -pub struct RegistryRule { - pub run: RuleExecutor, -} - -impl RuleRegistry { - pub fn builder<'a>(filter: &'a AnalysisFilter<'a>) -> RuleRegistryBuilder<'a> { - RuleRegistryBuilder { - filter, - registry: RuleRegistry { - rules: Default::default(), - }, - } - } -} - -pub struct RegistryRuleParams<'a> { - pub root: &'a pgt_query_ext::NodeEnum, - pub options: &'a AnalyserOptions, -} - -/// Executor for rule as a generic function pointer -type RuleExecutor = fn(&RegistryRuleParams) -> Vec; - -impl RegistryRule { - fn new() -> Self - where - R: Rule + 'static, - { - /// Generic implementation of RuleExecutor for any rule type R - fn run(params: &RegistryRuleParams) -> Vec - where - R: Rule + 'static, - { - let options = params.options.rule_options::().unwrap_or_default(); - let ctx = RuleContext::new(params.root, &options); - R::run(&ctx) - } - - Self { run: run:: } - } -} - -impl RuleRegistryBuilder<'_> { - pub fn build(self) -> RuleRegistry { - self.registry - } -} diff --git a/crates/pgt_analyse/src/rule.rs b/crates/pgt_analyse/src/rule.rs deleted file mode 100644 index f135705e..00000000 --- a/crates/pgt_analyse/src/rule.rs +++ /dev/null @@ -1,331 +0,0 @@ -use pgt_console::fmt::Display; -use pgt_console::{MarkupBuf, markup}; -use pgt_diagnostics::advice::CodeSuggestionAdvice; -use pgt_diagnostics::{ - Advices, Category, Diagnostic, DiagnosticTags, Location, LogCategory, MessageAndDescription, - Visit, -}; -use pgt_text_size::TextRange; -use std::cmp::Ordering; -use std::fmt::Debug; - -use crate::{categories::RuleCategory, context::RuleContext, registry::RegistryVisitor}; - -#[derive(Clone, Debug)] -#[cfg_attr( - feature = "serde", - derive(serde::Serialize), - serde(rename_all = "camelCase") -)] -/// Static metadata containing information about a rule -pub struct RuleMetadata { - /// It marks if a rule is deprecated, and if so a reason has to be provided. - pub deprecated: Option<&'static str>, - /// The version when the rule was implemented - pub version: &'static str, - /// The name of this rule, displayed in the diagnostics it emits - pub name: &'static str, - /// The content of the documentation comments for this rule - pub docs: &'static str, - /// Whether a rule is recommended or not - pub recommended: bool, - /// The source URL of the rule - pub sources: &'static [RuleSource], -} - -impl RuleMetadata { - pub const fn new(version: &'static str, name: &'static str, docs: &'static str) -> Self { - Self { - deprecated: None, - version, - name, - docs, - sources: &[], - recommended: false, - } - } - - pub const fn recommended(mut self, recommended: bool) -> Self { - self.recommended = recommended; - self - } - - pub const fn deprecated(mut self, deprecated: &'static str) -> Self { - self.deprecated = Some(deprecated); - self - } - - pub const fn sources(mut self, sources: &'static [RuleSource]) -> Self { - self.sources = sources; - self - } -} - -pub trait RuleMeta { - type Group: RuleGroup; - const METADATA: RuleMetadata; -} - -/// A rule group is a collection of rules under a given name, serving as a -/// "namespace" for lint rules and allowing the entire set of rules to be -/// disabled at once -pub trait RuleGroup { - type Category: GroupCategory; - /// The name of this group, displayed in the diagnostics emitted by its rules - const NAME: &'static str; - /// Register all the rules belonging to this group into `registry` - fn record_rules(registry: &mut V); -} - -/// A group category is a collection of rule groups under a given category ID, -/// serving as a broad classification on the kind of diagnostic or code action -/// these rule emit, and allowing whole categories of rules to be disabled at -/// once depending on the kind of analysis being performed -pub trait GroupCategory { - /// The category ID used for all groups and rule belonging to this category - const CATEGORY: RuleCategory; - /// Register all the groups belonging to this category into `registry` - fn record_groups(registry: &mut V); -} - -/// Trait implemented by all analysis rules: declares interest to a certain AstNode type, -/// and a callback function to be executed on all nodes matching the query to possibly -/// raise an analysis event -pub trait Rule: RuleMeta + Sized { - type Options: Default + Clone + Debug; - - fn run(ctx: &RuleContext) -> Vec; -} - -/// Diagnostic object returned by a single analysis rule -#[derive(Debug, Diagnostic, PartialEq)] -pub struct RuleDiagnostic { - #[category] - pub(crate) category: &'static Category, - #[location(span)] - pub(crate) span: Option, - #[message] - #[description] - pub(crate) message: MessageAndDescription, - #[tags] - pub(crate) tags: DiagnosticTags, - #[advice] - pub(crate) rule_advice: RuleAdvice, -} - -#[derive(Debug, Default, PartialEq)] -/// It contains possible advices to show when printing a diagnostic that belong to the rule -pub struct RuleAdvice { - pub(crate) details: Vec, - pub(crate) notes: Vec<(LogCategory, MarkupBuf)>, - pub(crate) suggestion_list: Option, - pub(crate) code_suggestion_list: Vec>, -} - -#[derive(Debug, Default, PartialEq)] -pub struct SuggestionList { - pub(crate) message: MarkupBuf, - pub(crate) list: Vec, -} - -impl Advices for RuleAdvice { - fn record(&self, visitor: &mut dyn Visit) -> std::io::Result<()> { - for detail in &self.details { - visitor.record_log( - detail.log_category, - &markup! { {detail.message} }.to_owned(), - )?; - visitor.record_frame(Location::builder().span(&detail.range).build())?; - } - // we then print notes - for (log_category, note) in &self.notes { - visitor.record_log(*log_category, &markup! { {note} }.to_owned())?; - } - - if let Some(suggestion_list) = &self.suggestion_list { - visitor.record_log( - LogCategory::Info, - &markup! { {suggestion_list.message} }.to_owned(), - )?; - let list: Vec<_> = suggestion_list - .list - .iter() - .map(|suggestion| suggestion as &dyn Display) - .collect(); - visitor.record_list(&list)?; - } - - // finally, we print possible code suggestions on how to fix the issue - for suggestion in &self.code_suggestion_list { - suggestion.record(visitor)?; - } - - Ok(()) - } -} - -#[derive(Debug, PartialEq)] -pub struct Detail { - pub log_category: LogCategory, - pub message: MarkupBuf, - pub range: Option, -} - -impl RuleDiagnostic { - /// Creates a new [`RuleDiagnostic`] with a severity and title that will be - /// used in a builder-like way to modify labels. - pub fn new(category: &'static Category, span: Option, title: impl Display) -> Self { - let message = markup!({ title }).to_owned(); - Self { - category, - span, - message: MessageAndDescription::from(message), - tags: DiagnosticTags::empty(), - rule_advice: RuleAdvice::default(), - } - } - - /// Set an explicit plain-text summary for this diagnostic. - pub fn description(mut self, summary: impl Into) -> Self { - self.message.set_description(summary.into()); - self - } - - /// Marks this diagnostic as deprecated code, which will - /// be displayed in the language server. - /// - /// This does not have any influence on the diagnostic rendering. - pub fn deprecated(mut self) -> Self { - self.tags |= DiagnosticTags::DEPRECATED_CODE; - self - } - - /// Marks this diagnostic as unnecessary code, which will - /// be displayed in the language server. - /// - /// This does not have any influence on the diagnostic rendering. - pub fn unnecessary(mut self) -> Self { - self.tags |= DiagnosticTags::UNNECESSARY_CODE; - self - } - - /// Attaches a label to this [`RuleDiagnostic`]. - /// - /// The given span has to be in the file that was provided while creating this [`RuleDiagnostic`]. - pub fn label(mut self, span: Option, msg: impl Display) -> Self { - self.rule_advice.details.push(Detail { - log_category: LogCategory::Info, - message: markup!({ msg }).to_owned(), - range: span, - }); - self - } - - /// Attaches a detailed message to this [`RuleDiagnostic`]. - pub fn detail(self, span: Option, msg: impl Display) -> Self { - self.label(span, msg) - } - - /// Adds a footer to this [`RuleDiagnostic`], which will be displayed under the actual error. - fn footer(mut self, log_category: LogCategory, msg: impl Display) -> Self { - self.rule_advice - .notes - .push((log_category, markup!({ msg }).to_owned())); - self - } - - /// Adds a footer to this [`RuleDiagnostic`], with the `Info` log category. - pub fn note(self, msg: impl Display) -> Self { - self.footer(LogCategory::Info, msg) - } - - /// It creates a new footer note which contains a message and a list of possible suggestions. - /// Useful when there's need to suggest a list of things inside a diagnostic. - pub fn footer_list(mut self, message: impl Display, list: &[impl Display]) -> Self { - if !list.is_empty() { - self.rule_advice.suggestion_list = Some(SuggestionList { - message: markup! { {message} }.to_owned(), - list: list - .iter() - .map(|msg| markup! { {msg} }.to_owned()) - .collect(), - }); - } - - self - } - - /// Adds a footer to this [`RuleDiagnostic`], with the `Warn` severity. - pub fn warning(self, msg: impl Display) -> Self { - self.footer(LogCategory::Warn, msg) - } - - pub fn advices(&self) -> &RuleAdvice { - &self.rule_advice - } - - /// Will return the rule's category name as defined via `define_categories! { .. }`. - pub fn get_category_name(&self) -> &'static str { - self.category.name() - } -} - -#[derive(Debug, Clone, Eq)] -#[cfg_attr(feature = "serde", derive(serde::Serialize))] -#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] -pub enum RuleSource { - /// Rules from [Squawk](https://squawkhq.com) - Squawk(&'static str), -} - -impl PartialEq for RuleSource { - fn eq(&self, other: &Self) -> bool { - std::mem::discriminant(self) == std::mem::discriminant(other) - } -} - -impl std::fmt::Display for RuleSource { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - match self { - Self::Squawk(_) => write!(f, "Squawk"), - } - } -} - -impl PartialOrd for RuleSource { - fn partial_cmp(&self, other: &Self) -> Option { - Some(self.cmp(other)) - } -} - -impl Ord for RuleSource { - fn cmp(&self, other: &Self) -> Ordering { - let self_rule = self.as_rule_name(); - let other_rule = other.as_rule_name(); - self_rule.cmp(other_rule) - } -} - -impl RuleSource { - pub fn as_rule_name(&self) -> &'static str { - match self { - Self::Squawk(rule_name) => rule_name, - } - } - - pub fn to_namespaced_rule_name(&self) -> String { - match self { - Self::Squawk(rule_name) => format!("squawk/{rule_name}"), - } - } - - pub fn to_rule_url(&self) -> String { - match self { - Self::Squawk(rule_name) => format!("https://squawkhq.com/docs/{rule_name}"), - } - } - - pub fn as_url_and_rule_name(&self) -> (String, &'static str) { - (self.to_rule_url(), self.as_rule_name()) - } -} diff --git a/crates/pgt_analyser/CONTRIBUTING.md b/crates/pgt_analyser/CONTRIBUTING.md deleted file mode 100644 index 50327d5e..00000000 --- a/crates/pgt_analyser/CONTRIBUTING.md +++ /dev/null @@ -1,358 +0,0 @@ -# Analyser - -## Creating a rule - -When creating or updating a lint rule, you need to be aware that there's a lot of generated code inside our toolchain. -Our CI ensures that this code is not out of sync and fails otherwise. -See the [code generation section](#code-generation) for more details. - -To create a new rule, you have to create and update several files. -Because it is a bit tedious, we provide an easy way to create and test your rule using [Just](https://just.systems/man/en/). -_Just_ is not part of the rust toolchain, you have to install it with [a package manager](https://just.systems/man/en/chapter_4.html). - -### Choose a name - -We follow a naming convention according to what the rule does: - -1. Forbid a concept - - ```block - ban - ``` - - When a rule's sole intention is to **forbid a single concept** the rule should be named using the `ban` prefix. - - Example: "banDropColumn" - -2. Mandate a concept - - ```block - use - ``` - - When a rule's sole intention is to **mandate a single concept** the rule should be named using the `use` prefix. - -### Explain a rule to the user - -A rule should be informative to the user, and give as much explanation as possible. - -When writing a rule, you must adhere to the following **pillars**: - -1. Explain to the user the error. Generally, this is the message of the diagnostic. -2. Explain to the user **why** the error is triggered. Generally, this is implemented with an additional node. -3. Tell the user what they should do. Generally, this is implemented using a code action. If a code action is not applicable a note should tell the user what they should do to fix the error. - -### Create and implement the rule - -> [!TIP] -> As a developer, you aren't forced to make a rule perfect in one PR. Instead, you are encouraged to lay out a plan and to split the work into multiple PRs. -> -> If you aren't familiar with the APIs, this is an option that you have. If you decide to use this option, you should make sure to describe your plan in an issue. - -Let's say we want to create a new **lint** rule called `useMyRuleName`, follow these steps: - -1. Run the command - - ```shell - just new-lintrule safety useMyRuleName - ``` - - The script will generate a bunch of files inside the `pgt_analyser` crate. - Among the other files, you'll find a file called `use_my_new_rule_name.rs` inside the `pgt_analyser/lib/src/lint/safety` folder. You'll implement your rule in this file. - -1. The `Options` type doesn't have to be used, so it can be considered optional. However, it has to be defined as `type Options = ()`. -1. Implement the `run` function: The function is called for every statement, and should return zero or more diagnostics. Follow the [pillars](#explain-a-rule-to-the-user) when writing the message of a diagnostic - -Don't forget to format your code with `just f` and lint with `just l`. - -That's it! Now, let's test the rule. - -### Rule configuration - -Some rules may allow customization using options. -We try to keep rule options to a minimum and only when needed. -Before adding an option, it's worth a discussion. - -Let's assume that the rule we implement support the following options: - -- `behavior`: a string among `"A"`, `"B"`, and `"C"`; -- `threshold`: an integer between 0 and 255; -- `behaviorExceptions`: an array of strings. - -We would like to set the options in the `postgrestools.jsonc` configuration file: - -```json -{ - "linter": { - "rules": { - "safety": { - "myRule": { - "level": "warn", - "options": { - "behavior": "A", - "threshold": 20, - "behaviorExceptions": ["one", "two"] - } - } - } - } - } -} -``` - -The first step is to create the Rust data representation of the rule's options. - -```rust -#[derive(Clone, Debug, Default)] -pub struct MyRuleOptions { - behavior: Behavior, - threshold: u8, - behavior_exceptions: Box<[Box]> -} - -#[derive(Clone, Debug, Defaul)] -pub enum Behavior { - #[default] - A, - B, - C, -} -``` - -Note that we use a boxed slice `Box<[Box]>` instead of `Vec`. -This allows saving memory: [boxed slices and boxed str use two instead of three words](https://nnethercote.github.io/perf-book/type-sizes.html#boxed-slices). - -With these types in place, you can set the associated type `Options` of the rule: - -```rust -impl Rule for MyRule { - type Options = MyRuleOptions; -} -``` - -A rule can retrieve its options with: - -```rust -let options = ctx.options(); -``` - -The compiler should warn you that `MyRuleOptions` does not implement some required types. -We currently require implementing _serde_'s traits `Deserialize`/`Serialize`. - -Also, we use other `serde` macros to adjust the JSON configuration: - -- `rename_all = "camelCase"`: it renames all fields in camel-case, so they are in line with the naming style of the `postgrestools.jsonc`. -- `deny_unknown_fields`: it raises an error if the configuration contains extraneous fields. -- `default`: it uses the `Default` value when the field is missing from `postgrestools.jsonc`. This macro makes the field optional. - -You can simply use a derive macros: - -```rust -#[derive(Debug, Default, Clone, Serialize, Deserialize)] -#[cfg_attr(feature = "schemars", derive(JsonSchema))] -#[serde(rename_all = "camelCase", deny_unknown_fields, default)] -pub struct MyRuleOptions { - #[serde(default, skip_serializing_if = "is_default")] - main_behavior: Behavior, - - #[serde(default, skip_serializing_if = "is_default")] - extra_behaviors: Vec, -} - -#[derive(Debug, Default, Clone)] -#[cfg_attr(feature = "schemars", derive(JsonSchema))] -pub enum Behavior { - #[default] - A, - B, - C, -} -``` - -### Coding the rule - -Below, there are many tips and guidelines on how to create a lint rule using our infrastructure. - -#### `declare_lint_rule` - -This macro is used to declare an analyzer rule type, and implement the [RuleMeta] trait for it. - -The macro itself expects the following syntax: - -```rust -use pgt_analyse::declare_lint_rule; - -declare_lint_rule! { - /// Documentation - pub(crate) ExampleRule { - version: "next", - name: "myRuleName", - recommended: false, - } -} -``` - -##### Lint rules inspired by other lint rules - -If a **lint** rule is inspired by an existing rule from other ecosystems (Squawk etc.), you can add a new metadata to the macro called `source`. Its value is `&'static [RuleSource]`, which is a reference to a slice of `RuleSource` elements, each representing a different source. - -If you're implementing a lint rule that matches the behaviour of the Squawk rule `ban-drop-column`, you'll use the variant `::Squawk` and pass the name of the rule: - -```rust -use pgt_analyse::{declare_lint_rule, RuleSource}; - -declare_lint_rule! { - /// Documentation - pub(crate) ExampleRule { - version: "next", - name: "myRuleName", - recommended: false, - sources: &[RuleSource::Squawk("ban-drop-column")], - } -} -``` - -#### Category Macro - -Declaring a rule using `declare_lint_rule!` will cause a new `rule_category!` -macro to be declared in the surrounding module. This macro can be used to -refer to the corresponding diagnostic category for this lint rule, if it -has one. Using this macro instead of getting the category for a diagnostic -by dynamically parsing its string name has the advantage of statically -injecting the category at compile time and checking that it is correctly -registered to the `pgt_diagnostics` library. - -```rust -declare_lint_rule! { - /// Documentation - pub(crate) ExampleRule { - version: "next", - name: "myRuleName", - recommended: false, - } -} - -impl Rule for BanDropColumn { - type Options = Options; - - fn run(ctx: &RuleContext) -> Vec { - vec![RuleDiagnostic::new( - rule_category!(), - None, - "message", - )] - } -} -``` - -### Document the rule - -The documentation needs to adhere to the following rules: - -- The **first** paragraph of the documentation is used as brief description of the rule, and it **must** be written in one single line. Breaking the paragraph in multiple lines will break the table content of the rules page. -- The next paragraphs can be used to further document the rule with as many details as you see fit. -- The documentation must have a `## Examples` header, followed by two headers: `### Invalid` and `### Valid`. `### Invalid` must go first because we need to show when the rule is triggered. -- Rule options if any, must be documented in the `## Options` section. -- Each code block must have `sql` set as language defined. -- When adding _invalid_ snippets in the `### Invalid` section, you must use the `expect_diagnostic` code block property. We use this property to generate a diagnostic and attach it to the snippet. A snippet **must emit only ONE diagnostic**. -- When adding _valid_ snippets in the `### Valid` section, you can use one single snippet. -- You can use the code block property `ignore` to tell the code generation script to **not generate a diagnostic for an invalid snippet**. - -Here's an example of how the documentation could look like: - -````rust -declare_lint_rule! { - /// Dropping a column may break existing clients. - /// - /// Update your application code to no longer read or write the column. - /// - /// You can leave the column as nullable or delete the column once queries no longer select or modify the column. - /// - /// ## Examples - /// - /// ### Invalid - /// - /// ```sql,expect_diagnostic - /// alter table test drop column id; - /// ``` - /// - pub BanDropColumn { - version: "next", - name: "banDropColumn", - recommended: true, - sources: &[RuleSource::Squawk("ban-drop-column")], - } -} -```` - -This will cause the documentation generator to ensure the rule does emit -exactly one diagnostic for this code, and to include a snapshot for the -diagnostic in the resulting documentation page. - -### Testing the Rule - -#### Quick Test - -To quickly test your rule, head to the `pgt_analyser/src/lib.rs` file and modify the `debug_test` function. - -You should: - -- remove the `#[ignore]` macro if present -- change the content of the `SQL` static `&str` to whatever you need -- pass your group and rule to the `RuleFilter::Rule(..)` - -If you run the test, you'll see any diagnostics your rule created in your console. - -### Code generation - -For simplicity, use `just` to run all the commands with: - -```shell -just gen-lint -``` - -### Commit your work - -Once the rule implemented, tested, and documented, you are ready to open a pull request! - -Stage and commit your changes: - -```shell -> git add -A -> git commit -m 'feat(pgt_analyser): myRuleName' -``` - -### Deprecate a rule - -There are occasions when a rule must be deprecated, to avoid breaking changes. The reason -of deprecation can be multiple. - -In order to do, the macro allows adding additional field to add the reason for deprecation - -````rust -use pgt_analyse::declare_lint_rule; - -declare_lint_rule! { - /// Dropping a column may break existing clients. - /// - /// Update your application code to no longer read or write the column. - /// - /// You can leave the column as nullable or delete the column once queries no longer select or modify the column. - /// - /// ## Examples - /// - /// ### Invalid - /// - /// ```sql,expect_diagnostic - /// alter table test drop column id; - /// ``` - /// - pub BanDropColumn { - version: "next", - name: "banDropColumn", - recommended: true, - deprecated: true, - sources: &[RuleSource::Squawk("ban-drop-column")], - } -} -```` diff --git a/crates/pgt_analyser/Cargo.toml b/crates/pgt_analyser/Cargo.toml deleted file mode 100644 index bd51c36a..00000000 --- a/crates/pgt_analyser/Cargo.toml +++ /dev/null @@ -1,24 +0,0 @@ - -[package] -authors.workspace = true -categories.workspace = true -description = "" -edition.workspace = true -homepage.workspace = true -keywords.workspace = true -license.workspace = true -name = "pgt_analyser" -repository.workspace = true -version = "0.0.0" - -[dependencies] -pgt_analyse = { workspace = true } -pgt_console = { workspace = true } -pgt_query_ext = { workspace = true } -serde = { workspace = true } - -[dev-dependencies] -insta = { version = "1.42.1" } -pgt_diagnostics = { workspace = true } -pgt_test_macros = { workspace = true } -termcolor = { workspace = true } diff --git a/crates/pgt_analyser/src/lib.rs b/crates/pgt_analyser/src/lib.rs deleted file mode 100644 index 248fe22b..00000000 --- a/crates/pgt_analyser/src/lib.rs +++ /dev/null @@ -1,126 +0,0 @@ -use std::{ops::Deref, sync::LazyLock}; - -use pgt_analyse::{ - AnalyserOptions, AnalysisFilter, MetadataRegistry, RegistryRuleParams, RuleDiagnostic, - RuleRegistry, -}; -pub use registry::visit_registry; - -mod lint; -pub mod options; -mod registry; - -pub static METADATA: LazyLock = LazyLock::new(|| { - let mut metadata = MetadataRegistry::default(); - visit_registry(&mut metadata); - metadata -}); - -/// Main entry point to the analyser. -pub struct Analyser<'a> { - /// Holds the metadata for all the rules statically known to the analyser - /// we need this later when we add suppression support - #[allow(dead_code)] - metadata: &'a MetadataRegistry, - - /// Holds all rule options - options: &'a AnalyserOptions, - - /// Holds all rules - registry: RuleRegistry, -} - -pub struct AnalyserContext<'a> { - pub root: &'a pgt_query_ext::NodeEnum, -} - -pub struct AnalyserConfig<'a> { - pub options: &'a AnalyserOptions, - pub filter: AnalysisFilter<'a>, -} - -impl<'a> Analyser<'a> { - pub fn new(conf: AnalyserConfig<'a>) -> Self { - let mut builder = RuleRegistry::builder(&conf.filter); - visit_registry(&mut builder); - let registry = builder.build(); - - Self { - metadata: METADATA.deref(), - registry, - options: conf.options, - } - } - - pub fn run(&self, ctx: AnalyserContext) -> Vec { - let params = RegistryRuleParams { - root: ctx.root, - options: self.options, - }; - - self.registry - .rules - .iter() - .flat_map(|rule| (rule.run)(¶ms)) - .collect::>() - } -} - -#[cfg(test)] -mod tests { - use core::slice; - - use pgt_analyse::{AnalyserOptions, AnalysisFilter, RuleFilter}; - use pgt_console::{ - Markup, - fmt::{Formatter, Termcolor}, - markup, - }; - use pgt_diagnostics::PrintDiagnostic; - use termcolor::NoColor; - - use crate::Analyser; - - #[ignore] - #[test] - fn debug_test() { - fn markup_to_string(markup: Markup) -> String { - let mut buffer = Vec::new(); - let mut write = Termcolor(NoColor::new(&mut buffer)); - let mut fmt = Formatter::new(&mut write); - fmt.write_markup(markup).unwrap(); - - String::from_utf8(buffer).unwrap() - } - - const SQL: &str = r#"alter table test drop column id;"#; - let rule_filter = RuleFilter::Rule("safety", "banDropColumn"); - - let filter = AnalysisFilter { - enabled_rules: Some(slice::from_ref(&rule_filter)), - ..Default::default() - }; - - let ast = pgt_query_ext::parse(SQL).expect("failed to parse SQL"); - - let options = AnalyserOptions::default(); - - let analyser = Analyser::new(crate::AnalyserConfig { - options: &options, - filter, - }); - - let results = analyser.run(crate::AnalyserContext { root: &ast }); - - println!("*******************"); - for result in &results { - let text = markup_to_string(markup! { - {PrintDiagnostic::simple(result)} - }); - eprintln!("{}", text); - } - println!("*******************"); - - // assert_eq!(results, vec![]); - } -} diff --git a/crates/pgt_analyser/src/lint.rs b/crates/pgt_analyser/src/lint.rs deleted file mode 100644 index cbc13c9d..00000000 --- a/crates/pgt_analyser/src/lint.rs +++ /dev/null @@ -1,4 +0,0 @@ -//! Generated file, do not edit by hand, see `xtask/codegen` - -pub mod safety; -::pgt_analyse::declare_category! { pub Lint { kind : Lint , groups : [self :: safety :: Safety ,] } } diff --git a/crates/pgt_analyser/src/lint/safety.rs b/crates/pgt_analyser/src/lint/safety.rs deleted file mode 100644 index 920326c2..00000000 --- a/crates/pgt_analyser/src/lint/safety.rs +++ /dev/null @@ -1,8 +0,0 @@ -//! Generated file, do not edit by hand, see `xtask/codegen` - -use pgt_analyse::declare_lint_group; -pub mod adding_required_field; -pub mod ban_drop_column; -pub mod ban_drop_not_null; -pub mod ban_drop_table; -declare_lint_group! { pub Safety { name : "safety" , rules : [self :: adding_required_field :: AddingRequiredField , self :: ban_drop_column :: BanDropColumn , self :: ban_drop_not_null :: BanDropNotNull , self :: ban_drop_table :: BanDropTable ,] } } diff --git a/crates/pgt_analyser/src/lint/safety/adding_required_field.rs b/crates/pgt_analyser/src/lint/safety/adding_required_field.rs deleted file mode 100644 index d4f72a7f..00000000 --- a/crates/pgt_analyser/src/lint/safety/adding_required_field.rs +++ /dev/null @@ -1,67 +0,0 @@ -use pgt_analyse::{Rule, RuleDiagnostic, RuleSource, context::RuleContext, declare_lint_rule}; -use pgt_console::markup; - -declare_lint_rule! { - /// Adding a new column that is NOT NULL and has no default value to an existing table effectively makes it required. - /// - /// This will fail immediately upon running for any populated table. Furthermore, old application code that is unaware of this column will fail to INSERT to this table. - /// - /// Make new columns optional initially by omitting the NOT NULL constraint until all existing data and application code has been updated. Once no NULL values are written to or persisted in the database, set it to NOT NULL. - /// Alternatively, if using Postgres version 11 or later, add a DEFAULT value that is not volatile. This allows the column to keep its NOT NULL constraint. - /// - /// ## Invalid - /// alter table test add column count int not null; - /// - /// ## Valid in Postgres >= 11 - /// alter table test add column count int not null default 0; - pub AddingRequiredField { - version: "next", - name: "addingRequiredField", - recommended: false, - sources: &[RuleSource::Squawk("adding-required-field")], - } -} - -impl Rule for AddingRequiredField { - type Options = (); - - fn run(ctx: &RuleContext) -> Vec { - let mut diagnostics = vec![]; - - if let pgt_query_ext::NodeEnum::AlterTableStmt(stmt) = ctx.stmt() { - // We are currently lacking a way to check if a `AtAddColumn` subtype sets a - // not null constraint – so we'll need to check the plain SQL. - let plain_sql = ctx.stmt().to_ref().deparse().unwrap().to_ascii_lowercase(); - let is_nullable = !plain_sql.contains("not null"); - let has_set_default = plain_sql.contains("default"); - if is_nullable || has_set_default { - return diagnostics; - } - - for cmd in &stmt.cmds { - if let Some(pgt_query_ext::NodeEnum::AlterTableCmd(alter_table_cmd)) = &cmd.node { - if alter_table_cmd.subtype() - == pgt_query_ext::protobuf::AlterTableType::AtAddColumn - { - diagnostics.push( - RuleDiagnostic::new( - rule_category!(), - None, - markup! { - "Adding a new column that is NOT NULL and has no default value to an existing table effectively makes it required." - }, - ) - .detail( - None, - "Make new columns optional initially by omitting the NOT NULL constraint until all existing data and application code has been updated. Once no NULL values are written to or persisted in the database, set it to NOT NULL. Alternatively, if using Postgres version 11 or later, add a DEFAULT value that is not volatile. This allows the column to keep its NOT NULL constraint. - ", - ), - ); - } - } - } - } - - diagnostics - } -} diff --git a/crates/pgt_analyser/src/lint/safety/ban_drop_column.rs b/crates/pgt_analyser/src/lint/safety/ban_drop_column.rs deleted file mode 100644 index aab5d515..00000000 --- a/crates/pgt_analyser/src/lint/safety/ban_drop_column.rs +++ /dev/null @@ -1,51 +0,0 @@ -use pgt_analyse::{Rule, RuleDiagnostic, RuleSource, context::RuleContext, declare_lint_rule}; -use pgt_console::markup; - -declare_lint_rule! { - /// Dropping a column may break existing clients. - /// - /// Update your application code to no longer read or write the column. - /// - /// You can leave the column as nullable or delete the column once queries no longer select or modify the column. - /// - /// ## Examples - /// - /// ### Invalid - /// - /// ```sql,expect_diagnostic - /// alter table test drop column id; - /// ``` - /// - pub BanDropColumn { - version: "next", - name: "banDropColumn", - recommended: true, - sources: &[RuleSource::Squawk("ban-drop-column")], - } -} - -impl Rule for BanDropColumn { - type Options = (); - - fn run(ctx: &RuleContext) -> Vec { - let mut diagnostics = Vec::new(); - - if let pgt_query_ext::NodeEnum::AlterTableStmt(stmt) = &ctx.stmt() { - for cmd in &stmt.cmds { - if let Some(pgt_query_ext::NodeEnum::AlterTableCmd(cmd)) = &cmd.node { - if cmd.subtype() == pgt_query_ext::protobuf::AlterTableType::AtDropColumn { - diagnostics.push(RuleDiagnostic::new( - rule_category!(), - None, - markup! { - "Dropping a column may break existing clients." - }, - ).detail(None, "You can leave the column as nullable or delete the column once queries no longer select or modify the column.")); - } - } - } - } - - diagnostics - } -} diff --git a/crates/pgt_analyser/src/lint/safety/ban_drop_not_null.rs b/crates/pgt_analyser/src/lint/safety/ban_drop_not_null.rs deleted file mode 100644 index eb17f694..00000000 --- a/crates/pgt_analyser/src/lint/safety/ban_drop_not_null.rs +++ /dev/null @@ -1,51 +0,0 @@ -use pgt_analyse::{Rule, RuleDiagnostic, RuleSource, context::RuleContext, declare_lint_rule}; -use pgt_console::markup; - -declare_lint_rule! { - /// Dropping a NOT NULL constraint may break existing clients. - /// - /// Application code or code written in procedural languages like PL/SQL or PL/pgSQL may not expect NULL values for the column that was previously guaranteed to be NOT NULL and therefore may fail to process them correctly. - /// - /// You can consider using a marker value that represents NULL. Alternatively, create a new table allowing NULL values, copy the data from the old table, and create a view that filters NULL values. - /// - /// ## Examples - /// - /// ### Invalid - /// - /// ```sql,expect_diagnostic - /// alter table users alter column email drop not null; - /// ``` - pub BanDropNotNull { - version: "next", - name: "banDropNotNull", - recommended: true, - sources: &[RuleSource::Squawk("ban-drop-not-null")], - - } -} - -impl Rule for BanDropNotNull { - type Options = (); - - fn run(ctx: &RuleContext) -> Vec { - let mut diagnostics = Vec::new(); - - if let pgt_query_ext::NodeEnum::AlterTableStmt(stmt) = &ctx.stmt() { - for cmd in &stmt.cmds { - if let Some(pgt_query_ext::NodeEnum::AlterTableCmd(cmd)) = &cmd.node { - if cmd.subtype() == pgt_query_ext::protobuf::AlterTableType::AtDropNotNull { - diagnostics.push(RuleDiagnostic::new( - rule_category!(), - None, - markup! { - "Dropping a NOT NULL constraint may break existing clients." - }, - ).detail(None, "Consider using a marker value that represents NULL. Alternatively, create a new table allowing NULL values, copy the data from the old table, and create a view that filters NULL values.")); - } - } - } - } - - diagnostics - } -} diff --git a/crates/pgt_analyser/src/lint/safety/ban_drop_table.rs b/crates/pgt_analyser/src/lint/safety/ban_drop_table.rs deleted file mode 100644 index 4ce00a60..00000000 --- a/crates/pgt_analyser/src/lint/safety/ban_drop_table.rs +++ /dev/null @@ -1,52 +0,0 @@ -use pgt_analyse::{Rule, RuleDiagnostic, RuleSource, context::RuleContext, declare_lint_rule}; -use pgt_console::markup; - -declare_lint_rule! { - /// Dropping a table may break existing clients. - /// - /// Update your application code to no longer read or write the table. - /// - /// Once the table is no longer needed, you can delete it by running the command "DROP TABLE mytable;". - /// - /// This command will permanently remove the table from the database and all its contents. - /// Be sure to back up the table before deleting it, just in case you need to restore it in the future. - /// - /// ## Examples - /// ```sql,expect_diagnostic - /// drop table some_table; - /// ``` - pub BanDropTable { - version: "next", - name: "banDropTable", - recommended: true, - sources: &[RuleSource::Squawk("ban-drop-table")], - } -} - -impl Rule for BanDropTable { - type Options = (); - - fn run(ctx: &RuleContext) -> Vec { - let mut diagnostics = vec![]; - - if let pgt_query_ext::NodeEnum::DropStmt(stmt) = &ctx.stmt() { - if stmt.remove_type() == pgt_query_ext::protobuf::ObjectType::ObjectTable { - diagnostics.push( - RuleDiagnostic::new( - rule_category!(), - None, - markup! { - "Dropping a table may break existing clients." - }, - ) - .detail( - None, - "Update your application code to no longer read or write the table, and only then delete the table. Be sure to create a backup.", - ), - ); - } - } - - diagnostics - } -} diff --git a/crates/pgt_analyser/src/options.rs b/crates/pgt_analyser/src/options.rs deleted file mode 100644 index d78020f8..00000000 --- a/crates/pgt_analyser/src/options.rs +++ /dev/null @@ -1,10 +0,0 @@ -//! Generated file, do not edit by hand, see `xtask/codegen` - -use crate::lint; -pub type AddingRequiredField = - ::Options; -pub type BanDropColumn = - ::Options; -pub type BanDropNotNull = - ::Options; -pub type BanDropTable = ::Options; diff --git a/crates/pgt_analyser/src/registry.rs b/crates/pgt_analyser/src/registry.rs deleted file mode 100644 index fb549575..00000000 --- a/crates/pgt_analyser/src/registry.rs +++ /dev/null @@ -1,6 +0,0 @@ -//! Generated file, do not edit by hand, see `xtask/codegen` - -use pgt_analyse::RegistryVisitor; -pub fn visit_registry(registry: &mut V) { - registry.record_category::(); -} diff --git a/crates/pgt_analyser/tests/rules_tests.rs b/crates/pgt_analyser/tests/rules_tests.rs deleted file mode 100644 index 247c02b0..00000000 --- a/crates/pgt_analyser/tests/rules_tests.rs +++ /dev/null @@ -1,132 +0,0 @@ -use core::slice; -use std::{fmt::Write, fs::read_to_string, path::Path}; - -use pgt_analyse::{AnalyserOptions, AnalysisFilter, RuleDiagnostic, RuleFilter}; -use pgt_analyser::{Analyser, AnalyserConfig, AnalyserContext}; -use pgt_console::StdDisplay; -use pgt_diagnostics::PrintDiagnostic; - -pgt_test_macros::gen_tests! { - "tests/specs/**/*.sql", - crate::rule_test -} - -fn rule_test(full_path: &'static str, _: &str, _: &str) { - let input_file = Path::new(full_path); - - let (group, rule, fname) = parse_test_path(input_file); - - let rule_filter = RuleFilter::Rule(group.as_str(), rule.as_str()); - let filter = AnalysisFilter { - enabled_rules: Some(slice::from_ref(&rule_filter)), - ..Default::default() - }; - - let query = - read_to_string(full_path).unwrap_or_else(|_| panic!("Failed to read file: {} ", full_path)); - - let ast = pgt_query_ext::parse(&query).expect("failed to parse SQL"); - let options = AnalyserOptions::default(); - let analyser = Analyser::new(AnalyserConfig { - options: &options, - filter, - }); - - let results = analyser.run(AnalyserContext { root: &ast }); - - let mut snapshot = String::new(); - write_snapshot(&mut snapshot, query.as_str(), results.as_slice()); - - insta::with_settings!({ - prepend_module_to_snapshot => false, - snapshot_path => input_file.parent().unwrap(), - }, { - insta::assert_snapshot!(fname, snapshot); - }); - - let expectation = Expectation::from_file(&query); - expectation.assert(results.as_slice()); -} - -fn parse_test_path(path: &Path) -> (String, String, String) { - let mut comps: Vec<&str> = path - .components() - .map(|c| c.as_os_str().to_str().unwrap()) - .collect(); - - let fname = comps.pop().unwrap(); - let rule = comps.pop().unwrap(); - let group = comps.pop().unwrap(); - - (group.into(), rule.into(), fname.into()) -} - -fn write_snapshot(snapshot: &mut String, query: &str, diagnostics: &[RuleDiagnostic]) { - writeln!(snapshot, "# Input").unwrap(); - writeln!(snapshot, "```").unwrap(); - writeln!(snapshot, "{query}").unwrap(); - writeln!(snapshot, "```").unwrap(); - writeln!(snapshot).unwrap(); - - if !diagnostics.is_empty() { - writeln!(snapshot, "# Diagnostics").unwrap(); - for diagnostic in diagnostics { - let printer = PrintDiagnostic::simple(diagnostic); - - writeln!(snapshot, "{}", StdDisplay(printer)).unwrap(); - writeln!(snapshot).unwrap(); - } - } -} - -enum Expectation { - NoDiagnostics, - AnyDiagnostics, - OnlyOne(String), -} - -impl Expectation { - fn from_file(content: &str) -> Self { - for line in content.lines() { - if line.contains("expect_no_diagnostics") { - return Self::NoDiagnostics; - } - - if line.contains("expect_only_") { - let kind = line - .splitn(3, "_") - .last() - .expect("Use pattern: `-- expect_only_`") - .trim(); - - return Self::OnlyOne(kind.into()); - } - } - - Self::AnyDiagnostics - } - - fn assert(&self, diagnostics: &[RuleDiagnostic]) { - match self { - Self::NoDiagnostics => { - if !diagnostics.is_empty() { - panic!("This test should not have any diagnostics."); - } - } - Self::OnlyOne(category) => { - let found_kinds = diagnostics - .iter() - .map(|d| d.get_category_name()) - .collect::>() - .join(", "); - - if diagnostics.len() != 1 || diagnostics[0].get_category_name() != category { - panic!( - "This test should only have one diagnostic of kind: {category}\nReceived: {found_kinds}" - ); - } - } - Self::AnyDiagnostics => {} - } - } -} diff --git a/crates/pgt_analyser/tests/specs/safety/addingRequiredField/basic.sql b/crates/pgt_analyser/tests/specs/safety/addingRequiredField/basic.sql deleted file mode 100644 index 836c295c..00000000 --- a/crates/pgt_analyser/tests/specs/safety/addingRequiredField/basic.sql +++ /dev/null @@ -1,3 +0,0 @@ --- expect_only_lint/safety/addingRequiredField -alter table test -add column c int not null; \ No newline at end of file diff --git a/crates/pgt_analyser/tests/specs/safety/addingRequiredField/basic.sql.snap b/crates/pgt_analyser/tests/specs/safety/addingRequiredField/basic.sql.snap deleted file mode 100644 index 559dbf53..00000000 --- a/crates/pgt_analyser/tests/specs/safety/addingRequiredField/basic.sql.snap +++ /dev/null @@ -1,17 +0,0 @@ ---- -source: crates/pgt_analyser/tests/rules_tests.rs -expression: snapshot ---- -# Input -``` --- expect_only_lint/safety/addingRequiredField -alter table test -add column c int not null; -``` - -# Diagnostics -lint/safety/addingRequiredField ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ - - × Adding a new column that is NOT NULL and has no default value to an existing table effectively makes it required. - - i Make new columns optional initially by omitting the NOT NULL constraint until all existing data and application code has been updated. Once no NULL values are written to or persisted in the database, set it to NOT NULL. Alternatively, if using Postgres version 11 or later, add a DEFAULT value that is not volatile. This allows the column to keep its NOT NULL constraint. diff --git a/crates/pgt_analyser/tests/specs/safety/addingRequiredField/with_default.sql b/crates/pgt_analyser/tests/specs/safety/addingRequiredField/with_default.sql deleted file mode 100644 index 988f0a71..00000000 --- a/crates/pgt_analyser/tests/specs/safety/addingRequiredField/with_default.sql +++ /dev/null @@ -1,3 +0,0 @@ --- expect_no_diagnostics -alter table test -add column c int not null default 0; \ No newline at end of file diff --git a/crates/pgt_analyser/tests/specs/safety/addingRequiredField/with_default.sql.snap b/crates/pgt_analyser/tests/specs/safety/addingRequiredField/with_default.sql.snap deleted file mode 100644 index 166ae7dc..00000000 --- a/crates/pgt_analyser/tests/specs/safety/addingRequiredField/with_default.sql.snap +++ /dev/null @@ -1,10 +0,0 @@ ---- -source: crates/pgt_analyser/tests/rules_tests.rs -expression: snapshot ---- -# Input -``` --- expect_no_diagnostics -alter table test -add column c int not null default 0; -``` diff --git a/crates/pgt_analyser/tests/specs/safety/addingRequiredField/without_required.sql b/crates/pgt_analyser/tests/specs/safety/addingRequiredField/without_required.sql deleted file mode 100644 index 1990edc1..00000000 --- a/crates/pgt_analyser/tests/specs/safety/addingRequiredField/without_required.sql +++ /dev/null @@ -1,3 +0,0 @@ --- expect_no_diagnostics -alter table test -add column c int; \ No newline at end of file diff --git a/crates/pgt_analyser/tests/specs/safety/addingRequiredField/without_required.sql.snap b/crates/pgt_analyser/tests/specs/safety/addingRequiredField/without_required.sql.snap deleted file mode 100644 index f62cf4cd..00000000 --- a/crates/pgt_analyser/tests/specs/safety/addingRequiredField/without_required.sql.snap +++ /dev/null @@ -1,10 +0,0 @@ ---- -source: crates/pgt_analyser/tests/rules_tests.rs -expression: snapshot ---- -# Input -``` --- expect_no_diagnostics -alter table test -add column c int; -``` diff --git a/crates/pgt_analyser/tests/specs/safety/banDropColumn/basic.sql b/crates/pgt_analyser/tests/specs/safety/banDropColumn/basic.sql deleted file mode 100644 index 16d3b476..00000000 --- a/crates/pgt_analyser/tests/specs/safety/banDropColumn/basic.sql +++ /dev/null @@ -1,3 +0,0 @@ --- expect_only_lint/safety/banDropColumn -alter table test -drop column id; \ No newline at end of file diff --git a/crates/pgt_analyser/tests/specs/safety/banDropColumn/basic.sql.snap b/crates/pgt_analyser/tests/specs/safety/banDropColumn/basic.sql.snap deleted file mode 100644 index 3fd80e19..00000000 --- a/crates/pgt_analyser/tests/specs/safety/banDropColumn/basic.sql.snap +++ /dev/null @@ -1,17 +0,0 @@ ---- -source: crates/pgt_analyser/tests/rules_tests.rs -expression: snapshot ---- -# Input -``` --- expect_only_lint/safety/banDropColumn -alter table test -drop column id; -``` - -# Diagnostics -lint/safety/banDropColumn ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ - - × Dropping a column may break existing clients. - - i You can leave the column as nullable or delete the column once queries no longer select or modify the column. diff --git a/crates/pgt_analyser/tests/specs/safety/banDropNotNull/basic.sql b/crates/pgt_analyser/tests/specs/safety/banDropNotNull/basic.sql deleted file mode 100644 index 1e1fc879..00000000 --- a/crates/pgt_analyser/tests/specs/safety/banDropNotNull/basic.sql +++ /dev/null @@ -1,4 +0,0 @@ --- expect_only_lint/safety/banDropNotNull -alter table users -alter column id -drop not null; \ No newline at end of file diff --git a/crates/pgt_analyser/tests/specs/safety/banDropNotNull/basic.sql.snap b/crates/pgt_analyser/tests/specs/safety/banDropNotNull/basic.sql.snap deleted file mode 100644 index e5d55267..00000000 --- a/crates/pgt_analyser/tests/specs/safety/banDropNotNull/basic.sql.snap +++ /dev/null @@ -1,18 +0,0 @@ ---- -source: crates/pgt_analyser/tests/rules_tests.rs -expression: snapshot ---- -# Input -``` --- expect_only_lint/safety/banDropNotNull -alter table users -alter column id -drop not null; -``` - -# Diagnostics -lint/safety/banDropNotNull ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ - - × Dropping a NOT NULL constraint may break existing clients. - - i Consider using a marker value that represents NULL. Alternatively, create a new table allowing NULL values, copy the data from the old table, and create a view that filters NULL values. diff --git a/crates/pgt_analyser/tests/specs/safety/banDropTable/basic.sql b/crates/pgt_analyser/tests/specs/safety/banDropTable/basic.sql deleted file mode 100644 index 16f6fd62..00000000 --- a/crates/pgt_analyser/tests/specs/safety/banDropTable/basic.sql +++ /dev/null @@ -1,2 +0,0 @@ --- expect_only_lint/safety/banDropTable -drop table test; \ No newline at end of file diff --git a/crates/pgt_analyser/tests/specs/safety/banDropTable/basic.sql.snap b/crates/pgt_analyser/tests/specs/safety/banDropTable/basic.sql.snap deleted file mode 100644 index 481b1223..00000000 --- a/crates/pgt_analyser/tests/specs/safety/banDropTable/basic.sql.snap +++ /dev/null @@ -1,16 +0,0 @@ ---- -source: crates/pgt_analyser/tests/rules_tests.rs -expression: snapshot ---- -# Input -``` --- expect_only_lint/safety/banDropTable -drop table test; -``` - -# Diagnostics -lint/safety/banDropTable ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ - - × Dropping a table may break existing clients. - - i Update your application code to no longer read or write the table, and only then delete the table. Be sure to create a backup. diff --git a/crates/pgt_cli/Cargo.toml b/crates/pgt_cli/Cargo.toml deleted file mode 100644 index f2a3fa18..00000000 --- a/crates/pgt_cli/Cargo.toml +++ /dev/null @@ -1,63 +0,0 @@ -[package] -authors.workspace = true -categories.workspace = true -description = "" -edition.workspace = true -homepage.workspace = true -keywords.workspace = true -license.workspace = true -name = "pgt_cli" -repository.workspace = true -version = "0.0.0" - - -[dependencies] -anyhow = { workspace = true } -biome_deserialize = { workspace = true } -biome_deserialize_macros = { workspace = true } -bpaf = { workspace = true, features = ["bright-color"] } -crossbeam = { workspace = true } -dashmap = "5.5.3" -hdrhistogram = { version = "7.5.4", default-features = false } -path-absolutize = { version = "3.1.1", optional = false, features = ["use_unix_paths_on_wasm"] } -pgt_analyse = { workspace = true } -pgt_configuration = { workspace = true } -pgt_console = { workspace = true } -pgt_diagnostics = { workspace = true } -pgt_flags = { workspace = true } -pgt_fs = { workspace = true } -pgt_lsp = { workspace = true } -pgt_text_edit = { workspace = true } -pgt_workspace = { workspace = true } -quick-junit = "0.5.0" -rayon = { workspace = true } -rustc-hash = { workspace = true } -serde = { workspace = true, features = ["derive"] } -serde_json = { workspace = true } -tokio = { workspace = true, features = ["io-std", "io-util", "net", "time", "rt", "sync", "rt-multi-thread", "macros"] } -tracing = { workspace = true } -tracing-appender = "0.2.3" -tracing-bunyan-formatter = { workspace = true } -tracing-subscriber = { workspace = true, features = ["env-filter", "json"] } -tracing-tree = { version = "0.4.0", features = ["time"] } - -[target.'cfg(unix)'.dependencies] -libc = "0.2.161" -tokio = { workspace = true, features = ["process"] } - -[target.'cfg(windows)'.dependencies] -mimalloc = "0.1.43" - -[target.'cfg(all(target_family="unix", not(all(target_arch = "aarch64", target_env = "musl"))))'.dependencies] -tikv-jemallocator = "0.6.0" - -[dev-dependencies] -assert_cmd = "2.0.16" -predicates = "3.1.3" - -[lib] -doctest = false - -[[bin]] -name = "postgrestools" -path = "src/main.rs" diff --git a/crates/pgt_cli/src/changed.rs b/crates/pgt_cli/src/changed.rs deleted file mode 100644 index a18502dd..00000000 --- a/crates/pgt_cli/src/changed.rs +++ /dev/null @@ -1,43 +0,0 @@ -use crate::CliDiagnostic; -use pgt_configuration::PartialConfiguration; -use pgt_fs::FileSystem; -use pgt_workspace::DynRef; -use std::ffi::OsString; - -pub(crate) fn get_changed_files( - fs: &DynRef<'_, dyn FileSystem>, - configuration: &PartialConfiguration, - since: Option<&str>, -) -> Result, CliDiagnostic> { - let default_branch = configuration - .vcs - .as_ref() - .and_then(|v| v.default_branch.as_ref()); - - let base = match (since, default_branch) { - (Some(since), Some(_)) => since, - (Some(since), None) => since, - (None, Some(branch)) => branch, - (None, None) => { - return Err(CliDiagnostic::incompatible_end_configuration( - "The `--changed` flag was set, but couldn't determine the base to compare against. Either set configuration.vcs.default_branch or use the --since argument.", - )); - } - }; - - let changed_files = fs.get_changed_files(base)?; - - let filtered_changed_files = changed_files.iter().map(OsString::from).collect::>(); - - Ok(filtered_changed_files) -} - -pub(crate) fn get_staged_files( - fs: &DynRef<'_, dyn FileSystem>, -) -> Result, CliDiagnostic> { - let staged_files = fs.get_staged_files()?; - - let filtered_staged_files = staged_files.iter().map(OsString::from).collect::>(); - - Ok(filtered_staged_files) -} diff --git a/crates/pgt_cli/src/cli_options.rs b/crates/pgt_cli/src/cli_options.rs deleted file mode 100644 index 5c41c7fc..00000000 --- a/crates/pgt_cli/src/cli_options.rs +++ /dev/null @@ -1,234 +0,0 @@ -use crate::LoggingLevel; -use crate::logging::LoggingKind; -use bpaf::Bpaf; -use pgt_configuration::ConfigurationPathHint; -use pgt_diagnostics::Severity; -use std::fmt::{Display, Formatter}; -use std::path::PathBuf; -use std::str::FromStr; - -/// Global options applied to all commands -#[derive(Debug, Clone, Bpaf)] -pub struct CliOptions { - /// Set the formatting mode for markup: "off" prints everything as plain text, "force" forces the formatting of markup using ANSI even if the console output is determined to be incompatible - #[bpaf(long("colors"), argument("off|force"))] - pub colors: Option, - - /// Connect to a running instance of the daemon server. - #[bpaf(long("use-server"), switch, fallback(false))] - pub use_server: bool, - - /// Print additional diagnostics, and some diagnostics show more information. Also, print out what files were processed and which ones were modified. - #[bpaf(long("verbose"), switch, fallback(false))] - pub verbose: bool, - - /// Set the file path to the configuration file, or the directory path to find `postgrestools.jsonc`. - /// If used, it disables the default configuration file resolution. - #[bpaf(long("config-path"), argument("PATH"), optional)] - pub config_path: Option, - - /// Cap the amount of diagnostics displayed. When `none` is provided, the limit is lifted. - #[bpaf( - long("max-diagnostics"), - argument("none|"), - fallback(MaxDiagnostics::default()), - display_fallback - )] - pub max_diagnostics: MaxDiagnostics, - - /// Skip over files containing syntax errors instead of emitting an error diagnostic. - #[bpaf(long("skip-errors"), switch)] - pub skip_errors: bool, - - /// Silence errors that would be emitted in case no files were processed during the execution of the command. - #[bpaf(long("no-errors-on-unmatched"), switch)] - pub no_errors_on_unmatched: bool, - - /// Tell Postgres Tools to exit with an error code if some diagnostics emit warnings. - #[bpaf(long("error-on-warnings"), switch)] - pub error_on_warnings: bool, - - /// Allows to change how diagnostics and summary are reported. - #[bpaf( - long("reporter"), - argument("json|json-pretty|github|junit|summary|gitlab"), - fallback(CliReporter::default()) - )] - pub reporter: CliReporter, - - #[bpaf( - env("PGT_LOG_LEVEL"), - long("log-level"), - argument("none|debug|info|warn|error"), - fallback(LoggingLevel::default()), - display_fallback - )] - /// The level of logging. In order, from the most verbose to the least verbose: debug, info, warn, error. - /// - /// The value `none` won't show any logging. - pub log_level: LoggingLevel, - - /// How the log should look like. - #[bpaf( - long("log-kind"), - argument("pretty|compact|json"), - fallback(LoggingKind::default()), - display_fallback - )] - pub log_kind: LoggingKind, - - #[bpaf( - long("diagnostic-level"), - argument("info|warn|error"), - fallback(Severity::default()), - display_fallback - )] - /// The level of diagnostics to show. In order, from the lowest to the most important: info, warn, error. Passing `--diagnostic-level=error` will cause Postgres Tools to print only diagnostics that contain only errors. - pub diagnostic_level: Severity, -} - -impl CliOptions { - /// Computes the [ConfigurationPathHint] based on the options passed by the user - pub(crate) fn as_configuration_path_hint(&self) -> ConfigurationPathHint { - match self.config_path.as_ref() { - None => ConfigurationPathHint::default(), - Some(path) => ConfigurationPathHint::FromUser(PathBuf::from(path)), - } - } -} - -#[derive(Debug, Clone)] -pub enum ColorsArg { - Off, - Force, -} - -impl FromStr for ColorsArg { - type Err = String; - - fn from_str(s: &str) -> Result { - match s { - "off" => Ok(Self::Off), - "force" => Ok(Self::Force), - _ => Err(format!( - "value {s:?} is not valid for the --colors argument" - )), - } - } -} - -#[derive(Debug, Default, Clone)] -pub enum CliReporter { - /// The default reporter - #[default] - Default, - /// Diagnostics are printed for GitHub workflow commands - GitHub, - /// Diagnostics and summary are printed in JUnit format - Junit, - /// Reports linter diagnostics using the [GitLab Code Quality report](https://docs.gitlab.com/ee/ci/testing/code_quality.html#implement-a-custom-tool). - GitLab, -} - -impl CliReporter { - pub(crate) const fn is_default(&self) -> bool { - matches!(self, Self::Default) - } -} - -impl FromStr for CliReporter { - type Err = String; - - fn from_str(s: &str) -> Result { - match s { - "github" => Ok(Self::GitHub), - "junit" => Ok(Self::Junit), - "gitlab" => Ok(Self::GitLab), - _ => Err(format!( - "value {s:?} is not valid for the --reporter argument" - )), - } - } -} - -impl Display for CliReporter { - fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { - match self { - CliReporter::Default => f.write_str("default"), - CliReporter::GitHub => f.write_str("github"), - CliReporter::Junit => f.write_str("junit"), - CliReporter::GitLab => f.write_str("gitlab"), - } - } -} - -#[derive(Debug, Clone, Copy, Bpaf)] -pub enum MaxDiagnostics { - None, - Limit(u32), -} - -impl MaxDiagnostics { - pub fn ok(&self) -> Option { - match self { - MaxDiagnostics::None => None, - MaxDiagnostics::Limit(value) => Some(*value), - } - } -} - -impl Default for MaxDiagnostics { - fn default() -> Self { - Self::Limit(20) - } -} - -impl Display for MaxDiagnostics { - fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { - match self { - MaxDiagnostics::None => { - write!(f, "none") - } - MaxDiagnostics::Limit(value) => { - write!(f, "{value}") - } - } - } -} - -impl FromStr for MaxDiagnostics { - type Err = String; - fn from_str(s: &str) -> Result { - match s { - "none" => Ok(MaxDiagnostics::None), - _ => { - if let Ok(value) = s.parse::() { - Ok(MaxDiagnostics::Limit(value)) - } else { - Err(format!( - "Invalid value provided. Provide 'none' to lift the limit, or a number between 0 and {}.", - u32::MAX - )) - } - } - } - } -} - -impl From for u64 { - fn from(value: MaxDiagnostics) -> Self { - match value { - MaxDiagnostics::None => u64::MAX, - MaxDiagnostics::Limit(value) => value as u64, - } - } -} - -impl From for u32 { - fn from(value: MaxDiagnostics) -> Self { - match value { - MaxDiagnostics::None => u32::MAX, - MaxDiagnostics::Limit(value) => value, - } - } -} diff --git a/crates/pgt_cli/src/commands/check.rs b/crates/pgt_cli/src/commands/check.rs deleted file mode 100644 index 46819624..00000000 --- a/crates/pgt_cli/src/commands/check.rs +++ /dev/null @@ -1,76 +0,0 @@ -use crate::cli_options::CliOptions; -use crate::{CliDiagnostic, Execution, TraversalMode}; -use biome_deserialize::Merge; -use pgt_configuration::PartialConfiguration; -use pgt_console::Console; -use pgt_fs::FileSystem; -use pgt_workspace::{DynRef, Workspace, WorkspaceError, configuration::LoadedConfiguration}; -use std::ffi::OsString; - -use super::{CommandRunner, get_files_to_process_with_cli_options}; - -pub(crate) struct CheckCommandPayload { - pub(crate) configuration: Option, - pub(crate) paths: Vec, - pub(crate) stdin_file_path: Option, - pub(crate) staged: bool, - pub(crate) changed: bool, - pub(crate) since: Option, -} - -impl CommandRunner for CheckCommandPayload { - const COMMAND_NAME: &'static str = "check"; - - fn merge_configuration( - &mut self, - loaded_configuration: LoadedConfiguration, - _fs: &DynRef<'_, dyn FileSystem>, - _console: &mut dyn Console, - ) -> Result { - let LoadedConfiguration { - configuration: mut fs_configuration, - .. - } = loaded_configuration; - - if let Some(configuration) = self.configuration.clone() { - // overwrite fs config with cli args - fs_configuration.merge_with(configuration); - } - - Ok(fs_configuration) - } - - fn get_files_to_process( - &self, - fs: &DynRef<'_, dyn FileSystem>, - configuration: &PartialConfiguration, - ) -> Result, CliDiagnostic> { - let paths = get_files_to_process_with_cli_options( - self.since.as_deref(), - self.changed, - self.staged, - fs, - configuration, - )? - .unwrap_or(self.paths.clone()); - - Ok(paths) - } - - fn get_stdin_file_path(&self) -> Option<&str> { - self.stdin_file_path.as_deref() - } - - fn get_execution( - &self, - cli_options: &CliOptions, - console: &mut dyn Console, - _workspace: &dyn Workspace, - ) -> Result { - Ok(Execution::new(TraversalMode::Check { - stdin: self.get_stdin(console)?, - vcs_targeted: (self.staged, self.changed).into(), - }) - .set_report(cli_options)) - } -} diff --git a/crates/pgt_cli/src/commands/clean.rs b/crates/pgt_cli/src/commands/clean.rs deleted file mode 100644 index e401f307..00000000 --- a/crates/pgt_cli/src/commands/clean.rs +++ /dev/null @@ -1,15 +0,0 @@ -use crate::commands::daemon::default_pgt_log_path; -use crate::{CliDiagnostic, CliSession}; -use pgt_flags::pgt_env; -use std::fs::{create_dir, remove_dir_all}; -use std::path::PathBuf; - -/// Runs the clean command -pub fn clean(_cli_session: CliSession) -> Result<(), CliDiagnostic> { - let logs_path = pgt_env() - .pgt_log_path - .value() - .map_or(default_pgt_log_path(), PathBuf::from); - remove_dir_all(logs_path.clone()).and_then(|_| create_dir(logs_path))?; - Ok(()) -} diff --git a/crates/pgt_cli/src/commands/daemon.rs b/crates/pgt_cli/src/commands/daemon.rs deleted file mode 100644 index 988286f4..00000000 --- a/crates/pgt_cli/src/commands/daemon.rs +++ /dev/null @@ -1,295 +0,0 @@ -use crate::{ - CliDiagnostic, CliSession, open_transport, - service::{self, ensure_daemon, open_socket, run_daemon}, -}; -use pgt_console::{ConsoleExt, markup}; -use pgt_lsp::ServerFactory; -use pgt_workspace::{TransportError, WorkspaceError, workspace::WorkspaceClient}; -use std::{env, path::PathBuf}; -use tokio::io; -use tokio::runtime::Runtime; -use tracing::subscriber::Interest; -use tracing::{Instrument, Metadata, debug_span, metadata::LevelFilter}; -use tracing_appender::rolling::Rotation; -use tracing_bunyan_formatter::{BunyanFormattingLayer, JsonStorageLayer}; -use tracing_subscriber::{ - layer::{Context, Filter}, - prelude::*, - registry, -}; -use tracing_tree::{HierarchicalLayer, time::UtcDateTime}; - -pub(crate) fn start( - session: CliSession, - config_path: Option, - log_path: Option, - log_file_name_prefix: Option, -) -> Result<(), CliDiagnostic> { - let rt = Runtime::new()?; - let did_spawn = rt.block_on(ensure_daemon( - false, - config_path, - log_path, - log_file_name_prefix, - ))?; - - if did_spawn { - session.app.console.log(markup! { - "The server was successfully started" - }); - } else { - session.app.console.log(markup! { - "The server was already running" - }); - } - - Ok(()) -} - -pub(crate) fn stop(session: CliSession) -> Result<(), CliDiagnostic> { - let rt = Runtime::new()?; - - match open_transport(rt)? { - Some(transport) => { - let client = WorkspaceClient::new(transport)?; - match client.shutdown() { - // The `ChannelClosed` error is expected since the server can - // shutdown before sending a response - Ok(()) | Err(WorkspaceError::TransportError(TransportError::ChannelClosed)) => {} - Err(err) => return Err(CliDiagnostic::from(err)), - }; - - session.app.console.log(markup! { - "The server was successfully stopped" - }); - } - _ => { - session.app.console.log(markup! { - "The server was not running" - }); - } - } - - Ok(()) -} - -pub(crate) fn run_server( - stop_on_disconnect: bool, - config_path: Option, - log_path: Option, - log_file_name_prefix: Option, - log_level: Option, - log_kind: Option, -) -> Result<(), CliDiagnostic> { - setup_tracing_subscriber(log_path, log_file_name_prefix, log_level, log_kind); - - let rt = Runtime::new()?; - let factory = ServerFactory::new(stop_on_disconnect); - let cancellation = factory.cancellation(); - let span = debug_span!("Running Server", pid = std::process::id()); - - rt.block_on(async move { - tokio::select! { - res = run_daemon(factory, config_path).instrument(span) => { - match res { - Ok(never) => match never {}, - Err(err) => Err(err.into()), - } - } - _ = cancellation.notified() => { - tracing::info!("Received shutdown signal"); - Ok(()) - } - } - }) -} - -pub(crate) fn print_socket() -> Result<(), CliDiagnostic> { - let rt = Runtime::new()?; - rt.block_on(service::print_socket())?; - Ok(()) -} - -pub(crate) fn lsp_proxy( - config_path: Option, - log_path: Option, - log_file_name_prefix: Option, -) -> Result<(), CliDiagnostic> { - let rt = Runtime::new()?; - rt.block_on(start_lsp_proxy( - &rt, - config_path, - log_path, - log_file_name_prefix, - ))?; - - Ok(()) -} - -/// Start a proxy process. -/// Receives a process via `stdin` and then copy the content to the LSP socket. -/// Copy to the process on `stdout` when the LSP responds to a message -async fn start_lsp_proxy( - rt: &Runtime, - config_path: Option, - log_path: Option, - log_file_name_prefix: Option, -) -> Result<(), CliDiagnostic> { - ensure_daemon(true, config_path, log_path, log_file_name_prefix).await?; - - match open_socket().await? { - Some((mut owned_read_half, mut owned_write_half)) => { - // forward stdin to socket - let mut stdin = io::stdin(); - let input_handle = rt.spawn(async move { - loop { - match io::copy(&mut stdin, &mut owned_write_half).await { - Ok(b) => { - if b == 0 { - return Ok(()); - } - } - Err(err) => return Err(err), - }; - } - }); - - // receive socket response to stdout - let mut stdout = io::stdout(); - let out_put_handle = rt.spawn(async move { - loop { - match io::copy(&mut owned_read_half, &mut stdout).await { - Ok(b) => { - if b == 0 { - return Ok(()); - } - } - Err(err) => return Err(err), - }; - } - }); - - let _ = input_handle.await; - let _ = out_put_handle.await; - Ok(()) - } - None => Ok(()), - } -} - -/// Set up the [tracing]-based logging system for the server -/// The events received by the subscriber are filtered at the `info` level, -/// then printed using the [HierarchicalLayer] layer, and the resulting text -/// is written to log files rotated on a hourly basis (in -/// `pgt-logs/server.log.yyyy-MM-dd-HH` files inside the system temporary -/// directory) -fn setup_tracing_subscriber( - log_path: Option, - log_file_name_prefix: Option, - log_level: Option, - log_kind: Option, -) { - let pgt_log_path = log_path.unwrap_or(pgt_fs::ensure_cache_dir().join("pgt-logs")); - - let appender_builder = tracing_appender::rolling::RollingFileAppender::builder(); - - let file_appender = appender_builder - .filename_prefix(log_file_name_prefix.unwrap_or(String::from("server.log"))) - .max_log_files(7) - .rotation(Rotation::HOURLY) - .build(pgt_log_path) - .expect("Failed to start the logger for the daemon."); - - let filter = PgtLoggingFilter::from(log_level); - - let log_kind = log_kind.unwrap_or("hierarchical".into()); - - match log_kind.as_str() { - "bunyan" => { - registry() - .with(JsonStorageLayer) - .with( - BunyanFormattingLayer::new("pgt_logs".into(), file_appender) - .with_filter(filter), - ) - .init(); - } - - _ => registry() - .with( - HierarchicalLayer::default() - .with_indent_lines(true) - .with_indent_amount(2) - .with_bracketed_fields(true) - .with_targets(true) - .with_ansi(false) - .with_timer(UtcDateTime { - higher_precision: false, - }) - .with_writer(file_appender) - .with_filter(filter), - ) - .init(), - } -} - -pub fn default_pgt_log_path() -> PathBuf { - match env::var_os("PGT_LOG_PATH") { - Some(directory) => PathBuf::from(directory), - None => pgt_fs::ensure_cache_dir().join("pgt-logs"), - } -} - -/// Tracing Filter with two rules: -/// For all crates starting with pgt*, use `PGT_LOG_LEVEL` or CLI option or "info" as default -/// For all other crates, use "info" -struct PgtLoggingFilter(LevelFilter); - -impl From> for PgtLoggingFilter { - fn from(value: Option) -> Self { - Self( - value - .map(|lv_filter| match lv_filter.as_str() { - "trace" => LevelFilter::TRACE, - "debug" => LevelFilter::DEBUG, - "info" => LevelFilter::INFO, - "warn" => LevelFilter::WARN, - "error" => LevelFilter::ERROR, - "off" => LevelFilter::OFF, - - _ => LevelFilter::INFO, - }) - .unwrap_or(LevelFilter::INFO), - ) - } -} - -impl PgtLoggingFilter { - fn is_enabled(&self, meta: &Metadata<'_>) -> bool { - let filter = if meta.target().starts_with("pgt") { - self.0 - } else { - LevelFilter::INFO - }; - - meta.level() <= &filter - } -} - -impl Filter for PgtLoggingFilter { - fn enabled(&self, meta: &Metadata<'_>, _cx: &Context<'_, S>) -> bool { - self.is_enabled(meta) - } - - fn callsite_enabled(&self, meta: &'static Metadata<'static>) -> Interest { - if self.is_enabled(meta) { - Interest::always() - } else { - Interest::never() - } - } - - fn max_level_hint(&self) -> Option { - Some(self.0) - } -} diff --git a/crates/pgt_cli/src/commands/init.rs b/crates/pgt_cli/src/commands/init.rs deleted file mode 100644 index 15b1b8a5..00000000 --- a/crates/pgt_cli/src/commands/init.rs +++ /dev/null @@ -1,23 +0,0 @@ -use crate::{CliDiagnostic, CliSession}; -use pgt_configuration::PartialConfiguration; -use pgt_console::{ConsoleExt, markup}; -use pgt_fs::ConfigName; -use pgt_workspace::configuration::create_config; - -pub(crate) fn init(mut session: CliSession) -> Result<(), CliDiagnostic> { - let fs = &mut session.app.fs; - let config = &mut PartialConfiguration::init(); - create_config(fs, config)?; - let file_created = ConfigName::pgt_jsonc(); - session.app.console.log(markup! { -" -Welcome to the Postgres Language Tools! Let's get you started... - -""Files created "" - - ""- "{file_created}" - Your project configuration. -" - }); - Ok(()) -} diff --git a/crates/pgt_cli/src/commands/mod.rs b/crates/pgt_cli/src/commands/mod.rs deleted file mode 100644 index ebd16e3d..00000000 --- a/crates/pgt_cli/src/commands/mod.rs +++ /dev/null @@ -1,414 +0,0 @@ -use crate::changed::{get_changed_files, get_staged_files}; -use crate::cli_options::{CliOptions, CliReporter, ColorsArg, cli_options}; -use crate::execute::Stdin; -use crate::logging::LoggingKind; -use crate::{ - CliDiagnostic, CliSession, Execution, LoggingLevel, VERSION, execute_mode, setup_cli_subscriber, -}; -use bpaf::Bpaf; -use pgt_configuration::{PartialConfiguration, partial_configuration}; -use pgt_console::Console; -use pgt_fs::FileSystem; -use pgt_workspace::configuration::{LoadedConfiguration, load_configuration}; -use pgt_workspace::settings::PartialConfigurationExt; -use pgt_workspace::workspace::UpdateSettingsParams; -use pgt_workspace::{DynRef, Workspace, WorkspaceError}; -use std::ffi::OsString; -use std::path::PathBuf; -pub(crate) mod check; -pub(crate) mod clean; -pub(crate) mod daemon; -pub(crate) mod init; -pub(crate) mod version; - -#[derive(Debug, Clone, Bpaf)] -#[bpaf(options, version(VERSION))] -#[allow(clippy::large_enum_variant)] -/// Postgres Tools official CLI. Use it to check the health of your project or run it to check single files. -pub enum PgtCommand { - /// Shows the version information and quit. - #[bpaf(command)] - Version(#[bpaf(external(cli_options), hide_usage)] CliOptions), - - /// Runs everything to the requested files. - #[bpaf(command)] - Check { - #[bpaf(external(partial_configuration), hide_usage, optional)] - configuration: Option, - - #[bpaf(external, hide_usage)] - cli_options: CliOptions, - - /// Use this option when you want to format code piped from `stdin`, and print the output to `stdout`. - /// - /// The file doesn't need to exist on disk, what matters is the extension of the file. Based on the extension, we know how to check the code. - /// - /// Example: `echo 'let a;' | pgt_cli check --stdin-file-path=test.sql` - #[bpaf(long("stdin-file-path"), argument("PATH"), hide_usage)] - stdin_file_path: Option, - - /// When set to true, only the files that have been staged (the ones prepared to be committed) - /// will be linted. This option should be used when working locally. - #[bpaf(long("staged"), switch)] - staged: bool, - - /// When set to true, only the files that have been changed compared to your `defaultBranch` - /// configuration will be linted. This option should be used in CI environments. - #[bpaf(long("changed"), switch)] - changed: bool, - - /// Use this to specify the base branch to compare against when you're using the --changed - /// flag and the `defaultBranch` is not set in your `postgrestools.jsonc` - #[bpaf(long("since"), argument("REF"))] - since: Option, - - /// Single file, single path or list of paths - #[bpaf(positional("PATH"), many)] - paths: Vec, - }, - - /// Starts the daemon server process. - #[bpaf(command)] - Start { - /// Allows to change the prefix applied to the file name of the logs. - #[bpaf( - env("PGT_LOG_PREFIX_NAME"), - long("log-prefix-name"), - argument("STRING"), - hide_usage, - fallback(String::from("server.log")), - display_fallback - )] - log_prefix_name: String, - - /// Allows to change the folder where logs are stored. - #[bpaf( - env("PGT_LOG_PATH"), - long("log-path"), - argument("PATH"), - hide_usage, - fallback(pgt_fs::ensure_cache_dir().join("pgt-logs")), - )] - log_path: PathBuf, - /// Allows to set a custom file path to the configuration file, - /// or a custom directory path to find `postgrestools.jsonc` - #[bpaf(env("PGT_LOG_PREFIX_NAME"), long("config-path"), argument("PATH"))] - config_path: Option, - }, - - /// Stops the daemon server process. - #[bpaf(command)] - Stop, - - /// Bootstraps a new project. Creates a configuration file with some defaults. - #[bpaf(command)] - Init, - - /// Acts as a server for the Language Server Protocol over stdin/stdout. - #[bpaf(command("lsp-proxy"))] - LspProxy { - /// Allows to change the prefix applied to the file name of the logs. - #[bpaf( - env("PGT_LOG_PREFIX_NAME"), - long("log-prefix-name"), - argument("STRING"), - hide_usage, - fallback(String::from("server.log")), - display_fallback - )] - log_prefix_name: String, - /// Allows to change the folder where logs are stored. - #[bpaf( - env("PGT_LOG_PATH"), - long("log-path"), - argument("PATH"), - hide_usage, - fallback(pgt_fs::ensure_cache_dir().join("pgt-logs")), - )] - log_path: PathBuf, - /// Allows to set a custom file path to the configuration file, - /// or a custom directory path to find `postgrestools.jsonc` - #[bpaf(env("PGT_CONFIG_PATH"), long("config-path"), argument("PATH"))] - config_path: Option, - /// Bogus argument to make the command work with vscode-languageclient - #[bpaf(long("stdio"), hide, hide_usage, switch)] - stdio: bool, - }, - - #[bpaf(command)] - /// Cleans the logs emitted by the daemon. - Clean, - - #[bpaf(command("__run_server"), hide)] - RunServer { - /// Allows to change the prefix applied to the file name of the logs. - #[bpaf( - env("PGT_LOG_PREFIX_NAME"), - long("log-prefix-name"), - argument("STRING"), - hide_usage, - fallback(String::from("server.log")), - display_fallback - )] - log_prefix_name: String, - - /// Allows to change the folder where logs are stored. - #[bpaf( - env("PGT_LOG_PATH"), - long("log-path"), - argument("PATH"), - hide_usage, - fallback(pgt_fs::ensure_cache_dir().join("pgt-logs")), - )] - log_path: PathBuf, - - /// Allows to change the log level. Default is debug. This will only affect "pgt*" crates. All others are logged with info level. - #[bpaf( - env("PGT_LOG_LEVEL"), - long("log-level"), - argument("trace|debug|info|warn|error|none"), - fallback(String::from("debug")) - )] - log_level: String, - - /// Allows to change the logging format kind. Default is hierarchical. - #[bpaf( - env("PGT_LOG_KIND"), - long("log-kind"), - argument("hierarchical|bunyan"), - fallback(String::from("hierarchical")) - )] - log_kind: String, - - #[bpaf(long("stop-on-disconnect"), hide_usage)] - stop_on_disconnect: bool, - /// Allows to set a custom file path to the configuration file, - /// or a custom directory path to find `postgrestools.jsonc` - #[bpaf(env("PGT_CONFIG_PATH"), long("config-path"), argument("PATH"))] - config_path: Option, - }, - #[bpaf(command("__print_socket"), hide)] - PrintSocket, -} - -impl PgtCommand { - const fn cli_options(&self) -> Option<&CliOptions> { - match self { - PgtCommand::Version(cli_options) | PgtCommand::Check { cli_options, .. } => { - Some(cli_options) - } - PgtCommand::LspProxy { .. } - | PgtCommand::Start { .. } - | PgtCommand::Stop - | PgtCommand::Init - | PgtCommand::RunServer { .. } - | PgtCommand::Clean - | PgtCommand::PrintSocket => None, - } - } - - pub const fn get_color(&self) -> Option<&ColorsArg> { - match self.cli_options() { - Some(cli_options) => { - // To properly display GitHub annotations we need to disable colors - if matches!(cli_options.reporter, CliReporter::GitHub) { - return Some(&ColorsArg::Off); - } - // We want force colors in CI, to give e better UX experience - // Unless users explicitly set the colors flag - // if matches!(self, Postgres ToolsCommand::Ci { .. }) && cli_options.colors.is_none() { - // return Some(&ColorsArg::Force); - // } - // Normal behaviors - cli_options.colors.as_ref() - } - None => None, - } - } - - pub const fn should_use_server(&self) -> bool { - match self.cli_options() { - Some(cli_options) => cli_options.use_server, - None => false, - } - } - - pub const fn has_metrics(&self) -> bool { - false - } - - pub fn is_verbose(&self) -> bool { - self.cli_options() - .is_some_and(|cli_options| cli_options.verbose) - } - - pub fn log_level(&self) -> LoggingLevel { - self.cli_options() - .map_or(LoggingLevel::default(), |cli_options| cli_options.log_level) - } - - pub fn log_kind(&self) -> LoggingKind { - self.cli_options() - .map_or(LoggingKind::default(), |cli_options| cli_options.log_kind) - } -} - -/// Generic interface for executing commands. -/// -/// Consumers must implement the following methods: -/// -/// - [CommandRunner::merge_configuration] -/// - [CommandRunner::get_files_to_process] -/// - [CommandRunner::get_stdin_file_path] -/// - [CommandRunner::should_write] -/// - [CommandRunner::get_execution] -/// -/// Optional methods: -/// - [CommandRunner::check_incompatible_arguments] -pub(crate) trait CommandRunner: Sized { - const COMMAND_NAME: &'static str; - - /// The main command to use. - fn run(&mut self, session: CliSession, cli_options: &CliOptions) -> Result<(), CliDiagnostic> { - setup_cli_subscriber(cli_options.log_level, cli_options.log_kind); - let fs = &session.app.fs; - let console = &mut *session.app.console; - let workspace = &*session.app.workspace; - self.check_incompatible_arguments()?; - let (execution, paths) = self.configure_workspace(fs, console, workspace, cli_options)?; - execute_mode(execution, session, cli_options, paths) - } - - /// This function prepares the workspace with the following: - /// - Loading the configuration file. - /// - Configure the VCS integration - /// - Computes the paths to traverse/handle. This changes based on the VCS arguments that were passed. - /// - Register a project folder using the working directory. - /// - Updates the settings that belong to the project registered - fn configure_workspace( - &mut self, - fs: &DynRef<'_, dyn FileSystem>, - console: &mut dyn Console, - workspace: &dyn Workspace, - cli_options: &CliOptions, - ) -> Result<(Execution, Vec), CliDiagnostic> { - let loaded_configuration = - load_configuration(fs, cli_options.as_configuration_path_hint())?; - - let configuration_path = loaded_configuration.directory_path.clone(); - let configuration = self.merge_configuration(loaded_configuration, fs, console)?; - let vcs_base_path = configuration_path.or(fs.working_directory()); - let (vcs_base_path, gitignore_matches) = - configuration.retrieve_gitignore_matches(fs, vcs_base_path.as_deref())?; - let paths = self.get_files_to_process(fs, &configuration)?; - - workspace.update_settings(UpdateSettingsParams { - workspace_directory: fs.working_directory(), - configuration, - vcs_base_path, - gitignore_matches, - })?; - - let execution = self.get_execution(cli_options, console, workspace)?; - Ok((execution, paths)) - } - - /// Computes [Stdin] if the CLI has the necessary information. - /// - /// ## Errors - /// - If the user didn't provide anything via `stdin` but the option `--stdin-file-path` is passed. - fn get_stdin(&self, console: &mut dyn Console) -> Result, CliDiagnostic> { - let stdin = if let Some(stdin_file_path) = self.get_stdin_file_path() { - let input_code = console.read(); - if let Some(input_code) = input_code { - let path = PathBuf::from(stdin_file_path); - Some((path, input_code).into()) - } else { - // we provided the argument without a piped stdin, we bail - return Err(CliDiagnostic::missing_argument("stdin", Self::COMMAND_NAME)); - } - } else { - None - }; - - Ok(stdin) - } - - // Below, the methods that consumers must implement. - - /// Implements this method if you need to merge CLI arguments to the loaded configuration. - /// - /// The CLI arguments take precedence over the option configured in the configuration file. - fn merge_configuration( - &mut self, - loaded_configuration: LoadedConfiguration, - fs: &DynRef<'_, dyn FileSystem>, - console: &mut dyn Console, - ) -> Result; - - /// It returns the paths that need to be handled/traversed. - fn get_files_to_process( - &self, - fs: &DynRef<'_, dyn FileSystem>, - configuration: &PartialConfiguration, - ) -> Result, CliDiagnostic>; - - /// It returns the file path to use in `stdin` mode. - fn get_stdin_file_path(&self) -> Option<&str>; - - /// Returns the [Execution] mode. - fn get_execution( - &self, - cli_options: &CliOptions, - console: &mut dyn Console, - workspace: &dyn Workspace, - ) -> Result; - - // Below, methods that consumers can implement - - /// Optional method that can be implemented to check if some CLI arguments aren't compatible. - /// - /// The method is called before loading the configuration from disk. - fn check_incompatible_arguments(&self) -> Result<(), CliDiagnostic> { - Ok(()) - } -} - -fn get_files_to_process_with_cli_options( - since: Option<&str>, - changed: bool, - staged: bool, - fs: &DynRef<'_, dyn FileSystem>, - configuration: &PartialConfiguration, -) -> Result>, CliDiagnostic> { - if since.is_some() { - if !changed { - return Err(CliDiagnostic::incompatible_arguments("since", "changed")); - } - if staged { - return Err(CliDiagnostic::incompatible_arguments("since", "staged")); - } - } - - if changed { - if staged { - return Err(CliDiagnostic::incompatible_arguments("changed", "staged")); - } - Ok(Some(get_changed_files(fs, configuration, since)?)) - } else if staged { - Ok(Some(get_staged_files(fs)?)) - } else { - Ok(None) - } -} - -#[cfg(test)] -mod tests { - use super::*; - - /// Tests that all CLI options adhere to the invariants expected by `bpaf`. - #[test] - fn check_options() { - pgt_command().check_invariants(false); - } -} diff --git a/crates/pgt_cli/src/commands/version.rs b/crates/pgt_cli/src/commands/version.rs deleted file mode 100644 index 685684a3..00000000 --- a/crates/pgt_cli/src/commands/version.rs +++ /dev/null @@ -1,42 +0,0 @@ -use pgt_console::fmt::Formatter; -use pgt_console::{ConsoleExt, fmt, markup}; -use pgt_workspace::workspace::ServerInfo; - -use crate::{CliDiagnostic, CliSession, VERSION}; - -/// Handle of the `version` command. Prints a more in detail version. -pub(crate) fn full_version(session: CliSession) -> Result<(), CliDiagnostic> { - session.app.console.log(markup! { - "CLI: "{VERSION} - }); - - match session.app.workspace.server_info() { - None => { - session.app.console.log(markup! { - "Server: ""not connected" - }); - } - Some(info) => { - session.app.console.log(markup! { -"Server: - Name: "{info.name}" - Version: "{DisplayServerVersion(info)} - }); - } - }; - - Ok(()) -} - -pub(super) struct DisplayServerVersion<'a>(pub &'a ServerInfo); - -impl fmt::Display for DisplayServerVersion<'_> { - fn fmt(&self, fmt: &mut Formatter) -> std::io::Result<()> { - match &self.0.version { - None => markup!("-").fmt(fmt), - Some(version) => { - write!(fmt, "{version}") - } - } - } -} diff --git a/crates/pgt_cli/src/diagnostics.rs b/crates/pgt_cli/src/diagnostics.rs deleted file mode 100644 index d24d02e9..00000000 --- a/crates/pgt_cli/src/diagnostics.rs +++ /dev/null @@ -1,462 +0,0 @@ -use pgt_console::markup; -use pgt_diagnostics::adapters::{BpafError, IoError, SerdeJsonError}; -use pgt_diagnostics::{ - Advices, Category, Diagnostic, Error, LogCategory, MessageAndDescription, Severity, Visit, -}; -use pgt_workspace::WorkspaceError; -use std::process::{ExitCode, Termination}; -use std::{env::current_exe, fmt::Debug}; - -fn command_name() -> String { - current_exe() - .ok() - .and_then(|path| Some(path.file_name()?.to_str()?.to_string())) - .unwrap_or_else(|| String::from("postgrestools")) -} - -/// A diagnostic that is emitted when running Postgres Tools via CLI. -/// -/// When displaying the diagnostic, -#[derive(Debug, Diagnostic)] -pub enum CliDiagnostic { - /// Returned when it is called with a subcommand it doesn't know - UnknownCommand(UnknownCommand), - /// Return by the help command when it is called with a subcommand it doesn't know - UnknownCommandHelp(UnknownCommandHelp), - /// Returned when the value of a command line argument could not be parsed - ParseError(ParseDiagnostic), - /// Returned when the CLI doesn't recognize a command line argument - UnexpectedArgument(UnexpectedArgument), - /// Returned when a required argument is not present in the command line - MissingArgument(MissingArgument), - /// Returned when a subcommand is called without any arguments - EmptyArguments(EmptyArguments), - /// Returned when a subcommand is called with an unsupported combination of arguments - IncompatibleArguments(IncompatibleArguments), - /// Returned by a traversal command when error diagnostics were emitted - CheckError(CheckError), - /// Emitted when a file is fixed, but it still contains diagnostics. - /// - /// This happens when these diagnostics come from rules that don't have a code action. - FileCheck(FileCheck), - /// When an argument is higher than the expected maximum - OverflowNumberArgument(OverflowNumberArgument), - /// Wrapper for an underlying pglt-service error - WorkspaceError(WorkspaceError), - /// Wrapper for an underlying `std::io` error - IoError(IoDiagnostic), - /// The daemon is not running - ServerNotRunning(ServerNotRunning), - /// The end configuration (`postgrestools.jsonc` + other options) is incompatible with the command - IncompatibleEndConfiguration(IncompatibleEndConfiguration), - /// No files processed during the file system traversal - NoFilesWereProcessed(NoFilesWereProcessed), - /// Emitted during the reporting phase - Report(ReportDiagnostic), - /// Emitted when there's an error emitted when using stdin mode - Stdin(StdinDiagnostic), -} - -#[derive(Debug, Diagnostic)] -#[diagnostic( - category = "flags/invalid", - severity = Error, - message( - description = "Unknown command {command_name}", - message("Unknown command "{self.command_name}) - ), -)] -pub struct UnknownCommand { - command_name: String, -} - -#[derive(Debug, Diagnostic)] -#[diagnostic( -category = "flags/invalid", - severity = Error, - message( - description = "Cannot print help for unknown command {command_name}", - message("Cannot print help for unknown command "{self.command_name}) - ), -)] -pub struct UnknownCommandHelp { - command_name: String, -} - -#[derive(Debug, Diagnostic)] -#[diagnostic( - category = "flags/invalid", - severity = Error, -)] -pub struct ParseDiagnostic { - #[message] - #[description] - message: MessageAndDescription, - #[source] - source: Option, -} - -#[derive(Debug, Diagnostic)] -#[diagnostic( - category = "flags/invalid", - severity = Error, - message( - description = "Unrecognized option {argument}", - message("Unrecognized option "{self.argument}".") - ), -)] -pub struct UnexpectedArgument { - argument: String, - #[advice] - help: CliAdvice, -} - -#[derive(Debug, Diagnostic)] -#[diagnostic( - category = "flags/invalid", - severity = Error, - message( - description = "Unrecognized option {argument}", - message("Missing argument "{self.argument}) - ), -)] -pub struct MissingArgument { - argument: String, - #[advice] - advice: CliAdvice, -} - -#[derive(Debug, Diagnostic)] -#[diagnostic( - category = "flags/invalid", - severity = Error, - message = "Empty arguments" -)] -pub struct EmptyArguments; - -#[derive(Debug, Diagnostic)] -#[diagnostic( - category = "flags/invalid", - severity = Error, - message( - description = "Incompatible arguments {first_argument} and {second_argument}", - message("Incompatible arguments "{self.first_argument}" and "{self.second_argument}) - ) -)] -pub struct IncompatibleArguments { - first_argument: String, - second_argument: String, -} - -#[derive(Debug, Diagnostic)] -#[diagnostic( - severity = Error, -)] -pub struct CheckError { - #[category] - category: &'static Category, - - #[message] - message: MessageAndDescription, -} - -#[derive(Debug, Diagnostic)] -#[diagnostic( - severity = Error, -)] -pub struct FileCheck { - #[message] - #[description] - pub message: MessageAndDescription, - - #[location(resource)] - pub file_path: String, - - #[category] - pub category: &'static Category, -} - -#[derive(Debug, Diagnostic)] -#[diagnostic( - category = "flags/invalid", - severity = Error, - message( - description = "The value of the argument {argument} is too high, maximum accepted {maximum}", - message("The value of the argument "{self.argument}" is too high, maximum accepted "{{self.maximum}}) - ) -)] -pub struct OverflowNumberArgument { - argument: String, - maximum: u16, -} - -#[derive(Debug, Diagnostic)] -#[diagnostic( - category = "internalError/io", - severity = Error, - message = "Errors occurred while executing I/O operations." -)] -pub struct IoDiagnostic { - #[source] - source: Option, -} - -#[derive(Debug, Diagnostic)] -#[diagnostic( - category = "internalError/io", - severity = Error, - message = "No running instance of the daemon server was found." -)] -pub struct ServerNotRunning; - -#[derive(Debug, Diagnostic)] -#[diagnostic( - category = "internalError/io", - severity = Error, - message( - description = "The combination of configuration and arguments is invalid: \n{reason}", - message("The combination of configuration and arguments is invalid: \n"{{&self.reason}}) - ) -)] -pub struct IncompatibleEndConfiguration { - reason: String, -} - -#[derive(Debug, Diagnostic)] -#[diagnostic( - category = "internalError/io", - severity = Error, - message = "No files were processed in the specified paths." -)] -pub struct NoFilesWereProcessed; - -#[derive(Debug, Diagnostic)] -pub enum ReportDiagnostic { - /// Emitted when trying to serialise the report - Serialization(SerdeJsonError), -} - -/// Advices for the [CliDiagnostic] -#[derive(Debug, Default)] -struct CliAdvice { - /// Used to print the help command - sub_command: String, -} - -impl CliAdvice { - fn new_with_help(sub_command: impl Into) -> Self { - Self { - sub_command: sub_command.into(), - } - } -} - -impl Advices for CliAdvice { - fn record(&self, visitor: &mut dyn Visit) -> std::io::Result<()> { - let command_name = command_name(); - let help_sub_command = format!("{} {} --help", command_name, &self.sub_command); - visitor.record_log( - LogCategory::Info, - &markup! { "Type the following command for more information" }, - )?; - visitor.record_command(&help_sub_command)?; - - Ok(()) - } -} - -impl CliDiagnostic { - /// Returned when a subcommand is called with an unsupported combination of arguments - pub fn incompatible_arguments( - first_argument: impl Into, - second_argument: impl Into, - ) -> Self { - Self::IncompatibleArguments(IncompatibleArguments { - first_argument: first_argument.into(), - second_argument: second_argument.into(), - }) - } - - /// To throw when there's been an error while parsing an argument - pub fn parse_error_bpaf(source: bpaf::ParseFailure) -> Self { - Self::ParseError(ParseDiagnostic { - source: Some(Error::from(BpafError::from(source))), - message: MessageAndDescription::from("Failed to parse CLI arguments.".to_string()), - }) - } - - /// Returned when it is called with a subcommand it doesn't know - pub fn unknown_command(command: impl Into) -> Self { - Self::UnknownCommand(UnknownCommand { - command_name: command.into(), - }) - } - - /// Returned when a subcommand is called without any arguments - pub fn empty_arguments() -> Self { - Self::EmptyArguments(EmptyArguments) - } - - /// Returned when a required argument is not present in the command line - pub fn missing_argument(argument: impl Into, subcommand: impl Into) -> Self { - Self::MissingArgument(MissingArgument { - argument: argument.into(), - advice: CliAdvice::new_with_help(subcommand), - }) - } - - /// When no files were processed while traversing the file system - pub fn no_files_processed() -> Self { - Self::NoFilesWereProcessed(NoFilesWereProcessed) - } - - /// Returned when the CLI doesn't recognize a command line argument - pub fn unexpected_argument(argument: impl Into, subcommand: impl Into) -> Self { - Self::UnexpectedArgument(UnexpectedArgument { - argument: argument.into(), - help: CliAdvice::new_with_help(subcommand), - }) - } - - /// When there's been error inside the workspace - pub fn workspace_error(error: WorkspaceError) -> Self { - Self::WorkspaceError(error) - } - - /// An I/O error - pub fn io_error(error: std::io::Error) -> Self { - Self::IoError(IoDiagnostic { - source: Some(Error::from(IoError::from(error))), - }) - } - - /// Emitted when errors were emitted while running `check` command - pub fn check_error(category: &'static Category) -> Self { - Self::CheckError(CheckError { - category, - message: MessageAndDescription::from( - markup! { - "Some ""errors"" were emitted while ""running checks""." - } - .to_owned(), - ), - }) - } - - /// Emitted when warnings were emitted while running `check` command - pub fn check_warnings(category: &'static Category) -> Self { - Self::CheckError(CheckError { - category, - message: MessageAndDescription::from( - markup! { - "Some ""warnings"" were emitted while ""running checks""." - } - .to_owned(), - ), - }) - } - - /// Emitted when errors were emitted while apply code fixes - pub fn apply_error(category: &'static Category) -> Self { - Self::CheckError(CheckError { - category, - message: MessageAndDescription::from( - markup! { - "Some ""errors"" were emitted while ""applying fixes""." - } - .to_owned(), - ), - }) - } - /// Emitted when warnings were emitted while apply code fixes - pub fn apply_warnings(category: &'static Category) -> Self { - Self::CheckError(CheckError { - category, - message: MessageAndDescription::from( - markup! { - "Some ""warnings"" were emitted while ""running checks""." - } - .to_owned(), - ), - }) - } - - pub fn stdin() -> Self { - Self::Stdin(StdinDiagnostic::default()) - } - - /// Emitted when the server is not running - pub fn server_not_running() -> Self { - Self::ServerNotRunning(ServerNotRunning) - } - - /// Emitted when the end configuration (`postgrestools.jsonc` file + CLI arguments + LSP configuration) - /// results in a combination of options that doesn't allow to run the command correctly. - /// - /// A reason needs to be provided - pub fn incompatible_end_configuration(reason: impl Into) -> Self { - Self::IncompatibleEndConfiguration(IncompatibleEndConfiguration { - reason: reason.into(), - }) - } - - /// Emitted when an argument value is greater than the allowed value - pub fn overflown_argument(argument: impl Into, maximum: u16) -> Self { - Self::OverflowNumberArgument(OverflowNumberArgument { - argument: argument.into(), - maximum, - }) - } - - /// Return by the help command when it is called with a subcommand it doesn't know - pub fn new_unknown_help(command: impl Into) -> Self { - Self::UnknownCommandHelp(UnknownCommandHelp { - command_name: command.into(), - }) - } -} - -impl From for CliDiagnostic { - fn from(error: WorkspaceError) -> Self { - CliDiagnostic::workspace_error(error) - } -} - -impl From for CliDiagnostic { - fn from(error: std::io::Error) -> Self { - CliDiagnostic::io_error(error) - } -} - -impl Termination for CliDiagnostic { - fn report(self) -> ExitCode { - let severity = self.severity(); - if severity >= Severity::Error { - ExitCode::FAILURE - } else { - ExitCode::SUCCESS - } - } -} - -#[derive(Debug, Default, Diagnostic)] -#[diagnostic( - severity = Error, - category = "stdin", - message = "The contents aren't fixed. Use the `--fix` flag to fix them." -)] -pub struct StdinDiagnostic {} - -#[cfg(test)] -mod test { - use crate::CliDiagnostic; - - #[test] - fn termination_diagnostic_size() { - assert_eq!( - std::mem::size_of::(), - 80, - "you successfully decreased the size of the diagnostic!" - ) - } -} diff --git a/crates/pgt_cli/src/execute/diagnostics.rs b/crates/pgt_cli/src/execute/diagnostics.rs deleted file mode 100644 index a355abec..00000000 --- a/crates/pgt_cli/src/execute/diagnostics.rs +++ /dev/null @@ -1,73 +0,0 @@ -use pgt_diagnostics::adapters::{IoError, StdError}; -use pgt_diagnostics::{Category, Diagnostic, DiagnosticExt, DiagnosticTags, Error}; -use std::io; - -#[derive(Debug, Diagnostic)] -#[diagnostic(category = "internalError/panic", tags(INTERNAL))] -pub(crate) struct PanicDiagnostic { - #[description] - #[message] - pub(crate) message: String, -} - -/// Extension trait for turning [Display]-able error types into [TraversalError] -pub(crate) trait ResultExt { - type Result; - fn with_file_path_and_code( - self, - file_path: String, - code: &'static Category, - ) -> Result; - - #[allow(unused)] - fn with_file_path_and_code_and_tags( - self, - file_path: String, - code: &'static Category, - tags: DiagnosticTags, - ) -> Result; -} - -impl ResultExt for Result -where - E: std::error::Error + Send + Sync + 'static, -{ - type Result = T; - - fn with_file_path_and_code_and_tags( - self, - file_path: String, - code: &'static Category, - diagnostic_tags: DiagnosticTags, - ) -> Result { - self.map_err(move |err| { - StdError::from(err) - .with_category(code) - .with_file_path(file_path) - .with_tags(diagnostic_tags) - }) - } - - fn with_file_path_and_code( - self, - file_path: String, - code: &'static Category, - ) -> Result { - self.map_err(move |err| { - StdError::from(err) - .with_category(code) - .with_file_path(file_path) - }) - } -} - -/// Extension trait for turning [io::Error] into [Error] -pub(crate) trait ResultIoExt: ResultExt { - fn with_file_path(self, file_path: String) -> Result; -} - -impl ResultIoExt for io::Result { - fn with_file_path(self, file_path: String) -> Result { - self.map_err(|error| IoError::from(error).with_file_path(file_path)) - } -} diff --git a/crates/pgt_cli/src/execute/mod.rs b/crates/pgt_cli/src/execute/mod.rs deleted file mode 100644 index 6cb01ca7..00000000 --- a/crates/pgt_cli/src/execute/mod.rs +++ /dev/null @@ -1,308 +0,0 @@ -mod diagnostics; -mod process_file; -mod std_in; -pub(crate) mod traverse; - -use crate::cli_options::{CliOptions, CliReporter}; -use crate::execute::traverse::{TraverseResult, traverse}; -use crate::reporter::github::{GithubReporter, GithubReporterVisitor}; -use crate::reporter::gitlab::{GitLabReporter, GitLabReporterVisitor}; -use crate::reporter::junit::{JunitReporter, JunitReporterVisitor}; -use crate::reporter::terminal::{ConsoleReporter, ConsoleReporterVisitor}; -use crate::{CliDiagnostic, CliSession, DiagnosticsPayload, Reporter}; -use pgt_diagnostics::{Category, category}; -use std::borrow::Borrow; -use std::ffi::OsString; -use std::fmt::{Display, Formatter}; -use std::path::PathBuf; -use tracing::info; - -/// Useful information during the traversal of files and virtual content -#[derive(Debug, Clone)] -pub struct Execution { - /// How the information should be collected and reported - report_mode: ReportMode, - - /// The modality of execution of the traversal - traversal_mode: TraversalMode, - - /// The maximum number of diagnostics that can be printed in console - max_diagnostics: u32, -} - -impl Execution { - pub fn report_mode(&self) -> &ReportMode { - &self.report_mode - } -} - -/// A type that holds the information to execute the CLI via `stdin -#[derive(Debug, Clone)] -pub struct Stdin( - #[allow(unused)] - /// The virtual path to the file - PathBuf, - /// The content of the file - String, -); - -impl Stdin { - fn as_content(&self) -> &str { - self.1.as_str() - } -} - -impl From<(PathBuf, String)> for Stdin { - fn from((path, content): (PathBuf, String)) -> Self { - Self(path, content) - } -} - -#[derive(Debug, Clone)] -pub struct VcsTargeted { - pub staged: bool, - pub changed: bool, -} - -impl From<(bool, bool)> for VcsTargeted { - fn from((staged, changed): (bool, bool)) -> Self { - Self { staged, changed } - } -} - -#[derive(Debug, Clone)] -pub enum TraversalMode { - /// A dummy mode to be used when the CLI is not running any command - Dummy, - /// This mode is enabled when running the command `check` - Check { - // The type of fixes that should be applied when analyzing a file. - // - // It's [None] if the `check` command is called without `--apply` or `--apply-suggested` - // arguments. - // fix_file_mode: Option, - /// An optional tuple. - /// 1. The virtual path to the file - /// 2. The content of the file - stdin: Option, - /// A flag to know vcs integrated options such as `--staged` or `--changed` are enabled - vcs_targeted: VcsTargeted, - }, -} - -impl Display for TraversalMode { - fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { - match self { - TraversalMode::Dummy => write!(f, "dummy"), - TraversalMode::Check { .. } => write!(f, "check"), - } - } -} - -/// Tells to the execution of the traversal how the information should be reported -#[derive(Copy, Clone, Debug)] -pub enum ReportMode { - /// Reports information straight to the console, it's the default mode - Terminal, - /// Reports information for GitHub - GitHub, - /// JUnit output - /// Ref: https://github.com/testmoapp/junitxml?tab=readme-ov-file#basic-junit-xml-structure - Junit, - /// Reports information in the [GitLab Code Quality](https://docs.gitlab.com/ee/ci/testing/code_quality.html#implement-a-custom-tool) format. - GitLab, -} - -impl Default for ReportMode { - fn default() -> Self { - Self::Terminal {} - } -} - -impl From for ReportMode { - fn from(value: CliReporter) -> Self { - match value { - CliReporter::Default => Self::Terminal, - CliReporter::GitHub => Self::GitHub, - CliReporter::Junit => Self::Junit, - CliReporter::GitLab => Self::GitLab {}, - } - } -} - -impl Execution { - pub(crate) fn new(mode: TraversalMode) -> Self { - Self { - report_mode: ReportMode::default(), - traversal_mode: mode, - max_diagnostics: 20, - } - } - - /// It sets the reporting mode by reading the [CliOptions] - pub(crate) fn set_report(mut self, cli_options: &CliOptions) -> Self { - self.report_mode = cli_options.reporter.clone().into(); - self - } - - pub(crate) fn traversal_mode(&self) -> &TraversalMode { - &self.traversal_mode - } - - pub(crate) fn get_max_diagnostics(&self) -> u32 { - self.max_diagnostics - } - - pub(crate) fn as_diagnostic_category(&self) -> &'static Category { - match self.traversal_mode { - TraversalMode::Dummy => category!("dummy"), - TraversalMode::Check { .. } => category!("check"), - } - } - - /// Whether the traversal mode requires write access to files - pub(crate) const fn requires_write_access(&self) -> bool { - match self.traversal_mode { - TraversalMode::Dummy => false, - TraversalMode::Check { .. } => false, - } - } - - pub(crate) fn as_stdin_file(&self) -> Option<&Stdin> { - match &self.traversal_mode { - TraversalMode::Dummy => None, - TraversalMode::Check { stdin, .. } => stdin.as_ref(), - } - } - - pub(crate) fn is_vcs_targeted(&self) -> bool { - match &self.traversal_mode { - TraversalMode::Dummy => false, - TraversalMode::Check { vcs_targeted, .. } => { - vcs_targeted.staged || vcs_targeted.changed - } - } - } - - pub(crate) const fn is_check_apply(&self) -> bool { - false - } - - #[allow(unused)] - /// Returns [true] if the user used the `--write`/`--fix` option - pub(crate) fn is_write(&self) -> bool { - match self.traversal_mode { - TraversalMode::Dummy => false, - TraversalMode::Check { .. } => false, - } - } -} - -/// Based on the [mode](TraversalMode), the function might launch a traversal of the file system -/// or handles the stdin file. -pub fn execute_mode( - mut execution: Execution, - mut session: CliSession, - cli_options: &CliOptions, - paths: Vec, -) -> Result<(), CliDiagnostic> { - // If a custom reporter was provided, let's lift the limit so users can see all of them - execution.max_diagnostics = if cli_options.reporter.is_default() { - cli_options.max_diagnostics.into() - } else { - info!( - "Removing the limit of --max-diagnostics, because of a reporter different from the default one: {}", - cli_options.reporter - ); - u32::MAX - }; - - // don't do any traversal if there's some content coming from stdin - if let Some(stdin) = execution.as_stdin_file() { - std_in::run(session, stdin.as_content()) - } else { - let TraverseResult { - summary, - evaluated_paths, - diagnostics, - } = traverse(&execution, &mut session, cli_options, paths)?; - let console = session.app.console; - let errors = summary.errors; - let skipped = summary.skipped; - let processed = summary.changed + summary.unchanged; - let should_exit_on_warnings = summary.warnings > 0 && cli_options.error_on_warnings; - - match execution.report_mode { - ReportMode::Terminal => { - let reporter = ConsoleReporter { - summary, - diagnostics_payload: DiagnosticsPayload { - verbose: cli_options.verbose, - diagnostic_level: cli_options.diagnostic_level, - diagnostics, - }, - execution: execution.clone(), - evaluated_paths, - }; - reporter.write(&mut ConsoleReporterVisitor(console))?; - } - ReportMode::GitHub => { - let reporter = GithubReporter { - diagnostics_payload: DiagnosticsPayload { - verbose: cli_options.verbose, - diagnostic_level: cli_options.diagnostic_level, - diagnostics, - }, - execution: execution.clone(), - }; - reporter.write(&mut GithubReporterVisitor(console))?; - } - ReportMode::GitLab => { - let reporter = GitLabReporter { - diagnostics: DiagnosticsPayload { - verbose: cli_options.verbose, - diagnostic_level: cli_options.diagnostic_level, - diagnostics, - }, - execution: execution.clone(), - }; - reporter.write(&mut GitLabReporterVisitor::new( - console, - session.app.fs.borrow().working_directory(), - ))?; - } - ReportMode::Junit => { - let reporter = JunitReporter { - summary, - diagnostics_payload: DiagnosticsPayload { - verbose: cli_options.verbose, - diagnostic_level: cli_options.diagnostic_level, - diagnostics, - }, - execution: execution.clone(), - }; - reporter.write(&mut JunitReporterVisitor::new(console))?; - } - } - - // Processing emitted error diagnostics, exit with a non-zero code - if processed.saturating_sub(skipped) == 0 && !cli_options.no_errors_on_unmatched { - Err(CliDiagnostic::no_files_processed()) - } else if errors > 0 || should_exit_on_warnings { - let category = execution.as_diagnostic_category(); - if should_exit_on_warnings { - if execution.is_check_apply() { - Err(CliDiagnostic::apply_warnings(category)) - } else { - Err(CliDiagnostic::check_warnings(category)) - } - } else if execution.is_check_apply() { - Err(CliDiagnostic::apply_error(category)) - } else { - Err(CliDiagnostic::check_error(category)) - } - } else { - Ok(()) - } - } -} diff --git a/crates/pgt_cli/src/execute/process_file.rs b/crates/pgt_cli/src/execute/process_file.rs deleted file mode 100644 index 421f9bb3..00000000 --- a/crates/pgt_cli/src/execute/process_file.rs +++ /dev/null @@ -1,117 +0,0 @@ -mod check; -pub(crate) mod workspace_file; - -use crate::execute::TraversalMode; -use crate::execute::traverse::TraversalOptions; -use check::check_file; -use pgt_diagnostics::Error; -use pgt_fs::PgTPath; -use std::marker::PhantomData; -use std::ops::Deref; - -#[derive(Debug)] -pub(crate) enum FileStatus { - /// File changed and it was a success - Changed, - /// File unchanged, and it was a success - Unchanged, - - /// While handling the file, something happened - #[allow(unused)] - Message(Message), - - /// A match was found while searching a file - #[allow(unused)] - SearchResult(usize, Message), - - /// File ignored, it should not be count as "handled" - #[allow(unused)] - Ignored, - - /// Files that belong to other tools and shouldn't be touched - #[allow(unused)] - Protected(String), -} - -/// Wrapper type for messages that can be printed during the traversal process -#[derive(Debug)] -pub(crate) enum Message { - #[allow(unused)] - SkippedFixes { - /// Suggested fixes skipped during the lint traversal - skipped_suggested_fixes: u32, - }, - - #[allow(unused)] - Failure, - Error(Error), - Diagnostics { - name: String, - content: String, - diagnostics: Vec, - skipped_diagnostics: u32, - }, -} - -impl From for Message -where - Error: From, - D: std::fmt::Debug, -{ - fn from(err: D) -> Self { - Self::Error(Error::from(err)) - } -} - -/// The return type for [process_file], with the following semantics: -/// - `Ok(Success)` means the operation was successful (the file is added to -/// the `processed` counter) -/// - `Ok(Message(_))` means the operation was successful but a message still -/// needs to be printed (eg. the diff when not in CI or write mode) -/// - `Ok(Ignored)` means the file was ignored (the file is not added to the -/// `processed` or `skipped` counters) -/// - `Err(_)` means the operation failed and the file should be added to the -/// `skipped` counter -pub(crate) type FileResult = Result; - -/// Data structure that allows to pass [TraversalOptions] to multiple consumers, bypassing the -/// compiler constraints set by the lifetimes of the [TraversalOptions] -pub(crate) struct SharedTraversalOptions<'ctx, 'app> { - inner: &'app TraversalOptions<'ctx, 'app>, - _p: PhantomData<&'app ()>, -} - -impl<'ctx, 'app> SharedTraversalOptions<'ctx, 'app> { - fn new(t: &'app TraversalOptions<'ctx, 'app>) -> Self { - Self { - _p: PhantomData, - inner: t, - } - } -} - -impl<'ctx, 'app> Deref for SharedTraversalOptions<'ctx, 'app> { - type Target = TraversalOptions<'ctx, 'app>; - - fn deref(&self) -> &Self::Target { - self.inner - } -} - -/// This function performs the actual processing: it reads the file from disk -/// and parse it; analyze and / or format it; then it either fails if error -/// diagnostics were emitted, or compare the formatted code with the original -/// content of the file and emit a diff or write the new content to the disk if -/// write mode is enabled -pub(crate) fn process_file(ctx: &TraversalOptions, pgt_path: &PgTPath) -> FileResult { - tracing::trace_span!("process_file", path = ?pgt_path).in_scope(move || { - let shared_context = &SharedTraversalOptions::new(ctx); - - match ctx.execution.traversal_mode { - TraversalMode::Dummy => { - unreachable!("The dummy mode should not be called for this file") - } - TraversalMode::Check { .. } => check_file(shared_context, pgt_path), - } - }) -} diff --git a/crates/pgt_cli/src/execute/process_file/check.rs b/crates/pgt_cli/src/execute/process_file/check.rs deleted file mode 100644 index c48d0c4c..00000000 --- a/crates/pgt_cli/src/execute/process_file/check.rs +++ /dev/null @@ -1,68 +0,0 @@ -use pgt_analyse::RuleCategoriesBuilder; -use pgt_diagnostics::{Error, category}; - -use crate::execute::diagnostics::ResultExt; -use crate::execute::process_file::workspace_file::WorkspaceFile; -use crate::execute::process_file::{FileResult, FileStatus, Message, SharedTraversalOptions}; -use std::path::Path; -use std::sync::atomic::Ordering; - -/// Lints a single file and returns a [FileResult] -pub(crate) fn check_file<'ctx>( - ctx: &'ctx SharedTraversalOptions<'ctx, '_>, - path: &Path, -) -> FileResult { - let mut workspace_file = WorkspaceFile::new(ctx, path)?; - check_with_guard(ctx, &mut workspace_file) -} - -pub(crate) fn check_with_guard<'ctx>( - ctx: &'ctx SharedTraversalOptions<'ctx, '_>, - workspace_file: &mut WorkspaceFile, -) -> FileResult { - tracing::info_span!("Processes check", path =? workspace_file.path.display()).in_scope( - move || { - let input = workspace_file.input()?; - let changed = false; - - let (only, skip) = (Vec::new(), Vec::new()); - - let max_diagnostics = ctx.remaining_diagnostics.load(Ordering::Relaxed); - - let pull_diagnostics_result = workspace_file - .guard() - .pull_diagnostics( - RuleCategoriesBuilder::default().all().build(), - max_diagnostics, - only, - skip, - ) - .with_file_path_and_code( - workspace_file.path.display().to_string(), - category!("check"), - )?; - - let no_diagnostics = pull_diagnostics_result.diagnostics.is_empty() - && pull_diagnostics_result.skipped_diagnostics == 0; - - if !no_diagnostics { - ctx.push_message(Message::Diagnostics { - name: workspace_file.path.display().to_string(), - content: input, - diagnostics: pull_diagnostics_result - .diagnostics - .into_iter() - .map(Error::from) - .collect(), - skipped_diagnostics: pull_diagnostics_result.skipped_diagnostics as u32, - }); - } - - if changed { - Ok(FileStatus::Changed) - } else { - Ok(FileStatus::Unchanged) - } - }, - ) -} diff --git a/crates/pgt_cli/src/execute/process_file/workspace_file.rs b/crates/pgt_cli/src/execute/process_file/workspace_file.rs deleted file mode 100644 index 790176b9..00000000 --- a/crates/pgt_cli/src/execute/process_file/workspace_file.rs +++ /dev/null @@ -1,75 +0,0 @@ -use crate::execute::diagnostics::{ResultExt, ResultIoExt}; -use crate::execute::process_file::SharedTraversalOptions; -use pgt_diagnostics::{Error, category}; -use pgt_fs::{File, OpenOptions, PgTPath}; -use pgt_workspace::workspace::{ChangeParams, FileGuard, OpenFileParams}; -use pgt_workspace::{Workspace, WorkspaceError}; -use std::path::{Path, PathBuf}; - -/// Small wrapper that holds information and operations around the current processed file -pub(crate) struct WorkspaceFile<'ctx, 'app> { - guard: FileGuard<'app, dyn Workspace + 'ctx>, - file: Box, - pub(crate) path: PathBuf, -} - -impl<'ctx, 'app> WorkspaceFile<'ctx, 'app> { - /// It attempts to read the file from disk, creating a [FileGuard] and - /// saving these information internally - pub(crate) fn new( - ctx: &SharedTraversalOptions<'ctx, 'app>, - path: &Path, - ) -> Result { - let pgt_path = PgTPath::new(path); - let open_options = OpenOptions::default() - .read(true) - .write(ctx.execution.requires_write_access()); - let mut file = ctx - .fs - .open_with_options(path, open_options) - .with_file_path(path.display().to_string())?; - - let mut input = String::new(); - file.read_to_string(&mut input) - .with_file_path(path.display().to_string())?; - - let guard = FileGuard::open( - ctx.workspace, - OpenFileParams { - path: pgt_path, - version: 0, - content: input.clone(), - }, - ) - .with_file_path_and_code(path.display().to_string(), category!("internalError/fs"))?; - - Ok(Self { - file, - guard, - path: PathBuf::from(path), - }) - } - - pub(crate) fn guard(&self) -> &FileGuard<'app, dyn Workspace + 'ctx> { - &self.guard - } - - pub(crate) fn input(&self) -> Result { - self.guard().get_file_content() - } - - /// It updates the workspace file with `new_content` - #[allow(dead_code)] - pub(crate) fn update_file(&mut self, new_content: impl Into) -> Result<(), Error> { - let new_content = new_content.into(); - - self.file - .set_content(new_content.as_bytes()) - .with_file_path(self.path.display().to_string())?; - self.guard.change_file( - self.file.file_version(), - vec![ChangeParams::overwrite(new_content)], - )?; - Ok(()) - } -} diff --git a/crates/pgt_cli/src/execute/std_in.rs b/crates/pgt_cli/src/execute/std_in.rs deleted file mode 100644 index f9346f6a..00000000 --- a/crates/pgt_cli/src/execute/std_in.rs +++ /dev/null @@ -1,11 +0,0 @@ -//! In here, there are the operations that run via standard input -//! -use crate::{CliDiagnostic, CliSession}; -use pgt_console::{ConsoleExt, markup}; - -pub(crate) fn run(session: CliSession, content: &str) -> Result<(), CliDiagnostic> { - let console = &mut *session.app.console; - - console.append(markup! {{content}}); - Ok(()) -} diff --git a/crates/pgt_cli/src/execute/traverse.rs b/crates/pgt_cli/src/execute/traverse.rs deleted file mode 100644 index 5673810c..00000000 --- a/crates/pgt_cli/src/execute/traverse.rs +++ /dev/null @@ -1,548 +0,0 @@ -use super::process_file::{FileStatus, Message, process_file}; -use super::{Execution, TraversalMode}; -use crate::cli_options::CliOptions; -use crate::execute::diagnostics::PanicDiagnostic; -use crate::reporter::TraversalSummary; -use crate::{CliDiagnostic, CliSession}; -use crossbeam::channel::{Receiver, Sender, unbounded}; -use pgt_diagnostics::DiagnosticTags; -use pgt_diagnostics::{DiagnosticExt, Error, Resource, Severity}; -use pgt_fs::{FileSystem, PathInterner, PgTPath}; -use pgt_fs::{TraversalContext, TraversalScope}; -use pgt_workspace::dome::Dome; -use pgt_workspace::workspace::IsPathIgnoredParams; -use pgt_workspace::{Workspace, WorkspaceError}; -use rustc_hash::FxHashSet; -use std::collections::BTreeSet; -use std::sync::RwLock; -use std::sync::atomic::AtomicU32; -use std::{ - env::current_dir, - ffi::OsString, - panic::catch_unwind, - path::PathBuf, - sync::{ - Once, - atomic::{AtomicUsize, Ordering}, - }, - thread, - time::{Duration, Instant}, -}; - -pub(crate) struct TraverseResult { - pub(crate) summary: TraversalSummary, - pub(crate) evaluated_paths: BTreeSet, - pub(crate) diagnostics: Vec, -} - -pub(crate) fn traverse( - execution: &Execution, - session: &mut CliSession, - cli_options: &CliOptions, - mut inputs: Vec, -) -> Result { - init_thread_pool(); - - if inputs.is_empty() { - match &execution.traversal_mode { - TraversalMode::Dummy => { - // If `--staged` or `--changed` is specified, it's acceptable for them to be empty, so ignore it. - if !execution.is_vcs_targeted() { - match current_dir() { - Ok(current_dir) => inputs.push(current_dir.into_os_string()), - Err(err) => return Err(CliDiagnostic::io_error(err)), - } - } - } - _ => { - if execution.as_stdin_file().is_none() && !cli_options.no_errors_on_unmatched { - return Err(CliDiagnostic::missing_argument( - "", - format!("{}", execution.traversal_mode), - )); - } - } - } - } - - let (interner, recv_files) = PathInterner::new(); - let (sender, receiver) = unbounded(); - - let changed = AtomicUsize::new(0); - let unchanged = AtomicUsize::new(0); - let matches = AtomicUsize::new(0); - let skipped = AtomicUsize::new(0); - - let fs = &*session.app.fs; - let workspace = &*session.app.workspace; - - let max_diagnostics = execution.get_max_diagnostics(); - let remaining_diagnostics = AtomicU32::new(max_diagnostics); - - let printer = DiagnosticsPrinter::new(execution) - .with_verbose(cli_options.verbose) - .with_diagnostic_level(cli_options.diagnostic_level) - .with_max_diagnostics(max_diagnostics); - - let (duration, evaluated_paths, diagnostics) = thread::scope(|s| { - let handler = thread::Builder::new() - .name(String::from("pgt::console")) - .spawn_scoped(s, || printer.run(receiver, recv_files)) - .expect("failed to spawn console thread"); - - // The traversal context is scoped to ensure all the channels it - // contains are properly closed once the traversal finishes - let (elapsed, evaluated_paths) = traverse_inputs( - fs, - inputs, - &TraversalOptions { - fs, - workspace, - execution, - interner, - matches: &matches, - changed: &changed, - unchanged: &unchanged, - skipped: &skipped, - messages: sender, - remaining_diagnostics: &remaining_diagnostics, - evaluated_paths: RwLock::default(), - }, - ); - // wait for the main thread to finish - let diagnostics = handler.join().unwrap(); - - (elapsed, evaluated_paths, diagnostics) - }); - - let errors = printer.errors(); - let warnings = printer.warnings(); - let changed = changed.load(Ordering::Relaxed); - let unchanged = unchanged.load(Ordering::Relaxed); - let matches = matches.load(Ordering::Relaxed); - let skipped = skipped.load(Ordering::Relaxed); - let suggested_fixes_skipped = printer.skipped_fixes(); - let diagnostics_not_printed = printer.not_printed_diagnostics(); - - Ok(TraverseResult { - summary: TraversalSummary { - changed, - unchanged, - duration, - errors, - matches, - warnings, - skipped, - suggested_fixes_skipped, - diagnostics_not_printed, - }, - evaluated_paths, - diagnostics, - }) -} - -/// This function will setup the global Rayon thread pool the first time it's called -/// -/// This is currently only used to assign friendly debug names to the threads of the pool -fn init_thread_pool() { - static INIT_ONCE: Once = Once::new(); - INIT_ONCE.call_once(|| { - rayon::ThreadPoolBuilder::new() - .thread_name(|index| format!("pgt::worker_{index}")) - .build_global() - .expect("failed to initialize the global thread pool"); - }); -} - -/// Initiate the filesystem traversal tasks with the provided input paths and -/// run it to completion, returning the duration of the process and the evaluated paths -fn traverse_inputs( - fs: &dyn FileSystem, - inputs: Vec, - ctx: &TraversalOptions, -) -> (Duration, BTreeSet) { - let start = Instant::now(); - fs.traversal(Box::new(move |scope: &dyn TraversalScope| { - for input in inputs { - scope.evaluate(ctx, PathBuf::from(input)); - } - })); - - let paths = ctx.evaluated_paths(); - let dome = Dome::new(paths); - let mut iter = dome.iter(); - fs.traversal(Box::new(|scope: &dyn TraversalScope| { - while let Some(path) = iter.next_config() { - scope.handle(ctx, path.to_path_buf()); - } - - for path in iter { - scope.handle(ctx, path.to_path_buf()); - } - })); - - (start.elapsed(), ctx.evaluated_paths()) -} - -// struct DiagnosticsReporter<'ctx> {} - -struct DiagnosticsPrinter<'ctx> { - /// Execution of the traversal - #[allow(dead_code)] - execution: &'ctx Execution, - /// The maximum number of diagnostics the console thread is allowed to print - max_diagnostics: u32, - /// The approximate number of diagnostics the console will print before - /// folding the rest into the "skipped diagnostics" counter - remaining_diagnostics: AtomicU32, - /// Mutable reference to a boolean flag tracking whether the console thread - /// printed any error-level message - errors: AtomicU32, - /// Mutable reference to a boolean flag tracking whether the console thread - /// printed any warnings-level message - warnings: AtomicU32, - /// Whether the console thread should print diagnostics in verbose mode - verbose: bool, - /// The diagnostic level the console thread should print - diagnostic_level: Severity, - - not_printed_diagnostics: AtomicU32, - printed_diagnostics: AtomicU32, - total_skipped_suggested_fixes: AtomicU32, -} - -impl<'ctx> DiagnosticsPrinter<'ctx> { - fn new(execution: &'ctx Execution) -> Self { - Self { - errors: AtomicU32::new(0), - warnings: AtomicU32::new(0), - remaining_diagnostics: AtomicU32::new(0), - execution, - diagnostic_level: Severity::Hint, - verbose: false, - max_diagnostics: 20, - not_printed_diagnostics: AtomicU32::new(0), - printed_diagnostics: AtomicU32::new(0), - total_skipped_suggested_fixes: AtomicU32::new(0), - } - } - - fn with_verbose(mut self, verbose: bool) -> Self { - self.verbose = verbose; - self - } - - fn with_max_diagnostics(mut self, value: u32) -> Self { - self.max_diagnostics = value; - self - } - - fn with_diagnostic_level(mut self, value: Severity) -> Self { - self.diagnostic_level = value; - self - } - - fn errors(&self) -> u32 { - self.errors.load(Ordering::Relaxed) - } - - fn warnings(&self) -> u32 { - self.warnings.load(Ordering::Relaxed) - } - - fn not_printed_diagnostics(&self) -> u32 { - self.not_printed_diagnostics.load(Ordering::Relaxed) - } - - fn skipped_fixes(&self) -> u32 { - self.total_skipped_suggested_fixes.load(Ordering::Relaxed) - } - - /// Checks if the diagnostic we received from the thread should be considered or not. Logic: - /// - it should not be considered if its severity level is lower than the one provided via CLI; - /// - it should not be considered if it's a verbose diagnostic and the CLI **didn't** request a `--verbose` option. - fn should_skip_diagnostic(&self, severity: Severity, diagnostic_tags: DiagnosticTags) -> bool { - if severity < self.diagnostic_level { - return true; - } - - if diagnostic_tags.is_verbose() && !self.verbose { - return true; - } - - false - } - - /// Count the diagnostic, and then returns a boolean that tells if it should be printed - fn should_print(&self) -> bool { - let printed_diagnostics = self.printed_diagnostics.load(Ordering::Relaxed); - let should_print = printed_diagnostics < self.max_diagnostics; - if should_print { - self.printed_diagnostics.fetch_add(1, Ordering::Relaxed); - self.remaining_diagnostics.store( - self.max_diagnostics.saturating_sub(printed_diagnostics), - Ordering::Relaxed, - ); - } else { - self.not_printed_diagnostics.fetch_add(1, Ordering::Relaxed); - } - - should_print - } - - fn run(&self, receiver: Receiver, interner: Receiver) -> Vec { - let mut paths: FxHashSet = FxHashSet::default(); - - let mut diagnostics_to_print = vec![]; - - while let Ok(msg) = receiver.recv() { - match msg { - Message::SkippedFixes { - skipped_suggested_fixes, - } => { - self.total_skipped_suggested_fixes - .fetch_add(skipped_suggested_fixes, Ordering::Relaxed); - } - - Message::Failure => { - self.errors.fetch_add(1, Ordering::Relaxed); - } - - Message::Error(mut err) => { - let location = err.location(); - if self.should_skip_diagnostic(err.severity(), err.tags()) { - continue; - } - if err.severity() == Severity::Warning { - // *warnings += 1; - self.warnings.fetch_add(1, Ordering::Relaxed); - // self.warnings.set(self.warnings.get() + 1) - } - if let Some(Resource::File(file_path)) = location.resource.as_ref() { - // Retrieves the file name from the file ID cache, if it's a miss - // flush entries from the interner channel until it's found - let file_name = match paths.get(*file_path) { - Some(path) => Some(path), - None => loop { - match interner.recv() { - Ok(path) => { - paths.insert(path.display().to_string()); - if path.display().to_string() == *file_path { - break paths.get(&path.display().to_string()); - } - } - // In case the channel disconnected without sending - // the path we need, print the error without a file - // name (normally this should never happen) - Err(_) => break None, - } - }, - }; - - if let Some(path) = file_name { - err = err.with_file_path(path.as_str()); - } - } - - let should_print = self.should_print(); - - if should_print { - diagnostics_to_print.push(err); - } - } - - Message::Diagnostics { - name, - content, - diagnostics, - skipped_diagnostics, - } => { - self.not_printed_diagnostics - .fetch_add(skipped_diagnostics, Ordering::Relaxed); - - // is CI mode we want to print all the diagnostics - for diag in diagnostics { - let severity = diag.severity(); - if self.should_skip_diagnostic(severity, diag.tags()) { - continue; - } - if severity == Severity::Error { - self.errors.fetch_add(1, Ordering::Relaxed); - } - if severity == Severity::Warning { - self.warnings.fetch_add(1, Ordering::Relaxed); - } - - let should_print = self.should_print(); - - if should_print { - let diag = diag.with_file_path(&name).with_file_source_code(&content); - diagnostics_to_print.push(diag) - } - } - } - } - } - - diagnostics_to_print - } -} - -/// Context object shared between directory traversal tasks -pub(crate) struct TraversalOptions<'ctx, 'app> { - /// Shared instance of [FileSystem] - pub(crate) fs: &'app dyn FileSystem, - /// Instance of [Workspace] used by this instance of the CLI - pub(crate) workspace: &'ctx dyn Workspace, - /// Determines how the files should be processed - pub(crate) execution: &'ctx Execution, - /// File paths interner cache used by the filesystem traversal - interner: PathInterner, - /// Shared atomic counter storing the number of changed files - changed: &'ctx AtomicUsize, - /// Shared atomic counter storing the number of unchanged files - unchanged: &'ctx AtomicUsize, - /// Shared atomic counter storing the number of unchanged files - matches: &'ctx AtomicUsize, - /// Shared atomic counter storing the number of skipped files - skipped: &'ctx AtomicUsize, - /// Channel sending messages to the display thread - pub(crate) messages: Sender, - /// The approximate number of diagnostics the console will print before - /// folding the rest into the "skipped diagnostics" counter - pub(crate) remaining_diagnostics: &'ctx AtomicU32, - - /// List of paths that should be processed - pub(crate) evaluated_paths: RwLock>, -} - -impl TraversalOptions<'_, '_> { - pub(crate) fn increment_changed(&self, path: &PgTPath) { - self.changed.fetch_add(1, Ordering::Relaxed); - self.evaluated_paths - .write() - .unwrap() - .replace(path.to_written()); - } - pub(crate) fn increment_unchanged(&self) { - self.unchanged.fetch_add(1, Ordering::Relaxed); - } - - pub(crate) fn increment_matches(&self, num_matches: usize) { - self.matches.fetch_add(num_matches, Ordering::Relaxed); - } - - /// Send a message to the display thread - pub(crate) fn push_message(&self, msg: impl Into) { - self.messages.send(msg.into()).ok(); - } -} - -impl TraversalContext for TraversalOptions<'_, '_> { - fn interner(&self) -> &PathInterner { - &self.interner - } - - fn evaluated_paths(&self) -> BTreeSet { - self.evaluated_paths.read().unwrap().clone() - } - - fn push_diagnostic(&self, error: Error) { - self.push_message(error); - } - - fn can_handle(&self, pgt_path: &PgTPath) -> bool { - let path = pgt_path.as_path(); - - let is_valid_file = self.fs.path_is_file(path) - && path - .extension() - .is_some_and(|ext| ext == "sql" || ext == "pg"); - - if self.fs.path_is_dir(path) || self.fs.path_is_symlink(path) || is_valid_file { - // handle: - // - directories - // - symlinks - // - unresolved symlinks - // e.g `symlink/subdir` where symlink points to a directory that includes `subdir`. - // Note that `symlink/subdir` is not an existing file. - let can_handle = !self - .workspace - .is_path_ignored(IsPathIgnoredParams { - pgt_path: pgt_path.clone(), - }) - .unwrap_or_else(|err| { - self.push_diagnostic(err.into()); - false - }); - return can_handle; - } - - // bail on fifo and socket files - if !is_valid_file { - return false; - } - - match self.execution.traversal_mode() { - TraversalMode::Dummy => true, - TraversalMode::Check { .. } => true, - } - } - - fn handle_path(&self, path: PgTPath) { - handle_file(self, &path) - } - - fn store_path(&self, path: PgTPath) { - self.evaluated_paths - .write() - .unwrap() - .insert(PgTPath::new(path.as_path())); - } -} - -/// This function wraps the [process_file] function implementing the traversal -/// in a [catch_unwind] block and emit diagnostics in case of error (either the -/// traversal function returns Err or panics) -fn handle_file(ctx: &TraversalOptions, path: &PgTPath) { - match catch_unwind(move || process_file(ctx, path)) { - Ok(Ok(FileStatus::Changed)) => { - ctx.increment_changed(path); - } - Ok(Ok(FileStatus::Unchanged)) => { - ctx.increment_unchanged(); - } - Ok(Ok(FileStatus::SearchResult(num_matches, msg))) => { - ctx.increment_unchanged(); - ctx.increment_matches(num_matches); - ctx.push_message(msg); - } - Ok(Ok(FileStatus::Message(msg))) => { - ctx.increment_unchanged(); - ctx.push_message(msg); - } - Ok(Ok(FileStatus::Protected(file_path))) => { - ctx.increment_unchanged(); - ctx.push_diagnostic(WorkspaceError::protected_file(file_path).into()); - } - Ok(Ok(FileStatus::Ignored)) => {} - Ok(Err(err)) => { - ctx.increment_unchanged(); - ctx.skipped.fetch_add(1, Ordering::Relaxed); - ctx.push_message(err); - } - Err(err) => { - let message = match err.downcast::() { - Ok(msg) => format!("processing panicked: {msg}"), - Err(err) => match err.downcast::<&'static str>() { - Ok(msg) => format!("processing panicked: {msg}"), - Err(_) => String::from("processing panicked"), - }, - }; - - ctx.push_message( - PanicDiagnostic { message }.with_file_path(path.display().to_string()), - ); - } - } -} diff --git a/crates/pgt_cli/src/lib.rs b/crates/pgt_cli/src/lib.rs deleted file mode 100644 index f8a04244..00000000 --- a/crates/pgt_cli/src/lib.rs +++ /dev/null @@ -1,144 +0,0 @@ -//! # Module -//! -//! This is where the main CLI session starts. The module is responsible -//! to parse commands and arguments, redirect the execution of the commands and -//! execute the traversal of directory and files, based on the command that was passed. - -use cli_options::CliOptions; -use commands::CommandRunner; -use commands::check::CheckCommandPayload; -use pgt_console::{ColorMode, Console}; -use pgt_fs::OsFileSystem; -use pgt_workspace::{App, DynRef, Workspace, WorkspaceRef}; -use std::env; - -mod changed; -mod cli_options; -mod commands; -mod diagnostics; -mod execute; -mod logging; -mod metrics; -mod panic; -mod reporter; -mod service; - -use crate::cli_options::ColorsArg; -pub use crate::commands::{PgtCommand, pgt_command}; -pub use crate::logging::{LoggingLevel, setup_cli_subscriber}; -pub use diagnostics::CliDiagnostic; -pub use execute::{Execution, TraversalMode, VcsTargeted, execute_mode}; -pub use panic::setup_panic_handler; -pub use reporter::{DiagnosticsPayload, Reporter, ReporterVisitor, TraversalSummary}; -pub use service::{SocketTransport, open_transport}; - -pub(crate) const VERSION: &str = match option_env!("PGT_VERSION") { - Some(version) => version, - None => env!("CARGO_PKG_VERSION"), -}; - -/// Global context for an execution of the CLI -pub struct CliSession<'app> { - /// Instance of [App] used by this run of the CLI - pub app: App<'app>, -} - -impl<'app> CliSession<'app> { - pub fn new( - workspace: &'app dyn Workspace, - console: &'app mut dyn Console, - ) -> Result { - Ok(Self { - app: App::new( - DynRef::Owned(Box::::default()), - console, - WorkspaceRef::Borrowed(workspace), - ), - }) - } - - /// Main function to run the CLI - pub fn run(self, command: PgtCommand) -> Result<(), CliDiagnostic> { - let has_metrics = command.has_metrics(); - if has_metrics { - crate::metrics::init_metrics(); - } - - let result = match command { - PgtCommand::Version(_) => commands::version::full_version(self), - PgtCommand::Check { - cli_options, - configuration, - paths, - stdin_file_path, - staged, - changed, - since, - } => run_command( - self, - &cli_options, - CheckCommandPayload { - configuration, - paths, - stdin_file_path, - staged, - changed, - since, - }, - ), - PgtCommand::Clean => commands::clean::clean(self), - PgtCommand::Start { - config_path, - log_path, - log_prefix_name, - } => commands::daemon::start(self, config_path, Some(log_path), Some(log_prefix_name)), - PgtCommand::Stop => commands::daemon::stop(self), - PgtCommand::Init => commands::init::init(self), - PgtCommand::LspProxy { - config_path, - log_path, - log_prefix_name, - .. - } => commands::daemon::lsp_proxy(config_path, Some(log_path), Some(log_prefix_name)), - PgtCommand::RunServer { - stop_on_disconnect, - config_path, - log_path, - log_prefix_name, - log_level, - log_kind, - } => commands::daemon::run_server( - stop_on_disconnect, - config_path, - Some(log_path), - Some(log_prefix_name), - Some(log_level), - Some(log_kind), - ), - PgtCommand::PrintSocket => commands::daemon::print_socket(), - }; - - if has_metrics { - metrics::print_metrics(); - } - - result - } -} - -pub fn to_color_mode(color: Option<&ColorsArg>) -> ColorMode { - match color { - Some(ColorsArg::Off) => ColorMode::Disabled, - Some(ColorsArg::Force) => ColorMode::Enabled, - None => ColorMode::Auto, - } -} - -pub(crate) fn run_command( - session: CliSession, - cli_options: &CliOptions, - mut command: impl CommandRunner, -) -> Result<(), CliDiagnostic> { - let command = &mut command; - command.run(session, cli_options) -} diff --git a/crates/pgt_cli/src/logging.rs b/crates/pgt_cli/src/logging.rs deleted file mode 100644 index 35911927..00000000 --- a/crates/pgt_cli/src/logging.rs +++ /dev/null @@ -1,174 +0,0 @@ -use std::fmt::{Display, Formatter}; -use std::str::FromStr; -use tracing::Metadata; -use tracing::subscriber::Interest; -use tracing_subscriber::filter::LevelFilter; -use tracing_subscriber::layer::{Context, Filter, SubscriberExt}; -use tracing_subscriber::util::SubscriberInitExt; -use tracing_subscriber::{Layer, registry}; - -pub fn setup_cli_subscriber(level: LoggingLevel, kind: LoggingKind) { - if level == LoggingLevel::None { - return; - } - - let format = tracing_subscriber::fmt::layer() - .with_level(true) - .with_target(false) - .with_thread_names(true) - .with_file(true) - .with_ansi(true); - match kind { - LoggingKind::Pretty => { - let format = format.pretty(); - registry() - .with(format.with_filter(LoggingFilter { level })) - .init() - } - LoggingKind::Compact => { - let format = format.compact(); - registry() - .with(format.with_filter(LoggingFilter { level })) - .init() - } - LoggingKind::Json => { - let format = format.json().flatten_event(true); - - registry() - .with(format.with_filter(LoggingFilter { level })) - .init() - } - }; -} - -#[derive(Copy, Debug, Default, Clone, Ord, PartialOrd, Eq, PartialEq)] -pub enum LoggingLevel { - /// No logs should be shown - #[default] - None, - Debug, - Info, - Warn, - Error, -} - -impl LoggingLevel { - fn to_filter_level(self) -> Option { - match self { - LoggingLevel::None => None, - LoggingLevel::Info => Some(LevelFilter::INFO), - LoggingLevel::Warn => Some(LevelFilter::WARN), - LoggingLevel::Error => Some(LevelFilter::ERROR), - LoggingLevel::Debug => Some(LevelFilter::DEBUG), - } - } -} - -impl FromStr for LoggingLevel { - type Err = String; - fn from_str(s: &str) -> Result { - match s { - "none" => Ok(Self::None), - "info" => Ok(Self::Info), - "warn" => Ok(Self::Warn), - "error" => Ok(Self::Error), - "debug" => Ok(Self::Debug), - _ => Err("Unexpected value".to_string()), - } - } -} - -impl Display for LoggingLevel { - fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { - match self { - LoggingLevel::None => write!(f, "none"), - LoggingLevel::Debug => write!(f, "debug"), - LoggingLevel::Info => write!(f, "info"), - LoggingLevel::Warn => write!(f, "warn"), - LoggingLevel::Error => write!(f, "error"), - } - } -} - -/// Tracing filter enabling: -/// - All spans and events at level info or higher -/// - All spans and events at level debug in crates whose name starts with `pgt` -struct LoggingFilter { - level: LoggingLevel, -} - -/// Tracing filter used for spans emitted by `pgt*` crates -const SELF_FILTER: LevelFilter = if cfg!(debug_assertions) { - LevelFilter::TRACE -} else { - LevelFilter::DEBUG -}; - -impl LoggingFilter { - fn is_enabled(&self, meta: &Metadata<'_>) -> bool { - let filter = if meta.target().starts_with("pgt") { - if let Some(level) = self.level.to_filter_level() { - level - } else { - return false; - } - } else { - LevelFilter::INFO - }; - - meta.level() <= &filter - } -} - -impl Filter for LoggingFilter { - fn enabled(&self, meta: &Metadata<'_>, _cx: &Context<'_, S>) -> bool { - self.is_enabled(meta) - } - - fn callsite_enabled(&self, meta: &'static Metadata<'static>) -> Interest { - if self.is_enabled(meta) { - Interest::always() - } else { - Interest::never() - } - } - - fn max_level_hint(&self) -> Option { - Some(SELF_FILTER) - } -} - -/// The kind of logging -#[derive(Copy, Debug, Default, Clone, Eq, PartialEq)] -pub enum LoggingKind { - /// A pretty log on multiple lines with nice colours - #[default] - Pretty, - /// A more cluttered logging - Compact, - /// Logs are emitted in JSON format - Json, -} - -impl Display for LoggingKind { - fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { - match self { - LoggingKind::Pretty => write!(f, "pretty"), - LoggingKind::Compact => write!(f, "compact"), - LoggingKind::Json => write!(f, "json"), - } - } -} - -impl FromStr for LoggingKind { - type Err = String; - - fn from_str(s: &str) -> Result { - match s { - "compact" => Ok(Self::Compact), - "pretty" => Ok(Self::Pretty), - "json" => Ok(Self::Json), - _ => Err("This log kind doesn't exist".to_string()), - } - } -} diff --git a/crates/pgt_cli/src/main.rs b/crates/pgt_cli/src/main.rs deleted file mode 100644 index 704b43bb..00000000 --- a/crates/pgt_cli/src/main.rs +++ /dev/null @@ -1,68 +0,0 @@ -//! This is the main binary - -use pgt_cli::{ - CliDiagnostic, CliSession, PgtCommand, open_transport, pgt_command, setup_panic_handler, - to_color_mode, -}; -use pgt_console::{ConsoleExt, EnvConsole, markup}; -use pgt_diagnostics::{Diagnostic, PrintDiagnostic, set_bottom_frame}; -use pgt_workspace::workspace; -use std::process::{ExitCode, Termination}; -use tokio::runtime::Runtime; - -#[cfg(target_os = "windows")] -#[global_allocator] -static GLOBAL: mimalloc::MiMalloc = mimalloc::MiMalloc; - -#[cfg(all( - any(target_os = "macos", target_os = "linux"), - not(target_env = "musl") -))] -#[global_allocator] -static GLOBAL: tikv_jemallocator::Jemalloc = tikv_jemallocator::Jemalloc; - -// Jemallocator does not work on aarch64 with musl, so we'll use the system allocator instead -#[cfg(all(target_env = "musl", target_os = "linux", target_arch = "aarch64"))] -#[global_allocator] -static GLOBAL: std::alloc::System = std::alloc::System; - -fn main() -> ExitCode { - setup_panic_handler(); - set_bottom_frame(main as usize); - - let mut console = EnvConsole::default(); - let command = pgt_command().fallback_to_usage().run(); - - console.set_color(to_color_mode(command.get_color())); - - let is_verbose = command.is_verbose(); - let result = run_workspace(&mut console, command); - match result { - Err(termination) => { - if termination.tags().is_verbose() && is_verbose { - console.error(markup! {{PrintDiagnostic::verbose(&termination)}}) - } else { - console.error(markup! {{PrintDiagnostic::simple(&termination)}}) - } - termination.report() - } - Ok(_) => ExitCode::SUCCESS, - } -} - -fn run_workspace(console: &mut EnvConsole, command: PgtCommand) -> Result<(), CliDiagnostic> { - // If the `--use-server` CLI flag is set, try to open a connection to an - // existing server socket - let workspace = if command.should_use_server() { - let runtime = Runtime::new()?; - match open_transport(runtime)? { - Some(transport) => workspace::client(transport)?, - None => return Err(CliDiagnostic::server_not_running()), - } - } else { - workspace::server() - }; - - let session = CliSession::new(&*workspace, console)?; - session.run(command) -} diff --git a/crates/pgt_cli/src/metrics.rs b/crates/pgt_cli/src/metrics.rs deleted file mode 100644 index 8d6012a0..00000000 --- a/crates/pgt_cli/src/metrics.rs +++ /dev/null @@ -1,410 +0,0 @@ -use std::{ - borrow::Cow, - hash::Hash, - ops::Sub, - ptr, - time::{Duration, Instant}, -}; - -use hdrhistogram::Histogram; -use rustc_hash::FxHashMap; -use std::sync::{LazyLock, Mutex, RwLock}; -use tracing::{Level, Metadata, Subscriber, span, subscriber::Interest}; -use tracing_subscriber::{ - Layer, - layer::Context, - prelude::*, - registry::{LookupSpan, SpanRef}, -}; - -/// Implementation of a tracing [Layer] that collects timing information for spans into [Histogram]s -struct MetricsLayer; - -static METRICS: LazyLock>>> = - LazyLock::new(RwLock::default); - -/// Static pointer to the metadata of a callsite, used as a unique identifier -/// for collecting spans created from there in the global metrics map -struct CallsiteKey(&'static Metadata<'static>); - -impl PartialEq for CallsiteKey { - fn eq(&self, other: &Self) -> bool { - ptr::eq(self.0, other.0) - } -} - -impl Eq for CallsiteKey {} - -impl Hash for CallsiteKey { - fn hash(&self, state: &mut H) { - ptr::hash(self.0, state); - } -} - -/// Single entry in the global callsite storage, containing handles to the -/// histograms associated with this callsite -enum CallsiteEntry { - /// Spans with the debug level only count their total duration - Debug { total: Histogram }, - /// Spans with the trace level count their total duration as well as - /// individual busy and idle times - Trace { - total: Histogram, - busy: Histogram, - idle: Histogram, - }, -} - -impl CallsiteEntry { - fn from_level(level: &Level) -> Self { - /// Number of significant figures retained by the histogram - const SIGNIFICANT_FIGURES: u8 = 3; - - match level { - &Level::TRACE => Self::Trace { - // SAFETY: Histogram::new only returns an error if the value of - // SIGNIFICANT_FIGURES is invalid, 3 is statically known to work - total: Histogram::new(SIGNIFICANT_FIGURES).unwrap(), - busy: Histogram::new(SIGNIFICANT_FIGURES).unwrap(), - idle: Histogram::new(SIGNIFICANT_FIGURES).unwrap(), - }, - _ => Self::Debug { - total: Histogram::new(SIGNIFICANT_FIGURES).unwrap(), - }, - } - } - - fn into_histograms(self, name: &str) -> Vec<(Cow, Histogram)> { - match self { - CallsiteEntry::Debug { total } => vec![(Cow::Borrowed(name), total)], - CallsiteEntry::Trace { total, busy, idle } => vec![ - (Cow::Borrowed(name), total), - (Cow::Owned(format!("{name}.busy")), busy), - (Cow::Owned(format!("{name}.idle")), idle), - ], - } - } -} - -/// Extension data attached to tracing spans to keep track of their idle and busy time -/// -/// Most of the associated code is based on the similar logic found in `tracing-subscriber` -/// for printing span timings to the console: -/// https://github.com/tokio-rs/tracing/blob/6f23c128fced6409008838a3223d76d7332d79e9/tracing-subscriber/src/fmt/fmt_subscriber.rs#L973 -struct Timings { - idle: u64, - busy: u64, - last: I, -} - -trait Timepoint: Sub + Copy + Sized { - fn now() -> Self; -} - -impl Timepoint for Instant { - fn now() -> Self { - Instant::now() - } -} - -impl Timings { - fn new() -> Self { - Self { - idle: 0, - busy: 0, - last: I::now(), - } - } - - /// Count the time between the last update and now as idle - fn enter(&mut self, now: I) { - self.idle += (now - self.last).as_nanos() as u64; - self.last = now; - } - - /// Count the time between the last update and now as busy - fn exit(&mut self, now: I) { - self.busy += (now - self.last).as_nanos() as u64; - self.last = now; - } - - /// Exit the timing for this span, and record it into a callsite entry - fn record(mut self, now: I, entry: &mut CallsiteEntry) { - self.exit(now); - - match entry { - CallsiteEntry::Debug { total } => { - total.record(self.busy + self.idle).unwrap(); - } - CallsiteEntry::Trace { total, busy, idle } => { - busy.record(self.busy).unwrap(); - idle.record(self.idle).unwrap(); - total.record(self.busy + self.idle).unwrap(); - } - } - } -} - -fn read_span<'ctx, S>(ctx: &'ctx Context<'_, S>, id: &span::Id) -> SpanRef<'ctx, S> -where - S: Subscriber + for<'a> LookupSpan<'a>, -{ - ctx.span(id) - .expect("Span not found, it should have been stored in the registry") -} - -impl Layer for MetricsLayer -where - S: Subscriber + for<'a> LookupSpan<'a>, -{ - /// Only express interest in span callsites, disabling collection of events, - /// and create new histogram for the spans created by this callsite - fn register_callsite(&self, metadata: &'static Metadata<'static>) -> Interest { - if !metadata.is_span() { - return Interest::never(); - } - - let entry = CallsiteEntry::from_level(metadata.level()); - - METRICS - .write() - .unwrap() - .insert(CallsiteKey(metadata), Mutex::new(entry)); - - Interest::always() - } - - /// When a new span is created, attach the timing data extension to it - fn on_new_span(&self, _attrs: &span::Attributes<'_>, id: &span::Id, ctx: Context<'_, S>) { - let span = read_span(&ctx, id); - let mut extensions = span.extensions_mut(); - - if extensions.get_mut::().is_none() { - extensions.insert(Timings::::new()); - } - } - - /// When a span is entered, start counting idle time for the parent span if - /// it exists and busy time for the entered span itself - fn on_enter(&self, id: &span::Id, ctx: Context<'_, S>) { - let span = read_span(&ctx, id); - - let now = Instant::now(); - if let Some(parent) = span.parent() { - let mut extensions = parent.extensions_mut(); - if let Some(timings) = extensions.get_mut::() { - // The parent span was busy until now - timings.exit(now); - } - } - - let mut extensions = span.extensions_mut(); - if let Some(timings) = extensions.get_mut::() { - // The child span was idle until now - timings.enter(now); - } - } - - /// When a span is exited, stop it from counting busy time and start - /// counting the parent as busy instead - fn on_exit(&self, id: &span::Id, ctx: Context<'_, S>) { - let span = read_span(&ctx, id); - - let now = Instant::now(); - let mut extensions = span.extensions_mut(); - if let Some(timings) = extensions.get_mut::() { - // Child span was busy until now - timings.exit(now); - } - - // Re-enter parent - if let Some(parent) = span.parent() { - let mut extensions = parent.extensions_mut(); - if let Some(timings) = extensions.get_mut::() { - // Parent span was idle until now - timings.enter(now); - } - } - } - - /// When a span is closed, extract its timing information and write it to - /// the associated histograms - fn on_close(&self, id: span::Id, ctx: Context<'_, S>) { - let span = read_span(&ctx, &id); - let mut extensions = span.extensions_mut(); - if let Some(timing) = extensions.remove::() { - let now = Instant::now(); - - // Acquire a read lock on the metrics storage, access the metrics entry - // associated with this call site and acquire a write lock on it - let metrics = METRICS.read().unwrap(); - let entry = metrics - .get(&CallsiteKey(span.metadata())) - .expect("callsite not found, it should have been registered in register_callsite"); - - let mut entry = entry.lock().unwrap(); - timing.record(now, &mut entry); - } - } -} - -/// Initializes metrics recording -pub fn init_metrics() { - // Create and injects the metrics recording layer with the tracing library - tracing_subscriber::registry().with(MetricsLayer).init(); -} - -/// Flush and print the recorded metrics to the console -pub fn print_metrics() { - let mut write_guard = METRICS.write().unwrap(); - let mut histograms: Vec<_> = write_guard - .drain() - .flat_map(|(key, entry)| entry.into_inner().unwrap().into_histograms(key.0.name())) - .collect(); - - histograms.sort_unstable_by(|(a, _), (b, _)| a.cmp(b)); - - for (key, histogram) in histograms { - // Print the header line for the histogram with its name, mean sample - // duration and standard deviation - println!( - "{}: mean = {:.1?}, stdev = {:.1?}", - key, - Duration::from_nanos(histogram.mean().round() as u64), - Duration::from_nanos(histogram.stdev().round() as u64), - ); - - // For each quantile bucket in the histogram print out the associated - // duration, a bar corresponding to the percentage of the total number - // of samples falling within this bucket and the percentile - // corresponding to this bucket - let total = histogram.len() as f64; - for v in histogram.iter_quantiles(1) { - let duration = Duration::from_nanos(v.value_iterated_to()); - - let count = v.count_since_last_iteration() as f64; - let bar_length = (count * 40.0 / total).ceil() as usize; - - println!( - "{: >7.1?} | {:40} | {:5.1}%", - duration, - "*".repeat(bar_length), - v.quantile_iterated_to() * 100.0, - ); - } - - // Print an empty line after each histogram - println!(); - } -} - -#[cfg(test)] -mod tests { - use std::{ops::Sub, thread, time::Duration}; - - use tracing::Level; - use tracing_subscriber::prelude::*; - - use super::{CallsiteEntry, CallsiteKey, METRICS, MetricsLayer, Timepoint, Timings}; - - #[derive(Clone, Copy)] - struct TestTime(u64); - - impl Sub for TestTime { - type Output = Duration; - - fn sub(self, rhs: Self) -> Self::Output { - Duration::from_nanos(self.0 - rhs.0) - } - } - - impl Timepoint for TestTime { - fn now() -> Self { - Self(0) - } - } - - #[test] - fn test_timing() { - let mut entry = CallsiteEntry::from_level(&Level::TRACE); - - for i in 1..=5 { - let mut timing = Timings::::new(); - - timing.enter(TestTime(i)); - - timing.record(TestTime(i * 2), &mut entry); - } - - let histograms = entry.into_histograms("test"); - for (name, histogram) in histograms { - let scale = match name.as_ref() { - "test" => 2.0, - "test.idle" | "test.busy" => 1.0, - _ => unreachable!(), - }; - - let sample_count = 5; - assert_eq!(histogram.len(), sample_count); - - let mean = 3.0 * scale; - assert_eq!(histogram.mean(), mean); - - let sum = (1..=5).fold(0.0, |sum, i| { - let sample = i as f64 * scale; - sum + (sample - mean).powi(2) - }); - - let stddev = (sum / sample_count as f64).sqrt(); - assert_eq!(histogram.stdev(), stddev); - - let s = scale as u64 - 1; - let expected_buckets = [ - (0, s, 0.0), - (1, 2 * s + 1, 0.2), - (1, 3 * s + 2, 0.4), - (1, 4 * s + 3, 0.6), - (1, 5 * s + 4, 0.8), - (1, 6 * s + 5, 1.0), - ]; - - for (bucket, expected) in histogram.iter_linear(scale as u64).zip(&expected_buckets) { - let (count, value, quantile) = *expected; - - assert_eq!(bucket.count_since_last_iteration(), count); - assert_eq!(bucket.value_iterated_to(), value); - assert_eq!(bucket.quantile_iterated_to(), quantile); - } - } - } - - #[test] - fn test_layer() { - let _guard = tracing_subscriber::registry() - .with(MetricsLayer) - .set_default(); - - let key = { - let span = tracing::trace_span!("test_layer"); - span.in_scope(|| { - thread::sleep(Duration::from_millis(1)); - }); - - span.metadata().expect("span is disabled") - }; - - let entry = { - let mut metrics = METRICS.write().unwrap(); - metrics.remove(&CallsiteKey(key)) - }; - - let entry = entry.expect("callsite does not exist in metrics storage"); - - let entry = entry.into_inner().unwrap(); - let histograms = entry.into_histograms(key.name()); - - for (_, histogram) in histograms { - assert_eq!(histogram.len(), 1); - } - } -} diff --git a/crates/pgt_cli/src/panic.rs b/crates/pgt_cli/src/panic.rs deleted file mode 100644 index 5b8cb16d..00000000 --- a/crates/pgt_cli/src/panic.rs +++ /dev/null @@ -1,47 +0,0 @@ -use std::{ - fmt::Write, - panic::{PanicHookInfo, set_hook}, - thread, -}; - -/// Installs a global panic handler to show a user-friendly error message -/// in case the CLI panics -pub fn setup_panic_handler() { - set_hook(Box::new(panic_handler)) -} - -fn panic_handler(info: &PanicHookInfo) { - // Buffer the error message to a string before printing it at once - // to prevent it from getting mixed with other errors if multiple threads - // panic at the same time - let mut error = String::new(); - - writeln!(error, "Encountered an unexpected error").unwrap(); - writeln!(error).unwrap(); - - writeln!(error, "This is a bug in Postgres Tools, not an error in your code, and we would appreciate it if you could report it along with the following information to help us fixing the issue:").unwrap(); - writeln!(error).unwrap(); - - if let Some(location) = info.location() { - writeln!(error, "Source Location: {location}").unwrap(); - } - - if let Some(thread) = thread::current().name() { - writeln!(error, "Thread Name: {thread}").unwrap(); - } - - let payload = info.payload(); - if let Some(msg) = payload.downcast_ref::<&'static str>() { - writeln!(error, "Message: {msg}").unwrap(); - } else if let Some(msg) = payload.downcast_ref::() { - writeln!(error, "Message: {msg}").unwrap(); - } - - // Write the panic to stderr - eprintln!("{error}"); - - // Write the panic to the log file, this is done last since the `tracing` - // infrastructure could panic a second time and abort the process, so we - // want to ensure the error has at least been logged to stderr beforehand - tracing::error!("{error}"); -} diff --git a/crates/pgt_cli/src/reporter/github.rs b/crates/pgt_cli/src/reporter/github.rs deleted file mode 100644 index 1faa9741..00000000 --- a/crates/pgt_cli/src/reporter/github.rs +++ /dev/null @@ -1,45 +0,0 @@ -use crate::{DiagnosticsPayload, Execution, Reporter, ReporterVisitor, TraversalSummary}; -use pgt_console::{Console, ConsoleExt, markup}; -use pgt_diagnostics::PrintGitHubDiagnostic; -use std::io; - -pub(crate) struct GithubReporter { - pub(crate) diagnostics_payload: DiagnosticsPayload, - pub(crate) execution: Execution, -} - -impl Reporter for GithubReporter { - fn write(self, visitor: &mut dyn ReporterVisitor) -> io::Result<()> { - visitor.report_diagnostics(&self.execution, self.diagnostics_payload)?; - Ok(()) - } -} -pub(crate) struct GithubReporterVisitor<'a>(pub(crate) &'a mut dyn Console); - -impl ReporterVisitor for GithubReporterVisitor<'_> { - fn report_summary( - &mut self, - _execution: &Execution, - _summary: TraversalSummary, - ) -> io::Result<()> { - Ok(()) - } - - fn report_diagnostics( - &mut self, - _execution: &Execution, - diagnostics_payload: DiagnosticsPayload, - ) -> io::Result<()> { - for diagnostic in &diagnostics_payload.diagnostics { - if diagnostic.severity() >= diagnostics_payload.diagnostic_level { - if diagnostic.tags().is_verbose() && diagnostics_payload.verbose { - self.0.log(markup! {{PrintGitHubDiagnostic(diagnostic)}}); - } else if !diagnostics_payload.verbose { - self.0.log(markup! {{PrintGitHubDiagnostic(diagnostic)}}); - } - } - } - - Ok(()) - } -} diff --git a/crates/pgt_cli/src/reporter/gitlab.rs b/crates/pgt_cli/src/reporter/gitlab.rs deleted file mode 100644 index fc35a8e0..00000000 --- a/crates/pgt_cli/src/reporter/gitlab.rs +++ /dev/null @@ -1,241 +0,0 @@ -use crate::{DiagnosticsPayload, Execution, Reporter, ReporterVisitor, TraversalSummary}; -use path_absolutize::Absolutize; -use pgt_console::fmt::{Display, Formatter}; -use pgt_console::{Console, ConsoleExt, markup}; -use pgt_diagnostics::display::SourceFile; -use pgt_diagnostics::{Error, PrintDescription, Resource, Severity}; -use serde::Serialize; -use std::sync::RwLock; -use std::{ - collections::HashSet, - hash::{DefaultHasher, Hash, Hasher}, - path::{Path, PathBuf}, -}; - -pub struct GitLabReporter { - pub(crate) execution: Execution, - pub(crate) diagnostics: DiagnosticsPayload, -} - -impl Reporter for GitLabReporter { - fn write(self, visitor: &mut dyn ReporterVisitor) -> std::io::Result<()> { - visitor.report_diagnostics(&self.execution, self.diagnostics)?; - Ok(()) - } -} - -pub(crate) struct GitLabReporterVisitor<'a> { - console: &'a mut dyn Console, - repository_root: Option, -} - -#[derive(Default)] -struct GitLabHasher(HashSet); - -impl GitLabHasher { - /// Enforces uniqueness of generated fingerprints in the context of a - /// single report. - fn rehash_until_unique(&mut self, fingerprint: u64) -> u64 { - let mut current = fingerprint; - while self.0.contains(¤t) { - let mut hasher = DefaultHasher::new(); - current.hash(&mut hasher); - current = hasher.finish(); - } - - self.0.insert(current); - current - } -} - -impl<'a> GitLabReporterVisitor<'a> { - pub fn new(console: &'a mut dyn Console, repository_root: Option) -> Self { - Self { - console, - repository_root, - } - } -} - -impl ReporterVisitor for GitLabReporterVisitor<'_> { - fn report_summary(&mut self, _: &Execution, _: TraversalSummary) -> std::io::Result<()> { - Ok(()) - } - - fn report_diagnostics( - &mut self, - _execution: &Execution, - payload: DiagnosticsPayload, - ) -> std::io::Result<()> { - let hasher = RwLock::default(); - let diagnostics = GitLabDiagnostics(payload, &hasher, self.repository_root.as_deref()); - self.console.log(markup!({ diagnostics })); - Ok(()) - } -} - -struct GitLabDiagnostics<'a>( - DiagnosticsPayload, - &'a RwLock, - Option<&'a Path>, -); - -impl GitLabDiagnostics<'_> { - fn attempt_to_relativize(&self, subject: &str) -> Option { - let Ok(resolved) = Path::new(subject).absolutize() else { - return None; - }; - - let Ok(relativized) = resolved.strip_prefix(self.2?) else { - return None; - }; - - Some(relativized.to_path_buf()) - } - - fn compute_initial_fingerprint(&self, diagnostic: &Error, path: &str) -> u64 { - let location = diagnostic.location(); - let code = match location.span { - Some(span) => match location.source_code { - Some(source_code) => &source_code.text[span], - None => "", - }, - None => "", - }; - - let check_name = diagnostic - .category() - .map(|category| category.name()) - .unwrap_or_default(); - - calculate_hash(&Fingerprint { - check_name, - path, - code, - }) - } -} - -impl Display for GitLabDiagnostics<'_> { - fn fmt(&self, fmt: &mut Formatter) -> std::io::Result<()> { - let mut hasher = self.1.write().unwrap(); - let gitlab_diagnostics: Vec<_> = self - .0 - .diagnostics - .iter() - .filter(|d| d.severity() >= self.0.diagnostic_level) - .filter(|d| { - if self.0.verbose { - d.tags().is_verbose() - } else { - true - } - }) - .filter_map(|pgt_diagnostic| { - let absolute_path = match pgt_diagnostic.location().resource { - Some(Resource::File(file)) => Some(file), - _ => None, - } - .unwrap_or_default(); - let path_buf = self.attempt_to_relativize(absolute_path); - let path = match path_buf { - Some(buf) => buf.to_str().unwrap_or(absolute_path).to_owned(), - None => absolute_path.to_owned(), - }; - - let initial_fingerprint = self.compute_initial_fingerprint(pgt_diagnostic, &path); - let fingerprint = hasher.rehash_until_unique(initial_fingerprint); - - GitLabDiagnostic::try_from_diagnostic(pgt_diagnostic, path.to_string(), fingerprint) - }) - .collect(); - let serialized = serde_json::to_string_pretty(&gitlab_diagnostics)?; - fmt.write_str(serialized.as_str())?; - Ok(()) - } -} - -/// An entry in the GitLab Code Quality report. -/// See https://docs.gitlab.com/ee/ci/testing/code_quality.html#implement-a-custom-tool -#[derive(Serialize)] -pub struct GitLabDiagnostic<'a> { - /// A description of the code quality violation. - description: String, - /// A unique name representing the static analysis check that emitted this issue. - check_name: &'a str, - /// A unique fingerprint to identify the code quality violation. For example, an MD5 hash. - fingerprint: String, - /// A severity string (can be info, minor, major, critical, or blocker). - severity: &'a str, - /// The location where the code quality violation occurred. - location: Location, -} - -impl<'a> GitLabDiagnostic<'a> { - pub fn try_from_diagnostic( - diagnostic: &'a Error, - path: String, - fingerprint: u64, - ) -> Option { - let location = diagnostic.location(); - let span = location.span?; - let source_code = location.source_code?; - let description = PrintDescription(diagnostic).to_string(); - let begin = match SourceFile::new(source_code).location(span.start()) { - Ok(start) => start.line_number.get(), - Err(_) => return None, - }; - let check_name = diagnostic - .category() - .map(|category| category.name()) - .unwrap_or_default(); - - Some(GitLabDiagnostic { - severity: match diagnostic.severity() { - Severity::Hint => "info", - Severity::Information => "minor", - Severity::Warning => "major", - Severity::Error => "critical", - Severity::Fatal => "blocker", - }, - description, - check_name, - // A u64 does not fit into a JSON number, so we serialize this as a - // string - fingerprint: fingerprint.to_string(), - location: Location { - path, - lines: Lines { begin }, - }, - }) - } -} - -#[derive(Serialize)] -struct Location { - /// The relative path to the file containing the code quality violation. - path: String, - lines: Lines, -} - -#[derive(Serialize)] -struct Lines { - /// The line on which the code quality violation occurred. - begin: usize, -} - -#[derive(Hash)] -struct Fingerprint<'a> { - // Including the source code in our hash leads to more stable - // fingerprints. If you instead rely on e.g. the line number and change - // the first line of a file, all of its fingerprint would change. - code: &'a str, - check_name: &'a str, - path: &'a str, -} - -fn calculate_hash(t: &T) -> u64 { - let mut s = DefaultHasher::new(); - t.hash(&mut s); - s.finish() -} diff --git a/crates/pgt_cli/src/reporter/junit.rs b/crates/pgt_cli/src/reporter/junit.rs deleted file mode 100644 index 670bf8d4..00000000 --- a/crates/pgt_cli/src/reporter/junit.rs +++ /dev/null @@ -1,119 +0,0 @@ -use crate::{DiagnosticsPayload, Execution, Reporter, ReporterVisitor, TraversalSummary}; -use pgt_console::{Console, ConsoleExt, markup}; -use pgt_diagnostics::display::SourceFile; -use pgt_diagnostics::{Error, Resource}; -use quick_junit::{NonSuccessKind, Report, TestCase, TestCaseStatus, TestSuite}; -use std::fmt::{Display, Formatter}; -use std::io; - -pub(crate) struct JunitReporter { - pub(crate) diagnostics_payload: DiagnosticsPayload, - pub(crate) execution: Execution, - pub(crate) summary: TraversalSummary, -} - -impl Reporter for JunitReporter { - fn write(self, visitor: &mut dyn ReporterVisitor) -> io::Result<()> { - visitor.report_summary(&self.execution, self.summary)?; - visitor.report_diagnostics(&self.execution, self.diagnostics_payload)?; - Ok(()) - } -} - -struct JunitDiagnostic<'a> { - diagnostic: &'a Error, -} - -impl Display for JunitDiagnostic<'_> { - fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { - self.diagnostic.description(f) - } -} - -pub(crate) struct JunitReporterVisitor<'a>(pub(crate) Report, pub(crate) &'a mut dyn Console); - -impl<'a> JunitReporterVisitor<'a> { - pub(crate) fn new(console: &'a mut dyn Console) -> Self { - let report = Report::new("PostgresTools"); - Self(report, console) - } -} - -impl ReporterVisitor for JunitReporterVisitor<'_> { - fn report_summary( - &mut self, - _execution: &Execution, - summary: TraversalSummary, - ) -> io::Result<()> { - self.0.time = Some(summary.duration); - self.0.errors = summary.errors as usize; - - Ok(()) - } - - fn report_diagnostics( - &mut self, - _execution: &Execution, - payload: DiagnosticsPayload, - ) -> io::Result<()> { - let diagnostics = payload.diagnostics.iter().filter(|diagnostic| { - if diagnostic.tags().is_verbose() { - payload.verbose - } else { - true - } - }); - - for diagnostic in diagnostics { - let mut status = TestCaseStatus::non_success(NonSuccessKind::Failure); - let message = format!("{}", JunitDiagnostic { diagnostic }); - status.set_message(message.clone()); - - let location = diagnostic.location(); - - if let (Some(span), Some(source_code), Some(resource)) = - (location.span, location.source_code, location.resource) - { - let source = SourceFile::new(source_code); - let start = source.location(span.start())?; - - status.set_description(format!( - "line {row:?}, col {col:?}, {body}", - row = start.line_number.to_zero_indexed(), - col = start.column_number.to_zero_indexed(), - body = message - )); - let mut case = TestCase::new( - format!( - "org.pgt.{}", - diagnostic - .category() - .map(|c| c.name()) - .unwrap_or_default() - .replace('/', ".") - ), - status, - ); - - if let Resource::File(path) = resource { - let mut test_suite = TestSuite::new(path); - case.extra - .insert("line".into(), start.line_number.get().to_string().into()); - case.extra.insert( - "column".into(), - start.column_number.get().to_string().into(), - ); - test_suite.extra.insert("package".into(), "org.pgt".into()); - test_suite.add_test_case(case); - self.0.add_test_suite(test_suite); - } - } - } - - self.1.log(markup! { - {self.0.to_string().unwrap()} - }); - - Ok(()) - } -} diff --git a/crates/pgt_cli/src/reporter/mod.rs b/crates/pgt_cli/src/reporter/mod.rs deleted file mode 100644 index ed265f9b..00000000 --- a/crates/pgt_cli/src/reporter/mod.rs +++ /dev/null @@ -1,63 +0,0 @@ -pub(crate) mod github; -pub(crate) mod gitlab; -pub(crate) mod junit; -pub(crate) mod terminal; - -use crate::execute::Execution; -use pgt_diagnostics::{Error, Severity}; -use pgt_fs::PgTPath; -use serde::Serialize; -use std::collections::BTreeSet; -use std::io; -use std::time::Duration; - -pub struct DiagnosticsPayload { - pub diagnostics: Vec, - pub verbose: bool, - pub diagnostic_level: Severity, -} - -/// A type that holds the result of the traversal -#[derive(Debug, Default, Serialize, Copy, Clone)] -pub struct TraversalSummary { - pub changed: usize, - pub unchanged: usize, - pub matches: usize, - // We skip it during testing because the time isn't predictable - #[cfg_attr(debug_assertions, serde(skip))] - pub duration: Duration, - pub errors: u32, - pub warnings: u32, - pub skipped: usize, - pub suggested_fixes_skipped: u32, - pub diagnostics_not_printed: u32, -} - -/// When using this trait, the type that implements this trait is the one that holds the read-only information to pass around -pub trait Reporter: Sized { - /// Writes the summary using the underling visitor - fn write(self, visitor: &mut dyn ReporterVisitor) -> io::Result<()>; -} - -/// When using this trait, the type that implements this trait is the one that will **write** the data, ideally inside a buffer -pub trait ReporterVisitor { - /// Writes the summary in the underling writer - fn report_summary( - &mut self, - execution: &Execution, - summary: TraversalSummary, - ) -> io::Result<()>; - - /// Writes the paths that were handled during a run. - fn report_handled_paths(&mut self, evaluated_paths: BTreeSet) -> io::Result<()> { - let _ = evaluated_paths; - Ok(()) - } - - /// Writes a diagnostics - fn report_diagnostics( - &mut self, - execution: &Execution, - payload: DiagnosticsPayload, - ) -> io::Result<()>; -} diff --git a/crates/pgt_cli/src/reporter/terminal.rs b/crates/pgt_cli/src/reporter/terminal.rs deleted file mode 100644 index 6e10efc8..00000000 --- a/crates/pgt_cli/src/reporter/terminal.rs +++ /dev/null @@ -1,190 +0,0 @@ -use crate::Reporter; -use crate::execute::{Execution, TraversalMode}; -use crate::reporter::{DiagnosticsPayload, ReporterVisitor, TraversalSummary}; -use pgt_console::fmt::Formatter; -use pgt_console::{Console, ConsoleExt, fmt, markup}; -use pgt_diagnostics::advice::ListAdvice; -use pgt_diagnostics::{Diagnostic, PrintDiagnostic}; -use pgt_fs::PgTPath; -use std::collections::BTreeSet; -use std::io; -use std::time::Duration; - -pub(crate) struct ConsoleReporter { - pub(crate) summary: TraversalSummary, - pub(crate) diagnostics_payload: DiagnosticsPayload, - pub(crate) execution: Execution, - pub(crate) evaluated_paths: BTreeSet, -} - -impl Reporter for ConsoleReporter { - fn write(self, visitor: &mut dyn ReporterVisitor) -> io::Result<()> { - let verbose = self.diagnostics_payload.verbose; - visitor.report_diagnostics(&self.execution, self.diagnostics_payload)?; - visitor.report_summary(&self.execution, self.summary)?; - if verbose { - visitor.report_handled_paths(self.evaluated_paths)?; - } - Ok(()) - } -} - -#[derive(Debug, Diagnostic)] -#[diagnostic( - tags(VERBOSE), - severity = Information, - message = "Files processed:" -)] -struct EvaluatedPathsDiagnostic { - #[advice] - advice: ListAdvice, -} - -#[derive(Debug, Diagnostic)] -#[diagnostic( - tags(VERBOSE), - severity = Information, - message = "Files fixed:" -)] -struct FixedPathsDiagnostic { - #[advice] - advice: ListAdvice, -} - -pub(crate) struct ConsoleReporterVisitor<'a>(pub(crate) &'a mut dyn Console); - -impl ReporterVisitor for ConsoleReporterVisitor<'_> { - fn report_summary( - &mut self, - execution: &Execution, - summary: TraversalSummary, - ) -> io::Result<()> { - self.0.log(markup! { - {ConsoleTraversalSummary(execution.traversal_mode(), &summary)} - }); - - Ok(()) - } - - fn report_handled_paths(&mut self, evaluated_paths: BTreeSet) -> io::Result<()> { - let evaluated_paths_diagnostic = EvaluatedPathsDiagnostic { - advice: ListAdvice { - list: evaluated_paths - .iter() - .map(|p| p.display().to_string()) - .collect(), - }, - }; - - let fixed_paths_diagnostic = FixedPathsDiagnostic { - advice: ListAdvice { - list: evaluated_paths - .iter() - .filter(|p| p.was_written()) - .map(|p| p.display().to_string()) - .collect(), - }, - }; - - self.0.log(markup! { - {PrintDiagnostic::verbose(&evaluated_paths_diagnostic)} - }); - self.0.log(markup! { - {PrintDiagnostic::verbose(&fixed_paths_diagnostic)} - }); - - Ok(()) - } - - fn report_diagnostics( - &mut self, - _execution: &Execution, - diagnostics_payload: DiagnosticsPayload, - ) -> io::Result<()> { - for diagnostic in &diagnostics_payload.diagnostics { - if diagnostic.severity() >= diagnostics_payload.diagnostic_level { - if diagnostic.tags().is_verbose() && diagnostics_payload.verbose { - self.0 - .error(markup! {{PrintDiagnostic::verbose(diagnostic)}}); - } else { - self.0 - .error(markup! {{PrintDiagnostic::simple(diagnostic)}}); - } - } - } - - Ok(()) - } -} - -struct Files(usize); - -impl fmt::Display for Files { - fn fmt(&self, fmt: &mut Formatter) -> io::Result<()> { - fmt.write_markup(markup!({self.0} " "))?; - if self.0 == 1 { - fmt.write_str("file") - } else { - fmt.write_str("files") - } - } -} - -struct SummaryDetail(usize); - -impl fmt::Display for SummaryDetail { - fn fmt(&self, fmt: &mut Formatter) -> io::Result<()> { - if self.0 > 0 { - fmt.write_markup(markup! { - " Fixed "{Files(self.0)}"." - }) - } else { - fmt.write_markup(markup! { - " No fixes applied." - }) - } - } -} -struct SummaryTotal<'a>(&'a TraversalMode, usize, &'a Duration); - -impl fmt::Display for SummaryTotal<'_> { - fn fmt(&self, fmt: &mut Formatter) -> io::Result<()> { - let files = Files(self.1); - match self.0 { - TraversalMode::Dummy => fmt.write_markup(markup! { - "Dummy "{files}" in "{self.2}"." - }), - TraversalMode::Check { .. } => fmt.write_markup(markup! { - "Checked "{files}" in "{self.2}"." - }), - } - } -} - -pub(crate) struct ConsoleTraversalSummary<'a>( - pub(crate) &'a TraversalMode, - pub(crate) &'a TraversalSummary, -); -impl fmt::Display for ConsoleTraversalSummary<'_> { - fn fmt(&self, fmt: &mut Formatter) -> io::Result<()> { - let summary = SummaryTotal(self.0, self.1.changed + self.1.unchanged, &self.1.duration); - let detail = SummaryDetail(self.1.changed); - fmt.write_markup(markup!({summary}{detail}))?; - - if self.1.errors > 0 { - if self.1.errors == 1 { - fmt.write_markup(markup!("\n""Found "{self.1.errors}" error."))?; - } else { - fmt.write_markup(markup!("\n""Found "{self.1.errors}" errors."))?; - } - } - if self.1.warnings > 0 { - if self.1.warnings == 1 { - fmt.write_markup(markup!("\n""Found "{self.1.warnings}" warning."))?; - } else { - fmt.write_markup(markup!("\n""Found "{self.1.warnings}" warnings."))?; - } - } - Ok(()) - } -} diff --git a/crates/pgt_cli/src/service/mod.rs b/crates/pgt_cli/src/service/mod.rs deleted file mode 100644 index b05f72de..00000000 --- a/crates/pgt_cli/src/service/mod.rs +++ /dev/null @@ -1,474 +0,0 @@ -//! Implements the OS dependent transport layer for the server protocol. This -//! uses a domain socket created in the global temporary directory on Unix -//! systems, and a named pipe on Windows. The protocol used for message frames -//! is based on the [Language Server Protocol](https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#baseProtocol), -//! a simplified derivative of the HTTP protocol - -use std::{ - any::type_name, - borrow::Cow, - io, - ops::Deref, - panic::RefUnwindSafe, - str::{FromStr, from_utf8}, - sync::Arc, - time::Duration, -}; - -use anyhow::{Context, Error, bail, ensure}; -use dashmap::DashMap; -use pgt_workspace::{ - TransportError, - workspace::{TransportRequest, WorkspaceTransport}, -}; -use serde::{Deserialize, Serialize, de::DeserializeOwned}; -use serde_json::{ - Value, from_slice, from_str, to_vec, - value::{RawValue, to_raw_value}, -}; -use tokio::{ - io::{ - AsyncBufRead, AsyncBufReadExt, AsyncRead, AsyncReadExt, AsyncWrite, AsyncWriteExt, - BufReader, BufWriter, - }, - runtime::Runtime, - sync::{ - Notify, - mpsc::{Receiver, Sender, channel}, - oneshot, - }, - time::sleep, -}; - -#[cfg(windows)] -mod windows; -#[cfg(windows)] -pub(crate) use self::windows::{ensure_daemon, open_socket, print_socket, run_daemon}; - -#[cfg(unix)] -mod unix; -#[cfg(unix)] -pub(crate) use self::unix::{ensure_daemon, open_socket, print_socket, run_daemon}; - -/// Tries to open a connection to a running daemon instance, returning a -/// [WorkspaceTransport] instance if the socket is currently active -pub fn open_transport(runtime: Runtime) -> io::Result> { - match runtime.block_on(open_socket()) { - Ok(Some((read, write))) => Ok(Some(SocketTransport::open(runtime, read, write))), - Ok(None) => Ok(None), - Err(err) => Err(err), - } -} - -type JsonRpcResult = Result, TransportError>; - -/// Implementation of [WorkspaceTransport] for types implementing [AsyncRead] -/// and [AsyncWrite] -/// -/// This structs holds an instance of the `tokio` runtime, as well as the -/// following fields: -/// - `write_send` is a sender handle to the "write channel", an MPSC channel -/// that's used to queue up requests to be sent to the server (for simplicity -/// the requests are pushed to the channel as serialized byte buffers) -/// - `pending_requests` is handle to a shared hashmap where the keys are `u64` -/// corresponding to request IDs, and the values are sender handles to oneshot -/// channel instances that can be consumed to fulfill the associated request -/// -/// Creating a new `SocketTransport` instance requires providing a `tokio` -/// runtime instance as well as the "read half" and "write half" of the socket -/// object to be used by this transport instance. These two objects implement -/// [AsyncRead] and [AsyncWrite] respectively, and should generally map to the -/// same underlying I/O object but are represented as separate so they can be -/// used concurrently -/// -/// This concurrent handling of I/O is implemented using two "background tasks": -/// - the `write_task` pulls outgoing messages from the "write channel" and -/// writes them to the "write half" of the socket -/// - the `read_task` reads incoming messages from the "read half" of the -/// socket, then looks up a request with an ID corresponding to the received -/// message in the "pending requests" map. If a pending request is found, it's -/// fulfilled with the content of the message that was just received -/// -/// In addition to these, a new "foreground task" is created for each request. -/// Each foreground task creates a oneshot channel and stores it in the pending -/// requests map using the request ID as a key, then serialize the content of -/// the request and send it over the write channel. Finally, the task blocks -/// the current thread until a response is received over the oneshot channel -/// from the read task, or the request times out -pub struct SocketTransport { - runtime: Runtime, - write_send: Sender<(Vec, bool)>, - pending_requests: PendingRequests, -} - -/// Stores a handle to the map of pending requests, and clears the map -/// automatically when the handle is dropped -#[derive(Clone, Default)] -struct PendingRequests { - inner: Arc>>, -} - -impl Deref for PendingRequests { - type Target = DashMap>; - - fn deref(&self) -> &Self::Target { - self.inner.as_ref() - } -} - -/// There are two live handles to the pending requests map: one is in the -/// `SocketTransport` and the other in the `read_task`. The `SocketTransport` -/// instance can only be dropped if it's empty (since the `request` method -/// blocks until the request is resolved, `&self` will always outlive any -/// pending request), but the `read_task` may abort if it encounters an error -/// or receives a shutdown broadcast while there are still pending requests. In -/// this case the `Drop` implementation will ensure that all pending requests -/// are cancelled immediately instead of timing out. -impl Drop for PendingRequests { - fn drop(&mut self) { - self.inner.clear(); - } -} - -impl SocketTransport { - pub fn open(runtime: Runtime, socket_read: R, socket_write: W) -> Self - where - R: AsyncRead + Unpin + Send + 'static, - W: AsyncWrite + Unpin + Send + 'static, - { - /// Capacity of the "write channel", once this many requests have been - /// queued up, calls to `write_send.send` will block the sending task - /// until enough capacity is available again - /// - /// Note that this does not limit how many requests can be in flight at - /// a given time, it only serves as a loose rate-limit on how many new - /// requests can be sent to the server within a given time frame - const WRITE_CHANNEL_CAPACITY: usize = 16; - - let (write_send, write_recv) = channel(WRITE_CHANNEL_CAPACITY); - - let pending_requests = PendingRequests::default(); - let pending_requests_2 = pending_requests.clone(); - - let socket_read = BufReader::new(socket_read); - let socket_write = BufWriter::new(socket_write); - - let broadcast_shutdown = Arc::new(Notify::new()); - - runtime.spawn(write_task( - broadcast_shutdown.clone(), - write_recv, - socket_write, - )); - - runtime.spawn(async move { - tokio::select! { - _ = read_task(socket_read, &pending_requests) => {} - _ = broadcast_shutdown.notified() => {} - } - }); - - Self { - runtime, - write_send, - pending_requests: pending_requests_2, - } - } -} - -// Allow the socket to be recovered across panic boundaries -impl RefUnwindSafe for SocketTransport {} - -impl WorkspaceTransport for SocketTransport { - fn request(&self, request: TransportRequest

) -> Result - where - P: Serialize, - R: DeserializeOwned, - { - let (send, recv) = oneshot::channel(); - - self.pending_requests.insert(request.id, send); - - let is_shutdown = request.method == "pgt/shutdown"; - - let request = JsonRpcRequest { - jsonrpc: Cow::Borrowed("2.0"), - id: request.id, - method: Cow::Borrowed(request.method), - params: request.params, - }; - - let request = to_vec(&request).map_err(|err| { - TransportError::SerdeError(format!( - "failed to serialize {} into byte buffer: {err}", - type_name::

() - )) - })?; - - let response = self.runtime.block_on(async move { - self.write_send - .send((request, is_shutdown)) - .await - .map_err(|_| TransportError::ChannelClosed)?; - - tokio::select! { - result = recv => { - match result { - Ok(Ok(response)) => Ok(response), - Ok(Err(error)) => Err(error), - Err(_) => Err(TransportError::ChannelClosed), - } - } - _ = sleep(Duration::from_secs(15)) => { - Err(TransportError::Timeout) - } - } - })?; - - let response = response.get(); - let result = from_str(response).map_err(|err| { - TransportError::SerdeError(format!( - "failed to deserialize {} from {response:?}: {err}", - type_name::() - )) - })?; - - Ok(result) - } -} - -async fn read_task(mut socket_read: BufReader, pending_requests: &PendingRequests) -where - R: AsyncRead + Unpin, -{ - loop { - let message = read_message(&mut socket_read).await; - let message = match message { - Ok(message) => { - let response = from_slice(&message).with_context(|| { - if let Ok(message) = from_utf8(&message) { - format!("failed to deserialize JSON-RPC response from {message:?}") - } else { - format!("failed to deserialize JSON-RPC response from {message:?}") - } - }); - - response.map(|response| (message, response)) - } - Err(err) => Err(err), - }; - - let (message, response): (_, JsonRpcResponse) = match message { - Ok(message) => message, - Err(err) => { - eprintln!( - "{:?}", - err.context("remote connection read task exited with an error") - ); - break; - } - }; - - if let Some((_, channel)) = pending_requests.remove(&response.id) { - let response = match (response.result, response.error) { - (Some(result), None) => Ok(result), - (None, Some(err)) => Err(TransportError::RPCError(err.message)), - - // Both result and error will be None if the request - // returns a null-ish result, in this case create a - // "null" RawValue as the result - // - // SAFETY: Calling `to_raw_value` with a static "null" - // JSON Value will always succeed - (None, None) => Ok(to_raw_value(&Value::Null).unwrap()), - - _ => { - let message = if let Ok(message) = from_utf8(&message) { - format!("invalid response {message:?}") - } else { - format!("invalid response {message:?}") - }; - - Err(TransportError::SerdeError(message)) - } - }; - - channel.send(response).ok(); - } - } -} - -async fn read_message(mut socket_read: R) -> Result, Error> -where - R: AsyncBufRead + Unpin, -{ - let mut length = None; - let mut line = String::new(); - - loop { - match socket_read - .read_line(&mut line) - .await - .context("failed to read header line from the socket")? - { - // A read of 0 bytes means the connection was closed - 0 => { - bail!("the connection to the remote workspace was unexpectedly closed"); - } - // A read of two bytes corresponds to the "\r\n" sequence - // that indicates the end of the header section - 2 => { - if line != "\r\n" { - bail!( - "unexpected byte sequence received from the remote workspace, got {line:?} expected \"\\r\\n\"" - ); - } - - break; - } - _ => { - let header: TransportHeader = line - .parse() - .context("failed to parse header from the remote workspace")?; - - match header { - TransportHeader::ContentLength(value) => { - length = Some(value); - } - TransportHeader::ContentType => {} - TransportHeader::Unknown(name) => { - eprintln!("ignoring unknown header {name:?}"); - } - } - - line.clear(); - } - } - } - - let length = length.context( - "incoming response from the remote workspace is missing the Content-Length header", - )?; - - let mut result = vec![0u8; length]; - socket_read - .read_exact(&mut result) - .await - .with_context(|| format!("failed to read message of {length} bytes from the socket"))?; - - Ok(result) -} - -async fn write_task( - broadcast_shutdown: Arc, - mut write_recv: Receiver<(Vec, bool)>, - mut socket_write: BufWriter, -) where - W: AsyncWrite + Unpin, -{ - while let Some((message, is_shutdown)) = write_recv.recv().await { - if is_shutdown { - broadcast_shutdown.notify_waiters(); - } - - if let Err(err) = write_message(&mut socket_write, message).await { - eprintln!( - "{:?}", - err.context("remote connection write task exited with an error") - ); - break; - } - - if is_shutdown { - break; - } - } -} - -async fn write_message(mut socket_write: W, message: Vec) -> Result<(), Error> -where - W: AsyncWrite + Unpin, -{ - socket_write.write_all(b"Content-Length: ").await?; - - let length = message.len().to_string(); - socket_write.write_all(length.as_bytes()).await?; - socket_write.write_all(b"\r\n").await?; - - socket_write - .write_all(b"Content-Type: application/vscode-jsonrpc; charset=utf-8\r\n") - .await?; - - socket_write.write_all(b"\r\n").await?; - - socket_write.write_all(&message).await?; - - socket_write.flush().await?; - - Ok(()) -} - -#[derive(Debug, Serialize)] -struct JsonRpcRequest

{ - /// The diagnostic is related to the content of the command line arguments. - Argv, - /// The diagnostic is related to the content of a memory buffer. - Memory, - /// The diagnostic is related to a file on the filesystem. - File(P), -} - -impl

Resource

{ - /// Returns a `FilePath<&P::Target>` if `self` points to a `Path`, or - /// `None` otherwise. - pub fn as_file(&self) -> Option<&

::Target> - where - P: Deref, - { - if let Resource::File(file) = self { - Some(file) - } else { - None - } - } - - /// Converts a `Path

` to `Path<&P::Target>`. - pub fn as_deref(&self) -> Resource<&

::Target> - where - P: Deref, - { - match self { - Resource::Argv => Resource::Argv, - Resource::Memory => Resource::Memory, - Resource::File(file) => Resource::File(file), - } - } -} - -impl Resource<&'_ str> { - /// Converts a `Path<&str>` to `Path`. - pub fn to_owned(self) -> Resource { - match self { - Resource::Argv => Resource::Argv, - Resource::Memory => Resource::Memory, - Resource::File(file) => Resource::File(file.to_owned()), - } - } -} - -type OwnedSourceCode = SourceCode; -pub(crate) type BorrowedSourceCode<'a> = SourceCode<&'a str, &'a LineIndex>; - -/// Represents the source code of a file. -#[derive(Debug, Clone, Copy)] -pub struct SourceCode { - /// The text content of the file. - pub text: T, - /// An optional "line index" for the file, a list of byte offsets for the - /// start of each line in the file. - pub line_starts: Option, -} - -impl SourceCode { - /// Converts a `SourceCode` to `SourceCode<&T::Target, &L::Target>`. - pub(crate) fn as_deref(&self) -> SourceCode<&::Target, &::Target> - where - T: Deref, - L: Deref, - { - SourceCode { - text: &self.text, - line_starts: self.line_starts.as_deref(), - } - } -} - -impl BorrowedSourceCode<'_> { - /// Converts a `SourceCode<&str, &LineIndex>` to `SourceCode`. - pub(crate) fn to_owned(self) -> OwnedSourceCode { - SourceCode { - text: self.text.to_owned(), - line_starts: self.line_starts.map(ToOwned::to_owned), - } - } -} - -#[derive(Debug)] -pub struct LineIndex([TextSize]); - -impl LineIndex { - pub fn new(slice: &'_ [TextSize]) -> &'_ Self { - // SAFETY: Transmuting `&[TextSize]` to `&LineIndex` is safe since - // `LineIndex` is a `repr(transparent)` struct containing a `[TextSize]` - // and thus has the same memory layout - unsafe { std::mem::transmute(slice) } - } -} - -impl Deref for LineIndex { - type Target = [TextSize]; - - fn deref(&self) -> &Self::Target { - &self.0 - } -} - -impl ToOwned for LineIndex { - type Owned = LineIndexBuf; - - fn to_owned(&self) -> Self::Owned { - LineIndexBuf(self.0.to_owned()) - } -} - -#[derive(Debug, Clone)] -pub struct LineIndexBuf(Vec); - -impl LineIndexBuf { - pub fn from_source_text(source: &str) -> Self { - Self( - std::iter::once(0) - .chain(source.match_indices(&['\n', '\r']).filter_map(|(i, _)| { - let bytes = source.as_bytes(); - - match bytes[i] { - // Filter out the `\r` in `\r\n` to avoid counting the line break twice - b'\r' if i + 1 < bytes.len() && bytes[i + 1] == b'\n' => None, - _ => Some(i + 1), - } - })) - .map(|i| TextSize::try_from(i).expect("integer overflow")) - .collect(), - ) - } -} - -impl Deref for LineIndexBuf { - type Target = LineIndex; - - fn deref(&self) -> &Self::Target { - LineIndex::new(self.0.as_slice()) - } -} - -impl Borrow for LineIndexBuf { - fn borrow(&self) -> &LineIndex { - self - } -} - -/// Builder type for the [Location] struct -pub struct LocationBuilder<'a> { - resource: Option>, - span: Option, - source_code: Option>, -} - -impl<'a> LocationBuilder<'a> { - pub fn resource(mut self, resource: &'a P) -> Self { - self.resource = resource.as_resource(); - self - } - - pub fn span(mut self, span: &'a S) -> Self { - self.span = span.as_span(); - self - } - - pub fn source_code(mut self, source_code: &'a S) -> Self { - self.source_code = source_code.as_source_code(); - self - } - - pub fn build(self) -> Location<'a> { - Location { - resource: self.resource, - span: self.span, - source_code: self.source_code, - } - } -} - -/// Utility trait for types that can be converted to a [Resource] -pub trait AsResource { - fn as_resource(&self) -> Option>; -} - -impl AsResource for Option { - fn as_resource(&self) -> Option> { - self.as_ref().and_then(T::as_resource) - } -} - -impl AsResource for &'_ T { - fn as_resource(&self) -> Option> { - T::as_resource(*self) - } -} - -impl> AsResource for Resource { - fn as_resource(&self) -> Option> { - Some(self.as_deref()) - } -} - -impl AsResource for String { - fn as_resource(&self) -> Option> { - Some(Resource::File(self)) - } -} - -impl AsResource for str { - fn as_resource(&self) -> Option> { - Some(Resource::File(self)) - } -} - -/// Utility trait for types that can be converted into `Option` -pub trait AsSpan { - fn as_span(&self) -> Option; -} - -impl AsSpan for Option { - fn as_span(&self) -> Option { - self.as_ref().and_then(T::as_span) - } -} - -impl AsSpan for &'_ T { - fn as_span(&self) -> Option { - T::as_span(*self) - } -} - -impl AsSpan for TextRange { - fn as_span(&self) -> Option { - Some(*self) - } -} - -impl AsSpan for Range -where - TextSize: TryFrom, - >::Error: Debug, -{ - fn as_span(&self) -> Option { - Some(TextRange::new( - TextSize::try_from(self.start).expect("integer overflow"), - TextSize::try_from(self.end).expect("integer overflow"), - )) - } -} - -/// Utility trait for types that can be converted into [SourceCode] -pub trait AsSourceCode { - fn as_source_code(&self) -> Option>; -} - -impl AsSourceCode for Option { - fn as_source_code(&self) -> Option> { - self.as_ref().and_then(T::as_source_code) - } -} - -impl AsSourceCode for &'_ T { - fn as_source_code(&self) -> Option> { - T::as_source_code(*self) - } -} - -impl AsSourceCode for BorrowedSourceCode<'_> { - fn as_source_code(&self) -> Option> { - Some(*self) - } -} - -impl AsSourceCode for OwnedSourceCode { - fn as_source_code(&self) -> Option> { - Some(SourceCode { - text: self.text.as_str(), - line_starts: self.line_starts.as_deref(), - }) - } -} - -impl AsSourceCode for str { - fn as_source_code(&self) -> Option> { - Some(SourceCode { - text: self, - line_starts: None, - }) - } -} - -impl AsSourceCode for String { - fn as_source_code(&self) -> Option> { - Some(SourceCode { - text: self, - line_starts: None, - }) - } -} - -#[cfg(test)] -mod tests { - use pgt_text_size::TextSize; - - use super::LineIndexBuf; - - #[test] - fn line_starts_with_carriage_return_line_feed() { - let input = "a\r\nb\r\nc"; - let LineIndexBuf(starts) = LineIndexBuf::from_source_text(input); - - assert_eq!( - vec![ - TextSize::from(0u32), - TextSize::from(3u32), - TextSize::from(6u32) - ], - starts - ); - } - - #[test] - fn line_starts_with_carriage_return() { - let input = "a\rb\rc"; - let LineIndexBuf(starts) = LineIndexBuf::from_source_text(input); - - assert_eq!( - vec![ - TextSize::from(0u32), - TextSize::from(2u32), - TextSize::from(4u32) - ], - starts - ); - } - - #[test] - fn line_starts_with_line_feed() { - let input = "a\nb\nc"; - let LineIndexBuf(starts) = LineIndexBuf::from_source_text(input); - - assert_eq!( - vec![ - TextSize::from(0u32), - TextSize::from(2u32), - TextSize::from(4u32) - ], - starts - ); - } -} diff --git a/crates/pgt_diagnostics/src/panic.rs b/crates/pgt_diagnostics/src/panic.rs deleted file mode 100644 index b41e8616..00000000 --- a/crates/pgt_diagnostics/src/panic.rs +++ /dev/null @@ -1,49 +0,0 @@ -use std::panic::UnwindSafe; - -#[derive(Default, Debug)] -pub struct PanicError { - pub info: String, - pub backtrace: Option, -} - -thread_local! { - static LAST_PANIC: std::cell::Cell> = const { std::cell::Cell::new(None) }; -} - -impl std::fmt::Display for PanicError { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - let r = f.write_fmt(format_args!("{}\n", self.info)); - match &self.backtrace { - Some(backtrace) => f.write_fmt(format_args!("Backtrace: {backtrace}")), - _ => r, - } - } -} - -/// Take and set a specific panic hook before calling `f` inside a `catch_unwind`, then -/// return the old set_hook. -/// -/// If `f` panicks am `Error` with the panic message plus backtrace will be returned. -pub fn catch_unwind(f: F) -> Result -where - F: FnOnce() -> R + UnwindSafe, -{ - let prev = std::panic::take_hook(); - std::panic::set_hook(Box::new(|info| { - let info = info.to_string(); - let backtrace = std::backtrace::Backtrace::capture(); - LAST_PANIC.with(|cell| { - cell.set(Some(PanicError { - info, - backtrace: Some(backtrace), - })) - }) - })); - - let result = std::panic::catch_unwind(f) - .map_err(|_| LAST_PANIC.with(|cell| cell.take()).unwrap_or_default()); - - std::panic::set_hook(prev); - - result -} diff --git a/crates/pgt_diagnostics/src/serde.rs b/crates/pgt_diagnostics/src/serde.rs deleted file mode 100644 index 334bd4e9..00000000 --- a/crates/pgt_diagnostics/src/serde.rs +++ /dev/null @@ -1,498 +0,0 @@ -use std::io; - -use pgt_console::{MarkupBuf, fmt, markup}; -use pgt_text_edit::TextEdit; -use pgt_text_size::{TextRange, TextSize}; -use serde::{ - Deserialize, Deserializer, Serialize, Serializer, - de::{self, SeqAccess}, -}; - -use crate::{ - Advices as _, Backtrace, Category, DiagnosticTags, LogCategory, Resource, Severity, SourceCode, - Visit, diagnostic::DiagnosticTag, diagnostic::internal::AsDiagnostic, -}; - -/// Serializable representation for a [Diagnostic](super::Diagnostic). -#[derive(Clone, Debug, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] -#[cfg_attr(test, derive(Eq, PartialEq))] -pub struct Diagnostic { - category: Option<&'static Category>, - severity: Severity, - description: String, - message: MarkupBuf, - advices: Advices, - verbose_advices: Advices, - location: Location, - tags: DiagnosticTags, - source: Option>, -} - -impl Diagnostic { - pub fn new(diag: D) -> Self { - Self::new_impl(diag.as_diagnostic()) - } - - fn new_impl(diag: &D) -> Self { - let category = diag.category(); - - let severity = diag.severity(); - - let description = PrintDescription(diag).to_string(); - - let mut message = MarkupBuf::default(); - let mut fmt = fmt::Formatter::new(&mut message); - // SAFETY: Writing to a MarkupBuf should never fail - diag.message(&mut fmt).unwrap(); - - let mut advices = Advices::new(); - // SAFETY: The Advices visitor never returns an error - diag.advices(&mut advices).unwrap(); - - let mut verbose_advices = Advices::new(); - // SAFETY: The Advices visitor never returns an error - diag.verbose_advices(&mut verbose_advices).unwrap(); - - let location = diag.location().into(); - - let tags = diag.tags(); - - let source = diag.source().map(Self::new_impl).map(Box::new); - - Self { - category, - severity, - description, - message, - advices, - verbose_advices, - location, - tags, - source, - } - } - - pub fn with_offset(mut self, offset: TextSize) -> Self { - self.location.span = self - .location - .span - .map(|span| TextRange::new(span.start() + offset, span.end() + offset)); - self - } -} - -impl super::Diagnostic for Diagnostic { - fn category(&self) -> Option<&'static Category> { - self.category - } - - fn severity(&self) -> Severity { - self.severity - } - - fn description(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - fmt.write_str(&self.description) - } - - fn message(&self, fmt: &mut fmt::Formatter<'_>) -> io::Result<()> { - fmt.write_markup(markup! { {self.message} }) - } - - fn advices(&self, visitor: &mut dyn Visit) -> io::Result<()> { - self.advices.record(visitor) - } - - fn verbose_advices(&self, visitor: &mut dyn Visit) -> io::Result<()> { - self.verbose_advices.record(visitor) - } - - fn location(&self) -> super::Location<'_> { - super::Location::builder() - .resource(&self.location.path) - .span(&self.location.span) - .source_code(&self.location.source_code) - .build() - } - - fn tags(&self) -> DiagnosticTags { - self.tags - } - - fn source(&self) -> Option<&dyn super::Diagnostic> { - self.source - .as_deref() - .map(|source| source as &dyn super::Diagnostic) - } -} - -/// Wrapper type implementing [std::fmt::Display] for types implementing [Diagnostic](super::Diagnostic), -/// prints the description of the diagnostic as a string. -struct PrintDescription<'fmt, D: ?Sized>(pub &'fmt D); - -impl std::fmt::Display for PrintDescription<'_, D> { - fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - self.0.description(fmt).map_err(|_| std::fmt::Error) - } -} - -#[derive(Clone, Debug, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] -#[cfg_attr(test, derive(Eq, PartialEq))] -struct Location { - path: Option>, - span: Option, - source_code: Option, -} - -impl From> for Location { - fn from(loc: super::Location<'_>) -> Self { - Self { - path: loc.resource.map(super::Resource::to_owned), - span: loc.span, - source_code: loc - .source_code - .map(|source_code| source_code.text.to_string()), - } - } -} - -/// Implementation of [Visitor] collecting serializable [Advice] into a vector. -#[derive(Clone, Debug, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] -#[cfg_attr(test, derive(Eq, PartialEq))] -struct Advices { - advices: Vec, -} - -impl Advices { - fn new() -> Self { - Self { - advices: Vec::new(), - } - } -} - -impl Visit for Advices { - fn record_log(&mut self, category: LogCategory, text: &dyn fmt::Display) -> io::Result<()> { - self.advices - .push(Advice::Log(category, markup!({ text }).to_owned())); - Ok(()) - } - - fn record_list(&mut self, list: &[&dyn fmt::Display]) -> io::Result<()> { - self.advices.push(Advice::List( - list.iter() - .map(|item| markup!({ item }).to_owned()) - .collect(), - )); - Ok(()) - } - - fn record_frame(&mut self, location: super::Location<'_>) -> io::Result<()> { - self.advices.push(Advice::Frame(location.into())); - Ok(()) - } - - fn record_diff(&mut self, diff: &TextEdit) -> io::Result<()> { - self.advices.push(Advice::Diff(diff.clone())); - Ok(()) - } - - fn record_backtrace( - &mut self, - title: &dyn fmt::Display, - backtrace: &Backtrace, - ) -> io::Result<()> { - self.advices.push(Advice::Backtrace( - markup!({ title }).to_owned(), - backtrace.clone(), - )); - Ok(()) - } - - fn record_command(&mut self, command: &str) -> io::Result<()> { - self.advices.push(Advice::Command(command.into())); - Ok(()) - } - - fn record_group( - &mut self, - title: &dyn fmt::Display, - advice: &dyn super::Advices, - ) -> io::Result<()> { - let mut advices = Advices::new(); - advice.record(&mut advices)?; - - self.advices - .push(Advice::Group(markup!({ title }).to_owned(), advices)); - Ok(()) - } -} - -impl super::Advices for Advices { - fn record(&self, visitor: &mut dyn Visit) -> io::Result<()> { - for advice in &self.advices { - advice.record(visitor)?; - } - - Ok(()) - } -} - -/// Serializable representation of a [Diagnostic](super::Diagnostic) advice -/// -/// See the [Visitor] trait for additional documentation on all the supported -/// advice types. -#[derive(Clone, Debug, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] -#[cfg_attr(test, derive(Eq, PartialEq))] -enum Advice { - Log(LogCategory, MarkupBuf), - List(Vec), - Frame(Location), - Diff(TextEdit), - Backtrace(MarkupBuf, Backtrace), - Command(String), - Group(MarkupBuf, Advices), -} - -impl super::Advices for Advice { - fn record(&self, visitor: &mut dyn Visit) -> io::Result<()> { - match self { - Advice::Log(category, text) => visitor.record_log(*category, text), - Advice::List(list) => { - let as_display: Vec<&dyn fmt::Display> = - list.iter().map(|item| item as &dyn fmt::Display).collect(); - visitor.record_list(&as_display) - } - Advice::Frame(location) => visitor.record_frame(super::Location { - resource: location.path.as_ref().map(super::Resource::as_deref), - span: location.span, - source_code: location.source_code.as_deref().map(|text| SourceCode { - text, - line_starts: None, - }), - }), - Advice::Diff(diff) => visitor.record_diff(diff), - Advice::Backtrace(title, backtrace) => visitor.record_backtrace(title, backtrace), - Advice::Command(command) => visitor.record_command(command), - Advice::Group(title, advice) => visitor.record_group(title, advice), - } - } -} - -impl From for DiagnosticTags { - fn from(tag: DiagnosticTag) -> Self { - match tag { - DiagnosticTag::Fixable => DiagnosticTags::FIXABLE, - DiagnosticTag::Internal => DiagnosticTags::INTERNAL, - DiagnosticTag::UnnecessaryCode => DiagnosticTags::UNNECESSARY_CODE, - DiagnosticTag::DeprecatedCode => DiagnosticTags::DEPRECATED_CODE, - DiagnosticTag::Verbose => DiagnosticTags::VERBOSE, - } - } -} - -// Custom `serde` implementation for `DiagnosticTags` as a list of `DiagnosticTag` enum -impl Serialize for DiagnosticTags { - fn serialize(&self, serializer: S) -> Result - where - S: Serializer, - { - let mut flags = Vec::new(); - - if self.contains(Self::FIXABLE) { - flags.push(DiagnosticTag::Fixable); - } - - if self.contains(Self::INTERNAL) { - flags.push(DiagnosticTag::Internal); - } - - if self.contains(Self::UNNECESSARY_CODE) { - flags.push(DiagnosticTag::UnnecessaryCode); - } - - if self.contains(Self::DEPRECATED_CODE) { - flags.push(DiagnosticTag::DeprecatedCode); - } - - serializer.collect_seq(flags) - } -} - -impl<'de> Deserialize<'de> for DiagnosticTags { - fn deserialize(deserializer: D) -> Result - where - D: Deserializer<'de>, - { - struct Visitor; - - impl<'de> de::Visitor<'de> for Visitor { - type Value = DiagnosticTags; - - fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(formatter, "DiagnosticTags") - } - - fn visit_seq(self, mut seq: A) -> Result - where - A: SeqAccess<'de>, - { - let mut result = DiagnosticTags::empty(); - - while let Some(item) = seq.next_element::()? { - result |= DiagnosticTags::from(item); - } - - Ok(result) - } - } - - deserializer.deserialize_seq(Visitor) - } -} - -#[cfg(feature = "schema")] -impl schemars::JsonSchema for DiagnosticTags { - fn schema_name() -> String { - String::from("DiagnosticTags") - } - - fn json_schema(r#gen: &mut schemars::r#gen::SchemaGenerator) -> schemars::schema::Schema { - >::json_schema(r#gen) - } -} - -#[cfg(test)] -mod tests { - use std::io; - - use pgt_text_size::{TextRange, TextSize}; - - use crate::{ - self as pgt_diagnostics, {Advices, LogCategory, Visit}, - }; - use pgt_diagnostics_macros::Diagnostic; - - #[derive(Debug, Diagnostic)] - #[diagnostic( - severity = Warning, - category = "internalError/io", - message( - description = "text description", - message("markup message"), - ), - tags(INTERNAL) - )] - struct TestDiagnostic { - #[location(resource)] - path: String, - #[location(span)] - span: TextRange, - #[location(source_code)] - source_code: String, - #[advice] - advices: TestAdvices, - #[verbose_advice] - verbose_advices: TestAdvices, - } - - impl Default for TestDiagnostic { - fn default() -> Self { - TestDiagnostic { - path: String::from("path"), - span: TextRange::new(TextSize::from(0), TextSize::from(6)), - source_code: String::from("source_code"), - advices: TestAdvices, - verbose_advices: TestAdvices, - } - } - } - - #[derive(Debug)] - struct TestAdvices; - - impl Advices for TestAdvices { - fn record(&self, visitor: &mut dyn Visit) -> io::Result<()> { - visitor.record_log(LogCategory::Warn, &"log")?; - Ok(()) - } - } - - // fn serialized() -> Value { - // let advices = json!([ - // { - // "log": [ - // "warn", - // [ - // { - // "elements": [], - // "content": "log" - // } - // ] - // ] - // } - // ]); - // - // json!({ - // "category": "internalError/io", - // "severity": "warning", - // "description": "text description", - // "message": [ - // { - // "elements": [ - // "Emphasis" - // ], - // "content": "markup message" - // } - // ], - // "advices": { - // "advices": advices - // }, - // "verbose_advices": { - // "advices": advices - // }, - // "location": { - // "path": { - // "file": "path" - // }, - // "sourceCode": "source_code", - // "span": [ - // 0, - // 6 - // ] - // }, - // "tags": [ - // "internal" - // ], - // "source": null - // }) - // } - - // #[test] - // fn test_serialize() { - // let diag = TestDiagnostic::default(); - // let diag = super::Diagnostic::new(diag); - // let json = to_value(&diag).unwrap(); - // - // let expected = serialized(); - // assert_eq!(json, expected); - // } - // - // #[test] - // fn test_deserialize() { - // let json = serialized(); - // let diag: super::Diagnostic = from_value(json).unwrap(); - // - // let expected = TestDiagnostic::default(); - // let expected = super::Diagnostic::new(expected); - // - // assert_eq!(diag, expected); - // } -} diff --git a/crates/pgt_diagnostics/src/suggestion.rs b/crates/pgt_diagnostics/src/suggestion.rs deleted file mode 100644 index 6368d305..00000000 --- a/crates/pgt_diagnostics/src/suggestion.rs +++ /dev/null @@ -1,27 +0,0 @@ -use ::serde::{Deserialize, Serialize}; -use pgt_console::MarkupBuf; -use pgt_text_edit::TextEdit; -use pgt_text_size::TextRange; - -/// Indicates how a tool should manage this suggestion. -#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash, Serialize, Deserialize)] -pub enum Applicability { - /// The suggestion is definitely what the user intended. - /// This suggestion should be automatically applied. - Always, - /// The suggestion may be what the user intended, but it is uncertain. - /// The suggestion should result in valid JavaScript/TypeScript code if it is applied. - MaybeIncorrect, -} - -/// A Suggestion that is provided by the linter, and -/// can be reported to the user, and can be automatically -/// applied if it has the right [`Applicability`]. -#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)] -pub struct CodeSuggestion { - pub span: TextRange, - pub applicability: Applicability, - pub msg: MarkupBuf, - pub suggestion: TextEdit, - pub labels: Vec, -} diff --git a/crates/pgt_diagnostics_categories/Cargo.toml b/crates/pgt_diagnostics_categories/Cargo.toml deleted file mode 100644 index 5f86d456..00000000 --- a/crates/pgt_diagnostics_categories/Cargo.toml +++ /dev/null @@ -1,23 +0,0 @@ -[package] -authors.workspace = true -categories.workspace = true -description = "" -edition.workspace = true -homepage.workspace = true -keywords.workspace = true -license.workspace = true -name = "pgt_diagnostics_categories" -repository.workspace = true -version = "0.0.0" - - -[dependencies] -schemars = { workspace = true, optional = true } -serde = { workspace = true, optional = true } - -[features] -schema = ["dep:schemars"] -serde = ["dep:serde"] - -[build-dependencies] -quote = "1.0.14" diff --git a/crates/pgt_diagnostics_categories/build.rs b/crates/pgt_diagnostics_categories/build.rs deleted file mode 100644 index 59e5cda7..00000000 --- a/crates/pgt_diagnostics_categories/build.rs +++ /dev/null @@ -1,135 +0,0 @@ -use quote::{format_ident, quote}; -use std::{env, fs, io, path::PathBuf}; - -macro_rules! define_categories { - ( $( $name_link:literal : $link:literal, )* ; $( $name:literal , )* ) => { - const CATEGORIES: &[(&str, Option<&str>)] = &[ - $( ($name_link, Some($link)), )* - $( ($name, None), )* - ]; - }; -} - -include!("src/categories.rs"); - -pub fn main() -> io::Result<()> { - let mut metadata = Vec::with_capacity(CATEGORIES.len()); - let mut macro_arms = Vec::with_capacity(CATEGORIES.len()); - let mut parse_arms = Vec::with_capacity(CATEGORIES.len()); - let mut enum_variants = Vec::with_capacity(CATEGORIES.len()); - let mut concat_macro_arms = Vec::with_capacity(CATEGORIES.len()); - - for (name, link) in CATEGORIES { - let meta_name = name.replace('/', "_").to_uppercase(); - let meta_ident = format_ident!("{meta_name}"); - - let link = if let Some(link) = link { - quote! { Some(#link) } - } else { - quote! { None } - }; - - metadata.push(quote! { - pub static #meta_ident: crate::Category = crate::Category { - name: #name, - link: #link, - }; - }); - - macro_arms.push(quote! { - (#name) => { &$crate::registry::#meta_ident }; - }); - - parse_arms.push(quote! { - #name => Ok(&crate::registry::#meta_ident), - }); - - enum_variants.push(*name); - - let parts = name.split('/'); - concat_macro_arms.push(quote! { - ( #( #parts ),* ) => { &$crate::registry::#meta_ident }; - }); - } - - let tokens = quote! { - impl FromStr for &'static Category { - type Err = (); - - fn from_str(name: &str) -> Result { - match name { - #( #parse_arms )* - _ => Err(()), - } - } - } - - #[cfg(feature = "schema")] - impl schemars::JsonSchema for &'static Category { - fn schema_name() -> String { - String::from("Category") - } - - fn json_schema(_gen: &mut schemars::r#gen::SchemaGenerator) -> schemars::schema::Schema { - schemars::schema::Schema::Object(schemars::schema::SchemaObject { - instance_type: Some(schemars::schema::InstanceType::String.into()), - enum_values: Some(vec![#( #enum_variants.into() ),*]), - ..Default::default() - }) - } - } - - /// The `category!` macro can be used to statically lookup a category - /// by name from the registry - /// - /// # Example - /// - /// ``` - /// # use pgt_diagnostics_categories::{Category, category}; - /// let category: &'static Category = category!("internalError/io"); - /// assert_eq!(category.name(), "internalError/io"); - /// assert_eq!(category.link(), None); - /// ``` - #[macro_export] - macro_rules! category { - #( #macro_arms )* - - ( $name:literal ) => { - compile_error!(concat!("Unregistered diagnostic category \"", $name, "\", please add it to \"crates/pgt_diagnostics_categories/src/categories.rs\"")) - }; - ( $( $parts:tt )* ) => { - compile_error!(concat!("Invalid diagnostic category `", stringify!($( $parts )*), "`, expected a single string literal")) - }; - } - - /// The `category_concat!` macro is a variant of `category!` using a - /// slightly different syntax, for use in the `declare_group` and - /// `declare_rule` macros in the analyser - #[macro_export] - macro_rules! category_concat { - #( #concat_macro_arms )* - - ( @compile_error $( $parts:tt )* ) => { - compile_error!(concat!("Unregistered diagnostic category \"", $( $parts, )* "\", please add it to \"crates/pgt_diagnostics_categories/src/categories.rs\"")) - }; - ( $( $parts:tt ),* ) => { - $crate::category_concat!( @compile_error $( $parts )"/"* ) - }; - ( $( $parts:tt )* ) => { - compile_error!(concat!("Invalid diagnostic category `", stringify!($( $parts )*), "`, expected a comma-separated list of string literals")) - }; - } - - pub mod registry { - #( #metadata )* - } - }; - - let out_dir = env::var("OUT_DIR").unwrap(); - fs::write( - PathBuf::from(out_dir).join("categories.rs"), - tokens.to_string(), - )?; - - Ok(()) -} diff --git a/crates/pgt_diagnostics_categories/src/categories.rs b/crates/pgt_diagnostics_categories/src/categories.rs deleted file mode 100644 index 8a91cfb5..00000000 --- a/crates/pgt_diagnostics_categories/src/categories.rs +++ /dev/null @@ -1,42 +0,0 @@ -// This file contains the list of all diagnostic categories for the pg -// toolchain -// -// The `define_categories` macro is preprocessed in the build script for the -// crate in order to generate the static registry. The body of the macro -// consists of a list of key-value pairs defining the categories that have an -// associated hyperlink, then a list of string literals defining the remaining -// categories without a link. - -// PLEASE, DON'T EDIT THIS FILE BY HAND. -// Use `just new-lintrule` to create a new rule. -// lint rules are lexicographically sorted and -// must be between `define_categories! {\n` and `\n ;\n`. - -define_categories! { - "lint/safety/addingRequiredField": "https://pglt.dev/linter/rules/adding-required-field", - "lint/safety/banDropColumn": "https://pglt.dev/linter/rules/ban-drop-column", - "lint/safety/banDropNotNull": "https://pglt.dev/linter/rules/ban-drop-not-null", - "lint/safety/banDropTable": "https://pglt.dev/linter/rules/ban-drop-table", - // end lint rules - ; - // General categories - "stdin", - "check", - "configuration", - "database/connection", - "internalError/io", - "internalError/runtime", - "internalError/fs", - "flags/invalid", - "project", - "typecheck", - "internalError/panic", - "syntax", - "dummy", - - // Lint groups start - "lint", - "lint/performance", - "lint/safety", - // Lint groups end -} diff --git a/crates/pgt_diagnostics_categories/src/lib.rs b/crates/pgt_diagnostics_categories/src/lib.rs deleted file mode 100644 index 5cf21734..00000000 --- a/crates/pgt_diagnostics_categories/src/lib.rs +++ /dev/null @@ -1,107 +0,0 @@ -use std::{ - hash::{Hash, Hasher}, - str::FromStr, -}; - -/// Metadata for a diagnostic category -/// -/// This type cannot be instantiated outside of the `pgt_diagnostics_categories` -/// crate, which serves as a registry for all known diagnostic categories -/// (currently this registry is fully static and generated at compile time) -#[derive(Debug)] -pub struct Category { - name: &'static str, - link: Option<&'static str>, -} - -impl Category { - /// Return the name of this category - pub fn name(&self) -> &'static str { - self.name - } - - /// Return the hyperlink associated with this category if it has one - /// - /// This will generally be a link to the documentation page for diagnostics - /// with this category - pub fn link(&self) -> Option<&'static str> { - self.link - } -} - -impl Eq for Category {} - -impl PartialEq for Category { - fn eq(&self, other: &Self) -> bool { - self.name == other.name - } -} - -impl Hash for Category { - fn hash(&self, state: &mut H) { - self.name.hash(state); - } -} - -#[cfg(feature = "serde")] -impl serde::Serialize for &'static Category { - fn serialize(&self, serializer: S) -> Result - where - S: serde::Serializer, - { - self.name().serialize(serializer) - } -} - -#[cfg(feature = "serde")] -struct CategoryVisitor; - -#[cfg(feature = "serde")] -fn deserialize_parse(code: &str) -> Result<&'static Category, E> { - code.parse().map_err(|()| { - serde::de::Error::custom(format_args!("failed to deserialize category from {code}")) - }) -} - -#[cfg(feature = "serde")] -impl<'de> serde::de::Visitor<'de> for CategoryVisitor { - type Value = &'static Category; - - fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result { - formatter.write_str("a borrowed string") - } - - fn visit_str(self, v: &str) -> Result - where - E: serde::de::Error, - { - deserialize_parse(v) - } - - fn visit_borrowed_str(self, v: &'de str) -> Result - where - E: serde::de::Error, - { - deserialize_parse(v) - } - - fn visit_string(self, v: String) -> Result - where - E: serde::de::Error, - { - deserialize_parse(&v) - } -} - -#[cfg(feature = "serde")] -impl<'de> serde::Deserialize<'de> for &'static Category { - fn deserialize(deserializer: D) -> Result - where - D: serde::Deserializer<'de>, - { - deserializer.deserialize_str(CategoryVisitor) - } -} - -// Import the code generated by the build script from the content of `src/categories.rs` -include!(concat!(env!("OUT_DIR"), "/categories.rs")); diff --git a/crates/pgt_diagnostics_macros/Cargo.toml b/crates/pgt_diagnostics_macros/Cargo.toml deleted file mode 100644 index 70e1555f..00000000 --- a/crates/pgt_diagnostics_macros/Cargo.toml +++ /dev/null @@ -1,23 +0,0 @@ -[package] -authors.workspace = true -categories.workspace = true -description = "" -edition.workspace = true -homepage.workspace = true -keywords.workspace = true -license.workspace = true -name = "pgt_diagnostics_macros" -repository.workspace = true -version = "0.0.0" - - -[lib] -proc-macro = true - -[dependencies] -proc-macro-error = { version = "1.0.4", default-features = false } -proc-macro2 = { workspace = true } -quote = { workspace = true } -syn = { workspace = true } - -[dev-dependencies] diff --git a/crates/pgt_diagnostics_macros/src/generate.rs b/crates/pgt_diagnostics_macros/src/generate.rs deleted file mode 100644 index 4ee12948..00000000 --- a/crates/pgt_diagnostics_macros/src/generate.rs +++ /dev/null @@ -1,339 +0,0 @@ -use proc_macro_error::*; -use proc_macro2::{Ident, Span, TokenStream}; -use quote::quote; - -use crate::parse::{ - DeriveEnumInput, DeriveInput, DeriveStructInput, StaticOrDynamic, StringOrMarkup, -}; - -pub(crate) fn generate_diagnostic(input: DeriveInput) -> TokenStream { - match input { - DeriveInput::DeriveStructInput(input) => generate_struct_diagnostic(input), - DeriveInput::DeriveEnumInput(input) => generate_enum_diagnostic(input), - } -} - -fn generate_struct_diagnostic(input: DeriveStructInput) -> TokenStream { - let category = generate_category(&input); - let severity = generate_severity(&input); - let description = generate_description(&input); - let message = generate_message(&input); - let advices = generate_advices(&input); - let verbose_advices = generate_verbose_advices(&input); - let location = generate_location(&input); - let tags = generate_tags(&input); - let source = generate_source(&input); - - let generic_params = if !input.generics.params.is_empty() { - let lt_token = &input.generics.lt_token; - let params = &input.generics.params; - let gt_token = &input.generics.gt_token; - quote! { #lt_token #params #gt_token } - } else { - quote!() - }; - - let ident = input.ident; - let generics = input.generics; - - quote! { - impl #generic_params pgt_diagnostics::Diagnostic for #ident #generics { - #category - #severity - #description - #message - #advices - #verbose_advices - #location - #tags - #source - } - } -} - -fn generate_category(input: &DeriveStructInput) -> TokenStream { - let category = match &input.category { - Some(StaticOrDynamic::Static(value)) => quote! { - pgt_diagnostics::category!(#value) - }, - Some(StaticOrDynamic::Dynamic(value)) => quote! { - self.#value - }, - None => return quote!(), - }; - - quote! { - fn category(&self) -> Option<&'static pgt_diagnostics::Category> { - Some(#category) - } - } -} - -fn generate_severity(input: &DeriveStructInput) -> TokenStream { - let severity = match &input.severity { - Some(StaticOrDynamic::Static(value)) => quote! { - pgt_diagnostics::Severity::#value - }, - Some(StaticOrDynamic::Dynamic(value)) => quote! { - self.#value - }, - None => return quote!(), - }; - - quote! { - fn severity(&self) -> pgt_diagnostics::Severity { - #severity - } - } -} - -fn generate_description(input: &DeriveStructInput) -> TokenStream { - let description = match &input.description { - Some(StaticOrDynamic::Static(StringOrMarkup::String(value))) => { - let mut format_string = String::new(); - let mut format_params = Vec::new(); - - let input = value.value(); - let mut input = input.as_str(); - - while let Some(idx) = input.find('{') { - let (before, after) = input.split_at(idx); - format_string.push_str(before); - - let after = &after[1..]; - format_string.push('{'); - - if let Some(after) = after.strip_prefix('{') { - input = after; - continue; - } - - let end = match after.find([':', '}']) { - Some(end) => end, - None => abort!(value.span(), "failed to parse format string"), - }; - - let (ident, after) = after.split_at(end); - let ident = Ident::new(ident, Span::call_site()); - format_params.push(quote! { self.#ident }); - - input = after; - } - - if !input.is_empty() { - format_string.push_str(input); - } - - if format_params.is_empty() { - quote! { - fmt.write_str(#format_string) - } - } else { - quote! { - fmt.write_fmt(::std::format_args!(#format_string, #( #format_params ),*)) - } - } - } - Some(StaticOrDynamic::Static(StringOrMarkup::Markup(markup))) => quote! { - let mut buffer = Vec::new(); - - let write = pgt_diagnostics::termcolor::NoColor::new(&mut buffer); - let mut write = pgt_diagnostics::console::fmt::Termcolor(write); - let mut write = pgt_diagnostics::console::fmt::Formatter::new(&mut write); - - use pgt_diagnostics::console as pgt_console; - write.write_markup(&pgt_diagnostics::console::markup!{ #markup }) - .map_err(|_| ::std::fmt::Error)?; - - fmt.write_str(::std::str::from_utf8(&buffer).map_err(|_| ::std::fmt::Error)?) - }, - Some(StaticOrDynamic::Dynamic(value)) => quote! { - fmt.write_fmt(::std::format_args!("{}", self.#value)) - }, - None => return quote!(), - }; - - quote! { - fn description(&self, fmt: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result { - #description - } - } -} - -fn generate_message(input: &DeriveStructInput) -> TokenStream { - let message = match &input.message { - Some(StaticOrDynamic::Static(StringOrMarkup::String(value))) => quote! { - fmt.write_str(#value) - }, - Some(StaticOrDynamic::Static(StringOrMarkup::Markup(markup))) => quote! { - use pgt_diagnostics::console as pgt_console; - fmt.write_markup(pgt_diagnostics::console::markup!{ #markup }) - }, - Some(StaticOrDynamic::Dynamic(value)) => quote! { - pgt_diagnostics::console::fmt::Display::fmt(&self.#value, fmt) - }, - None => return quote!(), - }; - - quote! { - fn message(&self, fmt: &mut pgt_diagnostics::console::fmt::Formatter<'_>) -> ::std::io::Result<()> { - #message - } - } -} - -fn generate_advices(input: &DeriveStructInput) -> TokenStream { - if input.advices.is_empty() { - return quote!(); - } - - let advices = input.advices.iter(); - - quote! { - fn advices(&self, visitor: &mut dyn pgt_diagnostics::Visit) -> ::std::io::Result<()> { - #( pgt_diagnostics::Advices::record(&self.#advices, visitor)?; )* - Ok(()) - } - } -} - -fn generate_verbose_advices(input: &DeriveStructInput) -> TokenStream { - if input.verbose_advices.is_empty() { - return quote!(); - } - - let verbose_advices = input.verbose_advices.iter(); - - quote! { - fn verbose_advices(&self, visitor: &mut dyn pgt_diagnostics::Visit) -> ::std::io::Result<()> { - #( pgt_diagnostics::Advices::record(&self.#verbose_advices, visitor)?; )* - Ok(()) - } - } -} - -fn generate_location(input: &DeriveStructInput) -> TokenStream { - if input.location.is_empty() { - return quote!(); - } - - let field = input.location.iter().map(|(field, _)| field); - let method = input.location.iter().map(|(_, method)| method); - - quote! { - fn location(&self) -> pgt_diagnostics::Location<'_> { - pgt_diagnostics::Location::builder() - #( .#method(&self.#field) )* - .build() - } - } -} - -fn generate_tags(input: &DeriveStructInput) -> TokenStream { - let tags = match &input.tags { - Some(StaticOrDynamic::Static(value)) => { - let values = value.iter(); - quote! { - #( pgt_diagnostics::DiagnosticTags::#values )|* - } - } - Some(StaticOrDynamic::Dynamic(value)) => quote! { - self.#value - }, - None => return quote!(), - }; - - quote! { - fn tags(&self) -> pgt_diagnostics::DiagnosticTags { - #tags - } - } -} - -fn generate_source(input: &DeriveStructInput) -> TokenStream { - match &input.source { - Some(value) => quote! { - fn source(&self) -> Option<&dyn pgt_diagnostics::Diagnostic> { - self.#value.as_deref() - } - }, - None => quote!(), - } -} - -fn generate_enum_diagnostic(input: DeriveEnumInput) -> TokenStream { - let generic_params = if !input.generics.params.is_empty() { - let lt_token = &input.generics.lt_token; - let params = &input.generics.params; - let gt_token = &input.generics.gt_token; - quote! { #lt_token #params #gt_token } - } else { - quote!() - }; - - let ident = input.ident; - let generics = input.generics; - let variants: Vec<_> = input - .variants - .iter() - .map(|variant| &variant.ident) - .collect(); - - quote! { - impl #generic_params pgt_diagnostics::Diagnostic for #ident #generics { - fn category(&self) -> Option<&'static pgt_diagnostics::Category> { - match self { - #(Self::#variants(error) => pgt_diagnostics::Diagnostic::category(error),)* - } - } - - fn description(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - match self { - #(Self::#variants(error) => pgt_diagnostics::Diagnostic::description(error, fmt),)* - } - } - - fn message(&self, fmt: &mut pgt_console::fmt::Formatter<'_>) -> std::io::Result<()> { - match self { - #(Self::#variants(error) => pgt_diagnostics::Diagnostic::message(error, fmt),)* - } - } - - fn severity(&self) -> pgt_diagnostics::Severity { - match self { - #(Self::#variants(error) => pgt_diagnostics::Diagnostic::severity(error),)* - } - } - - fn tags(&self) -> pgt_diagnostics::DiagnosticTags { - match self { - #(Self::#variants(error) => pgt_diagnostics::Diagnostic::tags(error),)* - } - } - - fn location(&self) -> pgt_diagnostics::Location<'_> { - match self { - #(Self::#variants(error) => pgt_diagnostics::Diagnostic::location(error),)* - } - } - - fn source(&self) -> Option<&dyn pgt_diagnostics::Diagnostic> { - match self { - #(Self::#variants(error) => pgt_diagnostics::Diagnostic::source(error),)* - } - } - - fn advices(&self, visitor: &mut dyn pgt_diagnostics::Visit) -> std::io::Result<()> { - match self { - #(Self::#variants(error) => pgt_diagnostics::Diagnostic::advices(error, visitor),)* - } - } - - fn verbose_advices(&self, visitor: &mut dyn pgt_diagnostics::Visit) -> std::io::Result<()> { - match self { - #(Self::#variants(error) => pgt_diagnostics::Diagnostic::verbose_advices(error, visitor),)* - } - } - } - } -} diff --git a/crates/pgt_diagnostics_macros/src/lib.rs b/crates/pgt_diagnostics_macros/src/lib.rs deleted file mode 100644 index ea069db6..00000000 --- a/crates/pgt_diagnostics_macros/src/lib.rs +++ /dev/null @@ -1,36 +0,0 @@ -use proc_macro::TokenStream; -use proc_macro_error::*; -use syn::{DeriveInput, parse_macro_input}; - -mod generate; -mod parse; - -#[proc_macro_derive( - Diagnostic, - attributes( - diagnostic, - severity, - category, - description, - message, - advice, - verbose_advice, - location, - tags, - source - ) -)] -#[proc_macro_error] -pub fn derive_diagnostic(input: TokenStream) -> TokenStream { - let input = parse_macro_input!(input as DeriveInput); - - let input = parse::DeriveInput::parse(input); - - let tokens = generate::generate_diagnostic(input); - - if false { - panic!("{tokens}"); - } - - TokenStream::from(tokens) -} diff --git a/crates/pgt_diagnostics_macros/src/parse.rs b/crates/pgt_diagnostics_macros/src/parse.rs deleted file mode 100644 index 0e6e1b91..00000000 --- a/crates/pgt_diagnostics_macros/src/parse.rs +++ /dev/null @@ -1,472 +0,0 @@ -use proc_macro_error::*; -use proc_macro2::{Ident, TokenStream}; -use quote::{ToTokens, quote}; -use syn::{ - Attribute, DataEnum, DataStruct, Generics, Token, Variant, - parse::{Error, Parse, ParseStream, Parser, Result, discouraged::Speculative}, - punctuated::Punctuated, - spanned::Spanned, - token::Paren, -}; - -#[allow(clippy::large_enum_variant)] -pub(crate) enum DeriveInput { - DeriveStructInput(DeriveStructInput), - DeriveEnumInput(DeriveEnumInput), -} - -pub(crate) struct DeriveStructInput { - pub(crate) ident: Ident, - pub(crate) generics: Generics, - - pub(crate) severity: Option>, - pub(crate) category: Option>, - pub(crate) description: Option>, - pub(crate) message: Option>, - pub(crate) advices: Vec, - pub(crate) verbose_advices: Vec, - pub(crate) location: Vec<(TokenStream, LocationField)>, - pub(crate) tags: Option>>, - pub(crate) source: Option, -} - -pub(crate) struct DeriveEnumInput { - pub(crate) ident: Ident, - pub(crate) generics: Generics, - - pub(crate) variants: Vec, -} - -impl DeriveInput { - pub(crate) fn parse(input: syn::DeriveInput) -> Self { - match input.data { - syn::Data::Struct(data) => Self::DeriveStructInput(DeriveStructInput::parse( - input.ident, - input.generics, - input.attrs, - data, - )), - syn::Data::Enum(data) => Self::DeriveEnumInput(DeriveEnumInput::parse( - input.ident, - input.generics, - input.attrs, - data, - )), - syn::Data::Union(data) => abort!( - data.union_token.span(), - "unions are not supported by the Diagnostic derive macro" - ), - } - } -} - -impl DeriveStructInput { - pub(crate) fn parse( - ident: Ident, - generics: Generics, - attrs: Vec, - data: DataStruct, - ) -> Self { - let mut result = Self { - ident, - generics, - - severity: None, - category: None, - description: None, - message: None, - advices: Vec::new(), - verbose_advices: Vec::new(), - location: Vec::new(), - tags: None, - source: None, - }; - - for attr in attrs { - if attr.path.is_ident("diagnostic") { - let tokens = attr.tokens.into(); - let attrs = match DiagnosticAttrs::parse.parse(tokens) { - Ok(attrs) => attrs, - Err(err) => abort!( - err.span(), - "failed to parse \"diagnostic\" attribute: {}", - err - ), - }; - - for item in attrs.attrs { - match item { - DiagnosticAttr::Severity(attr) => { - result.severity = Some(StaticOrDynamic::Static(attr.value)); - } - DiagnosticAttr::Category(attr) => { - result.category = Some(StaticOrDynamic::Static(attr.value)); - } - DiagnosticAttr::Message(MessageAttr::SingleString { value, .. }) => { - let value = StringOrMarkup::from(value); - result.description = Some(StaticOrDynamic::Static(value.clone())); - result.message = Some(StaticOrDynamic::Static(value)); - } - DiagnosticAttr::Message(MessageAttr::SingleMarkup { markup, .. }) => { - let value = StringOrMarkup::from(markup); - result.description = Some(StaticOrDynamic::Static(value.clone())); - result.message = Some(StaticOrDynamic::Static(value)); - } - DiagnosticAttr::Message(MessageAttr::Split(attr)) => { - for item in attr.attrs { - match item { - SplitMessageAttr::Description { value, .. } => { - result.description = - Some(StaticOrDynamic::Static(value.into())); - } - SplitMessageAttr::Message { markup, .. } => { - result.message = - Some(StaticOrDynamic::Static(markup.into())); - } - } - } - } - DiagnosticAttr::Tags(attr) => { - result.tags = Some(StaticOrDynamic::Static(attr.tags)); - } - } - } - - continue; - } - } - - for (index, field) in data.fields.into_iter().enumerate() { - let ident = match field.ident { - Some(ident) => quote! { #ident }, - None => quote! { #index }, - }; - - for attr in field.attrs { - if attr.path.is_ident("category") { - result.category = Some(StaticOrDynamic::Dynamic(ident.clone())); - continue; - } - - if attr.path.is_ident("severity") { - result.severity = Some(StaticOrDynamic::Dynamic(ident.clone())); - continue; - } - - if attr.path.is_ident("description") { - result.description = Some(StaticOrDynamic::Dynamic(ident.clone())); - continue; - } - - if attr.path.is_ident("message") { - result.message = Some(StaticOrDynamic::Dynamic(ident.clone())); - continue; - } - - if attr.path.is_ident("advice") { - result.advices.push(ident.clone()); - continue; - } - - if attr.path.is_ident("verbose_advice") { - result.verbose_advices.push(ident.clone()); - continue; - } - - if attr.path.is_ident("location") { - let tokens = attr.tokens.into(); - let attr = match LocationAttr::parse.parse(tokens) { - Ok(attr) => attr, - Err(err) => abort!( - err.span(), - "failed to parse \"location\" attribute: {}", - err - ), - }; - - result.location.push((ident.clone(), attr.field)); - continue; - } - - if attr.path.is_ident("tags") { - result.tags = Some(StaticOrDynamic::Dynamic(ident.clone())); - continue; - } - - if attr.path.is_ident("source") { - result.source = Some(ident.clone()); - continue; - } - } - } - - result - } -} - -impl DeriveEnumInput { - pub(crate) fn parse( - ident: Ident, - generics: Generics, - attrs: Vec, - data: DataEnum, - ) -> Self { - for attr in attrs { - if attr.path.is_ident("diagnostic") { - abort!( - attr.span(), - "\"diagnostic\" attributes are not supported on enums" - ); - } - } - - Self { - ident, - generics, - - variants: data.variants.into_iter().collect(), - } - } -} - -pub(crate) enum StaticOrDynamic { - Static(S), - Dynamic(TokenStream), -} - -#[derive(Clone)] -pub(crate) enum StringOrMarkup { - String(syn::LitStr), - Markup(TokenStream), -} - -impl From for StringOrMarkup { - fn from(value: syn::LitStr) -> Self { - Self::String(value) - } -} - -impl From for StringOrMarkup { - fn from(value: TokenStream) -> Self { - Self::Markup(value) - } -} - -struct DiagnosticAttrs { - _paren_token: Paren, - attrs: Punctuated, -} - -impl Parse for DiagnosticAttrs { - fn parse(input: ParseStream) -> Result { - let content; - Ok(Self { - _paren_token: syn::parenthesized!(content in input), - attrs: content.parse_terminated(DiagnosticAttr::parse)?, - }) - } -} - -enum DiagnosticAttr { - Severity(SeverityAttr), - Category(CategoryAttr), - Message(MessageAttr), - Tags(TagsAttr), -} - -impl Parse for DiagnosticAttr { - fn parse(input: ParseStream) -> Result { - let name: Ident = input.parse()?; - - if name == "severity" { - return Ok(Self::Severity(input.parse()?)); - } - - if name == "category" { - return Ok(Self::Category(input.parse()?)); - } - - if name == "message" { - return Ok(Self::Message(input.parse()?)); - } - - if name == "tags" { - return Ok(Self::Tags(input.parse()?)); - } - - Err(Error::new_spanned(name, "unknown attribute")) - } -} - -struct SeverityAttr { - _eq_token: Token![=], - value: Ident, -} - -impl Parse for SeverityAttr { - fn parse(input: ParseStream) -> Result { - Ok(Self { - _eq_token: input.parse()?, - value: input.parse()?, - }) - } -} - -struct CategoryAttr { - _eq_token: Token![=], - value: syn::LitStr, -} - -impl Parse for CategoryAttr { - fn parse(input: ParseStream) -> Result { - Ok(Self { - _eq_token: input.parse()?, - value: input.parse()?, - }) - } -} - -enum MessageAttr { - SingleString { - _eq_token: Token![=], - value: syn::LitStr, - }, - SingleMarkup { - _paren_token: Paren, - markup: TokenStream, - }, - Split(SplitMessageAttrs), -} - -impl Parse for MessageAttr { - fn parse(input: ParseStream) -> Result { - let lookahead = input.lookahead1(); - - if lookahead.peek(Token![=]) { - return Ok(Self::SingleString { - _eq_token: input.parse()?, - value: input.parse()?, - }); - } - - let fork = input.fork(); - if let Ok(attr) = fork.parse() { - input.advance_to(&fork); - return Ok(Self::Split(attr)); - } - - let content; - Ok(Self::SingleMarkup { - _paren_token: syn::parenthesized!(content in input), - markup: content.parse()?, - }) - } -} - -struct SplitMessageAttrs { - _paren_token: Paren, - attrs: Punctuated, -} - -impl Parse for SplitMessageAttrs { - fn parse(input: ParseStream) -> Result { - let content; - Ok(Self { - _paren_token: syn::parenthesized!(content in input), - attrs: content.parse_terminated(SplitMessageAttr::parse)?, - }) - } -} - -enum SplitMessageAttr { - Description { - _eq_token: Token![=], - value: syn::LitStr, - }, - Message { - _paren_token: Paren, - markup: TokenStream, - }, -} - -impl Parse for SplitMessageAttr { - fn parse(input: ParseStream) -> Result { - let name: Ident = input.parse()?; - - if name == "description" { - return Ok(Self::Description { - _eq_token: input.parse()?, - value: input.parse()?, - }); - } - - if name == "message" { - let content; - return Ok(Self::Message { - _paren_token: syn::parenthesized!(content in input), - markup: content.parse()?, - }); - } - - Err(Error::new_spanned(name, "unknown attribute")) - } -} - -struct TagsAttr { - _paren_token: Paren, - tags: Punctuated, -} - -impl Parse for TagsAttr { - fn parse(input: ParseStream) -> Result { - let content; - Ok(Self { - _paren_token: syn::parenthesized!(content in input), - tags: content.parse_terminated(Ident::parse)?, - }) - } -} - -struct LocationAttr { - _paren_token: Paren, - field: LocationField, -} - -pub(crate) enum LocationField { - Resource(Ident), - Span(Ident), - SourceCode(Ident), -} - -impl Parse for LocationAttr { - fn parse(input: ParseStream) -> Result { - let content; - let _paren_token = syn::parenthesized!(content in input); - let ident: Ident = content.parse()?; - - let field = if ident == "resource" { - LocationField::Resource(ident) - } else if ident == "span" { - LocationField::Span(ident) - } else if ident == "source_code" { - LocationField::SourceCode(ident) - } else { - return Err(Error::new_spanned(ident, "unknown location field")); - }; - - Ok(Self { - _paren_token, - field, - }) - } -} - -impl ToTokens for LocationField { - fn to_tokens(&self, tokens: &mut TokenStream) { - match self { - LocationField::Resource(ident) => ident.to_tokens(tokens), - LocationField::Span(ident) => ident.to_tokens(tokens), - LocationField::SourceCode(ident) => ident.to_tokens(tokens), - } - } -} diff --git a/crates/pgt_flags/Cargo.toml b/crates/pgt_flags/Cargo.toml deleted file mode 100644 index 6a96719c..00000000 --- a/crates/pgt_flags/Cargo.toml +++ /dev/null @@ -1,17 +0,0 @@ -[package] -authors.workspace = true -categories.workspace = true -description = "" -edition.workspace = true -homepage.workspace = true -keywords.workspace = true -license.workspace = true -name = "pgt_flags" -repository.workspace = true -version = "0.0.0" - - -[dependencies] -pgt_console = { workspace = true } - -[dev-dependencies] diff --git a/crates/pgt_flags/src/lib.rs b/crates/pgt_flags/src/lib.rs deleted file mode 100644 index 133df777..00000000 --- a/crates/pgt_flags/src/lib.rs +++ /dev/null @@ -1,108 +0,0 @@ -//! A simple implementation of feature flags. - -use pgt_console::fmt::{Display, Formatter}; -use pgt_console::{DebugDisplay, KeyValuePair, markup}; -use std::env; -use std::ops::Deref; -use std::sync::{LazyLock, OnceLock}; - -/// Returns `true` if this is an unstable build of Postgres Tools -pub fn is_unstable() -> bool { - PGT_VERSION.deref().is_none() -} - -/// The internal version of Postgres Tools. This is usually supplied during the CI build -pub static PGT_VERSION: LazyLock> = LazyLock::new(|| option_env!("PGT_VERSION")); - -pub struct PgTEnv { - pub pgt_log_path: PgTEnvVariable, - pub pgt_log_prefix: PgTEnvVariable, - pub pgt_config_path: PgTEnvVariable, -} - -pub static PGT_ENV: OnceLock = OnceLock::new(); - -impl PgTEnv { - fn new() -> Self { - Self { - pgt_log_path: PgTEnvVariable::new( - "PGT_LOG_PATH", - "The directory where the Daemon logs will be saved.", - ), - pgt_log_prefix: PgTEnvVariable::new( - "PGT_LOG_PREFIX_NAME", - "A prefix that's added to the name of the log. Default: `server.log.`", - ), - pgt_config_path: PgTEnvVariable::new( - "PGT_CONFIG_PATH", - "A path to the configuration file", - ), - } - } -} - -pub struct PgTEnvVariable { - /// The name of the environment variable - name: &'static str, - /// The description of the variable. - // This field will be used in the website to automate its generation - description: &'static str, -} - -impl PgTEnvVariable { - fn new(name: &'static str, description: &'static str) -> Self { - Self { name, description } - } - - /// It attempts to read the value of the variable - pub fn value(&self) -> Option { - env::var(self.name).ok() - } - - /// It returns the description of the variable - pub fn description(&self) -> &'static str { - self.description - } - - /// It returns the name of the variable. - pub fn name(&self) -> &'static str { - self.name - } -} - -pub fn pgt_env() -> &'static PgTEnv { - PGT_ENV.get_or_init(PgTEnv::new) -} - -impl Display for PgTEnv { - fn fmt(&self, fmt: &mut Formatter) -> std::io::Result<()> { - match self.pgt_log_path.value() { - None => { - KeyValuePair(self.pgt_log_path.name, markup! { "unset" }).fmt(fmt)?; - } - Some(value) => { - KeyValuePair(self.pgt_log_path.name, markup! {{DebugDisplay(value)}}).fmt(fmt)?; - } - }; - match self.pgt_log_prefix.value() { - None => { - KeyValuePair(self.pgt_log_prefix.name, markup! { "unset" }).fmt(fmt)?; - } - Some(value) => { - KeyValuePair(self.pgt_log_prefix.name, markup! {{DebugDisplay(value)}}).fmt(fmt)?; - } - }; - - match self.pgt_config_path.value() { - None => { - KeyValuePair(self.pgt_config_path.name, markup! { "unset" }).fmt(fmt)?; - } - Some(value) => { - KeyValuePair(self.pgt_config_path.name, markup! {{DebugDisplay(value)}}) - .fmt(fmt)?; - } - }; - - Ok(()) - } -} diff --git a/crates/pgt_fs/Cargo.toml b/crates/pgt_fs/Cargo.toml deleted file mode 100644 index 1e4a7b4f..00000000 --- a/crates/pgt_fs/Cargo.toml +++ /dev/null @@ -1,32 +0,0 @@ -[package] -authors.workspace = true -categories.workspace = true -description = "" -edition.workspace = true -homepage.workspace = true -keywords.workspace = true -license.workspace = true -name = "pgt_fs" -repository.workspace = true -version = "0.0.0" - - -[dependencies] -crossbeam = { workspace = true } -directories = "5.0.1" -enumflags2 = { workspace = true } -parking_lot = { version = "0.12.3", features = ["arc_lock"] } -pgt_diagnostics = { workspace = true } -rayon = { workspace = true } -rustc-hash = { workspace = true } -schemars = { workspace = true, optional = true } -serde = { workspace = true, optional = true } -smallvec = { workspace = true } -tracing = { workspace = true } - -[features] -schema = ["dep:schemars", "pgt_diagnostics/schema"] -serde = ["dep:serde"] - -[lib] -doctest = false diff --git a/crates/pgt_fs/src/dir.rs b/crates/pgt_fs/src/dir.rs deleted file mode 100644 index 6ba559e4..00000000 --- a/crates/pgt_fs/src/dir.rs +++ /dev/null @@ -1,23 +0,0 @@ -use directories::ProjectDirs; -use std::{env, fs, path::PathBuf}; -use tracing::warn; - -pub fn ensure_cache_dir() -> PathBuf { - if let Some(proj_dirs) = ProjectDirs::from("dev", "supabase-community", "pgt") { - // Linux: /home/alice/.cache/pgt - // Win: C:\Users\Alice\AppData\Local\supabase-community\pgt\cache - // Mac: /Users/Alice/Library/Caches/dev.supabase-community.pgt - let cache_dir = proj_dirs.cache_dir().to_path_buf(); - if let Err(err) = fs::create_dir_all(&cache_dir) { - let temp_dir = env::temp_dir(); - warn!( - "Failed to create local cache directory {cache_dir:?} due to error: {err}, fallback to {temp_dir:?}" - ); - temp_dir - } else { - cache_dir - } - } else { - env::temp_dir() - } -} diff --git a/crates/pgt_fs/src/fs.rs b/crates/pgt_fs/src/fs.rs deleted file mode 100644 index b73aef6e..00000000 --- a/crates/pgt_fs/src/fs.rs +++ /dev/null @@ -1,439 +0,0 @@ -use crate::{PathInterner, PgTPath}; -pub use memory::{ErrorEntry, MemoryFileSystem}; -pub use os::OsFileSystem; -use pgt_diagnostics::{Advices, Diagnostic, LogCategory, Visit, console}; -use pgt_diagnostics::{Error, Severity}; -use serde::{Deserialize, Serialize}; -use std::collections::BTreeSet; -use std::fmt::{Debug, Display, Formatter}; -use std::panic::RefUnwindSafe; -use std::path::{Path, PathBuf}; -use std::sync::Arc; -use std::{fmt, io}; -use tracing::{error, info}; - -mod memory; -mod os; - -pub struct ConfigName; - -impl ConfigName { - const PGT_JSONC: [&'static str; 1] = ["postgrestools.jsonc"]; - - pub const fn pgt_jsonc() -> &'static str { - Self::PGT_JSONC[0] - } - - pub const fn file_names() -> [&'static str; 1] { - Self::PGT_JSONC - } -} - -type AutoSearchResultAlias = Result, FileSystemDiagnostic>; - -pub trait FileSystem: Send + Sync + RefUnwindSafe { - /// It opens a file with the given set of options - fn open_with_options(&self, path: &Path, options: OpenOptions) -> io::Result>; - - /// Initiate a traversal of the filesystem - /// - /// This method creates a new "traversal scope" that can be used to - /// efficiently batch many filesystem read operations - fn traversal<'scope>(&'scope self, func: BoxedTraversal<'_, 'scope>); - - /// Return the path to the working directory - fn working_directory(&self) -> Option; - - /// Checks if the given path exists in the file system - fn path_exists(&self, path: &Path) -> bool; - - /// Checks if the given path is a regular file - fn path_is_file(&self, path: &Path) -> bool; - - /// Checks if the given path is a directory - fn path_is_dir(&self, path: &Path) -> bool; - - /// Checks if the given path is a symlink - fn path_is_symlink(&self, path: &Path) -> bool; - - /// This method accepts a directory path (`search_dir`) and a list of filenames (`file_names`), - /// It looks for the files in the specified directory in the order they appear in the list. - /// If a file is not found in the initial directory, the search may continue into the parent - /// directories based on the `should_error_if_file_not_found` flag. - /// - /// Behavior if files are not found in `search_dir`: - /// - /// - If `should_error_if_file_not_found` is set to `true`, the method will return an error. - /// - If `should_error_if_file_not_found` is set to `false`, the method will search for the files in the parent - /// directories of `search_dir` recursively until: - /// - It finds a file, reads it, and returns its contents along with its path. - /// - It confirms that the file doesn't exist in any of the checked directories. - /// - /// ## Errors - /// - /// The method returns an error if `should_error_if_file_not_found` is `true`, - /// and the file is not found or cannot be opened or read. - /// - fn auto_search( - &self, - search_dir: &Path, - file_names: &[&str], - should_error_if_file_not_found: bool, - ) -> AutoSearchResultAlias { - let mut curret_search_dir = search_dir.to_path_buf(); - let mut is_searching_in_parent_dir = false; - loop { - let mut errors: Vec = vec![]; - - // Iterate all possible file names - for file_name in file_names { - let file_path = curret_search_dir.join(file_name); - match self.read_file_from_path(&file_path) { - Ok(content) => { - if is_searching_in_parent_dir { - info!( - "Auto discovered the file at the following path that isn't in the working directory:\n{:?}", - curret_search_dir.display() - ); - } - return Ok(Some(AutoSearchResult { content, file_path })); - } - Err(error) => { - // We don't return the error immediately because - // there're multiple valid file names to search for - if !is_searching_in_parent_dir && should_error_if_file_not_found { - errors.push(error); - } - } - } - } - - if !is_searching_in_parent_dir && should_error_if_file_not_found { - if let Some(diagnostic) = errors.into_iter().next() { - // We can only return one Err, so we return the first diagnostic. - return Err(diagnostic); - } - } - - if let Some(parent_search_dir) = curret_search_dir.parent() { - curret_search_dir = PathBuf::from(parent_search_dir); - is_searching_in_parent_dir = true; - } else { - break; - } - } - - Ok(None) - } - - /// Reads the content of a file specified by `file_path`. - /// - /// This method attempts to open and read the entire content of a file at the given path. - /// - /// ## Errors - /// This method logs an error message and returns a `FileSystemDiagnostic` error in two scenarios: - /// - If the file cannot be opened, possibly due to incorrect path or permission issues. - /// - If the file is opened but its content cannot be read, potentially due to the file being damaged. - fn read_file_from_path(&self, file_path: &PathBuf) -> Result { - match self.open_with_options(file_path, OpenOptions::default().read(true)) { - Ok(mut file) => { - let mut content = String::new(); - match file.read_to_string(&mut content) { - Ok(_) => Ok(content), - Err(err) => { - error!("Couldn't read the file {:?}, reason:\n{:?}", file_path, err); - Err(FileSystemDiagnostic { - path: file_path.display().to_string(), - severity: Severity::Error, - error_kind: ErrorKind::CantReadFile(file_path.display().to_string()), - }) - } - } - } - Err(err) => { - error!("Couldn't open the file {:?}, reason:\n{:?}", file_path, err); - Err(FileSystemDiagnostic { - path: file_path.display().to_string(), - severity: Severity::Error, - error_kind: ErrorKind::CantReadFile(file_path.display().to_string()), - }) - } - } - } - - fn get_changed_files(&self, base: &str) -> io::Result>; - - fn get_staged_files(&self) -> io::Result>; -} - -/// Result of the auto search -#[derive(Debug)] -pub struct AutoSearchResult { - /// The content of the file - pub content: String, - /// The path of the file found - pub file_path: PathBuf, -} - -pub trait File { - /// Read the content of the file into `buffer` - fn read_to_string(&mut self, buffer: &mut String) -> io::Result<()>; - - /// Overwrite the content of the file with the provided bytes - /// - /// This will write to the associated memory buffer, as well as flush the - /// new content to the disk if this is a physical file - fn set_content(&mut self, content: &[u8]) -> io::Result<()>; - - /// Returns the version of the current file - fn file_version(&self) -> i32; -} - -/// This struct is a "mirror" of [std::fs::FileOptions]. -/// Refer to their documentation for more details -#[derive(Default, Debug)] -pub struct OpenOptions { - read: bool, - write: bool, - truncate: bool, - create: bool, - create_new: bool, -} - -impl OpenOptions { - pub fn read(mut self, read: bool) -> Self { - self.read = read; - self - } - pub fn write(mut self, write: bool) -> Self { - self.write = write; - self - } - pub fn truncate(mut self, truncate: bool) -> Self { - self.truncate = truncate; - self - } - pub fn create(mut self, create: bool) -> Self { - self.create = create; - self - } - pub fn create_new(mut self, create_new: bool) -> Self { - self.create_new = create_new; - self - } - - pub fn into_fs_options(self, options: &mut std::fs::OpenOptions) -> &mut std::fs::OpenOptions { - options - .read(self.read) - .write(self.write) - .truncate(self.truncate) - .create(self.create) - .create_new(self.create_new) - } -} - -/// Trait that contains additional methods to work with [FileSystem] -pub trait FileSystemExt: FileSystem { - /// Open a file with the `read` option - /// - /// Equivalent to [std::fs::File::open] - fn open(&self, path: &Path) -> io::Result> { - self.open_with_options(path, OpenOptions::default().read(true)) - } - - /// Open a file with the `write` and `create` options - /// - /// Equivalent to [std::fs::File::create] - fn create(&self, path: &Path) -> io::Result> { - self.open_with_options( - path, - OpenOptions::default() - .write(true) - .create(true) - .truncate(true), - ) - } - - /// Opens a file with the `read`, `write` and `create_new` options - /// - /// Equivalent to [std::fs::File::create_new] - fn create_new(&self, path: &Path) -> io::Result> { - self.open_with_options( - path, - OpenOptions::default() - .read(true) - .write(true) - .create_new(true), - ) - } -} - -impl FileSystemExt for T {} - -type BoxedTraversal<'fs, 'scope> = Box) + Send + 'fs>; - -pub trait TraversalScope<'scope> { - /// Spawn a new filesystem read task. - /// - /// If the provided path exists and is a file, then the [`handle_file`](TraversalContext::handle_path) - /// method of the provided [TraversalContext] will be called. If it's a - /// directory, it will be recursively traversed and all the files the - /// [TraversalContext::can_handle] method of the context - /// returns true for will be handled as well - fn evaluate(&self, context: &'scope dyn TraversalContext, path: PathBuf); - - /// Spawn a new filesystem read task. - /// - /// It's assumed that the provided already exist and was already evaluated via [TraversalContext::can_handle]. - /// - /// This method will call [TraversalContext::handle_path]. - fn handle(&self, context: &'scope dyn TraversalContext, path: PathBuf); -} - -pub trait TraversalContext: Sync { - /// Provides the traversal scope with an instance of [PathInterner], used - /// to emit diagnostics for IO errors that may happen in the traversal process - fn interner(&self) -> &PathInterner; - - /// Called by the traversal process to emit an error diagnostic associated - /// with a particular file ID when an IO error happens - fn push_diagnostic(&self, error: Error); - - /// Checks if the traversal context can handle a particular path, used as - /// an optimization to bail out of scheduling a file handler if it wouldn't - /// be able to process the file anyway - fn can_handle(&self, path: &PgTPath) -> bool; - - /// This method will be called by the traversal for each file it finds - /// where [TraversalContext::can_handle] returned true - fn handle_path(&self, path: PgTPath); - - /// This method will be called by the traversal for each file it finds - /// where [TraversalContext::store_path] returned true - fn store_path(&self, path: PgTPath); - - /// Returns the paths that should be handled - fn evaluated_paths(&self) -> BTreeSet; -} - -impl FileSystem for Arc -where - T: FileSystem + Send, -{ - fn open_with_options(&self, path: &Path, options: OpenOptions) -> io::Result> { - T::open_with_options(self, path, options) - } - - fn traversal<'scope>(&'scope self, func: BoxedTraversal<'_, 'scope>) { - T::traversal(self, func) - } - - fn working_directory(&self) -> Option { - T::working_directory(self) - } - - fn path_exists(&self, path: &Path) -> bool { - T::path_exists(self, path) - } - - fn path_is_file(&self, path: &Path) -> bool { - T::path_is_file(self, path) - } - - fn path_is_dir(&self, path: &Path) -> bool { - T::path_is_dir(self, path) - } - - fn path_is_symlink(&self, path: &Path) -> bool { - T::path_is_symlink(self, path) - } - - fn get_changed_files(&self, base: &str) -> io::Result> { - T::get_changed_files(self, base) - } - - fn get_staged_files(&self) -> io::Result> { - T::get_staged_files(self) - } -} - -#[derive(Debug, Diagnostic, Deserialize, Serialize)] -#[diagnostic(category = "internalError/fs")] -pub struct FileSystemDiagnostic { - #[severity] - pub severity: Severity, - #[location(resource)] - pub path: String, - #[message] - #[description] - #[advice] - pub error_kind: ErrorKind, -} - -impl Display for FileSystemDiagnostic { - fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { - Diagnostic::description(self, f) - } -} - -#[derive(Clone, Debug, Deserialize, Serialize)] -pub enum ErrorKind { - /// File not found - CantReadFile(String), - /// Unknown file type - UnknownFileType, - /// Dereferenced (broken) symbolic link - DereferencedSymlink(String), - /// Too deeply nested symbolic link expansion - DeeplyNestedSymlinkExpansion(String), -} - -impl console::fmt::Display for ErrorKind { - fn fmt(&self, fmt: &mut console::fmt::Formatter) -> io::Result<()> { - match self { - ErrorKind::CantReadFile(_) => fmt.write_str("Cannot read file"), - ErrorKind::UnknownFileType => fmt.write_str("Unknown file type"), - ErrorKind::DereferencedSymlink(_) => fmt.write_str("Dereferenced symlink"), - ErrorKind::DeeplyNestedSymlinkExpansion(_) => { - fmt.write_str("Deeply nested symlink expansion") - } - } - } -} - -impl std::fmt::Display for ErrorKind { - fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - match self { - ErrorKind::CantReadFile(_) => fmt.write_str("Cannot read file"), - ErrorKind::UnknownFileType => write!(fmt, "Unknown file type"), - ErrorKind::DereferencedSymlink(_) => write!(fmt, "Dereferenced symlink"), - ErrorKind::DeeplyNestedSymlinkExpansion(_) => { - write!(fmt, "Deeply nested symlink expansion") - } - } - } -} - -impl Advices for ErrorKind { - fn record(&self, visitor: &mut dyn Visit) -> io::Result<()> { - match self { - ErrorKind::CantReadFile(path) => visitor.record_log( - LogCategory::Error, - &format!("Can't read the following file, maybe for permissions reasons or it doesn't exist: {path}") - ), - - ErrorKind::UnknownFileType => visitor.record_log( - LogCategory::Info, - &"Encountered a file system entry that's neither a file, directory or symbolic link", - ), - ErrorKind::DereferencedSymlink(path) => visitor.record_log( - LogCategory::Info, - &format!("Encountered a file system entry that is a broken symbolic link: {path}"), - ), - ErrorKind::DeeplyNestedSymlinkExpansion(path) => visitor.record_log( - LogCategory::Error, - &format!("Encountered a file system entry with too many nested symbolic links, possibly forming an infinite cycle: {path}"), - ), - } - } -} diff --git a/crates/pgt_fs/src/fs/memory.rs b/crates/pgt_fs/src/fs/memory.rs deleted file mode 100644 index baffe0ab..00000000 --- a/crates/pgt_fs/src/fs/memory.rs +++ /dev/null @@ -1,573 +0,0 @@ -use rustc_hash::FxHashMap; -use std::collections::hash_map::{Entry, IntoIter}; -use std::io; -use std::panic::{AssertUnwindSafe, RefUnwindSafe}; -use std::path::{Path, PathBuf}; -use std::str; -use std::sync::Arc; - -use parking_lot::{Mutex, RawMutex, RwLock, lock_api::ArcMutexGuard}; -use pgt_diagnostics::{Error, Severity}; - -use crate::fs::OpenOptions; -use crate::{FileSystem, PgTPath, TraversalContext, TraversalScope}; - -use super::{BoxedTraversal, ErrorKind, File, FileSystemDiagnostic}; - -type OnGetChangedFiles = Option< - Arc< - AssertUnwindSafe< - Mutex Vec + Send + 'static + RefUnwindSafe>>>, - >, - >, ->; - -/// Fully in-memory file system, stores the content of all known files in a hashmap -pub struct MemoryFileSystem { - files: AssertUnwindSafe>>, - errors: FxHashMap, - allow_write: bool, - on_get_staged_files: OnGetChangedFiles, - on_get_changed_files: OnGetChangedFiles, -} - -impl Default for MemoryFileSystem { - fn default() -> Self { - Self { - files: Default::default(), - errors: Default::default(), - allow_write: true, - on_get_staged_files: Some(Arc::new(AssertUnwindSafe(Mutex::new(Some(Box::new( - Vec::new, - )))))), - on_get_changed_files: Some(Arc::new(AssertUnwindSafe(Mutex::new(Some(Box::new( - Vec::new, - )))))), - } - } -} - -/// This is what's actually being stored for each file in the filesystem -/// -/// To break it down: -/// - `Vec` is the byte buffer holding the content of the file -/// - `Mutex` lets it safely be read an written concurrently from multiple -/// threads ([FileSystem] is required to be [Sync]) -/// - `Arc` allows [MemoryFile] handles to outlive references to the filesystem -/// itself (since [FileSystem::open] returns an owned value) -/// - `AssertUnwindSafe` tells the type system this value can safely be -/// accessed again after being recovered from a panic (using `catch_unwind`), -/// which means the filesystem guarantees a file will never get into an -/// inconsistent state if a thread panics while having a handle open (a read -/// or write either happens or not, but will never panic halfway through) -type FileEntry = Arc>>; - -/// Error entries are special file system entries that cause an error to be -/// emitted when they are reached through a filesystem traversal. This is -/// mainly useful as a mechanism to test the handling of filesystem error in -/// client code. -#[derive(Clone, Debug)] -pub enum ErrorEntry { - UnknownFileType, - DereferencedSymlink(PathBuf), - DeeplyNestedSymlinkExpansion(PathBuf), -} - -impl MemoryFileSystem { - /// Create a read-only instance of [MemoryFileSystem] - /// - /// This instance will disallow any modification through the [FileSystem] - /// trait, but the content of the filesystem may still be modified using - /// the methods on [MemoryFileSystem] itself. - pub fn new_read_only() -> Self { - Self { - allow_write: false, - ..Self::default() - } - } - - /// Create or update a file in the filesystem - pub fn insert(&mut self, path: PathBuf, content: impl Into>) { - let files = self.files.0.get_mut(); - files.insert(path, Arc::new(Mutex::new(content.into()))); - } - - /// Create or update an error in the filesystem - pub fn insert_error(&mut self, path: PathBuf, kind: ErrorEntry) { - self.errors.insert(path, kind); - } - - /// Remove a file from the filesystem - pub fn remove(&mut self, path: &Path) { - self.files.0.write().remove(path); - } - - pub fn files(self) -> IntoIter { - let files = self.files.0.into_inner(); - files.into_iter() - } - - pub fn set_on_get_changed_files( - &mut self, - cfn: Box Vec + Send + RefUnwindSafe + 'static>, - ) { - self.on_get_changed_files = Some(Arc::new(AssertUnwindSafe(Mutex::new(Some(cfn))))); - } - - pub fn set_on_get_staged_files( - &mut self, - cfn: Box Vec + Send + RefUnwindSafe + 'static>, - ) { - self.on_get_staged_files = Some(Arc::new(AssertUnwindSafe(Mutex::new(Some(cfn))))); - } -} - -impl FileSystem for MemoryFileSystem { - fn open_with_options(&self, path: &Path, options: OpenOptions) -> io::Result> { - if !self.allow_write - && (options.create || options.create_new || options.truncate || options.write) - { - return Err(io::Error::new( - io::ErrorKind::PermissionDenied, - "cannot acquire write access to file in read-only filesystem", - )); - } - - let mut inner = if options.create || options.create_new { - // Acquire write access to the files map if the file may need to be created - let mut files = self.files.0.write(); - match files.entry(PathBuf::from(path)) { - Entry::Vacant(entry) => { - // we create an empty file - let file: FileEntry = Arc::new(Mutex::new(vec![])); - let entry = entry.insert(file); - entry.lock_arc() - } - Entry::Occupied(entry) => { - if options.create { - // If `create` is true, truncate the file - let entry = entry.into_mut(); - *entry = Arc::new(Mutex::new(vec![])); - entry.lock_arc() - } else { - // This branch can only be reached if `create_new` was true, - // we should return an error if the file already exists - return Err(io::Error::new( - io::ErrorKind::AlreadyExists, - format!("path {path:?} already exists in memory filesystem"), - )); - } - } - } - } else { - let files = self.files.0.read(); - let entry = files.get(path).ok_or_else(|| { - io::Error::new( - io::ErrorKind::NotFound, - format!("path {path:?} does not exists in memory filesystem"), - ) - })?; - - entry.lock_arc() - }; - - if options.truncate { - // Clear the buffer if the file was open with `truncate` - inner.clear(); - } - - Ok(Box::new(MemoryFile { - inner, - can_read: options.read, - can_write: options.write, - version: 0, - })) - } - fn traversal<'scope>(&'scope self, func: BoxedTraversal<'_, 'scope>) { - func(&MemoryTraversalScope { fs: self }) - } - - fn working_directory(&self) -> Option { - None - } - - fn path_exists(&self, path: &Path) -> bool { - self.path_is_file(path) - } - - fn path_is_file(&self, path: &Path) -> bool { - let files = self.files.0.read(); - files.get(path).is_some() - } - - fn path_is_dir(&self, path: &Path) -> bool { - !self.path_is_file(path) - } - - fn path_is_symlink(&self, _path: &Path) -> bool { - false - } - - fn get_changed_files(&self, _base: &str) -> io::Result> { - let cb_arc = self.on_get_changed_files.as_ref().unwrap().clone(); - - let mut cb_guard = cb_arc.lock(); - - let cb = cb_guard.take().unwrap(); - - Ok(cb()) - } - - fn get_staged_files(&self) -> io::Result> { - let cb_arc = self.on_get_staged_files.as_ref().unwrap().clone(); - - let mut cb_guard = cb_arc.lock(); - - let cb = cb_guard.take().unwrap(); - - Ok(cb()) - } -} - -struct MemoryFile { - inner: ArcMutexGuard>, - can_read: bool, - can_write: bool, - version: i32, -} - -impl File for MemoryFile { - fn read_to_string(&mut self, buffer: &mut String) -> io::Result<()> { - if !self.can_read { - return Err(io::Error::new( - io::ErrorKind::PermissionDenied, - "this file wasn't open with read access", - )); - } - - // Verify the stored byte content is valid UTF-8 - let content = str::from_utf8(&self.inner) - .map_err(|err| io::Error::new(io::ErrorKind::InvalidData, err))?; - // Append the content of the file to the buffer - buffer.push_str(content); - Ok(()) - } - - fn set_content(&mut self, content: &[u8]) -> io::Result<()> { - if !self.can_write { - return Err(io::Error::new( - io::ErrorKind::PermissionDenied, - "this file wasn't open with write access", - )); - } - - // Resize the memory buffer to fit the new content - self.inner.resize(content.len(), 0); - // Copy the new content into the memory buffer - self.inner.copy_from_slice(content); - // we increase its version - self.version += 1; - Ok(()) - } - - fn file_version(&self) -> i32 { - self.version - } -} - -pub struct MemoryTraversalScope<'scope> { - fs: &'scope MemoryFileSystem, -} - -impl<'scope> TraversalScope<'scope> for MemoryTraversalScope<'scope> { - fn evaluate(&self, ctx: &'scope dyn TraversalContext, base: PathBuf) { - // Traversal is implemented by iterating on all keys, and matching on - // those that are prefixed with the provided `base` path - { - let files = &self.fs.files.0.read(); - for path in files.keys() { - let should_process_file = if base.starts_with(".") || base.starts_with("./") { - // we simulate absolute paths, so we can correctly strips out the base path from the path - let absolute_base = PathBuf::from("/").join(&base); - let absolute_path = Path::new("/").join(path); - absolute_path.strip_prefix(&absolute_base).is_ok() - } else { - path.strip_prefix(&base).is_ok() - }; - - if should_process_file { - let _ = ctx.interner().intern_path(path.into()); - let pgt_path = PgTPath::new(path); - if !ctx.can_handle(&pgt_path) { - continue; - } - ctx.store_path(pgt_path); - } - } - } - - for (path, entry) in &self.fs.errors { - if path.strip_prefix(&base).is_ok() { - ctx.push_diagnostic(Error::from(FileSystemDiagnostic { - path: path.to_string_lossy().to_string(), - error_kind: match entry { - ErrorEntry::UnknownFileType => ErrorKind::UnknownFileType, - ErrorEntry::DereferencedSymlink(path) => { - ErrorKind::DereferencedSymlink(path.to_string_lossy().to_string()) - } - ErrorEntry::DeeplyNestedSymlinkExpansion(path) => { - ErrorKind::DeeplyNestedSymlinkExpansion( - path.to_string_lossy().to_string(), - ) - } - }, - severity: Severity::Warning, - })); - } - } - } - - fn handle(&self, context: &'scope dyn TraversalContext, path: PathBuf) { - context.handle_path(PgTPath::new(path)); - } -} - -#[cfg(test)] -mod tests { - use std::collections::BTreeSet; - use std::{ - io, - mem::swap, - path::{Path, PathBuf}, - }; - - use parking_lot::Mutex; - use pgt_diagnostics::Error; - - use crate::{FileSystem, MemoryFileSystem, PathInterner, PgTPath, TraversalContext}; - use crate::{OpenOptions, fs::FileSystemExt}; - - #[test] - fn fs_read_only() { - let mut fs = MemoryFileSystem::new_read_only(); - - let path = Path::new("file.js"); - fs.insert(path.into(), *b"content"); - - assert!(fs.open(path).is_ok()); - - match fs.create(path) { - Ok(_) => panic!("fs.create() for a read-only filesystem should return an error"), - Err(error) => { - assert_eq!(error.kind(), io::ErrorKind::PermissionDenied); - } - } - - match fs.create_new(path) { - Ok(_) => panic!("fs.create() for a read-only filesystem should return an error"), - Err(error) => { - assert_eq!(error.kind(), io::ErrorKind::PermissionDenied); - } - } - - match fs.open_with_options(path, OpenOptions::default().read(true).write(true)) { - Ok(_) => panic!( - "fs.open_with_options(read + write) for a read-only filesystem should return an error" - ), - Err(error) => { - assert_eq!(error.kind(), io::ErrorKind::PermissionDenied); - } - } - } - - #[test] - fn file_read_write() { - let mut fs = MemoryFileSystem::default(); - - let path = Path::new("file.js"); - let content_1 = "content 1"; - let content_2 = "content 2"; - - fs.insert(path.into(), content_1.as_bytes()); - - let mut file = fs - .open_with_options(path, OpenOptions::default().read(true).write(true)) - .expect("the file should exist in the memory file system"); - - let mut buffer = String::new(); - file.read_to_string(&mut buffer) - .expect("the file should be read without error"); - - assert_eq!(buffer, content_1); - - file.set_content(content_2.as_bytes()) - .expect("the file should be written without error"); - - let mut buffer = String::new(); - file.read_to_string(&mut buffer) - .expect("the file should be read without error"); - - assert_eq!(buffer, content_2); - } - - #[test] - fn file_create() { - let fs = MemoryFileSystem::default(); - - let path = Path::new("file.js"); - let mut file = fs.create(path).expect("the file should not fail to open"); - - file.set_content(b"content".as_slice()) - .expect("the file should be written without error"); - } - - #[test] - fn file_create_truncate() { - let mut fs = MemoryFileSystem::default(); - - let path = Path::new("file.js"); - fs.insert(path.into(), b"content".as_slice()); - - let file = fs.create(path).expect("the file should not fail to create"); - - drop(file); - - let mut file = fs.open(path).expect("the file should not fail to open"); - - let mut buffer = String::new(); - file.read_to_string(&mut buffer) - .expect("the file should be read without error"); - - assert!( - buffer.is_empty(), - "fs.create() should truncate the file content" - ); - } - - #[test] - fn file_create_new() { - let fs = MemoryFileSystem::default(); - - let path = Path::new("file.js"); - let content = "content"; - - let mut file = fs - .create_new(path) - .expect("the file should not fail to create"); - - file.set_content(content.as_bytes()) - .expect("the file should be written without error"); - - drop(file); - - let mut file = fs.open(path).expect("the file should not fail to open"); - - let mut buffer = String::new(); - file.read_to_string(&mut buffer) - .expect("the file should be read without error"); - - assert_eq!(buffer, content); - } - - #[test] - fn file_create_new_exists() { - let mut fs = MemoryFileSystem::default(); - - let path = Path::new("file.js"); - fs.insert(path.into(), b"content".as_slice()); - - let result = fs.create_new(path); - - match result { - Ok(_) => panic!("fs.create_new() for an existing file should return an error"), - Err(error) => { - assert_eq!(error.kind(), io::ErrorKind::AlreadyExists); - } - } - } - - #[test] - fn missing_file() { - let fs = MemoryFileSystem::default(); - - let result = fs.open(Path::new("non_existing")); - - match result { - Ok(_) => panic!("opening a non-existing file should return an error"), - Err(error) => { - assert_eq!(error.kind(), io::ErrorKind::NotFound); - } - } - } - - #[test] - fn traversal() { - let mut fs = MemoryFileSystem::default(); - - fs.insert(PathBuf::from("dir1/file1"), "dir1/file1".as_bytes()); - fs.insert(PathBuf::from("dir1/file2"), "dir1/file1".as_bytes()); - fs.insert(PathBuf::from("dir2/file1"), "dir2/file1".as_bytes()); - fs.insert(PathBuf::from("dir2/file2"), "dir2/file1".as_bytes()); - - struct TestContext { - interner: PathInterner, - visited: Mutex>, - } - - impl TraversalContext for TestContext { - fn interner(&self) -> &PathInterner { - &self.interner - } - - fn push_diagnostic(&self, err: Error) { - panic!("unexpected error {err:?}") - } - - fn can_handle(&self, _: &PgTPath) -> bool { - true - } - - fn handle_path(&self, path: PgTPath) { - self.visited.lock().insert(path.to_written()); - } - - fn store_path(&self, path: PgTPath) { - self.visited.lock().insert(path); - } - - fn evaluated_paths(&self) -> BTreeSet { - let lock = self.visited.lock(); - lock.clone() - } - } - - let (interner, _) = PathInterner::new(); - let mut ctx = TestContext { - interner, - visited: Mutex::default(), - }; - - // Traverse a directory - fs.traversal(Box::new(|scope| { - scope.evaluate(&ctx, PathBuf::from("dir1")); - })); - - let mut visited = BTreeSet::default(); - swap(&mut visited, ctx.visited.get_mut()); - - assert_eq!(visited.len(), 2); - assert!(visited.contains(&PgTPath::new("dir1/file1"))); - assert!(visited.contains(&PgTPath::new("dir1/file2"))); - - // Traverse a single file - fs.traversal(Box::new(|scope| { - scope.evaluate(&ctx, PathBuf::from("dir2/file2")); - })); - - let mut visited = BTreeSet::default(); - swap(&mut visited, ctx.visited.get_mut()); - - assert_eq!(visited.len(), 1); - assert!(visited.contains(&PgTPath::new("dir2/file2"))); - } -} diff --git a/crates/pgt_fs/src/fs/os.rs b/crates/pgt_fs/src/fs/os.rs deleted file mode 100644 index a2e40695..00000000 --- a/crates/pgt_fs/src/fs/os.rs +++ /dev/null @@ -1,429 +0,0 @@ -//! Implementation of the [FileSystem] and related traits for the underlying OS filesystem -use super::{BoxedTraversal, ErrorKind, File, FileSystemDiagnostic}; -use crate::fs::OpenOptions; -use crate::{ - FileSystem, PgTPath, - fs::{TraversalContext, TraversalScope}, -}; -use pgt_diagnostics::{DiagnosticExt, Error, Severity, adapters::IoError}; -use rayon::{Scope, scope}; -use std::fs::{DirEntry, FileType}; -use std::process::Command; -use std::{ - env, fs, - io::{self, ErrorKind as IoErrorKind, Read, Seek, Write}, - mem, - path::{Path, PathBuf}, -}; - -const MAX_SYMLINK_DEPTH: u8 = 3; - -/// Implementation of [FileSystem] that directly calls through to the underlying OS -pub struct OsFileSystem { - pub working_directory: Option, -} - -impl OsFileSystem { - pub fn new(working_directory: PathBuf) -> Self { - Self { - working_directory: Some(working_directory), - } - } -} - -impl Default for OsFileSystem { - fn default() -> Self { - Self { - working_directory: env::current_dir().ok(), - } - } -} - -impl FileSystem for OsFileSystem { - fn open_with_options(&self, path: &Path, options: OpenOptions) -> io::Result> { - tracing::debug_span!("OsFileSystem::open_with_options", path = ?path, options = ?options) - .in_scope(move || -> io::Result> { - let mut fs_options = fs::File::options(); - Ok(Box::new(OsFile { - inner: options.into_fs_options(&mut fs_options).open(path)?, - version: 0, - })) - }) - } - - fn traversal(&self, func: BoxedTraversal) { - OsTraversalScope::with(move |scope| { - func(scope); - }) - } - - fn working_directory(&self) -> Option { - self.working_directory.clone() - } - - fn path_exists(&self, path: &Path) -> bool { - path.exists() - } - - fn path_is_file(&self, path: &Path) -> bool { - path.is_file() - } - - fn path_is_dir(&self, path: &Path) -> bool { - path.is_dir() - } - - fn path_is_symlink(&self, path: &Path) -> bool { - path.is_symlink() - } - - fn get_changed_files(&self, base: &str) -> io::Result> { - let output = Command::new("git") - .arg("diff") - .arg("--name-only") - .arg("--relative") - // A: added - // C: copied - // M: modified - // R: renamed - // Source: https://git-scm.com/docs/git-diff#Documentation/git-diff.txt---diff-filterACDMRTUXB82308203 - .arg("--diff-filter=ACMR") - .arg(format!("{base}...HEAD")) - .output()?; - - Ok(String::from_utf8_lossy(&output.stdout) - .lines() - .map(|l| l.to_string()) - .collect()) - } - - fn get_staged_files(&self) -> io::Result> { - let output = Command::new("git") - .arg("diff") - .arg("--name-only") - .arg("--relative") - .arg("--staged") - // A: added - // C: copied - // M: modified - // R: renamed - // Source: https://git-scm.com/docs/git-diff#Documentation/git-diff.txt---diff-filterACDMRTUXB82308203 - .arg("--diff-filter=ACMR") - .output()?; - - Ok(String::from_utf8_lossy(&output.stdout) - .lines() - .map(|l| l.to_string()) - .collect()) - } -} - -struct OsFile { - inner: fs::File, - version: i32, -} - -impl File for OsFile { - fn read_to_string(&mut self, buffer: &mut String) -> io::Result<()> { - tracing::debug_span!("OsFile::read_to_string").in_scope(move || { - // Reset the cursor to the starting position - self.inner.rewind()?; - // Read the file content - self.inner.read_to_string(buffer)?; - Ok(()) - }) - } - - fn set_content(&mut self, content: &[u8]) -> io::Result<()> { - tracing::trace_span!("OsFile::set_content").in_scope(move || { - // Truncate the file - self.inner.set_len(0)?; - // Reset the cursor to the starting position - self.inner.rewind()?; - // Write the byte slice - self.inner.write_all(content)?; - // new version stored - self.version += 1; - Ok(()) - }) - } - - fn file_version(&self) -> i32 { - self.version - } -} - -#[repr(transparent)] -pub struct OsTraversalScope<'scope> { - scope: Scope<'scope>, -} - -impl<'scope> OsTraversalScope<'scope> { - pub(crate) fn with(func: F) - where - F: FnOnce(&Self) + Send, - { - scope(move |scope| func(Self::from_rayon(scope))) - } - - fn from_rayon<'a>(scope: &'a Scope<'scope>) -> &'a Self { - // SAFETY: transmuting from Scope to OsTraversalScope is safe since - // OsTraversalScope has the `repr(transparent)` attribute that - // guarantees its layout is the same as Scope - unsafe { mem::transmute(scope) } - } -} - -impl<'scope> TraversalScope<'scope> for OsTraversalScope<'scope> { - fn evaluate(&self, ctx: &'scope dyn TraversalContext, path: PathBuf) { - let file_type = match path.metadata() { - Ok(meta) => meta.file_type(), - Err(err) => { - ctx.push_diagnostic( - IoError::from(err).with_file_path(path.to_string_lossy().to_string()), - ); - return; - } - }; - handle_any_file(&self.scope, ctx, path, file_type, None); - } - - fn handle(&self, context: &'scope dyn TraversalContext, path: PathBuf) { - self.scope.spawn(move |_| { - context.handle_path(PgTPath::new(path)); - }); - } -} - -// TODO: remove in 2.0, and directly use `.gitignore` -/// Default list of ignored directories, in the future will be supplanted by -/// detecting and parsing .ignore files -const DEFAULT_IGNORE: &[&[u8]] = &[b".git", b".svn", b".hg", b".yarn", b"node_modules"]; - -/// Traverse a single directory -fn handle_dir<'scope>( - scope: &Scope<'scope>, - ctx: &'scope dyn TraversalContext, - path: &Path, - // The unresolved origin path in case the directory is behind a symbolic link - origin_path: Option, -) { - if let Some(file_name) = path.file_name() { - if DEFAULT_IGNORE.contains(&file_name.as_encoded_bytes()) { - return; - } - } - let iter = match fs::read_dir(path) { - Ok(iter) => iter, - Err(err) => { - ctx.push_diagnostic(IoError::from(err).with_file_path(path.display().to_string())); - return; - } - }; - - for entry in iter { - match entry { - Ok(entry) => handle_dir_entry(scope, ctx, entry, origin_path.clone()), - Err(err) => { - ctx.push_diagnostic(IoError::from(err).with_file_path(path.display().to_string())); - } - } - } -} - -/// Traverse a single directory entry, scheduling any file to execute the context -/// handler and sub-directories for subsequent traversal -fn handle_dir_entry<'scope>( - scope: &Scope<'scope>, - ctx: &'scope dyn TraversalContext, - entry: DirEntry, - // The unresolved origin path in case the directory is behind a symbolic link - origin_path: Option, -) { - let path = entry.path(); - let file_type = match entry.file_type() { - Ok(file_type) => file_type, - Err(err) => { - ctx.push_diagnostic( - IoError::from(err).with_file_path(path.to_string_lossy().to_string()), - ); - return; - } - }; - handle_any_file(scope, ctx, path, file_type, origin_path); -} - -fn handle_any_file<'scope>( - scope: &Scope<'scope>, - ctx: &'scope dyn TraversalContext, - mut path: PathBuf, - mut file_type: FileType, - // The unresolved origin path in case the directory is behind a symbolic link - mut origin_path: Option, -) { - if !ctx.interner().intern_path(path.clone()) { - // If the path was already inserted, it could have been pointed at by - // multiple symlinks. No need to traverse again. - return; - } - - if file_type.is_symlink() { - if !ctx.can_handle(&PgTPath::new(path.clone())) { - return; - } - let Ok((target_path, target_file_type)) = expand_symbolic_link(path.clone(), ctx) else { - return; - }; - - if !ctx.interner().intern_path(target_path.clone()) { - // If the path was already inserted, it could have been pointed at by - // multiple symlinks. No need to traverse again. - return; - } - - if target_file_type.is_dir() { - scope.spawn(move |scope| { - handle_dir(scope, ctx, &target_path, Some(path)); - }); - return; - } - - path = target_path; - file_type = target_file_type; - } - - // In case the file is inside a directory that is behind a symbolic link, - // the unresolved origin path is used to construct a new path. - // This is required to support ignore patterns to symbolic links. - let pgt_path = if let Some(old_origin_path) = &origin_path { - if let Some(file_name) = path.file_name() { - let new_origin_path = old_origin_path.join(file_name); - origin_path = Some(new_origin_path.clone()); - PgTPath::new(new_origin_path) - } else { - ctx.push_diagnostic(Error::from(FileSystemDiagnostic { - path: path.to_string_lossy().to_string(), - error_kind: ErrorKind::UnknownFileType, - severity: Severity::Warning, - })); - return; - } - } else { - PgTPath::new(&path) - }; - - // Performing this check here let's us skip unsupported - // files entirely, as well as silently ignore unsupported files when - // doing a directory traversal, but printing an error message if the - // user explicitly requests an unsupported file to be handled. - // This check also works for symbolic links. - if !ctx.can_handle(&pgt_path) { - return; - } - - if file_type.is_dir() { - scope.spawn(move |scope| { - handle_dir(scope, ctx, &path, origin_path); - }); - return; - } - - if file_type.is_file() { - scope.spawn(move |_| { - ctx.store_path(PgTPath::new(path)); - }); - return; - } - - ctx.push_diagnostic(Error::from(FileSystemDiagnostic { - path: path.to_string_lossy().to_string(), - error_kind: ErrorKind::from(file_type), - severity: Severity::Warning, - })); -} - -/// Indicates a symbolic link could not be expanded. -/// -/// Has no fields, since the diagnostics are already generated inside -/// [follow_symbolic_link()] and the caller doesn't need to do anything except -/// an early return. -struct SymlinkExpansionError; - -/// Expands symlinks by recursively following them up to [MAX_SYMLINK_DEPTH]. -/// -/// ## Returns -/// -/// Returns a tuple where the first argument is the target path being pointed to -/// and the second argument is the target file type. -fn expand_symbolic_link( - mut path: PathBuf, - ctx: &dyn TraversalContext, -) -> Result<(PathBuf, FileType), SymlinkExpansionError> { - let mut symlink_depth = 0; - loop { - symlink_depth += 1; - if symlink_depth > MAX_SYMLINK_DEPTH { - let path = path.to_string_lossy().to_string(); - ctx.push_diagnostic(Error::from(FileSystemDiagnostic { - path: path.clone(), - error_kind: ErrorKind::DeeplyNestedSymlinkExpansion(path), - severity: Severity::Warning, - })); - return Err(SymlinkExpansionError); - } - - let (target_path, target_file_type) = follow_symlink(&path, ctx)?; - - if target_file_type.is_symlink() { - path = target_path; - continue; - } - - return Ok((target_path, target_file_type)); - } -} - -fn follow_symlink( - path: &Path, - ctx: &dyn TraversalContext, -) -> Result<(PathBuf, FileType), SymlinkExpansionError> { - tracing::info!("Translating symlink: {path:?}"); - - let target_path = fs::read_link(path).map_err(|err| { - ctx.push_diagnostic(IoError::from(err).with_file_path(path.to_string_lossy().to_string())); - SymlinkExpansionError - })?; - - // Make sure relative symlinks are resolved: - let target_path = path - .parent() - .map(|parent_dir| parent_dir.join(&target_path)) - .unwrap_or(target_path); - - let target_file_type = match fs::symlink_metadata(&target_path) { - Ok(meta) => meta.file_type(), - Err(err) => { - if err.kind() == IoErrorKind::NotFound { - let path = path.to_string_lossy().to_string(); - ctx.push_diagnostic(Error::from(FileSystemDiagnostic { - path: path.clone(), - error_kind: ErrorKind::DereferencedSymlink(path), - severity: Severity::Warning, - })); - } else { - ctx.push_diagnostic( - IoError::from(err).with_file_path(path.to_string_lossy().to_string()), - ); - } - return Err(SymlinkExpansionError); - } - }; - - Ok((target_path, target_file_type)) -} - -impl From for ErrorKind { - fn from(_: FileType) -> Self { - Self::UnknownFileType - } -} diff --git a/crates/pgt_fs/src/interner.rs b/crates/pgt_fs/src/interner.rs deleted file mode 100644 index 89fb64ea..00000000 --- a/crates/pgt_fs/src/interner.rs +++ /dev/null @@ -1,34 +0,0 @@ -use crossbeam::channel::{Receiver, Sender, unbounded}; -use rustc_hash::FxHashSet; -use std::path::PathBuf; -use std::sync::RwLock; - -/// File paths interner cache -/// -/// The path interner stores an instance of [PathBuf] -pub struct PathInterner { - storage: RwLock>, - handler: Sender, -} - -impl PathInterner { - pub fn new() -> (Self, Receiver) { - let (send, recv) = unbounded(); - let interner = Self { - storage: RwLock::new(FxHashSet::default()), - handler: send, - }; - - (interner, recv) - } - - /// Insert the path. - /// Returns `true` if the path was not previously inserted. - pub fn intern_path(&self, path: PathBuf) -> bool { - let result = self.storage.write().unwrap().insert(path.clone()); - if result { - self.handler.send(path).ok(); - } - result - } -} diff --git a/crates/pgt_fs/src/lib.rs b/crates/pgt_fs/src/lib.rs deleted file mode 100644 index 0988e7b5..00000000 --- a/crates/pgt_fs/src/lib.rs +++ /dev/null @@ -1,15 +0,0 @@ -//! # pgt_fs - -mod dir; -mod fs; -mod interner; -mod path; - -pub use dir::ensure_cache_dir; -pub use interner::PathInterner; -pub use path::PgTPath; - -pub use fs::{ - AutoSearchResult, ConfigName, ErrorEntry, File, FileSystem, FileSystemDiagnostic, - FileSystemExt, MemoryFileSystem, OpenOptions, OsFileSystem, TraversalContext, TraversalScope, -}; diff --git a/crates/pgt_fs/src/path.rs b/crates/pgt_fs/src/path.rs deleted file mode 100644 index 811bebe5..00000000 --- a/crates/pgt_fs/src/path.rs +++ /dev/null @@ -1,208 +0,0 @@ -use enumflags2::{BitFlags, bitflags}; -use smallvec::SmallVec; -use std::{ - cmp::Ordering, - ffi::OsStr, - fs::File, - fs::read_to_string, - io, - io::Write, - ops::{Deref, DerefMut}, - path::PathBuf, -}; - -use crate::ConfigName; - -/// The priority of the file -#[derive(Debug, Clone, Copy, Default, PartialEq, Eq, Ord, PartialOrd, Hash)] -#[repr(u8)] -#[bitflags] -#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] -#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] -// NOTE: The order of the variants is important, the one on the top has the highest priority -pub enum FileKind { - /// A configuration file has the highest priority. It's usually `postgrestools.jsonc` - /// - /// Other third-party configuration files might be added in the future - Config, - /// An ignore file, like `.gitignore` - Ignore, - /// Files that are required to be inspected before handling other files. - Inspectable, - /// A file to handle has the lowest priority. It's usually a traversed file, or a file opened by the LSP - #[default] - Handleable, -} - -#[derive(Debug, Clone, Hash, Ord, PartialOrd, Eq, PartialEq, Default)] -#[cfg_attr( - feature = "serde", - derive(serde::Serialize, serde::Deserialize), - serde( - from = "smallvec::SmallVec<[FileKind; 5]>", - into = "smallvec::SmallVec<[FileKind; 5]>" - ) -)] -pub struct FileKinds(BitFlags); - -impl From> for FileKinds { - fn from(value: SmallVec<[FileKind; 5]>) -> Self { - value - .into_iter() - .fold(FileKinds::default(), |mut acc, kind| { - acc.insert(kind); - acc - }) - } -} - -impl From for SmallVec<[FileKind; 5]> { - fn from(value: FileKinds) -> Self { - value.iter().collect() - } -} - -impl Deref for FileKinds { - type Target = BitFlags; - - fn deref(&self) -> &Self::Target { - &self.0 - } -} - -impl DerefMut for FileKinds { - fn deref_mut(&mut self) -> &mut Self::Target { - &mut self.0 - } -} - -impl From for FileKinds { - fn from(flag: FileKind) -> Self { - Self(BitFlags::from(flag)) - } -} - -#[derive(Debug, Clone, Eq, PartialEq, Hash, Default)] -#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] -#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] -pub struct PgTPath { - path: PathBuf, - /// Determines the kind of the file inside Postgres Tools. Some files are considered as configuration files, others as manifest files, and others as files to handle - kind: FileKinds, - /// Whether this path (usually a file) was fixed as a result of a format/lint/check command with the `--write` filag. - was_written: bool, -} - -impl Deref for PgTPath { - type Target = PathBuf; - - fn deref(&self) -> &Self::Target { - &self.path - } -} - -impl PartialOrd for PgTPath { - fn partial_cmp(&self, other: &Self) -> Option { - Some(self.cmp(other)) - } -} - -impl Ord for PgTPath { - fn cmp(&self, other: &Self) -> Ordering { - match self.kind.cmp(&other.kind) { - Ordering::Equal => self.path.cmp(&other.path), - ordering => ordering, - } - } -} - -impl PgTPath { - pub fn new(path_to_file: impl Into) -> Self { - let path = path_to_file.into(); - let kind = path.file_name().map(Self::priority).unwrap_or_default(); - Self { - path, - kind, - was_written: false, - } - } - - pub fn new_written(path_to_file: impl Into) -> Self { - let path = path_to_file.into(); - let kind = path.file_name().map(Self::priority).unwrap_or_default(); - Self { - path, - kind, - was_written: true, - } - } - - /// Creates a new [PgTPath], marked as fixed - pub fn to_written(&self) -> Self { - Self { - path: self.path.clone(), - kind: self.kind.clone(), - was_written: true, - } - } - - pub fn was_written(&self) -> bool { - self.was_written - } - - /// Accepts a file opened in read mode and saves into it - pub fn save(&mut self, content: &str) -> Result<(), std::io::Error> { - let mut file_to_write = File::create(&self.path).unwrap(); - // TODO: handle error with diagnostic - file_to_write.write_all(content.as_bytes()) - } - - /// Returns the contents of a file, if it exists - /// - /// ## Error - /// If Postgres Tools doesn't have permissions to read the file - pub fn get_buffer_from_file(&mut self) -> String { - // we assume we have permissions - read_to_string(&self.path).expect("cannot read the file to format") - } - - /// Small wrapper for [read_to_string] - pub fn read_to_string(&self) -> io::Result { - let path = self.path.as_path(); - read_to_string(path) - } - - /// The priority of the file. - /// - `postgrestools.jsonc` has the highest priority - /// - Other files are considered as files to handle - fn priority(file_name: &OsStr) -> FileKinds { - if file_name == ConfigName::pgt_jsonc() { - FileKind::Config.into() - } else { - FileKind::Handleable.into() - } - } - - pub fn is_config(&self) -> bool { - self.kind.contains(FileKind::Config) - } - - pub fn is_ignore(&self) -> bool { - self.kind.contains(FileKind::Ignore) - } - - pub fn is_to_inspect(&self) -> bool { - self.kind.contains(FileKind::Inspectable) - } -} - -#[cfg(feature = "schema")] -impl schemars::JsonSchema for FileKinds { - fn schema_name() -> String { - String::from("FileKind") - } - - fn json_schema(r#gen: &mut schemars::r#gen::SchemaGenerator) -> schemars::schema::Schema { - >::json_schema(r#gen) - } -} diff --git a/crates/pgt_lexer/Cargo.toml b/crates/pgt_lexer/Cargo.toml deleted file mode 100644 index 4b218588..00000000 --- a/crates/pgt_lexer/Cargo.toml +++ /dev/null @@ -1,27 +0,0 @@ -[package] -authors.workspace = true -categories.workspace = true -description = "" -edition.workspace = true -homepage.workspace = true -keywords.workspace = true -license.workspace = true -name = "pgt_lexer" -repository.workspace = true -version = "0.0.0" - - -[dependencies] -regex = "1.9.1" - -pg_query.workspace = true -pgt_diagnostics.workspace = true -pgt_lexer_codegen.workspace = true - -pgt_text_size.workspace = true - -[dev-dependencies] -insta.workspace = true - -[lib] -doctest = false diff --git a/crates/pgt_lexer/README.md b/crates/pgt_lexer/README.md deleted file mode 100644 index ec61c7b2..00000000 --- a/crates/pgt_lexer/README.md +++ /dev/null @@ -1,8 +0,0 @@ -# pgt_lexer - -The `pgt_lexer` crate exposes the `lex` method, which turns an SQL query text into a `Vec>`: the base for the `pg_parser` and most of pgtools's operations. - -A token is always of a certain `SyntaxKind` kind. That `SyntaxKind` enum is derived from `libpg_query`'s protobuf file. - -The SQL query text is mostly lexed using the `pg_query::scan` method (`pg_query` is just a Rust wrapper around `libpg_query`). -However, that method does not parse required whitespace tokens, so the `lex` method takes care of parsing those and merging them into the result. diff --git a/crates/pgt_lexer/src/codegen.rs b/crates/pgt_lexer/src/codegen.rs deleted file mode 100644 index 6c750590..00000000 --- a/crates/pgt_lexer/src/codegen.rs +++ /dev/null @@ -1,3 +0,0 @@ -use pgt_lexer_codegen::lexer_codegen; - -lexer_codegen!(); diff --git a/crates/pgt_lexer/src/diagnostics.rs b/crates/pgt_lexer/src/diagnostics.rs deleted file mode 100644 index 9516387a..00000000 --- a/crates/pgt_lexer/src/diagnostics.rs +++ /dev/null @@ -1,67 +0,0 @@ -use pgt_diagnostics::{Diagnostic, MessageAndDescription}; -use pgt_text_size::TextRange; - -/// A specialized diagnostic for scan errors. -/// -/// Scan diagnostics are always **fatal errors**. -#[derive(Clone, Debug, Diagnostic, PartialEq)] -#[diagnostic(category = "syntax", severity = Fatal)] -pub struct ScanError { - /// The location where the error is occurred - #[location(span)] - span: Option, - #[message] - #[description] - pub message: MessageAndDescription, -} - -impl ScanError { - pub fn from_pg_query_err(err: pg_query::Error, input: &str) -> Vec { - let err_msg = err.to_string(); - let re = regex::Regex::new(r#"at or near "(.*?)""#).unwrap(); - let mut diagnostics = Vec::new(); - - for captures in re.captures_iter(&err_msg) { - if let Some(matched) = captures.get(1) { - let search_term = matched.as_str(); - for (idx, _) in input.match_indices(search_term) { - let from = idx; - let to = from + search_term.len(); - diagnostics.push(ScanError { - span: Some(TextRange::new( - from.try_into().unwrap(), - to.try_into().unwrap(), - )), - message: MessageAndDescription::from(err_msg.clone()), - }); - } - } - } - - if diagnostics.is_empty() { - diagnostics.push(ScanError { - span: None, - message: MessageAndDescription::from(err_msg), - }); - } - - diagnostics - } -} - -#[cfg(test)] -mod tests { - use crate::lex; - - #[test] - fn finds_all_occurrences() { - let input = - "select 1443ddwwd33djwdkjw13331333333333; select 1443ddwwd33djwdkjw13331333333333;"; - let diagnostics = lex(input).unwrap_err(); - assert_eq!(diagnostics.len(), 2); - assert_eq!(diagnostics[0].span.unwrap().start(), 7.into()); - assert_eq!(diagnostics[0].span.unwrap().end(), 39.into()); - assert_eq!(diagnostics[1].span.unwrap().start(), 48.into()); - assert_eq!(diagnostics[1].span.unwrap().end(), 80.into()); - } -} diff --git a/crates/pgt_lexer/src/lib.rs b/crates/pgt_lexer/src/lib.rs deleted file mode 100644 index 32bbdd42..00000000 --- a/crates/pgt_lexer/src/lib.rs +++ /dev/null @@ -1,293 +0,0 @@ -mod codegen; -pub mod diagnostics; - -use diagnostics::ScanError; -use pg_query::protobuf::{KeywordKind, ScanToken}; -use pgt_text_size::{TextLen, TextRange, TextSize}; -use regex::Regex; -use std::{collections::VecDeque, sync::LazyLock}; - -pub use crate::codegen::SyntaxKind; - -#[derive(Debug, Clone, PartialEq, Eq)] -pub enum TokenType { - Whitespace, - NoKeyword, - UnreservedKeyword, - ColNameKeyword, - TypeFuncNameKeyword, - ReservedKeyword, -} - -impl From<&ScanToken> for TokenType { - fn from(token: &ScanToken) -> TokenType { - match token.token { - // SqlComment | CComment - 275 | 276 => TokenType::Whitespace, - _ => match token.keyword_kind() { - KeywordKind::NoKeyword => TokenType::NoKeyword, - KeywordKind::UnreservedKeyword => TokenType::UnreservedKeyword, - KeywordKind::ColNameKeyword => TokenType::ColNameKeyword, - KeywordKind::TypeFuncNameKeyword => TokenType::TypeFuncNameKeyword, - KeywordKind::ReservedKeyword => TokenType::ReservedKeyword, - }, - } - } -} - -#[derive(Debug, Clone, PartialEq, Eq)] -pub struct Token { - pub kind: SyntaxKind, - pub text: String, - pub span: TextRange, - pub token_type: TokenType, -} - -impl Token { - pub fn eof(pos: usize) -> Token { - Token { - kind: SyntaxKind::Eof, - text: "".to_string(), - span: TextRange::at(TextSize::try_from(pos).unwrap(), TextSize::from(0)), - token_type: TokenType::Whitespace, - } - } -} - -pub static WHITESPACE_TOKENS: &[SyntaxKind] = &[ - SyntaxKind::Whitespace, - SyntaxKind::Tab, - SyntaxKind::Newline, - SyntaxKind::SqlComment, - SyntaxKind::CComment, -]; - -static PATTERN_LEXER: LazyLock = LazyLock::new(|| { - #[cfg(windows)] - { - // On Windows, treat \r\n as a single newline token - Regex::new(r"(?P +)|(?P(\r\n|\n)+)|(?P\t+)").unwrap() - } - #[cfg(not(windows))] - { - // On other platforms, just check for \n - Regex::new(r"(?P +)|(?P\n+)|(?P\t+)").unwrap() - } -}); - -fn whitespace_tokens(input: &str) -> VecDeque { - let mut tokens = VecDeque::new(); - - for cap in PATTERN_LEXER.captures_iter(input) { - if let Some(whitespace) = cap.name("whitespace") { - tokens.push_back(Token { - token_type: TokenType::Whitespace, - kind: SyntaxKind::Whitespace, - text: whitespace.as_str().to_string(), - span: TextRange::new( - TextSize::from(u32::try_from(whitespace.start()).unwrap()), - TextSize::from(u32::try_from(whitespace.end()).unwrap()), - ), - }); - } else if let Some(newline) = cap.name("newline") { - tokens.push_back(Token { - token_type: TokenType::Whitespace, - kind: SyntaxKind::Newline, - text: newline.as_str().to_string(), - span: TextRange::new( - TextSize::from(u32::try_from(newline.start()).unwrap()), - TextSize::from(u32::try_from(newline.end()).unwrap()), - ), - }); - } else if let Some(tab) = cap.name("tab") { - tokens.push_back(Token { - token_type: TokenType::Whitespace, - kind: SyntaxKind::Tab, - text: tab.as_str().to_string(), - span: TextRange::new( - TextSize::from(u32::try_from(tab.start()).unwrap()), - TextSize::from(u32::try_from(tab.end()).unwrap()), - ), - }); - } else { - panic!("No match"); - }; - } - - tokens -} - -/// Turn a string of potentially valid sql code into a list of tokens, including their range in the source text. -/// -/// The implementation is primarily using libpg_querys `scan` method, and fills in the gaps with tokens that are not parsed by the library, e.g. whitespace. -pub fn lex(text: &str) -> Result, Vec> { - let mut whitespace_tokens = whitespace_tokens(text); - - // tokens from pg_query.rs - let mut pgt_query_tokens = match pg_query::scan(text) { - Ok(r) => r.tokens.into_iter().collect::>(), - Err(err) => return Err(ScanError::from_pg_query_err(err, text)), - }; - - // merge the two token lists - let mut tokens: Vec = Vec::new(); - let mut pos = TextSize::from(0); - - while pos < text.text_len() { - if !pgt_query_tokens.is_empty() - && TextSize::from(u32::try_from(pgt_query_tokens[0].start).unwrap()) == pos - { - let pgt_query_token = pgt_query_tokens.pop_front().unwrap(); - - // the lexer returns byte indices, so we need to slice - let token_text = &text[usize::try_from(pgt_query_token.start).unwrap() - ..usize::try_from(pgt_query_token.end).unwrap()]; - - let len = token_text.text_len(); - let has_whitespace = token_text.contains(" ") || token_text.contains("\n"); - tokens.push(Token { - token_type: TokenType::from(&pgt_query_token), - kind: SyntaxKind::from(&pgt_query_token), - text: token_text.to_string(), - span: TextRange::new( - TextSize::from(u32::try_from(pgt_query_token.start).unwrap()), - TextSize::from(u32::try_from(pgt_query_token.end).unwrap()), - ), - }); - pos += len; - - if has_whitespace { - while !whitespace_tokens.is_empty() - && whitespace_tokens[0].span.start() < TextSize::from(u32::from(pos)) - { - whitespace_tokens.pop_front(); - } - } - - continue; - } - - if !whitespace_tokens.is_empty() - && whitespace_tokens[0].span.start() == TextSize::from(u32::from(pos)) - { - let whitespace_token = whitespace_tokens.pop_front().unwrap(); - let len = whitespace_token.text.text_len(); - tokens.push(whitespace_token); - pos += len; - continue; - } - - let usize_pos = usize::from(pos); - panic!( - "No token found at position {:?}: '{:?}'", - pos, - text.get(usize_pos..usize_pos + 1) - ); - } - - Ok(tokens) -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn test_special_chars() { - let input = "insert into c (name, full_name) values ('Å', 1);"; - let tokens = lex(input).unwrap(); - assert!(!tokens.is_empty()); - } - - #[test] - fn test_tab_tokens() { - let input = "select\t1"; - let tokens = lex(input).unwrap(); - assert_eq!(tokens[1].kind, SyntaxKind::Tab); - } - - #[test] - fn test_newline_tokens() { - let input = "select\n1"; - let tokens = lex(input).unwrap(); - assert_eq!(tokens[1].kind, SyntaxKind::Newline); - } - - #[test] - fn test_consecutive_newlines() { - // Test with multiple consecutive newlines - #[cfg(windows)] - let input = "select\r\n\r\n1"; - #[cfg(not(windows))] - let input = "select\n\n1"; - - let tokens = lex(input).unwrap(); - - // Check that we have exactly one newline token between "select" and "1" - assert_eq!(tokens[0].kind, SyntaxKind::Select); - assert_eq!(tokens[1].kind, SyntaxKind::Newline); - assert_eq!(tokens[2].kind, SyntaxKind::Iconst); - } - - #[test] - fn test_whitespace_tokens() { - let input = "select 1"; - let tokens = lex(input).unwrap(); - assert_eq!(tokens[1].kind, SyntaxKind::Whitespace); - } - - #[test] - fn test_lexer() { - let input = "select 1; \n -- some comment \n select 2\t"; - - let tokens = lex(input).unwrap(); - let mut tokens_iter = tokens.iter(); - - let token = tokens_iter.next().unwrap(); - assert_eq!(token.kind, SyntaxKind::Select); - assert_eq!(token.text, "select"); - - let token = tokens_iter.next().unwrap(); - assert_eq!(token.kind, SyntaxKind::Whitespace); - - let token = tokens_iter.next().unwrap(); - assert_eq!(token.kind, SyntaxKind::Iconst); - assert_eq!(token.text, "1"); - - let token = tokens_iter.next().unwrap(); - assert_eq!(token.kind, SyntaxKind::Ascii59); - - let token = tokens_iter.next().unwrap(); - assert_eq!(token.kind, SyntaxKind::Whitespace); - - let token = tokens_iter.next().unwrap(); - assert_eq!(token.kind, SyntaxKind::Newline); - - let token = tokens_iter.next().unwrap(); - assert_eq!(token.kind, SyntaxKind::Whitespace); - - let token = tokens_iter.next().unwrap(); - assert_eq!(token.kind, SyntaxKind::SqlComment); - assert_eq!(token.text, "-- some comment "); - - let token = tokens_iter.next().unwrap(); - assert_eq!(token.kind, SyntaxKind::Newline); - - let token = tokens_iter.next().unwrap(); - assert_eq!(token.kind, SyntaxKind::Whitespace); - - let token = tokens_iter.next().unwrap(); - assert_eq!(token.kind, SyntaxKind::Select); - assert_eq!(token.text, "select"); - - let token = tokens_iter.next().unwrap(); - assert_eq!(token.kind, SyntaxKind::Whitespace); - - let token = tokens_iter.next().unwrap(); - assert_eq!(token.kind, SyntaxKind::Iconst); - assert_eq!(token.text, "2"); - - let token = tokens_iter.next().unwrap(); - assert_eq!(token.kind, SyntaxKind::Tab); - } -} diff --git a/crates/pgt_lexer_codegen/Cargo.toml b/crates/pgt_lexer_codegen/Cargo.toml deleted file mode 100644 index c5878646..00000000 --- a/crates/pgt_lexer_codegen/Cargo.toml +++ /dev/null @@ -1,21 +0,0 @@ -[package] -authors.workspace = true -categories.workspace = true -description = "" -edition.workspace = true -homepage.workspace = true -keywords.workspace = true -license.workspace = true -name = "pgt_lexer_codegen" -repository.workspace = true -version = "0.0.0" - - -[dependencies] -pgt_query_proto_parser.workspace = true -proc-macro2.workspace = true -quote = "1.0.33" - -[lib] -doctest = false -proc-macro = true diff --git a/crates/pgt_lexer_codegen/README.md b/crates/pgt_lexer_codegen/README.md deleted file mode 100644 index 843ac2f8..00000000 --- a/crates/pgt_lexer_codegen/README.md +++ /dev/null @@ -1,7 +0,0 @@ -# pgt_lexer_codegen - -This crate is responsible for reading `libpg_query`'s protobuf file and turning it into the Rust enum `SyntaxKind`. - -It does so by reading the file from the installed git submodule, parsing it with a protobuf parser, and using a procedural macro to generate the enum. - -Rust requires procedural macros to be defined in a different crate than where they're used, hence this \_codegen crate. diff --git a/crates/pgt_lexer_codegen/src/lib.rs b/crates/pgt_lexer_codegen/src/lib.rs deleted file mode 100644 index 8f492e4b..00000000 --- a/crates/pgt_lexer_codegen/src/lib.rs +++ /dev/null @@ -1,29 +0,0 @@ -mod syntax_kind; - -use pgt_query_proto_parser::ProtoParser; -use quote::quote; -use std::{env, path, path::Path}; - -#[proc_macro] -pub fn lexer_codegen(_item: proc_macro::TokenStream) -> proc_macro::TokenStream { - let parser = ProtoParser::new(&proto_file_path()); - let proto_file = parser.parse(); - - let syntax_kind = syntax_kind::syntax_kind_mod(&proto_file); - - quote! { - use pg_query::{protobuf, protobuf::ScanToken, protobuf::Token, NodeEnum, NodeRef}; - - #syntax_kind - } - .into() -} - -fn proto_file_path() -> path::PathBuf { - Path::new(env!("CARGO_MANIFEST_DIR")) - .ancestors() - .nth(2) - .unwrap() - .join("libpg_query/protobuf/pg_query.proto") - .to_path_buf() -} diff --git a/crates/pgt_lexer_codegen/src/syntax_kind.rs b/crates/pgt_lexer_codegen/src/syntax_kind.rs deleted file mode 100644 index 091b1e02..00000000 --- a/crates/pgt_lexer_codegen/src/syntax_kind.rs +++ /dev/null @@ -1,113 +0,0 @@ -use std::collections::HashSet; - -use pgt_query_proto_parser::{Node, ProtoFile, Token}; -use proc_macro2::{Ident, Literal}; -use quote::{format_ident, quote}; - -pub fn syntax_kind_mod(proto_file: &ProtoFile) -> proc_macro2::TokenStream { - let custom_node_names = custom_node_names(); - let custom_node_identifiers = custom_node_identifiers(&custom_node_names); - - let node_identifiers = node_identifiers(&proto_file.nodes); - - let token_identifiers = token_identifiers(&proto_file.tokens); - let token_value_literals = token_value_literals(&proto_file.tokens); - - let syntax_kind_from_impl = - syntax_kind_from_impl(&node_identifiers, &token_identifiers, &token_value_literals); - - let mut enum_variants = HashSet::new(); - enum_variants.extend(&custom_node_identifiers); - enum_variants.extend(&node_identifiers); - enum_variants.extend(&token_identifiers); - let unique_enum_variants = enum_variants.into_iter().collect::>(); - - quote! { - /// An u32 enum of all valid syntax elements (nodes and tokens) of the postgres - /// sql dialect, and a few custom ones that are not parsed by pg_query.rs, such - /// as `Whitespace`. - #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)] - #[repr(u32)] - pub enum SyntaxKind { - #(#unique_enum_variants),*, - } - - #syntax_kind_from_impl - } -} - -fn custom_node_names() -> Vec<&'static str> { - vec![ - "SourceFile", - "Comment", - "Whitespace", - "Newline", - "Tab", - "Stmt", - "Eof", - ] -} - -fn custom_node_identifiers(custom_node_names: &[&str]) -> Vec { - custom_node_names - .iter() - .map(|&node_name| format_ident!("{}", node_name)) - .collect() -} - -fn node_identifiers(nodes: &[Node]) -> Vec { - nodes - .iter() - .map(|node| format_ident!("{}", &node.name)) - .collect() -} - -fn token_identifiers(tokens: &[Token]) -> Vec { - tokens - .iter() - .map(|token| format_ident!("{}", &token.name)) - .collect() -} - -fn token_value_literals(tokens: &[Token]) -> Vec { - tokens - .iter() - .map(|token| Literal::i32_unsuffixed(token.value)) - .collect() -} - -fn syntax_kind_from_impl( - node_identifiers: &[Ident], - token_identifiers: &[Ident], - token_value_literals: &[Literal], -) -> proc_macro2::TokenStream { - quote! { - /// Converts a `pg_query` node to a `SyntaxKind` - impl From<&NodeEnum> for SyntaxKind { - fn from(node: &NodeEnum) -> SyntaxKind { - match node { - #(NodeEnum::#node_identifiers(_) => SyntaxKind::#node_identifiers),* - } - } - - } - - impl From for SyntaxKind { - fn from(token: Token) -> SyntaxKind { - match i32::from(token) { - #(#token_value_literals => SyntaxKind::#token_identifiers),*, - _ => panic!("Unknown token: {:?}", token), - } - } - } - - impl From<&ScanToken> for SyntaxKind { - fn from(token: &ScanToken) -> SyntaxKind { - match token.token { - #(#token_value_literals => SyntaxKind::#token_identifiers),*, - _ => panic!("Unknown token: {:?}", token.token), - } - } - } - } -} diff --git a/crates/pgt_lsp/Cargo.toml b/crates/pgt_lsp/Cargo.toml deleted file mode 100644 index f2aca70a..00000000 --- a/crates/pgt_lsp/Cargo.toml +++ /dev/null @@ -1,43 +0,0 @@ -[package] -authors.workspace = true -categories.workspace = true -description = "" -edition.workspace = true -homepage.workspace = true -keywords.workspace = true -license.workspace = true -name = "pgt_lsp" -repository.workspace = true -version = "0.0.0" - - -[dependencies] -anyhow = { workspace = true } -biome_deserialize = { workspace = true } -futures = "0.3.31" -pgt_analyse = { workspace = true } -pgt_completions = { workspace = true } -pgt_configuration = { workspace = true } -pgt_console = { workspace = true } -pgt_diagnostics = { workspace = true } -pgt_fs = { workspace = true } -pgt_text_edit = { workspace = true } -pgt_text_size.workspace = true -pgt_workspace = { workspace = true } -rustc-hash = { workspace = true } -serde = { workspace = true, features = ["derive"] } -serde_json = { workspace = true } -strum = { workspace = true } -tokio = { workspace = true, features = ["rt", "io-std"] } -tower-lsp = { version = "0.20.0" } -tracing = { workspace = true, features = ["attributes"] } - -[dev-dependencies] -pgt_test_utils = { workspace = true } -sqlx = { workspace = true } -test-log = { workspace = true } -tokio = { workspace = true, features = ["macros"] } -tower = { version = "0.4.13", features = ["timeout"] } - -[lib] -doctest = false diff --git a/crates/pgt_lsp/src/adapters/from_lsp.rs b/crates/pgt_lsp/src/adapters/from_lsp.rs deleted file mode 100644 index ce764a9e..00000000 --- a/crates/pgt_lsp/src/adapters/from_lsp.rs +++ /dev/null @@ -1,41 +0,0 @@ -use crate::adapters::line_index::LineIndex; -use crate::adapters::{LineCol, PositionEncoding, WideLineCol}; -use anyhow::{Context, Result}; -use pgt_text_size::{TextRange, TextSize}; -use tower_lsp::lsp_types; - -/// The function is used to convert a LSP position to TextSize. -pub fn offset( - line_index: &LineIndex, - position: lsp_types::Position, - position_encoding: PositionEncoding, -) -> Result { - let line_col = match position_encoding { - PositionEncoding::Utf8 => LineCol { - line: position.line, - col: position.character, - }, - PositionEncoding::Wide(enc) => { - let line_col = WideLineCol { - line: position.line, - col: position.character, - }; - line_index.to_utf8(enc, line_col) - } - }; - - line_index - .offset(line_col) - .with_context(|| format!("position {position:?} is out of range")) -} - -/// The function is used to convert a LSP range to TextRange. -pub fn text_range( - line_index: &LineIndex, - range: lsp_types::Range, - position_encoding: PositionEncoding, -) -> Result { - let start = offset(line_index, range.start, position_encoding)?; - let end = offset(line_index, range.end, position_encoding)?; - Ok(TextRange::new(start, end)) -} diff --git a/crates/pgt_lsp/src/adapters/line_index.rs b/crates/pgt_lsp/src/adapters/line_index.rs deleted file mode 100644 index 79a48b10..00000000 --- a/crates/pgt_lsp/src/adapters/line_index.rs +++ /dev/null @@ -1,134 +0,0 @@ -//! `LineIndex` maps flat `TextSize` offsets into `(Line, Column)` -//! representation. - -use std::mem; - -use pgt_text_size::TextSize; -use rustc_hash::FxHashMap; - -use crate::adapters::{LineCol, WideChar, WideEncoding, WideLineCol}; - -#[derive(Clone, Debug, PartialEq, Eq)] -pub struct LineIndex { - /// Offset the beginning of each line, zero-based. - pub newlines: Vec, - /// List of non-ASCII characters on each line. - pub line_wide_chars: FxHashMap>, -} - -impl LineIndex { - pub fn new(text: &str) -> LineIndex { - let mut line_wide_chars = FxHashMap::default(); - let mut wide_chars = Vec::new(); - - let mut newlines = vec![TextSize::from(0)]; - - let mut current_col = TextSize::from(0); - - let mut line = 0; - for (offset, char) in text.char_indices() { - let char_size = TextSize::of(char); - - if char == '\n' { - // SAFETY: the conversion from `usize` to `TextSize` can fail if `offset` - // is larger than 2^32. We don't support such large files. - let char_offset = TextSize::try_from(offset).expect("TextSize overflow"); - newlines.push(char_offset + char_size); - - // Save any utf-16 characters seen in the previous line - if !wide_chars.is_empty() { - line_wide_chars.insert(line, mem::take(&mut wide_chars)); - } - - // Prepare for processing the next line - current_col = TextSize::from(0); - line += 1; - continue; - } - - if !char.is_ascii() { - wide_chars.push(WideChar { - start: current_col, - end: current_col + char_size, - }); - } - - current_col += char_size; - } - - // Save any utf-16 characters seen in the last line - if !wide_chars.is_empty() { - line_wide_chars.insert(line, wide_chars); - } - - LineIndex { - newlines, - line_wide_chars, - } - } - - pub fn line_col(&self, offset: TextSize) -> Option { - let line = self.newlines.partition_point(|&it| it <= offset) - 1; - let line_start_offset = self.newlines.get(line)?; - let col = offset - line_start_offset; - - Some(LineCol { - line: u32::try_from(line).ok()?, - col: col.into(), - }) - } - - pub fn offset(&self, line_col: LineCol) -> Option { - self.newlines - .get(line_col.line as usize) - .map(|offset| offset + TextSize::from(line_col.col)) - } - - pub fn to_wide(&self, enc: WideEncoding, line_col: LineCol) -> Option { - let col = self.utf8_to_wide_col(enc, line_col.line, line_col.col.into()); - Some(WideLineCol { - line: line_col.line, - col: u32::try_from(col).ok()?, - }) - } - - pub fn to_utf8(&self, enc: WideEncoding, line_col: WideLineCol) -> LineCol { - let col = self.wide_to_utf8_col(enc, line_col.line, line_col.col); - LineCol { - line: line_col.line, - col: col.into(), - } - } - - fn utf8_to_wide_col(&self, enc: WideEncoding, line: u32, col: TextSize) -> usize { - let mut res: usize = col.into(); - if let Some(wide_chars) = self.line_wide_chars.get(&line) { - for c in wide_chars { - if c.end <= col { - res -= usize::from(c.len()) - c.wide_len(enc); - } else { - // From here on, all utf16 characters come *after* the character we are mapping, - // so we don't need to take them into account - break; - } - } - } - res - } - - fn wide_to_utf8_col(&self, enc: WideEncoding, line: u32, mut col: u32) -> TextSize { - if let Some(wide_chars) = self.line_wide_chars.get(&line) { - for c in wide_chars { - if col > u32::from(c.start) { - col += u32::from(c.len()) - c.wide_len(enc) as u32; - } else { - // From here on, all utf16 characters come *after* the character we are mapping, - // so we don't need to take them into account - break; - } - } - } - - col.into() - } -} diff --git a/crates/pgt_lsp/src/adapters/mod.rs b/crates/pgt_lsp/src/adapters/mod.rs deleted file mode 100644 index a5375180..00000000 --- a/crates/pgt_lsp/src/adapters/mod.rs +++ /dev/null @@ -1,241 +0,0 @@ -//! The crate contains a set of converters to translate between `lsp-types` and `text_size` (and vice versa) types. - -use pgt_text_size::TextSize; -use tower_lsp::lsp_types::{ClientCapabilities, Position, PositionEncodingKind, Url}; - -use crate::session::Session; - -pub mod from_lsp; -pub mod line_index; -pub mod to_lsp; - -pub fn negotiated_encoding(capabilities: &ClientCapabilities) -> PositionEncoding { - let client_encodings = match &capabilities.general { - Some(general) => general.position_encodings.as_deref().unwrap_or_default(), - None => &[], - }; - - for enc in client_encodings { - if enc == &PositionEncodingKind::UTF8 { - return PositionEncoding::Utf8; - } else if enc == &PositionEncodingKind::UTF32 { - return PositionEncoding::Wide(WideEncoding::Utf32); - } - // NB: intentionally prefer just about anything else to utf-16. - } - - PositionEncoding::Wide(WideEncoding::Utf16) -} - -pub fn get_cursor_position( - session: &Session, - url: &Url, - position: Position, -) -> anyhow::Result { - let client_capabilities = session - .client_capabilities() - .expect("Client capabilities not established for current session."); - - let line_index = session - .document(url) - .map(|doc| doc.line_index) - .map_err(|_| anyhow::anyhow!("Document not found."))?; - - let cursor_pos = from_lsp::offset( - &line_index, - position, - negotiated_encoding(client_capabilities), - )?; - - Ok(cursor_pos) -} - -#[derive(Clone, Copy, Debug)] -pub enum PositionEncoding { - Utf8, - Wide(WideEncoding), -} - -#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] -pub enum WideEncoding { - Utf16, - Utf32, -} - -#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] -pub struct LineCol { - /// Zero-based - pub line: u32, - /// Zero-based utf8 offset - pub col: u32, -} - -/// Deliberately not a generic type and different from `LineCol`. -#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] -pub struct WideLineCol { - /// Zero-based - pub line: u32, - /// Zero-based - pub col: u32, -} - -#[derive(Clone, Debug, Hash, PartialEq, Eq)] -pub struct WideChar { - /// Start offset of a character inside a line, zero-based - pub start: TextSize, - /// End offset of a character inside a line, zero-based - pub end: TextSize, -} - -impl WideChar { - /// Returns the length in 8-bit UTF-8 code units. - fn len(&self) -> TextSize { - self.end - self.start - } - - /// Returns the length in UTF-16 or UTF-32 code units. - fn wide_len(&self, enc: WideEncoding) -> usize { - match enc { - WideEncoding::Utf16 => { - if self.len() == TextSize::from(4) { - 2 - } else { - 1 - } - } - - WideEncoding::Utf32 => 1, - } - } -} - -#[cfg(test)] -mod tests { - use crate::adapters::WideEncoding::{Utf16, Utf32}; - use crate::adapters::from_lsp::offset; - use crate::adapters::line_index::LineIndex; - use crate::adapters::to_lsp::position; - use crate::adapters::{LineCol, PositionEncoding, WideEncoding}; - use pgt_text_size::TextSize; - use tower_lsp::lsp_types::Position; - - macro_rules! check_conversion { - ($line_index:ident : $position:expr_2021 => $text_size:expr_2021 ) => { - let position_encoding = PositionEncoding::Wide(WideEncoding::Utf16); - - let offset = offset(&$line_index, $position, position_encoding).ok(); - assert_eq!(offset, Some($text_size)); - - let position = position(&$line_index, offset.unwrap(), position_encoding).ok(); - - assert_eq!(position, Some($position)); - }; - } - - #[test] - fn empty_string() { - let line_index = LineIndex::new(""); - check_conversion!(line_index: Position { line: 0, character: 0 } => TextSize::from(0)); - } - - #[test] - fn empty_line() { - let line_index = LineIndex::new("\n\n"); - check_conversion!(line_index: Position { line: 1, character: 0 } => TextSize::from(1)); - } - - #[test] - fn line_end() { - let line_index = LineIndex::new("abc\ndef\nghi"); - check_conversion!(line_index: Position { line: 1, character: 3 } => TextSize::from(7)); - } - - #[test] - fn out_of_bounds_line() { - let line_index = LineIndex::new("abcde\nfghij\n"); - - let offset = line_index.offset(LineCol { line: 5, col: 0 }); - assert!(offset.is_none()); - } - - #[test] - fn with_tabs() { - let line_index = LineIndex::new( - r#" -select - email, - id -from auth.users u -join public.client_identities c on u.id = c.user_id; -"# - .trim(), - ); - - // on `i` of `id` in the select - // 22 because of: - // selectemail,i = 13 - // 8 spaces, 2 newlines = 23 characters - // it's zero indexed => index 22 - check_conversion!(line_index: Position { line: 2, character: 4 } => TextSize::from(22)); - } - - #[test] - fn unicode() { - let line_index = LineIndex::new("'Jan 1, 2018 – Jan 1, 2019'"); - - check_conversion!(line_index: Position { line: 0, character: 0 } => TextSize::from(0)); - check_conversion!(line_index: Position { line: 0, character: 1 } => TextSize::from(1)); - check_conversion!(line_index: Position { line: 0, character: 12 } => TextSize::from(12)); - check_conversion!(line_index: Position { line: 0, character: 13 } => TextSize::from(15)); - check_conversion!(line_index: Position { line: 0, character: 14 } => TextSize::from(18)); - check_conversion!(line_index: Position { line: 0, character: 15 } => TextSize::from(21)); - check_conversion!(line_index: Position { line: 0, character: 26 } => TextSize::from(32)); - check_conversion!(line_index: Position { line: 0, character: 27 } => TextSize::from(33)); - } - - #[ignore] - #[test] - fn test_every_chars() { - let text: String = { - let mut chars: Vec = ((0 as char)..char::MAX).collect(); - chars.extend("\n".repeat(chars.len() / 16).chars()); - chars.into_iter().collect() - }; - - let line_index = LineIndex::new(&text); - - let mut lin_col = LineCol { line: 0, col: 0 }; - let mut col_utf16 = 0; - let mut col_utf32 = 0; - for (offset, char) in text.char_indices() { - let got_offset = line_index.offset(lin_col).unwrap(); - assert_eq!(usize::from(got_offset), offset); - - let got_lin_col = line_index.line_col(got_offset).unwrap(); - assert_eq!(got_lin_col, lin_col); - - for enc in [Utf16, Utf32] { - let wide_lin_col = line_index.to_wide(enc, lin_col).unwrap(); - let got_lin_col = line_index.to_utf8(enc, wide_lin_col); - assert_eq!(got_lin_col, lin_col); - - let want_col = match enc { - Utf16 => col_utf16, - Utf32 => col_utf32, - }; - assert_eq!(wide_lin_col.col, want_col) - } - - if char == '\n' { - lin_col.line += 1; - lin_col.col = 0; - col_utf16 = 0; - col_utf32 = 0; - } else { - lin_col.col += char.len_utf8() as u32; - col_utf16 += char.len_utf16() as u32; - col_utf32 += 1; - } - } - } -} diff --git a/crates/pgt_lsp/src/adapters/to_lsp.rs b/crates/pgt_lsp/src/adapters/to_lsp.rs deleted file mode 100644 index 71a6b3c4..00000000 --- a/crates/pgt_lsp/src/adapters/to_lsp.rs +++ /dev/null @@ -1,39 +0,0 @@ -use crate::adapters::PositionEncoding; -use crate::adapters::line_index::LineIndex; -use anyhow::{Context, Result}; -use pgt_text_size::{TextRange, TextSize}; -use tower_lsp::lsp_types; - -/// The function is used to convert TextSize to a LSP position. -pub fn position( - line_index: &LineIndex, - offset: TextSize, - position_encoding: PositionEncoding, -) -> Result { - let line_col = line_index - .line_col(offset) - .with_context(|| format!("could not convert offset {offset:?} into a line-column index"))?; - - let position = match position_encoding { - PositionEncoding::Utf8 => lsp_types::Position::new(line_col.line, line_col.col), - PositionEncoding::Wide(enc) => { - let line_col = line_index - .to_wide(enc, line_col) - .with_context(|| format!("could not convert {line_col:?} into wide line column"))?; - lsp_types::Position::new(line_col.line, line_col.col) - } - }; - - Ok(position) -} - -/// The function is used to convert TextRange to a LSP range. -pub fn range( - line_index: &LineIndex, - range: TextRange, - position_encoding: PositionEncoding, -) -> Result { - let start = position(line_index, range.start(), position_encoding)?; - let end = position(line_index, range.end(), position_encoding)?; - Ok(lsp_types::Range::new(start, end)) -} diff --git a/crates/pgt_lsp/src/capabilities.rs b/crates/pgt_lsp/src/capabilities.rs deleted file mode 100644 index acfc60ed..00000000 --- a/crates/pgt_lsp/src/capabilities.rs +++ /dev/null @@ -1,69 +0,0 @@ -use crate::adapters::{PositionEncoding, WideEncoding, negotiated_encoding}; -use pgt_workspace::features::code_actions::CommandActionCategory; -use strum::IntoEnumIterator; -use tower_lsp::lsp_types::{ - ClientCapabilities, CompletionOptions, ExecuteCommandOptions, PositionEncodingKind, - SaveOptions, ServerCapabilities, TextDocumentSyncCapability, TextDocumentSyncKind, - TextDocumentSyncOptions, TextDocumentSyncSaveOptions, WorkDoneProgressOptions, -}; - -use crate::handlers::code_actions::command_id; - -/// The capabilities to send from server as part of [`InitializeResult`] -/// -/// [`InitializeResult`]: lspower::lsp::InitializeResult -pub(crate) fn server_capabilities(capabilities: &ClientCapabilities) -> ServerCapabilities { - ServerCapabilities { - position_encoding: Some(match negotiated_encoding(capabilities) { - PositionEncoding::Utf8 => PositionEncodingKind::UTF8, - PositionEncoding::Wide(wide) => match wide { - WideEncoding::Utf16 => PositionEncodingKind::UTF16, - WideEncoding::Utf32 => PositionEncodingKind::UTF32, - }, - }), - text_document_sync: Some(TextDocumentSyncCapability::Options( - TextDocumentSyncOptions { - open_close: Some(true), - change: Some(TextDocumentSyncKind::INCREMENTAL), - will_save: None, - will_save_wait_until: None, - save: Some(TextDocumentSyncSaveOptions::SaveOptions(SaveOptions { - include_text: Some(false), - })), - }, - )), - completion_provider: Some(CompletionOptions { - // currently not supporting the completionItem/resolve request. - // The request is used to get more information about a simple CompletionItem. - resolve_provider: None, - - trigger_characters: Some(vec![".".to_owned(), " ".to_owned(), "(".to_owned()]), - - // No character will lead to automatically inserting the selected completion-item - all_commit_characters: None, - - // No special support for completionItem/resolve requests - completion_item: None, - - // We do not report the progress of the completion process - work_done_progress_options: WorkDoneProgressOptions { - work_done_progress: None, - }, - }), - execute_command_provider: Some(ExecuteCommandOptions { - commands: CommandActionCategory::iter() - .map(|c| command_id(&c)) - .collect::>(), - - ..Default::default() - }), - document_formatting_provider: None, - document_range_formatting_provider: None, - document_on_type_formatting_provider: None, - code_action_provider: Some(tower_lsp::lsp_types::CodeActionProviderCapability::Simple( - true, - )), - rename_provider: None, - ..Default::default() - } -} diff --git a/crates/pgt_lsp/src/diagnostics.rs b/crates/pgt_lsp/src/diagnostics.rs deleted file mode 100644 index 2c7acbba..00000000 --- a/crates/pgt_lsp/src/diagnostics.rs +++ /dev/null @@ -1,42 +0,0 @@ -use anyhow::Error; -use pgt_workspace::WorkspaceError; -use std::fmt::{Display, Formatter}; - -#[derive(Debug)] -pub enum LspError { - WorkspaceError(WorkspaceError), - Anyhow(anyhow::Error), - Error(pgt_diagnostics::Error), -} - -impl From for LspError { - fn from(value: WorkspaceError) -> Self { - Self::WorkspaceError(value) - } -} - -impl From for LspError { - fn from(value: pgt_diagnostics::Error) -> Self { - Self::Error(value) - } -} - -impl From for LspError { - fn from(value: Error) -> Self { - Self::Anyhow(value) - } -} - -impl Display for LspError { - fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { - match self { - LspError::WorkspaceError(err) => { - write!(f, "{err}") - } - LspError::Anyhow(err) => { - write!(f, "{err}") - } - LspError::Error(err) => err.description(f), - } - } -} diff --git a/crates/pgt_lsp/src/documents.rs b/crates/pgt_lsp/src/documents.rs deleted file mode 100644 index c0cf85f9..00000000 --- a/crates/pgt_lsp/src/documents.rs +++ /dev/null @@ -1,19 +0,0 @@ -use crate::adapters::line_index::LineIndex; - -/// Represents an open [`textDocument`]. Can be cheaply cloned. -/// -/// [`textDocument`]: https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#textDocumentItem -#[derive(Clone)] -pub(crate) struct Document { - pub(crate) version: i32, - pub(crate) line_index: LineIndex, -} - -impl Document { - pub(crate) fn new(version: i32, text: &str) -> Self { - Self { - version, - line_index: LineIndex::new(text), - } - } -} diff --git a/crates/pgt_lsp/src/handlers.rs b/crates/pgt_lsp/src/handlers.rs deleted file mode 100644 index 103bef2f..00000000 --- a/crates/pgt_lsp/src/handlers.rs +++ /dev/null @@ -1,3 +0,0 @@ -pub(crate) mod code_actions; -pub(crate) mod completions; -pub(crate) mod text_document; diff --git a/crates/pgt_lsp/src/handlers/code_actions.rs b/crates/pgt_lsp/src/handlers/code_actions.rs deleted file mode 100644 index a10bee03..00000000 --- a/crates/pgt_lsp/src/handlers/code_actions.rs +++ /dev/null @@ -1,111 +0,0 @@ -use crate::{adapters::get_cursor_position, session::Session}; -use anyhow::{Result, anyhow}; -use tower_lsp::lsp_types::{ - self, CodeAction, CodeActionDisabled, CodeActionOrCommand, Command, ExecuteCommandParams, - MessageType, -}; - -use pgt_workspace::features::code_actions::{ - CodeActionKind, CodeActionsParams, CommandActionCategory, ExecuteStatementParams, -}; - -#[tracing::instrument(level = "debug", skip(session), err)] -pub fn get_actions( - session: &Session, - params: lsp_types::CodeActionParams, -) -> Result { - let url = params.text_document.uri; - let path = session.file_path(&url)?; - - let cursor_position = get_cursor_position(session, &url, params.range.start)?; - - let workspace_actions = session.workspace.pull_code_actions(CodeActionsParams { - path, - cursor_position, - only: vec![], - skip: vec![], - })?; - - let actions: Vec = workspace_actions - .actions - .into_iter() - .filter_map(|action| match action.kind { - CodeActionKind::Command(command) => { - let command_id: String = command_id(&command.category); - let title = action.title; - - match command.category { - CommandActionCategory::ExecuteStatement(stmt_id) => Some(CodeAction { - title: title.clone(), - kind: Some(lsp_types::CodeActionKind::EMPTY), - command: Some({ - Command { - title: title.clone(), - command: command_id, - arguments: Some(vec![ - serde_json::to_value(&stmt_id).unwrap(), - serde_json::to_value(&url).unwrap(), - ]), - } - }), - disabled: action - .disabled_reason - .map(|reason| CodeActionDisabled { reason }), - ..Default::default() - }), - } - } - - _ => todo!(), - }) - .collect(); - - Ok(actions - .into_iter() - .map(CodeActionOrCommand::CodeAction) - .collect()) -} - -pub fn command_id(command: &CommandActionCategory) -> String { - match command { - CommandActionCategory::ExecuteStatement(_) => "pgt.executeStatement".into(), - } -} - -#[tracing::instrument(level = "debug", skip(session), err)] -pub async fn execute_command( - session: &Session, - params: ExecuteCommandParams, -) -> anyhow::Result> { - let command = params.command; - - match command.as_str() { - "pgt.executeStatement" => { - let statement_id = serde_json::from_value::( - params.arguments[0].clone(), - )?; - let doc_url: lsp_types::Url = serde_json::from_value(params.arguments[1].clone())?; - - let path = session.file_path(&doc_url)?; - - let result = session - .workspace - .execute_statement(ExecuteStatementParams { statement_id, path })?; - - /* - * Updating all diagnostics: the changes caused by the statement execution - * might affect many files. - */ - session.update_all_diagnostics().await; - - session - .client - .show_message(MessageType::INFO, result.message) - .await; - - Ok(None) - } - - any => Err(anyhow!(format!("Unknown command: {}", any))), - } -} diff --git a/crates/pgt_lsp/src/handlers/completions.rs b/crates/pgt_lsp/src/handlers/completions.rs deleted file mode 100644 index 7e901c79..00000000 --- a/crates/pgt_lsp/src/handlers/completions.rs +++ /dev/null @@ -1,80 +0,0 @@ -use crate::{ - adapters::{self, get_cursor_position}, - diagnostics::LspError, - session::Session, -}; -use anyhow::Result; -use pgt_workspace::{WorkspaceError, features::completions::GetCompletionsParams}; -use tower_lsp::lsp_types::{ - self, CompletionItem, CompletionItemLabelDetails, InsertTextFormat, TextEdit, -}; - -#[tracing::instrument(level = "debug", skip(session), err)] -pub fn get_completions( - session: &Session, - params: lsp_types::CompletionParams, -) -> Result { - let url = params.text_document_position.text_document.uri; - let path = session.file_path(&url)?; - - let doc = session.document(&url)?; - let encoding = adapters::negotiated_encoding(session.client_capabilities().unwrap()); - - let completion_result = match session.workspace.get_completions(GetCompletionsParams { - path, - position: get_cursor_position(session, &url, params.text_document_position.position)?, - }) { - Ok(result) => result, - Err(e) => match e { - WorkspaceError::DatabaseConnectionError(_) => { - return Ok(lsp_types::CompletionResponse::Array(vec![])); - } - _ => { - return Err(e.into()); - } - }, - }; - - let items: Vec = completion_result - .into_iter() - .map(|i| CompletionItem { - label: i.label, - label_details: Some(CompletionItemLabelDetails { - description: Some(i.description), - detail: i - .detail - .map(|s| format!(" {}", s)) - .or(Some(format!(" {}", i.kind))), - }), - preselect: Some(i.preselected), - sort_text: Some(i.sort_text), - insert_text_format: if i.completion_text.as_ref().is_some_and(|c| c.is_snippet) { - Some(InsertTextFormat::SNIPPET) - } else { - Some(InsertTextFormat::PLAIN_TEXT) - }, - text_edit: i.completion_text.map(|c| { - lsp_types::CompletionTextEdit::Edit(TextEdit { - new_text: c.text, - range: adapters::to_lsp::range(&doc.line_index, c.range, encoding).unwrap(), - }) - }), - kind: Some(to_lsp_types_completion_item_kind(i.kind)), - ..CompletionItem::default() - }) - .collect(); - - Ok(lsp_types::CompletionResponse::Array(items)) -} - -fn to_lsp_types_completion_item_kind( - pg_comp_kind: pgt_completions::CompletionItemKind, -) -> lsp_types::CompletionItemKind { - match pg_comp_kind { - pgt_completions::CompletionItemKind::Function => lsp_types::CompletionItemKind::FUNCTION, - pgt_completions::CompletionItemKind::Table => lsp_types::CompletionItemKind::CLASS, - pgt_completions::CompletionItemKind::Column => lsp_types::CompletionItemKind::FIELD, - pgt_completions::CompletionItemKind::Schema => lsp_types::CompletionItemKind::CLASS, - pgt_completions::CompletionItemKind::Policy => lsp_types::CompletionItemKind::CONSTANT, - } -} diff --git a/crates/pgt_lsp/src/handlers/text_document.rs b/crates/pgt_lsp/src/handlers/text_document.rs deleted file mode 100644 index 63250ef5..00000000 --- a/crates/pgt_lsp/src/handlers/text_document.rs +++ /dev/null @@ -1,115 +0,0 @@ -use crate::adapters::from_lsp; -use crate::{ - diagnostics::LspError, documents::Document, session::Session, utils::apply_document_changes, -}; -use anyhow::Result; -use pgt_workspace::workspace::{ - ChangeFileParams, ChangeParams, CloseFileParams, GetFileContentParams, OpenFileParams, -}; -use tower_lsp::lsp_types; -use tracing::error; - -/// Handler for `textDocument/didOpen` LSP notification -#[tracing::instrument(level = "debug", skip(session), err)] -pub(crate) async fn did_open( - session: &Session, - params: lsp_types::DidOpenTextDocumentParams, -) -> Result<()> { - let url = params.text_document.uri; - let version = params.text_document.version; - let content = params.text_document.text; - - let path = session.file_path(&url)?; - let doc = Document::new(version, &content); - - session.workspace.open_file(OpenFileParams { - path, - version, - content, - })?; - - session.insert_document(url.clone(), doc); - - if let Err(err) = session.update_diagnostics(url).await { - error!("Failed to update diagnostics: {}", err); - } - - Ok(()) -} - -// Handler for `textDocument/didChange` LSP notification -#[tracing::instrument(level = "debug", skip(session), err)] -pub(crate) async fn did_change( - session: &Session, - params: lsp_types::DidChangeTextDocumentParams, -) -> Result<(), LspError> { - let url = params.text_document.uri; - let version = params.text_document.version; - - let pgt_path = session.file_path(&url)?; - - let old_doc = session.document(&url)?; - let old_text = session.workspace.get_file_content(GetFileContentParams { - path: pgt_path.clone(), - })?; - - let start = params - .content_changes - .iter() - .rev() - .position(|change| change.range.is_none()) - .map_or(0, |idx| params.content_changes.len() - idx - 1); - - let text = apply_document_changes( - session.position_encoding(), - old_text, - ¶ms.content_changes[start..], - ); - - session.workspace.change_file(ChangeFileParams { - path: pgt_path, - version, - changes: params.content_changes[start..] - .iter() - .map(|c| ChangeParams { - range: c.range.and_then(|r| { - from_lsp::text_range(&old_doc.line_index, r, session.position_encoding()).ok() - }), - text: c.text.clone(), - }) - .collect(), - })?; - - session.insert_document(url.clone(), Document::new(version, &text)); - - if let Err(err) = session.update_diagnostics(url).await { - error!("Failed to update diagnostics: {}", err); - } - - Ok(()) -} - -/// Handler for `textDocument/didClose` LSP notification -#[tracing::instrument(level = "debug", skip(session), err)] -pub(crate) async fn did_close( - session: &Session, - params: lsp_types::DidCloseTextDocumentParams, -) -> Result<()> { - let url = params.text_document.uri; - let pgt_path = session.file_path(&url)?; - - session - .workspace - .close_file(CloseFileParams { path: pgt_path })?; - - session.remove_document(&url); - - let diagnostics = vec![]; - let version = None; - session - .client - .publish_diagnostics(url, diagnostics, version) - .await; - - Ok(()) -} diff --git a/crates/pgt_lsp/src/lib.rs b/crates/pgt_lsp/src/lib.rs deleted file mode 100644 index cc014313..00000000 --- a/crates/pgt_lsp/src/lib.rs +++ /dev/null @@ -1,10 +0,0 @@ -mod adapters; -mod capabilities; -mod diagnostics; -mod documents; -mod handlers; -mod server; -mod session; -mod utils; - -pub use crate::server::{LSPServer, ServerConnection, ServerFactory}; diff --git a/crates/pgt_lsp/src/server.rs b/crates/pgt_lsp/src/server.rs deleted file mode 100644 index 4c05c0e4..00000000 --- a/crates/pgt_lsp/src/server.rs +++ /dev/null @@ -1,435 +0,0 @@ -use crate::capabilities::server_capabilities; -use crate::handlers; -use crate::session::{CapabilitySet, CapabilityStatus, Session, SessionHandle, SessionKey}; -use crate::utils::{into_lsp_error, panic_to_lsp_error}; -use futures::FutureExt; -use futures::future::ready; -use pgt_fs::{ConfigName, FileSystem, OsFileSystem}; -use pgt_workspace::{DynRef, Workspace, workspace}; -use rustc_hash::FxHashMap; -use serde_json::json; -use std::panic::RefUnwindSafe; -use std::path::PathBuf; -use std::sync::atomic::{AtomicBool, AtomicU64, Ordering}; -use std::sync::{Arc, Mutex}; -use tokio::io::{AsyncRead, AsyncWrite}; -use tokio::sync::Notify; -use tokio::task::spawn_blocking; -use tower_lsp::jsonrpc::Result as LspResult; -use tower_lsp::{ClientSocket, lsp_types::*}; -use tower_lsp::{LanguageServer, LspService, Server}; -use tracing::{error, info}; - -pub struct LSPServer { - session: SessionHandle, - /// Map of all sessions connected to the same [ServerFactory] as this [LSPServer]. - sessions: Sessions, - /// If this is true the server will broadcast a shutdown signal once the - /// last client disconnected - stop_on_disconnect: bool, - /// This shared flag is set to true once at least one session has been - /// initialized on this server instance - is_initialized: Arc, -} - -impl RefUnwindSafe for LSPServer {} - -impl LSPServer { - fn new( - session: SessionHandle, - sessions: Sessions, - stop_on_disconnect: bool, - is_initialized: Arc, - ) -> Self { - Self { - session, - sessions, - stop_on_disconnect, - is_initialized, - } - } - - async fn setup_capabilities(&self) { - let mut capabilities = CapabilitySet::default(); - - capabilities.add_capability( - "pgt_did_change_extension_settings", - "workspace/didChangeConfiguration", - if self.session.can_register_did_change_configuration() { - CapabilityStatus::Enable(None) - } else { - CapabilityStatus::Disable - }, - ); - - capabilities.add_capability( - "pgt_did_change_workspace_settings", - "workspace/didChangeWatchedFiles", - match self.session.base_path() { - Some(base_path) => CapabilityStatus::Enable(Some(json!( - DidChangeWatchedFilesRegistrationOptions { - watchers: vec![FileSystemWatcher { - glob_pattern: GlobPattern::String(format!( - "{}/{}", - base_path.display(), - ConfigName::pgt_jsonc() - )), - kind: Some(WatchKind::all()), - },], - } - ))), - _ => CapabilityStatus::Disable, - }, - ); - - self.session.register_capabilities(capabilities).await; - } -} - -#[tower_lsp::async_trait] -impl LanguageServer for LSPServer { - #[allow(deprecated)] - #[tracing::instrument( - level = "info", - skip_all, - fields( - root_uri = params.root_uri.as_ref().map(display), - capabilities = debug(¶ms.capabilities), - client_info = params.client_info.as_ref().map(debug), - workspace_folders = params.workspace_folders.as_ref().map(debug), - ) - )] - async fn initialize(&self, params: InitializeParams) -> LspResult { - info!("Starting Language Server..."); - self.is_initialized.store(true, Ordering::Relaxed); - - let server_capabilities = server_capabilities(¶ms.capabilities); - - self.session.initialize( - params.capabilities, - params.root_uri, - params.workspace_folders, - ); - - // - let init = InitializeResult { - capabilities: server_capabilities, - server_info: Some(ServerInfo { - name: String::from(env!("CARGO_PKG_NAME")), - version: Some(pgt_configuration::VERSION.to_string()), - }), - }; - - Ok(init) - } - - #[tracing::instrument(level = "info", skip_all)] - async fn initialized(&self, params: InitializedParams) { - let _ = params; - - info!( - "Attempting to load the configuration from '{}' file", - ConfigName::pgt_jsonc() - ); - - futures::join!(self.session.load_workspace_settings(None)); - - let msg = format!("Server initialized with PID: {}", std::process::id()); - self.session - .client - .log_message(MessageType::INFO, msg) - .await; - - self.setup_capabilities().await; - - // Diagnostics are disabled by default, so update them after fetching workspace config - self.session.update_all_diagnostics().await; - } - - #[tracing::instrument(level = "info", skip_all)] - async fn shutdown(&self) -> LspResult<()> { - Ok(()) - } - - #[tracing::instrument(level = "info", skip_all)] - async fn did_change_configuration(&self, params: DidChangeConfigurationParams) { - self.session - .load_workspace_settings(serde_json::from_value(params.settings).ok()) - .await; - self.setup_capabilities().await; - self.session.update_all_diagnostics().await; - } - - #[tracing::instrument(level = "trace", skip_all)] - async fn did_change_watched_files(&self, params: DidChangeWatchedFilesParams) { - let file_paths = params - .changes - .iter() - .map(|change| change.uri.to_file_path()); - for file_path in file_paths { - match file_path { - Ok(file_path) => { - let base_path = self.session.base_path(); - if let Some(base_path) = base_path { - let possible_config_json = file_path.strip_prefix(&base_path); - if let Ok(watched_file) = possible_config_json { - if ConfigName::file_names() - .contains(&&*watched_file.display().to_string()) - { - self.session.load_workspace_settings(None).await; - self.setup_capabilities().await; - // self.session.update_all_diagnostics().await; - // for now we are only interested to the configuration file, - // so it's OK to exist the loop - break; - } - } - } - } - Err(_) => { - error!( - "The Workspace root URI {file_path:?} could not be parsed as a filesystem path" - ); - continue; - } - } - } - } - - #[tracing::instrument(level = "trace", skip_all)] - async fn did_open(&self, params: DidOpenTextDocumentParams) { - handlers::text_document::did_open(&self.session, params) - .await - .ok(); - } - - #[tracing::instrument(level = "trace", skip_all)] - async fn did_change(&self, params: DidChangeTextDocumentParams) { - if let Err(e) = handlers::text_document::did_change(&self.session, params).await { - error!("{}", e); - }; - } - - #[tracing::instrument(level = "trace", skip_all)] - async fn did_close(&self, params: DidCloseTextDocumentParams) { - handlers::text_document::did_close(&self.session, params) - .await - .ok(); - } - - #[tracing::instrument(level = "trace", skip_all)] - async fn completion(&self, params: CompletionParams) -> LspResult> { - match handlers::completions::get_completions(&self.session, params) { - Ok(result) => LspResult::Ok(Some(result)), - Err(e) => LspResult::Err(into_lsp_error(e)), - } - } - - #[tracing::instrument(level = "trace", skip(self))] - async fn code_action(&self, params: CodeActionParams) -> LspResult> { - match handlers::code_actions::get_actions(&self.session, params) { - Ok(result) => { - tracing::trace!("Got {} Code Action(s)", result.len()); - return LspResult::Ok(Some(result)); - } - Err(e) => LspResult::Err(into_lsp_error(e)), - } - } - - #[tracing::instrument(level = "trace", skip(self))] - async fn execute_command( - &self, - params: ExecuteCommandParams, - ) -> LspResult> { - match handlers::code_actions::execute_command(&self.session, params).await { - // we'll inform the client within `code_actions::execute_command` - Ok(_) => LspResult::Ok(None), - Err(err) => LspResult::Err(into_lsp_error(err)), - } - } -} - -impl Drop for LSPServer { - fn drop(&mut self) { - if let Ok(mut sessions) = self.sessions.lock() { - let _removed = sessions.remove(&self.session.key); - debug_assert!(_removed.is_some(), "Session did not exist."); - - if self.stop_on_disconnect - && sessions.is_empty() - && self.is_initialized.load(Ordering::Relaxed) - { - self.session.cancellation.notify_one(); - } - } - } -} - -/// Map of active sessions connected to a [ServerFactory]. -type Sessions = Arc>>; - -/// Helper method for wrapping a [Workspace] method in a `custom_method` for -/// the [LSPServer] -macro_rules! workspace_method { - ( $builder:ident, $method:ident ) => { - $builder = $builder.custom_method( - concat!("pgt/", stringify!($method)), - |server: &LSPServer, params| { - let span = tracing::trace_span!(concat!("pgt/", stringify!($method)), params = ?params).or_current(); - tracing::info!("Received request: {}", stringify!($method)); - - let workspace = server.session.workspace.clone(); - let result = spawn_blocking(move || { - let _guard = span.entered(); - workspace.$method(params) - }); - - result.map(move |result| { - // The type of `result` is `Result, JoinError>`, - // where the inner result is the return value of `$method` while the - // outer one is added by `spawn_blocking` to catch panics or - // cancellations of the task - match result { - Ok(Ok(result)) => Ok(result), - Ok(Err(err)) => Err(into_lsp_error(err)), - Err(err) => match err.try_into_panic() { - Ok(err) => Err(panic_to_lsp_error(err)), - Err(err) => Err(into_lsp_error(err)), - }, - } - }) - }, - ); - }; -} - -/// Factory data structure responsible for creating [ServerConnection] handles -/// for each incoming connection accepted by the server -#[derive(Default)] -pub struct ServerFactory { - /// Synchronization primitive used to broadcast a shutdown signal to all - /// active connections - cancellation: Arc, - /// Optional [Workspace] instance shared between all clients. Currently - /// this field is always [None] (meaning each connection will get its own - /// workspace) until we figure out how to handle concurrent access to the - /// same workspace from multiple client - workspace: Option>, - - /// The sessions of the connected clients indexed by session key. - sessions: Sessions, - - /// Session key generator. Stores the key of the next session. - next_session_key: AtomicU64, - - /// If this is true the server will broadcast a shutdown signal once the - /// last client disconnected - stop_on_disconnect: bool, - /// This shared flag is set to true once at least one sessions has been - /// initialized on this server instance - is_initialized: Arc, -} - -impl ServerFactory { - pub fn new(stop_on_disconnect: bool) -> Self { - Self { - cancellation: Arc::default(), - workspace: None, - sessions: Sessions::default(), - next_session_key: AtomicU64::new(0), - stop_on_disconnect, - is_initialized: Arc::default(), - } - } - - pub fn create(&self, config_path: Option) -> ServerConnection { - self.create_with_fs(config_path, DynRef::Owned(Box::::default())) - } - - /// Create a new [ServerConnection] from this factory - pub fn create_with_fs( - &self, - config_path: Option, - fs: DynRef<'static, dyn FileSystem>, - ) -> ServerConnection { - let workspace = self - .workspace - .clone() - .unwrap_or_else(workspace::server_sync); - - let session_key = SessionKey(self.next_session_key.fetch_add(1, Ordering::Relaxed)); - - let mut builder = LspService::build(move |client| { - let mut session = Session::new( - session_key, - client, - workspace, - self.cancellation.clone(), - fs, - ); - if let Some(path) = config_path { - session.set_config_path(path); - } - let handle = Arc::new(session); - - let mut sessions = self.sessions.lock().unwrap(); - sessions.insert(session_key, handle.clone()); - - LSPServer::new( - handle, - self.sessions.clone(), - self.stop_on_disconnect, - self.is_initialized.clone(), - ) - }); - - // "shutdown" is not part of the Workspace API - builder = builder.custom_method("pgt/shutdown", |server: &LSPServer, (): ()| { - info!("Sending shutdown signal"); - server.session.broadcast_shutdown(); - ready(Ok(Some(()))) - }); - - workspace_method!(builder, is_path_ignored); - workspace_method!(builder, update_settings); - workspace_method!(builder, get_file_content); - workspace_method!(builder, open_file); - workspace_method!(builder, change_file); - workspace_method!(builder, close_file); - workspace_method!(builder, pull_diagnostics); - workspace_method!(builder, get_completions); - - let (service, socket) = builder.finish(); - ServerConnection { socket, service } - } - - /// Return a handle to the cancellation token for this server process - pub fn cancellation(&self) -> Arc { - self.cancellation.clone() - } -} - -/// Handle type created by the server for each incoming connection -pub struct ServerConnection { - socket: ClientSocket, - service: LspService, -} - -impl ServerConnection { - /// Destructure a connection into its inner service instance and socket - pub fn into_inner(self) -> (LspService, ClientSocket) { - (self.service, self.socket) - } - - /// Accept an incoming connection and run the server async I/O loop to - /// completion - pub async fn accept(self, stdin: I, stdout: O) - where - I: AsyncRead + Unpin, - O: AsyncWrite, - { - Server::new(stdin, stdout, self.socket) - .serve(self.service) - .await; - } -} diff --git a/crates/pgt_lsp/src/session.rs b/crates/pgt_lsp/src/session.rs deleted file mode 100644 index 7ccf2bab..00000000 --- a/crates/pgt_lsp/src/session.rs +++ /dev/null @@ -1,524 +0,0 @@ -use crate::adapters::{PositionEncoding, WideEncoding, negotiated_encoding}; -use crate::diagnostics::LspError; -use crate::documents::Document; -use crate::utils; -use anyhow::Result; -use biome_deserialize::Merge; -use futures::StreamExt; -use futures::stream::FuturesUnordered; -use pgt_analyse::RuleCategoriesBuilder; -use pgt_configuration::{ConfigurationPathHint, PartialConfiguration}; -use pgt_diagnostics::{DiagnosticExt, Error}; -use pgt_fs::{FileSystem, PgTPath}; -use pgt_workspace::Workspace; -use pgt_workspace::configuration::{LoadedConfiguration, load_configuration}; -use pgt_workspace::features; -use pgt_workspace::settings::PartialConfigurationExt; -use pgt_workspace::workspace::UpdateSettingsParams; -use pgt_workspace::{DynRef, WorkspaceError}; -use rustc_hash::FxHashMap; -use serde_json::Value; -use std::path::PathBuf; -use std::sync::Arc; -use std::sync::RwLock; -use std::sync::atomic::Ordering; -use std::sync::atomic::{AtomicBool, AtomicU8}; -use tokio::sync::Notify; -use tokio::sync::OnceCell; -use tower_lsp::lsp_types::Url; -use tower_lsp::lsp_types::{self, ClientCapabilities}; -use tower_lsp::lsp_types::{MessageType, Registration}; -use tower_lsp::lsp_types::{Unregistration, WorkspaceFolder}; -use tracing::{error, info}; - -/// Key, uniquely identifying a LSP session. -#[derive(Clone, Copy, Eq, PartialEq, Hash, Debug)] -pub(crate) struct SessionKey(pub u64); - -/// Represents the state of an LSP server session. -pub(crate) struct Session { - /// The unique key identifying this session. - pub(crate) key: SessionKey, - - /// The LSP client for this session. - pub(crate) client: tower_lsp::Client, - - /// The parameters provided by the client in the "initialize" request - initialize_params: OnceCell, - - pub(crate) workspace: Arc, - - configuration_status: AtomicU8, - - /// A flag to notify a message to the user when the configuration is broken, and the LSP attempts - /// to update the diagnostics - notified_broken_configuration: AtomicBool, - - /// File system to read files inside the workspace - pub(crate) fs: DynRef<'static, dyn FileSystem>, - - documents: RwLock>, - - pub(crate) cancellation: Arc, - - pub(crate) config_path: Option, -} - -/// The parameters provided by the client in the "initialize" request -struct InitializeParams { - /// The capabilities provided by the client as part of [`lsp_types::InitializeParams`] - client_capabilities: lsp_types::ClientCapabilities, - root_uri: Option, - #[allow(unused)] - workspace_folders: Option>, -} - -#[repr(u8)] -pub(crate) enum ConfigurationStatus { - /// The configuration file was properly loaded - Loaded = 0, - /// The configuration file does not exist - Missing = 1, - /// The configuration file exists but could not be loaded - Error = 2, - /// Currently loading the configuration - Loading = 3, -} - -impl ConfigurationStatus { - pub(crate) const fn is_error(&self) -> bool { - matches!(self, ConfigurationStatus::Error) - } -} - -impl TryFrom for ConfigurationStatus { - type Error = (); - - fn try_from(value: u8) -> Result { - match value { - 0 => Ok(Self::Loaded), - 1 => Ok(Self::Missing), - 2 => Ok(Self::Error), - 3 => Ok(Self::Loading), - _ => Err(()), - } - } -} - -pub(crate) type SessionHandle = Arc; - -/// Holds the set of capabilities supported by the Language Server -/// instance and whether they are enabled or not -#[derive(Default)] -pub(crate) struct CapabilitySet { - registry: FxHashMap<&'static str, (&'static str, CapabilityStatus)>, -} - -/// Represents whether a capability is enabled or not, optionally holding the -/// configuration associated with the capability -pub(crate) enum CapabilityStatus { - Enable(Option), - Disable, -} - -impl CapabilitySet { - /// Insert a capability in the set - pub(crate) fn add_capability( - &mut self, - id: &'static str, - method: &'static str, - status: CapabilityStatus, - ) { - self.registry.insert(id, (method, status)); - } -} - -impl Session { - pub(crate) fn new( - key: SessionKey, - client: tower_lsp::Client, - workspace: Arc, - cancellation: Arc, - fs: DynRef<'static, dyn FileSystem>, - ) -> Self { - let documents = Default::default(); - Self { - key, - client, - initialize_params: OnceCell::default(), - workspace, - configuration_status: AtomicU8::new(ConfigurationStatus::Missing as u8), - documents, - fs, - cancellation, - config_path: None, - notified_broken_configuration: AtomicBool::new(false), - } - } - - pub(crate) fn set_config_path(&mut self, path: PathBuf) { - self.config_path = Some(path); - } - - /// Initialize this session instance with the incoming initialization parameters from the client - pub(crate) fn initialize( - &self, - client_capabilities: lsp_types::ClientCapabilities, - root_uri: Option, - workspace_folders: Option>, - ) { - let result = self.initialize_params.set(InitializeParams { - client_capabilities, - root_uri, - workspace_folders, - }); - - if let Err(err) = result { - error!("Failed to initialize session: {err}"); - } - } - - /// Register a set of capabilities with the client - pub(crate) async fn register_capabilities(&self, capabilities: CapabilitySet) { - let mut registrations = Vec::new(); - let mut unregistrations = Vec::new(); - - let mut register_methods = String::new(); - let mut unregister_methods = String::new(); - - for (id, (method, status)) in capabilities.registry { - unregistrations.push(Unregistration { - id: id.to_string(), - method: method.to_string(), - }); - - if !unregister_methods.is_empty() { - unregister_methods.push_str(", "); - } - - unregister_methods.push_str(method); - - if let CapabilityStatus::Enable(register_options) = status { - registrations.push(Registration { - id: id.to_string(), - method: method.to_string(), - register_options, - }); - - if !register_methods.is_empty() { - register_methods.push_str(", "); - } - - register_methods.push_str(method); - } - } - - match self.client.unregister_capability(unregistrations).await { - Err(e) => { - error!( - "Error unregistering {unregister_methods:?} capabilities: {}", - e - ); - } - _ => { - info!("Unregister capabilities {unregister_methods:?}"); - } - } - - match self.client.register_capability(registrations).await { - Err(e) => { - error!("Error registering {register_methods:?} capabilities: {}", e); - } - _ => { - info!("Register capabilities {register_methods:?}"); - } - } - } - - /// Computes diagnostics for the file matching the provided url and publishes - /// them to the client. Called from [`handlers::text_document`] when a file's - /// contents changes. - #[tracing::instrument(level = "trace", skip_all, fields(url = display(&url), diagnostic_count), err)] - pub(crate) async fn update_diagnostics(&self, url: lsp_types::Url) -> Result<(), LspError> { - let pgt_path = self.file_path(&url)?; - let doc = self.document(&url)?; - if self.configuration_status().is_error() && !self.notified_broken_configuration() { - self.set_notified_broken_configuration(); - self.client - .show_message(MessageType::WARNING, "The configuration file has errors. PgLSP will report only parsing errors until the configuration is fixed.") - .await; - } - - let categories = RuleCategoriesBuilder::default().all(); - - let diagnostics: Vec = { - let result = - self.workspace - .pull_diagnostics(features::diagnostics::PullDiagnosticsParams { - path: pgt_path.clone(), - max_diagnostics: u64::MAX, - categories: categories.build(), - only: Vec::new(), - skip: Vec::new(), - })?; - - result - .diagnostics - .into_iter() - .filter_map(|d| { - match utils::diagnostic_to_lsp( - d, - &url, - &doc.line_index, - self.position_encoding(), - None, - ) { - Ok(diag) => Some(diag), - Err(err) => { - error!("failed to convert diagnostic to LSP: {err:?}"); - None - } - } - }) - .collect() - }; - - self.client - .publish_diagnostics(url, diagnostics, Some(doc.version)) - .await; - - Ok(()) - } - - /// Updates diagnostics for every [`Document`] in this [`Session`] - pub(crate) async fn update_all_diagnostics(&self) { - let mut futures: FuturesUnordered<_> = self - .documents - .read() - .unwrap() - .keys() - .map(|url| self.update_diagnostics(url.clone())) - .collect(); - - while let Some(result) = futures.next().await { - if let Err(e) = result { - error!("Error while updating diagnostics: {}", e); - } - } - } - - /// Get a [`Document`] matching the provided [`lsp_types::Url`] - /// - /// If document does not exist, result is [WorkspaceError::NotFound] - pub(crate) fn document(&self, url: &lsp_types::Url) -> Result { - self.documents - .read() - .unwrap() - .get(url) - .cloned() - .ok_or_else(|| WorkspaceError::not_found().with_file_path(url.to_string())) - } - - /// Set the [`Document`] for the provided [`lsp_types::Url`] - /// - /// Used by [`handlers::text_document] to synchronize documents with the client. - pub(crate) fn insert_document(&self, url: lsp_types::Url, document: Document) { - self.documents.write().unwrap().insert(url, document); - } - - /// Remove the [`Document`] matching the provided [`lsp_types::Url`] - pub(crate) fn remove_document(&self, url: &lsp_types::Url) { - self.documents.write().unwrap().remove(url); - } - - pub(crate) fn file_path(&self, url: &lsp_types::Url) -> Result { - let path_to_file = match url.to_file_path() { - Err(_) => { - // If we can't create a path, it's probably because the file doesn't exist. - // It can be a newly created file that it's not on disk - PathBuf::from(url.path()) - } - Ok(path) => path, - }; - - Ok(PgTPath::new(path_to_file)) - } - - /// True if the client supports dynamic registration of "workspace/didChangeConfiguration" requests - pub(crate) fn can_register_did_change_configuration(&self) -> bool { - self.initialize_params - .get() - .and_then(|c| c.client_capabilities.workspace.as_ref()) - .and_then(|c| c.did_change_configuration) - .and_then(|c| c.dynamic_registration) - == Some(true) - } - - /// Get the current workspace folders - pub(crate) fn get_workspace_folders(&self) -> Option<&Vec> { - self.initialize_params - .get() - .and_then(|c| c.workspace_folders.as_ref()) - } - - /// Returns the base path of the workspace on the filesystem if it has one - pub(crate) fn base_path(&self) -> Option { - let initialize_params = self.initialize_params.get()?; - - let root_uri = initialize_params.root_uri.as_ref()?; - match root_uri.to_file_path() { - Ok(base_path) => Some(base_path), - Err(()) => { - error!( - "The Workspace root URI {root_uri:?} could not be parsed as a filesystem path" - ); - None - } - } - } - - /// Returns a reference to the client capabilities for this session - pub(crate) fn client_capabilities(&self) -> Option<&ClientCapabilities> { - self.initialize_params - .get() - .map(|params| ¶ms.client_capabilities) - } - - /// This function attempts to read the `postgrestools.jsonc` configuration file from - /// the root URI and update the workspace settings accordingly - #[tracing::instrument(level = "trace", skip(self))] - pub(crate) async fn load_workspace_settings(&self, extra_config: Option) { - // Providing a custom configuration path will not allow to support workspaces - if let Some(config_path) = &self.config_path { - let base_path = ConfigurationPathHint::FromUser(config_path.clone()); - let status = self - .load_pgt_configuration_file(base_path, extra_config) - .await; - self.set_configuration_status(status); - } else if let Some(folders) = self.get_workspace_folders() { - info!("Detected workspace folder."); - self.set_configuration_status(ConfigurationStatus::Loading); - for folder in folders { - info!("Attempt to load the configuration file in {:?}", folder.uri); - let base_path = folder.uri.to_file_path(); - match base_path { - Ok(base_path) => { - let status = self - .load_pgt_configuration_file( - ConfigurationPathHint::FromWorkspace(base_path), - extra_config.clone(), - ) - .await; - self.set_configuration_status(status); - } - Err(_) => { - error!( - "The Workspace root URI {:?} could not be parsed as a filesystem path", - folder.uri - ); - } - } - } - } else { - let base_path = match self.base_path() { - None => ConfigurationPathHint::default(), - Some(path) => ConfigurationPathHint::FromLsp(path), - }; - let status = self - .load_pgt_configuration_file(base_path, extra_config) - .await; - self.set_configuration_status(status); - } - } - - async fn load_pgt_configuration_file( - &self, - base_path: ConfigurationPathHint, - extra_config: Option, - ) -> ConfigurationStatus { - match load_configuration(&self.fs, base_path.clone()) { - Ok(loaded_configuration) => { - let LoadedConfiguration { - configuration: mut fs_configuration, - directory_path: configuration_path, - .. - } = loaded_configuration; - info!("Configuration loaded successfully from disk."); - info!("Update workspace settings."); - - if let Some(ws_configuration) = extra_config { - fs_configuration.merge_with(ws_configuration); - } - - let result = fs_configuration - .retrieve_gitignore_matches(&self.fs, configuration_path.as_deref()); - - match result { - Ok((vcs_base_path, gitignore_matches)) => { - let result = self.workspace.update_settings(UpdateSettingsParams { - workspace_directory: self.fs.working_directory(), - configuration: fs_configuration, - vcs_base_path, - gitignore_matches, - }); - - if let Err(error) = result { - error!("Failed to set workspace settings: {}", error); - self.client.log_message(MessageType::ERROR, &error).await; - ConfigurationStatus::Error - } else { - ConfigurationStatus::Loaded - } - } - Err(err) => { - error!("Couldn't load the configuration file, reason:\n {}", err); - self.client.log_message(MessageType::ERROR, &err).await; - ConfigurationStatus::Error - } - } - } - Err(err) => { - error!("Couldn't load the configuration file, reason:\n {}", err); - self.client.log_message(MessageType::ERROR, &err).await; - ConfigurationStatus::Error - } - } - } - - /// Broadcast a shutdown signal to all active connections - pub(crate) fn broadcast_shutdown(&self) { - self.cancellation.notify_one(); - } - - /// Retrieves information regarding the configuration status - pub(crate) fn configuration_status(&self) -> ConfigurationStatus { - self.configuration_status - .load(Ordering::Relaxed) - .try_into() - .unwrap() - } - - /// Updates the status of the configuration - fn set_configuration_status(&self, status: ConfigurationStatus) { - self.notified_broken_configuration - .store(false, Ordering::Relaxed); - self.configuration_status - .store(status as u8, Ordering::Relaxed); - } - - fn notified_broken_configuration(&self) -> bool { - self.notified_broken_configuration.load(Ordering::Relaxed) - } - fn set_notified_broken_configuration(&self) { - self.notified_broken_configuration - .store(true, Ordering::Relaxed); - } - - pub fn position_encoding(&self) -> PositionEncoding { - self.initialize_params - .get() - .map_or(PositionEncoding::Wide(WideEncoding::Utf16), |params| { - negotiated_encoding(¶ms.client_capabilities) - }) - } -} diff --git a/crates/pgt_lsp/src/utils.rs b/crates/pgt_lsp/src/utils.rs deleted file mode 100644 index 92059b66..00000000 --- a/crates/pgt_lsp/src/utils.rs +++ /dev/null @@ -1,441 +0,0 @@ -use crate::adapters::line_index::LineIndex; -use crate::adapters::{PositionEncoding, from_lsp, to_lsp}; -use anyhow::{Context, Result, ensure}; -use pgt_console::MarkupBuf; -use pgt_console::fmt::Termcolor; -use pgt_console::fmt::{self, Formatter}; -use pgt_diagnostics::termcolor::NoColor; -use pgt_diagnostics::{Diagnostic, DiagnosticTags, Location, PrintDescription, Severity, Visit}; -use pgt_text_size::{TextRange, TextSize}; -use std::any::Any; -use std::borrow::Cow; -use std::fmt::{Debug, Display}; -use std::io; -use std::ops::{Add, Range}; -use tower_lsp::jsonrpc::Error as LspError; -use tower_lsp::lsp_types; -use tower_lsp::lsp_types::{self as lsp, CodeDescription, Url}; -use tracing::error; - -/// Convert an [pgt_diagnostics::Diagnostic] to a [lsp::Diagnostic], using the span -/// of the diagnostic's primary label as the diagnostic range. -/// Requires a [LineIndex] to convert a byte offset range to the line/col range -/// expected by LSP. -pub(crate) fn diagnostic_to_lsp( - diagnostic: D, - url: &lsp::Url, - line_index: &LineIndex, - position_encoding: PositionEncoding, - offset: Option, -) -> Result { - let location = diagnostic.location(); - - let span = location.span.context("diagnostic location has no span")?; - let span = if let Some(offset) = offset { - TextRange::new( - span.start().add(TextSize::from(offset)), - span.end().add(TextSize::from(offset)), - ) - } else { - span - }; - let span = to_lsp::range(line_index, span, position_encoding) - .context("failed to convert diagnostic span to LSP range")?; - - let severity = match diagnostic.severity() { - Severity::Fatal | Severity::Error => lsp::DiagnosticSeverity::ERROR, - Severity::Warning => lsp::DiagnosticSeverity::WARNING, - Severity::Information => lsp::DiagnosticSeverity::INFORMATION, - Severity::Hint => lsp::DiagnosticSeverity::HINT, - }; - - let code = diagnostic - .category() - .map(|category| lsp::NumberOrString::String(category.name().to_string())); - - let code_description = diagnostic - .category() - .and_then(|category| category.link()) - .and_then(|link| { - let href = Url::parse(link).ok()?; - Some(CodeDescription { href }) - }); - - let message = PrintDescription(&diagnostic).to_string(); - ensure!(!message.is_empty(), "diagnostic description is empty"); - - let mut related_information = None; - let mut visitor = RelatedInformationVisitor { - url, - line_index, - position_encoding, - related_information: &mut related_information, - }; - - diagnostic.advices(&mut visitor).unwrap(); - - let tags = diagnostic.tags(); - let tags = { - let mut result = Vec::new(); - - if tags.contains(DiagnosticTags::UNNECESSARY_CODE) { - result.push(lsp::DiagnosticTag::UNNECESSARY); - } - - if tags.contains(DiagnosticTags::DEPRECATED_CODE) { - result.push(lsp::DiagnosticTag::DEPRECATED); - } - - if !result.is_empty() { - Some(result) - } else { - None - } - }; - - let mut diagnostic = lsp::Diagnostic::new( - span, - Some(severity), - code, - Some("pg".into()), - message, - related_information, - tags, - ); - diagnostic.code_description = code_description; - Ok(diagnostic) -} - -struct RelatedInformationVisitor<'a> { - url: &'a lsp::Url, - line_index: &'a LineIndex, - position_encoding: PositionEncoding, - related_information: &'a mut Option>, -} - -impl Visit for RelatedInformationVisitor<'_> { - fn record_frame(&mut self, location: Location<'_>) -> io::Result<()> { - let span = match location.span { - Some(span) => span, - None => return Ok(()), - }; - - let range = match to_lsp::range(self.line_index, span, self.position_encoding) { - Ok(range) => range, - Err(_) => return Ok(()), - }; - - let related_information = self.related_information.get_or_insert_with(Vec::new); - - related_information.push(lsp::DiagnosticRelatedInformation { - location: lsp::Location { - uri: self.url.clone(), - range, - }, - message: String::new(), - }); - - Ok(()) - } -} - -/// Convert a piece of markup into a String -#[allow(unused)] -fn print_markup(markup: &MarkupBuf) -> String { - let mut message = Termcolor(NoColor::new(Vec::new())); - fmt::Display::fmt(markup, &mut Formatter::new(&mut message)) - // SAFETY: Writing to a memory buffer should never fail - .unwrap(); - - // SAFETY: Printing uncolored markup never generates non UTF-8 byte sequences - String::from_utf8(message.0.into_inner()).unwrap() -} - -/// Helper to create a [tower_lsp::jsonrpc::Error] from a message -pub(crate) fn into_lsp_error(msg: impl Display + Debug) -> LspError { - let mut error = LspError::internal_error(); - error!("Error: {}", msg); - error.message = Cow::Owned(msg.to_string()); - error.data = Some(format!("{msg:?}").into()); - error -} - -pub(crate) fn panic_to_lsp_error(err: Box) -> LspError { - let mut error = LspError::internal_error(); - - match err.downcast::() { - Ok(msg) => { - error.message = Cow::Owned(msg.to_string()); - } - Err(err) => match err.downcast::<&str>() { - Ok(msg) => { - error.message = Cow::Owned(msg.to_string()); - } - Err(_) => { - error.message = Cow::Owned(String::from("Encountered an unknown error")); - } - }, - } - - error -} - -pub(crate) fn apply_document_changes( - position_encoding: PositionEncoding, - current_content: String, - content_changes: &[lsp_types::TextDocumentContentChangeEvent], -) -> String { - // Skip to the last full document change, as it invalidates all previous changes anyways. - let mut start = content_changes - .iter() - .rev() - .position(|change| change.range.is_none()) - .map_or(0, |idx| content_changes.len() - idx - 1); - - let mut text: String = match content_changes.get(start) { - // peek at the first content change as an optimization - Some(lsp_types::TextDocumentContentChangeEvent { - range: None, text, .. - }) => { - let text = text.clone(); - start += 1; - - // The only change is a full document update - if start == content_changes.len() { - return text; - } - text - } - Some(_) => current_content, - // we received no content changes - None => return current_content, - }; - - let mut line_index = LineIndex::new(&text); - - // The changes we got must be applied sequentially, but can cross lines so we - // have to keep our line index updated. - // Some clients (e.g. Code) sort the ranges in reverse. As an optimization, we - // remember the last valid line in the index and only rebuild it if needed. - let mut index_valid = u32::MAX; - for change in content_changes { - // The None case can't happen as we have handled it above already - if let Some(range) = change.range { - if index_valid <= range.end.line { - line_index = LineIndex::new(&text); - } - index_valid = range.start.line; - if let Ok(range) = from_lsp::text_range(&line_index, range, position_encoding) { - text.replace_range(Range::::from(range), &change.text); - } - } - } - - text -} - -#[cfg(test)] -mod tests { - use crate::adapters::line_index::LineIndex; - use crate::adapters::{PositionEncoding, to_lsp}; - use anyhow::Result; - use pgt_text_edit::{CompressedOp, DiffOp, TextEdit}; - use pgt_text_size::TextSize; - use tower_lsp::lsp_types as lsp; - - fn text_edit( - line_index: &LineIndex, - diff: TextEdit, - position_encoding: PositionEncoding, - offset: Option, - ) -> Result> { - let mut result: Vec = Vec::new(); - let mut offset = if let Some(offset) = offset { - TextSize::from(offset) - } else { - TextSize::from(0) - }; - - for op in diff.iter() { - match op { - CompressedOp::DiffOp(DiffOp::Equal { range }) => { - offset += range.len(); - } - CompressedOp::DiffOp(DiffOp::Insert { range }) => { - let start = to_lsp::position(line_index, offset, position_encoding)?; - - // Merge with a previous delete operation if possible - let last_edit = result.last_mut().filter(|text_edit| { - text_edit.range.end == start && text_edit.new_text.is_empty() - }); - - if let Some(last_edit) = last_edit { - last_edit.new_text = diff.get_text(*range).to_string(); - } else { - result.push(lsp::TextEdit { - range: lsp::Range::new(start, start), - new_text: diff.get_text(*range).to_string(), - }); - } - } - CompressedOp::DiffOp(DiffOp::Delete { range }) => { - let start = to_lsp::position(line_index, offset, position_encoding)?; - offset += range.len(); - let end = to_lsp::position(line_index, offset, position_encoding)?; - - result.push(lsp::TextEdit { - range: lsp::Range::new(start, end), - new_text: String::new(), - }); - } - - CompressedOp::EqualLines { line_count } => { - let mut line_col = line_index - .line_col(offset) - .expect("diff length is overflowing the line count in the original file"); - - line_col.line += line_count.get() + 1; - line_col.col = 0; - - // SAFETY: This should only happen if `line_index` wasn't built - // from the same string as the old revision of `diff` - let new_offset = line_index - .offset(line_col) - .expect("diff length is overflowing the line count in the original file"); - - offset = new_offset; - } - } - } - - Ok(result) - } - - #[test] - fn test_diff_1() { - const OLD: &str = "line 1 old -line 2 -line 3 -line 4 -line 5 -line 6 -line 7 old"; - - const NEW: &str = "line 1 new -line 2 -line 3 -line 4 -line 5 -line 6 -line 7 new"; - - let line_index = LineIndex::new(OLD); - let diff = TextEdit::from_unicode_words(OLD, NEW); - - let text_edit = text_edit(&line_index, diff, PositionEncoding::Utf8, None).unwrap(); - - assert_eq!( - text_edit.as_slice(), - &[ - lsp::TextEdit { - range: lsp::Range { - start: lsp::Position { - line: 0, - character: 7, - }, - end: lsp::Position { - line: 0, - character: 10, - }, - }, - new_text: String::from("new"), - }, - lsp::TextEdit { - range: lsp::Range { - start: lsp::Position { - line: 6, - character: 7 - }, - end: lsp::Position { - line: 6, - character: 10 - } - }, - new_text: String::from("new"), - }, - ] - ); - } - - #[test] - fn test_diff_2() { - const OLD: &str = "console.log(\"Variable: \" + variable);"; - const NEW: &str = "console.log(`Variable: ${variable}`);"; - - let line_index = LineIndex::new(OLD); - let diff = TextEdit::from_unicode_words(OLD, NEW); - - let text_edit = text_edit(&line_index, diff, PositionEncoding::Utf8, None).unwrap(); - - assert_eq!( - text_edit.as_slice(), - &[ - lsp::TextEdit { - range: lsp::Range { - start: lsp::Position { - line: 0, - character: 12, - }, - end: lsp::Position { - line: 0, - character: 13, - }, - }, - new_text: String::from("`"), - }, - lsp::TextEdit { - range: lsp::Range { - start: lsp::Position { - line: 0, - character: 23 - }, - end: lsp::Position { - line: 0, - character: 27 - } - }, - new_text: String::from("${"), - }, - lsp::TextEdit { - range: lsp::Range { - start: lsp::Position { - line: 0, - character: 35 - }, - end: lsp::Position { - line: 0, - character: 35 - } - }, - new_text: String::from("}`"), - }, - ] - ); - } - - // #[test] - // fn test_range_formatting() { - // let encoding = PositionEncoding::Wide(WideEncoding::Utf16); - // let input = "(\"Jan 1, 2018\u{2009}–\u{2009}Jan 1, 2019\");\n(\"Jan 1, 2018\u{2009}–\u{2009}Jan 1, 2019\");\nisSpreadAssignment;\n".to_string(); - // let change = TextDocumentContentChangeEvent { - // range: Some(Range::new(Position::new(0, 30), Position::new(1, 0))), - // range_length: Some(1), - // text: String::new(), - // }; - // - // let output = apply_document_changes(encoding, input, vec![change]); - // let expected = "(\"Jan 1, 2018\u{2009}–\u{2009}Jan 1, 2019\");(\"Jan 1, 2018\u{2009}–\u{2009}Jan 1, 2019\");\nisSpreadAssignment;\n"; - // - // assert_eq!(output, expected); - // } -} diff --git a/crates/pgt_lsp/tests/server.rs b/crates/pgt_lsp/tests/server.rs deleted file mode 100644 index 581ea1fe..00000000 --- a/crates/pgt_lsp/tests/server.rs +++ /dev/null @@ -1,1115 +0,0 @@ -use anyhow::Context; -use anyhow::Error; -use anyhow::Result; -use anyhow::bail; -use biome_deserialize::Merge; -use futures::Sink; -use futures::SinkExt; -use futures::Stream; -use futures::StreamExt; -use futures::channel::mpsc::{Sender, channel}; -use pgt_configuration::PartialConfiguration; -use pgt_configuration::database::PartialDatabaseConfiguration; -use pgt_fs::MemoryFileSystem; -use pgt_lsp::LSPServer; -use pgt_lsp::ServerFactory; -use pgt_test_utils::test_database::get_new_test_db; -use pgt_workspace::DynRef; -use serde::Serialize; -use serde::de::DeserializeOwned; -use serde_json::Value; -use serde_json::{from_value, to_value}; -use sqlx::Executor; -use std::any::type_name; -use std::fmt::Display; -use std::time::Duration; -use tower::timeout::Timeout; -use tower::{Service, ServiceExt}; -use tower_lsp::LspService; -use tower_lsp::jsonrpc; -use tower_lsp::jsonrpc::Response; -use tower_lsp::lsp_types as lsp; -use tower_lsp::lsp_types::CodeActionContext; -use tower_lsp::lsp_types::CodeActionParams; -use tower_lsp::lsp_types::CodeActionResponse; -use tower_lsp::lsp_types::CompletionParams; -use tower_lsp::lsp_types::CompletionResponse; -use tower_lsp::lsp_types::ExecuteCommandParams; -use tower_lsp::lsp_types::PartialResultParams; -use tower_lsp::lsp_types::Position; -use tower_lsp::lsp_types::Range; -use tower_lsp::lsp_types::TextDocumentPositionParams; -use tower_lsp::lsp_types::WorkDoneProgressParams; -use tower_lsp::lsp_types::{ - ClientCapabilities, DidChangeConfigurationParams, DidChangeTextDocumentParams, - DidCloseTextDocumentParams, DidOpenTextDocumentParams, InitializeResult, InitializedParams, - PublishDiagnosticsParams, TextDocumentContentChangeEvent, TextDocumentIdentifier, - TextDocumentItem, Url, VersionedTextDocumentIdentifier, -}; -use tower_lsp::{jsonrpc::Request, lsp_types::InitializeParams}; - -/// Statically build an [Url] instance that points to the file at `$path` -/// within the workspace. The filesystem path contained in the return URI is -/// guaranteed to be a valid path for the underlying operating system, but -/// doesn't have to refer to an existing file on the host machine. -macro_rules! url { - ($path:literal) => { - if cfg!(windows) { - lsp::Url::parse(concat!("file:///z%3A/workspace/", $path)).unwrap() - } else { - lsp::Url::parse(concat!("file:///workspace/", $path)).unwrap() - } - }; -} - -struct Server { - service: Timeout>, -} - -impl Server { - fn new(service: LspService) -> Self { - Self { - service: Timeout::new(service, Duration::from_secs(1)), - } - } - - async fn notify

(&mut self, method: &'static str, params: P) -> Result<()> - where - P: Serialize, - { - self.service - .ready() - .await - .map_err(Error::msg) - .context("ready() returned an error")? - .call( - Request::build(method) - .params(to_value(¶ms).context("failed to serialize params")?) - .finish(), - ) - .await - .map_err(Error::msg) - .context("call() returned an error") - .and_then(|res| match res { - Some(res) => { - bail!("shutdown returned {:?}", res) - } - _ => Ok(()), - }) - } - - async fn request( - &mut self, - method: &'static str, - id: &'static str, - params: P, - ) -> Result> - where - P: Serialize, - R: DeserializeOwned, - { - self.service - .ready() - .await - .map_err(Error::msg) - .context("ready() returned an error")? - .call( - Request::build(method) - .id(id) - .params(to_value(¶ms).context("failed to serialize params")?) - .finish(), - ) - .await - .map_err(Error::msg) - .context("call() returned an error")? - .map(|res| { - let (_, body) = res.into_parts(); - - let body = - body.with_context(|| format!("response to {method:?} contained an error"))?; - - from_value(body.clone()).with_context(|| { - format!( - "failed to deserialize type {} from response {body:?}", - type_name::() - ) - }) - }) - .transpose() - } - - /// Basic implementation of the `initialize` request for tests - // The `root_path` field is deprecated, but we still need to specify it - #[allow(deprecated)] - async fn initialize(&mut self) -> Result<()> { - let _res: InitializeResult = self - .request( - "initialize", - "_init", - InitializeParams { - process_id: None, - root_path: None, - root_uri: Some(url!("")), - initialization_options: None, - capabilities: ClientCapabilities::default(), - trace: None, - workspace_folders: None, - client_info: None, - locale: None, - }, - ) - .await? - .context("initialize returned None")?; - - Ok(()) - } - - /// Basic implementation of the `initialized` notification for tests - async fn initialized(&mut self) -> Result<()> { - self.notify("initialized", InitializedParams {}).await - } - - /// Basic implementation of the `shutdown` notification for tests - async fn shutdown(&mut self) -> Result<()> { - self.service - .ready() - .await - .map_err(Error::msg) - .context("ready() returned an error")? - .call(Request::build("shutdown").finish()) - .await - .map_err(Error::msg) - .context("call() returned an error") - .and_then(|res| match res { - Some(res) => { - bail!("shutdown returned {:?}", res) - } - _ => Ok(()), - }) - } - - async fn open_document(&mut self, text: impl Display) -> Result<()> { - self.notify( - "textDocument/didOpen", - DidOpenTextDocumentParams { - text_document: TextDocumentItem { - uri: url!("document.sql"), - language_id: String::from("sql"), - version: 0, - text: text.to_string(), - }, - }, - ) - .await - } - - /// Opens a document with given contents and given name. The name must contain the extension too - async fn open_named_document(&mut self, text: impl Display, document_name: Url) -> Result<()> { - self.notify( - "textDocument/didOpen", - DidOpenTextDocumentParams { - text_document: TextDocumentItem { - uri: document_name, - language_id: String::from("sql"), - version: 0, - text: text.to_string(), - }, - }, - ) - .await - } - - /// When calling this function, remember to insert the file inside the memory file system - async fn load_configuration(&mut self) -> Result<()> { - self.notify( - "workspace/didChangeConfiguration", - DidChangeConfigurationParams { - settings: to_value(()).unwrap(), - }, - ) - .await - } - - async fn change_document( - &mut self, - version: i32, - content_changes: Vec, - ) -> Result<()> { - self.notify( - "textDocument/didChange", - DidChangeTextDocumentParams { - text_document: VersionedTextDocumentIdentifier { - uri: url!("document.sql"), - version, - }, - content_changes, - }, - ) - .await - } - - #[allow(unused)] - async fn close_document(&mut self) -> Result<()> { - self.notify( - "textDocument/didClose", - DidCloseTextDocumentParams { - text_document: TextDocumentIdentifier { - uri: url!("document.sql"), - }, - }, - ) - .await - } - - async fn get_completion( - &mut self, - params: tower_lsp::lsp_types::CompletionParams, - ) -> Result> { - self.request::( - "textDocument/completion", - "_get_completion", - params, - ) - .await - } - - /// Basic implementation of the `pgt/shutdown` request for tests - async fn pgt_shutdown(&mut self) -> Result<()> { - self.request::<_, ()>("pgt/shutdown", "_pgt_shutdown", ()) - .await? - .context("pgt/shutdown returned None")?; - Ok(()) - } -} - -/// Number of notifications buffered by the server-to-client channel before it starts blocking the current task -const CHANNEL_BUFFER_SIZE: usize = 8; - -#[derive(Debug, PartialEq, Eq)] -enum ServerNotification { - PublishDiagnostics(PublishDiagnosticsParams), -} - -/// Basic handler for requests and notifications coming from the server for tests -async fn client_handler( - mut stream: I, - mut sink: O, - mut notify: Sender, -) -> Result<()> -where - // This function has to be generic as `RequestStream` and `ResponseSink` - // are not exported from `tower_lsp` and cannot be named in the signature - I: Stream + Unpin, - O: Sink + Unpin, -{ - while let Some(req) = stream.next().await { - if req.method() == "textDocument/publishDiagnostics" { - let params = req.params().expect("invalid request"); - let diagnostics = from_value(params.clone()).expect("invalid params"); - let notification = ServerNotification::PublishDiagnostics(diagnostics); - match notify.send(notification).await { - Ok(_) => continue, - Err(_) => break, - } - } - - let id = match req.id() { - Some(id) => id, - None => continue, - }; - - let res = Response::from_error(id.clone(), jsonrpc::Error::method_not_found()); - - sink.send(res).await.ok(); - } - - Ok(()) -} - -#[tokio::test] -async fn basic_lifecycle() -> Result<()> { - let factory = ServerFactory::default(); - let (service, client) = factory.create(None).into_inner(); - let (stream, sink) = client.split(); - let mut server = Server::new(service); - - let (sender, _) = channel(CHANNEL_BUFFER_SIZE); - let reader = tokio::spawn(client_handler(stream, sink, sender)); - - server.initialize().await?; - server.initialized().await?; - - server.shutdown().await?; - reader.abort(); - - Ok(()) -} - -#[tokio::test] -async fn test_database_connection() -> Result<()> { - let factory = ServerFactory::default(); - let mut fs = MemoryFileSystem::default(); - let test_db = get_new_test_db().await; - - let setup = r#" - create table public.users ( - id serial primary key, - name varchar(255) not null - ); - "#; - - test_db - .execute(setup) - .await - .expect("Failed to setup test database"); - - let mut conf = PartialConfiguration::init(); - conf.merge_with(PartialConfiguration { - db: Some(PartialDatabaseConfiguration { - database: Some( - test_db - .connect_options() - .get_database() - .unwrap() - .to_string(), - ), - ..Default::default() - }), - ..Default::default() - }); - fs.insert( - url!("postgrestools.jsonc").to_file_path().unwrap(), - serde_json::to_string_pretty(&conf).unwrap(), - ); - - let (service, client) = factory - .create_with_fs(None, DynRef::Owned(Box::new(fs))) - .into_inner(); - - let (stream, sink) = client.split(); - let mut server = Server::new(service); - - let (sender, mut receiver) = channel(CHANNEL_BUFFER_SIZE); - let reader = tokio::spawn(client_handler(stream, sink, sender)); - - server.initialize().await?; - server.initialized().await?; - - server.load_configuration().await?; - - server - .open_document("select unknown from public.users; ") - .await?; - - // in this test, we want to ensure a database connection is established and the schema cache is - // loaded. This is the case when the server sends typecheck diagnostics for the query above. - // so we wait for diagnostics to be sent. - let notification = tokio::time::timeout(Duration::from_secs(5), async { - loop { - match receiver.next().await { - Some(ServerNotification::PublishDiagnostics(msg)) => { - if msg - .diagnostics - .iter() - .any(|d| d.message.contains("column \"unknown\" does not exist")) - { - return true; - } - } - _ => continue, - } - } - }) - .await - .is_ok(); - - assert!(notification, "expected diagnostics for unknown column"); - - server.shutdown().await?; - reader.abort(); - - Ok(()) -} - -#[tokio::test] -async fn server_shutdown() -> Result<()> { - let factory = ServerFactory::default(); - let (service, client) = factory.create(None).into_inner(); - let (stream, sink) = client.split(); - let mut server = Server::new(service); - - let (sender, _) = channel(CHANNEL_BUFFER_SIZE); - let reader = tokio::spawn(client_handler(stream, sink, sender)); - - server.initialize().await?; - server.initialized().await?; - - let cancellation = factory.cancellation(); - let cancellation = cancellation.notified(); - - // this is called when `postgrestools stop` is run by the user - server.pgt_shutdown().await?; - - cancellation.await; - - reader.abort(); - - Ok(()) -} - -#[tokio::test] -async fn test_completions() -> Result<()> { - let factory = ServerFactory::default(); - let mut fs = MemoryFileSystem::default(); - let test_db = get_new_test_db().await; - - let setup = r#" - create table public.users ( - id serial primary key, - name varchar(255) not null - ); - "#; - - test_db - .execute(setup) - .await - .expect("Failed to setup test database"); - - let mut conf = PartialConfiguration::init(); - conf.merge_with(PartialConfiguration { - db: Some(PartialDatabaseConfiguration { - database: Some( - test_db - .connect_options() - .get_database() - .unwrap() - .to_string(), - ), - ..Default::default() - }), - ..Default::default() - }); - fs.insert( - url!("postgrestools.jsonc").to_file_path().unwrap(), - serde_json::to_string_pretty(&conf).unwrap(), - ); - - let (service, client) = factory - .create_with_fs(None, DynRef::Owned(Box::new(fs))) - .into_inner(); - - let (stream, sink) = client.split(); - let mut server = Server::new(service); - - let (sender, _) = channel(CHANNEL_BUFFER_SIZE); - let reader = tokio::spawn(client_handler(stream, sink, sender)); - - server.initialize().await?; - server.initialized().await?; - - server.load_configuration().await?; - - server - .open_document("alter table appointment alter column end_time drop not null;\n") - .await?; - - server - .change_document( - 3, - vec![TextDocumentContentChangeEvent { - range: Some(Range { - start: Position { - line: 0, - character: 24, - }, - end: Position { - line: 0, - character: 24, - }, - }), - range_length: Some(0), - text: " ".to_string(), - }], - ) - .await?; - - let res = server - .get_completion(CompletionParams { - work_done_progress_params: WorkDoneProgressParams::default(), - partial_result_params: PartialResultParams::default(), - context: None, - text_document_position: TextDocumentPositionParams { - text_document: TextDocumentIdentifier { - uri: url!("document.sql"), - }, - position: Position { - line: 0, - character: 25, - }, - }, - }) - .await?; - - assert!(res.is_some()); - - server.shutdown().await?; - reader.abort(); - - Ok(()) -} - -#[tokio::test] -async fn test_issue_271() -> Result<()> { - let factory = ServerFactory::default(); - let mut fs = MemoryFileSystem::default(); - let test_db = get_new_test_db().await; - - let setup = r#" - create table public.users ( - id serial primary key, - name varchar(255) not null - ); - "#; - - test_db - .execute(setup) - .await - .expect("Failed to setup test database"); - - let mut conf = PartialConfiguration::init(); - conf.merge_with(PartialConfiguration { - db: Some(PartialDatabaseConfiguration { - database: Some( - test_db - .connect_options() - .get_database() - .unwrap() - .to_string(), - ), - ..Default::default() - }), - ..Default::default() - }); - fs.insert( - url!("postgrestools.jsonc").to_file_path().unwrap(), - serde_json::to_string_pretty(&conf).unwrap(), - ); - - let (service, client) = factory - .create_with_fs(None, DynRef::Owned(Box::new(fs))) - .into_inner(); - - let (stream, sink) = client.split(); - let mut server = Server::new(service); - - let (sender, _) = channel(CHANNEL_BUFFER_SIZE); - let reader = tokio::spawn(client_handler(stream, sink, sender)); - - server.initialize().await?; - server.initialized().await?; - - server.load_configuration().await?; - - server - .open_document("CREATE COLLATION ignore_accent_case (provider = icu, deterministic = false, locale = 'und-u-ks-level1');\n\n-- CREATE OR REPLACE FUNCTION\n-- add_one(integer)\n-- RETURNS\n-- integer\n-- AS\n-- 'add_one.so', 'add_one'\n-- LANGUAGE\n-- C \n-- STRICT;\n\n\nSELECT pwhash, FROM users;") - .await?; - - server - .change_document( - 3, - vec![TextDocumentContentChangeEvent { - range: Some(Range { - start: Position { - line: 13, - character: 13, - }, - end: Position { - line: 13, - character: 14, - }, - }), - range_length: Some(0), - text: "".to_string(), - }], - ) - .await?; - - server - .change_document( - 1, - vec![TextDocumentContentChangeEvent { - range: Some(Range { - start: Position { - line: 13, - character: 13, - }, - end: Position { - line: 13, - character: 13, - }, - }), - range_length: Some(0), - text: ",".to_string(), - }], - ) - .await?; - - server - .change_document( - 2, - vec![TextDocumentContentChangeEvent { - range: Some(Range { - start: Position { - line: 13, - character: 14, - }, - end: Position { - line: 13, - character: 14, - }, - }), - range_length: Some(0), - text: " ".to_string(), - }], - ) - .await?; - - server - .change_document( - 3, - vec![TextDocumentContentChangeEvent { - range: Some(Range { - start: Position { - line: 13, - character: 15, - }, - end: Position { - line: 13, - character: 15, - }, - }), - range_length: Some(0), - text: "county_name".to_string(), - }], - ) - .await?; - - server - .change_document( - 4, - vec![TextDocumentContentChangeEvent { - range: Some(Range { - start: Position { - line: 13, - character: 13, - }, - end: Position { - line: 13, - character: 26, - }, - }), - range_length: Some(13), - text: "".to_string(), - }], - ) - .await?; - - server - .change_document( - 5, - vec![TextDocumentContentChangeEvent { - range: Some(Range { - start: Position { - line: 13, - character: 13, - }, - end: Position { - line: 13, - character: 13, - }, - }), - range_length: Some(0), - text: ",".to_string(), - }], - ) - .await?; - - // crashes with range end index 37 out of range for slice of length 26 - let res = server - .get_completion(CompletionParams { - work_done_progress_params: WorkDoneProgressParams::default(), - partial_result_params: PartialResultParams::default(), - context: None, - text_document_position: TextDocumentPositionParams { - text_document: TextDocumentIdentifier { - uri: url!("document.sql"), - }, - position: Position { - line: 13, - character: 14, - }, - }, - }) - .await?; - - assert!(res.is_some()); - - server.shutdown().await?; - reader.abort(); - - Ok(()) -} - -#[tokio::test] -async fn test_execute_statement() -> Result<()> { - let factory = ServerFactory::default(); - let mut fs = MemoryFileSystem::default(); - let test_db = get_new_test_db().await; - - let database = test_db - .connect_options() - .get_database() - .unwrap() - .to_string(); - let host = test_db.connect_options().get_host().to_string(); - - let mut conf = PartialConfiguration::init(); - conf.merge_with(PartialConfiguration { - db: Some(PartialDatabaseConfiguration { - database: Some(database), - host: Some(host), - ..Default::default() - }), - ..Default::default() - }); - - fs.insert( - url!("postgrestools.jsonc").to_file_path().unwrap(), - serde_json::to_string_pretty(&conf).unwrap(), - ); - - let (service, client) = factory - .create_with_fs(None, DynRef::Owned(Box::new(fs))) - .into_inner(); - - let (stream, sink) = client.split(); - let mut server = Server::new(service); - - let (sender, _) = channel(CHANNEL_BUFFER_SIZE); - let reader = tokio::spawn(client_handler(stream, sink, sender)); - - server.initialize().await?; - server.initialized().await?; - - server.load_configuration().await?; - - let users_tbl_exists = async || { - let result = sqlx::query!( - r#" - select exists ( - select 1 as exists - from pg_catalog.pg_tables - where tablename = 'users' - ); - "# - ) - .fetch_one(&test_db.clone()) - .await; - - result.unwrap().exists.unwrap() - }; - - assert!( - !(users_tbl_exists().await), - "The user table shouldn't exist at this point." - ); - - let doc_content = r#" - create table users ( - id serial primary key, - name text, - email text - ); - "#; - - let doc_url = url!("test.sql"); - - server - .open_named_document(doc_content.to_string(), doc_url.clone()) - .await?; - - let code_actions_response = server - .request::( - "textDocument/codeAction", - "_code_action", - CodeActionParams { - text_document: TextDocumentIdentifier { - uri: doc_url.clone(), - }, - range: Range { - start: Position::new(3, 7), - end: Position::new(3, 7), - }, // just somewhere within the statement. - context: CodeActionContext::default(), - partial_result_params: PartialResultParams::default(), - work_done_progress_params: WorkDoneProgressParams::default(), - }, - ) - .await? - .unwrap(); - - let exec_statement_command: (String, Vec) = code_actions_response - .iter() - .find_map(|action_or_cmd| match action_or_cmd { - lsp::CodeActionOrCommand::CodeAction(code_action) => { - let command = code_action.command.as_ref(); - if command.is_some_and(|cmd| &cmd.command == "pgt.executeStatement") { - let command = command.unwrap(); - let arguments = command.arguments.as_ref().unwrap().clone(); - Some((command.command.clone(), arguments)) - } else { - None - } - } - - _ => None, - }) - .expect("Did not find executeStatement command!"); - - server - .request::>( - "workspace/executeCommand", - "_execStmt", - ExecuteCommandParams { - command: exec_statement_command.0, - arguments: exec_statement_command.1, - ..Default::default() - }, - ) - .await?; - - assert!( - users_tbl_exists().await, - "Users table did not exists even though it should've been created by the workspace/executeStatement command." - ); - - server.shutdown().await?; - reader.abort(); - - Ok(()) -} - -#[tokio::test] -async fn test_issue_281() -> Result<()> { - let factory = ServerFactory::default(); - let mut fs = MemoryFileSystem::default(); - let test_db = get_new_test_db().await; - - let setup = r#" - create table public.users ( - id serial primary key, - name varchar(255) not null - ); - "#; - - test_db - .execute(setup) - .await - .expect("Failed to setup test database"); - - let mut conf = PartialConfiguration::init(); - conf.merge_with(PartialConfiguration { - db: Some(PartialDatabaseConfiguration { - database: Some( - test_db - .connect_options() - .get_database() - .unwrap() - .to_string(), - ), - ..Default::default() - }), - ..Default::default() - }); - fs.insert( - url!("postgrestools.jsonc").to_file_path().unwrap(), - serde_json::to_string_pretty(&conf).unwrap(), - ); - - let (service, client) = factory - .create_with_fs(None, DynRef::Owned(Box::new(fs))) - .into_inner(); - - let (stream, sink) = client.split(); - let mut server = Server::new(service); - - let (sender, _) = channel(CHANNEL_BUFFER_SIZE); - let reader = tokio::spawn(client_handler(stream, sink, sender)); - - server.initialize().await?; - server.initialized().await?; - - server.load_configuration().await?; - - server.open_document("\n------------- Meta -------------\n\n-- name: GetValueFromMetaKVStore :one\nSELECT value FROM meta_kv\nWHERE key = $1;\n\n-- name: SetValueToMetaKVStore :exec\nINSERT INTO meta_kv (key, value)\nVALUES ($1, $2)\nON CONFLICT (key) DO UPDATE\nSET value = excluded.value;\n\n\nasdsadsad\n\nыывфыв khgk\nasdыdsf\ndsdsjdfnfmdsвтьвыаыdsfsmndf,m\nы\n").await?; - - let chars = ["s", "n", ",", "d", "f", "j", "s", "d", "f", "в"]; - - for (i, c) in chars.iter().enumerate() { - server - .change_document( - i as i32 + 4, - vec![TextDocumentContentChangeEvent { - range: Some(Range { - start: Position { - line: 20, - character: i as u32, - }, - end: Position { - line: 20, - character: i as u32, - }, - }), - range_length: Some(0), - text: c.to_string(), - }], - ) - .await?; - } - - server.shutdown().await?; - reader.abort(); - - Ok(()) -} - -#[tokio::test] -async fn test_issue_303() -> Result<()> { - let factory = ServerFactory::default(); - let mut fs = MemoryFileSystem::default(); - let test_db = get_new_test_db().await; - - let setup = r#" - create table public.users ( - id serial primary key, - name varchar(255) not null - ); - "#; - - test_db - .execute(setup) - .await - .expect("Failed to setup test database"); - - let mut conf = PartialConfiguration::init(); - conf.merge_with(PartialConfiguration { - db: Some(PartialDatabaseConfiguration { - database: Some( - test_db - .connect_options() - .get_database() - .unwrap() - .to_string(), - ), - ..Default::default() - }), - ..Default::default() - }); - fs.insert( - url!("postgrestools.jsonc").to_file_path().unwrap(), - serde_json::to_string_pretty(&conf).unwrap(), - ); - - let (service, client) = factory - .create_with_fs(None, DynRef::Owned(Box::new(fs))) - .into_inner(); - - let (stream, sink) = client.split(); - let mut server = Server::new(service); - - let (sender, _) = channel(CHANNEL_BUFFER_SIZE); - let reader = tokio::spawn(client_handler(stream, sink, sender)); - - server.initialize().await?; - server.initialized().await?; - - server.load_configuration().await?; - - server.open_document("").await?; - - let chars = [ - "c", "r", "e", "a", "t", "e", " ", "t", "a", "b", "l", "e", " ", "\"\"", "h", "e", "l", - "l", "o", - ]; - let mut version = 1; - - for (i, c) in chars.iter().enumerate() { - version += 1; - server - .change_document( - version, - vec![TextDocumentContentChangeEvent { - range: Some(Range { - start: Position { - line: 0, - character: i as u32, - }, - end: Position { - line: 0, - character: i as u32, - }, - }), - range_length: Some(0), - text: c.to_string(), - }], - ) - .await?; - } - - version += 1; - server - .change_document( - version, - vec![TextDocumentContentChangeEvent { - range: Some(Range { - start: Position { - line: 0, - character: 20, - }, - end: Position { - line: 0, - character: 20, - }, - }), - range_length: Some(0), - text: " ".to_string(), - }], - ) - .await?; - - version += 1; - server - .change_document( - version, - vec![TextDocumentContentChangeEvent { - range: Some(Range { - start: Position { - line: 0, - character: 20, - }, - end: Position { - line: 0, - character: 21, - }, - }), - range_length: Some(0), - text: "".to_string(), - }], - ) - .await?; - - server.shutdown().await?; - reader.abort(); - - Ok(()) -} diff --git a/crates/pgt_markup/Cargo.toml b/crates/pgt_markup/Cargo.toml deleted file mode 100644 index 348bccf0..00000000 --- a/crates/pgt_markup/Cargo.toml +++ /dev/null @@ -1,22 +0,0 @@ -[package] -authors.workspace = true -categories.workspace = true -description = "" -edition.workspace = true -homepage.workspace = true -keywords.workspace = true -license.workspace = true -name = "pgt_markup" -repository.workspace = true -version = "0.0.0" - - -[dependencies] -proc-macro-error = { version = "1.0.4", default-features = false } -proc-macro2 = { workspace = true } -quote = "1.0.14" - -[dev-dependencies] - -[lib] -proc-macro = true diff --git a/crates/pgt_markup/README.md b/crates/pgt_markup/README.md deleted file mode 100644 index 75c528cd..00000000 --- a/crates/pgt_markup/README.md +++ /dev/null @@ -1,10 +0,0 @@ -# `pgt_markup` - -The crate contains procedural macros to build `pgt_console` markup object with a JSX-like syntax - -The macro cannot be used alone as it generates code that requires supporting types declared in the -`pgt_console` crate, so it's re-exported from there and should be used as `pgt_console::markup` - -## Acknowledgement - -This crate was initially forked from [biome](https://github.com/biomejs/biome). diff --git a/crates/pgt_markup/src/lib.rs b/crates/pgt_markup/src/lib.rs deleted file mode 100644 index eb253078..00000000 --- a/crates/pgt_markup/src/lib.rs +++ /dev/null @@ -1,169 +0,0 @@ -use proc_macro_error::*; -use proc_macro2::{Delimiter, Group, Ident, TokenStream, TokenTree}; -use quote::{ToTokens, quote}; - -struct StackEntry { - name: Ident, - attributes: Vec<(Ident, TokenTree)>, -} - -impl ToTokens for StackEntry { - fn to_tokens(&self, tokens: &mut TokenStream) { - let name = &self.name; - tokens.extend(quote! { - pgt_console::MarkupElement::#name - }); - - if !self.attributes.is_empty() { - let attributes: Vec<_> = self - .attributes - .iter() - .map(|(key, value)| quote! { #key: (#value).into() }) - .collect(); - - tokens.extend(quote! { { #( #attributes ),* } }) - } - } -} - -#[proc_macro] -#[proc_macro_error] -pub fn markup(input: proc_macro::TokenStream) -> proc_macro::TokenStream { - let mut input = TokenStream::from(input).into_iter().peekable(); - let mut stack = Vec::new(); - let mut output = Vec::new(); - - while let Some(token) = input.next() { - match token { - TokenTree::Punct(punct) => match punct.as_char() { - '<' => { - let is_closing_element = match input.peek() { - Some(TokenTree::Punct(punct)) if punct.as_char() == '/' => { - // SAFETY: Guarded by above call to peek - input.next().unwrap(); - true - } - _ => false, - }; - - let name = match input.next() { - Some(TokenTree::Ident(ident)) => ident, - Some(token) => abort!(token.span(), "unexpected token"), - None => abort_call_site!("unexpected end of input"), - }; - - let mut attributes = Vec::new(); - while let Some(TokenTree::Ident(_)) = input.peek() { - // SAFETY: these panics are checked by the above call to peek - let attr = match input.next().unwrap() { - TokenTree::Ident(attr) => attr, - _ => unreachable!(), - }; - - match input.next() { - Some(TokenTree::Punct(punct)) => { - if punct.as_char() != '=' { - abort!(punct.span(), "unexpected token"); - } - } - Some(token) => abort!(token.span(), "unexpected token"), - None => abort_call_site!("unexpected end of input"), - } - - let value = match input.next() { - Some(TokenTree::Literal(value)) => TokenTree::Literal(value), - Some(TokenTree::Group(group)) => { - TokenTree::Group(Group::new(Delimiter::None, group.stream())) - } - Some(token) => abort!(token.span(), "unexpected token"), - None => abort_call_site!("unexpected end of input"), - }; - - attributes.push((attr, value)); - } - - let is_self_closing = match input.next() { - Some(TokenTree::Punct(punct)) => match punct.as_char() { - '>' => false, - '/' if !is_closing_element => { - match input.next() { - Some(TokenTree::Punct(punct)) if punct.as_char() == '>' => {} - Some(token) => abort!(token.span(), "unexpected token"), - None => abort_call_site!("unexpected end of input"), - } - true - } - _ => abort!(punct.span(), "unexpected token"), - }, - Some(token) => abort!(token.span(), "unexpected token"), - None => abort_call_site!("unexpected end of input"), - }; - - if !is_closing_element { - stack.push(StackEntry { - name: name.clone(), - attributes: attributes.clone(), - }); - } else if let Some(top) = stack.last() { - // Only verify the coherence of the top element on the - // stack with a closing element, skip over the check if - // the stack is empty as that error will be handled - // when the top element gets popped off the stack later - let name_str = name.to_string(); - let top_str = top.name.to_string(); - if name_str != top_str { - abort!( - name.span(), "closing element mismatch"; - close = "found closing element {}", name_str; - open = top.name.span() => "expected {}", top_str - ); - } - } - - if (is_closing_element || is_self_closing) && stack.pop().is_none() { - abort!(name.span(), "unexpected closing element"); - } - } - _ => { - abort!(punct.span(), "unexpected token"); - } - }, - TokenTree::Literal(literal) => { - let elements: Vec<_> = stack - .iter() - .map(|entry| { - quote! { #entry } - }) - .collect(); - - output.push(quote! { - pgt_console::MarkupNode { - elements: &[ #( #elements ),* ], - content: &(#literal), - } - }); - } - TokenTree::Group(group) => match group.delimiter() { - Delimiter::Brace => { - let elements: Vec<_> = stack.iter().map(|entry| quote! { #entry }).collect(); - - let body = group.stream(); - output.push(quote! { - pgt_console::MarkupNode { - elements: &[ #( #elements ),* ], - content: &(#body) as &dyn pgt_console::fmt::Display, - } - }); - } - _ => abort!(group.span(), "unexpected token"), - }, - TokenTree::Ident(_) => abort!(token.span(), "unexpected token"), - } - } - - if let Some(top) = stack.pop() { - abort!(top.name.span(), "unclosed element"); - } - - quote! { pgt_console::Markup(&[ #( #output ),* ]) }.into() -} diff --git a/crates/pgt_query_ext/Cargo.toml b/crates/pgt_query_ext/Cargo.toml deleted file mode 100644 index c6754b67..00000000 --- a/crates/pgt_query_ext/Cargo.toml +++ /dev/null @@ -1,24 +0,0 @@ -[package] -authors.workspace = true -categories.workspace = true -description = "" -edition.workspace = true -homepage.workspace = true -keywords.workspace = true -license.workspace = true -name = "pgt_query_ext" -repository.workspace = true -version = "0.0.0" - - -[dependencies] -petgraph = "0.6.4" - -pg_query.workspace = true -pgt_diagnostics.workspace = true -pgt_lexer.workspace = true -pgt_query_ext_codegen.workspace = true -pgt_text_size.workspace = true - -[lib] -doctest = false diff --git a/crates/pgt_query_ext/src/codegen.rs b/crates/pgt_query_ext/src/codegen.rs deleted file mode 100644 index 8278383b..00000000 --- a/crates/pgt_query_ext/src/codegen.rs +++ /dev/null @@ -1 +0,0 @@ -pgt_query_ext_codegen::codegen!(); diff --git a/crates/pgt_query_ext/src/diagnostics.rs b/crates/pgt_query_ext/src/diagnostics.rs deleted file mode 100644 index aa16db81..00000000 --- a/crates/pgt_query_ext/src/diagnostics.rs +++ /dev/null @@ -1,25 +0,0 @@ -use pgt_diagnostics::{Diagnostic, MessageAndDescription}; -use pgt_text_size::TextRange; - -/// A specialized diagnostic for the libpg_query parser. -/// -/// Parser diagnostics are always **errors**. -#[derive(Clone, Debug, Diagnostic)] -#[diagnostic(category = "syntax", severity = Error)] -pub struct SyntaxDiagnostic { - /// The location where the error is occurred - #[location(span)] - span: Option, - #[message] - #[description] - pub message: MessageAndDescription, -} - -impl From for SyntaxDiagnostic { - fn from(err: pg_query::Error) -> Self { - SyntaxDiagnostic { - span: None, - message: MessageAndDescription::from(err.to_string()), - } - } -} diff --git a/crates/pgt_query_ext/src/lib.rs b/crates/pgt_query_ext/src/lib.rs deleted file mode 100644 index c1f5fb49..00000000 --- a/crates/pgt_query_ext/src/lib.rs +++ /dev/null @@ -1,32 +0,0 @@ -//! Postgres Statement Parser -//! -//! Simple wrapper crate for `pg_query` to expose types and a function to get the root node for an -//! SQL statement. -//! -//! It also host any "extensions" to the `pg_query` crate that are not yet contributed upstream. -//! Extensions include -//! - `get_location` to get the location of a node -//! - `get_node_properties` to get the properties of a node -//! - `get_nodes` to get all the nodes in the AST as a petgraph tree -//! - `ChildrenIterator` to iterate over the children of a node -mod codegen; -pub mod diagnostics; - -pub use pg_query::protobuf; -pub use pg_query::{Error, NodeEnum, Result}; - -pub use codegen::{ - ChildrenIterator, Node, TokenProperty, get_location, get_node_properties, get_nodes, -}; - -pub fn parse(sql: &str) -> Result { - pg_query::parse(sql).map(|parsed| { - parsed - .protobuf - .nodes() - .iter() - .find(|n| n.1 == 1) - .map(|n| n.0.to_enum()) - .ok_or_else(|| Error::Parse("Unable to find root node".to_string())) - })? -} diff --git a/crates/pgt_query_ext_codegen/Cargo.toml b/crates/pgt_query_ext_codegen/Cargo.toml deleted file mode 100644 index c3a0f20d..00000000 --- a/crates/pgt_query_ext_codegen/Cargo.toml +++ /dev/null @@ -1,22 +0,0 @@ -[package] -authors.workspace = true -categories.workspace = true -description = "" -edition.workspace = true -homepage.workspace = true -keywords.workspace = true -license.workspace = true -name = "pgt_query_ext_codegen" -repository.workspace = true -version = "0.0.0" - - -[dependencies] -proc-macro2.workspace = true -quote.workspace = true - -pgt_query_proto_parser.workspace = true - -[lib] -doctest = false -proc-macro = true diff --git a/crates/pgt_query_ext_codegen/src/get_location.rs b/crates/pgt_query_ext_codegen/src/get_location.rs deleted file mode 100644 index fa6fa8b2..00000000 --- a/crates/pgt_query_ext_codegen/src/get_location.rs +++ /dev/null @@ -1,122 +0,0 @@ -use pgt_query_proto_parser::{FieldType, Node, ProtoFile}; -use proc_macro2::{Ident, TokenStream}; -use quote::{format_ident, quote}; - -pub fn get_location_mod(proto_file: &ProtoFile) -> proc_macro2::TokenStream { - let manual_node_names = manual_node_names(); - - let node_identifiers = node_identifiers(&proto_file.nodes, &manual_node_names); - let location_idents = location_idents(&proto_file.nodes, &manual_node_names); - - quote! { - /// Returns the location of a node - pub fn get_location(node: &NodeEnum) -> Option { - let loc = get_location_internal(node); - if loc.is_some() { - usize::try_from(loc.unwrap()).ok() - } else { - None - } - } - - fn get_location_internal(node: &NodeEnum) -> Option { - let location = match node { - // for some nodes, the location of the node itself is after their children location. - // we implement the logic for those nodes manually. - // if you add one, make sure to add its name to `manual_node_names()`. - NodeEnum::BoolExpr(n) => { - let a = n.args.iter().min_by(|a, b| { - let loc_a = get_location_internal(&a.node.as_ref().unwrap()); - let loc_b = get_location_internal(&b.node.as_ref().unwrap()); - loc_a.cmp(&loc_b) - }); - get_location_internal(&a.unwrap().node.as_ref().unwrap()) - }, - NodeEnum::AExpr(n) => get_location_internal(&n.lexpr.as_ref().unwrap().node.as_ref().unwrap()), - NodeEnum::WindowDef(n) => { - if n.partition_clause.len() > 0 || n.order_clause.len() > 0 { - // the location is not correct if its the definition clause, e.g. for - // window w as (partition by a order by b) - // the location is the start of the `partition` token - None - } else { - Some(n.location) - } - }, - NodeEnum::CollateClause(n) => get_location_internal(&n.arg.as_ref().unwrap().node.as_ref().unwrap()), - NodeEnum::TypeCast(n) => get_location_internal(&n.arg.as_ref().unwrap().node.as_ref().unwrap()), - NodeEnum::ColumnDef(n) => if n.colname.len() > 0 { - Some(n.location) - } else { - None - }, - NodeEnum::NullTest(n) => if n.arg.is_some() { - get_location_internal(&n.arg.as_ref().unwrap().node.as_ref().unwrap()) - } else { - Some(n.location) - }, - NodeEnum::PublicationObjSpec(n) => { - match &n.pubtable { - Some(pubtable) => match &pubtable.relation { - Some(range_var) => Some(range_var.location), - None => Some(n.location), - }, - None => Some(n.location), - } - }, - NodeEnum::BooleanTest(n) => { - if n.arg.is_some() { - get_location_internal(&n.arg.as_ref().unwrap().node.as_ref().unwrap()) - } else { - Some(n.location) - } - }, - #(NodeEnum::#node_identifiers(n) => #location_idents),* - }; - if location.is_some() && location.unwrap() < 0 { - None - } else { - location - } - } - } -} - -fn manual_node_names() -> Vec<&'static str> { - vec![ - "BoolExpr", - "AExpr", - "WindowDef", - "CollateClause", - "TypeCast", - "ColumnDef", - "NullTest", - "PublicationObjSpec", - ] -} - -fn location_idents(nodes: &[Node], exclude_nodes: &[&str]) -> Vec { - nodes - .iter() - .filter(|n| !exclude_nodes.contains(&n.name.as_str())) - .map(|node| { - if node - .fields - .iter() - .any(|n| n.name == "location" && n.field_type == FieldType::Int32) - { - quote! { Some(n.location) } - } else { - quote! { None } - } - }) - .collect() -} - -fn node_identifiers(nodes: &[Node], exclude_nodes: &[&str]) -> Vec { - nodes - .iter() - .filter(|n| !exclude_nodes.contains(&n.name.as_str())) - .map(|node| format_ident!("{}", &node.name)) - .collect() -} diff --git a/crates/pgt_query_ext_codegen/src/get_node_properties.rs b/crates/pgt_query_ext_codegen/src/get_node_properties.rs deleted file mode 100644 index 9581304b..00000000 --- a/crates/pgt_query_ext_codegen/src/get_node_properties.rs +++ /dev/null @@ -1,1006 +0,0 @@ -use pgt_query_proto_parser::{FieldType, Node, ProtoFile}; -use proc_macro2::{Ident, TokenStream}; -use quote::{format_ident, quote}; - -pub fn get_node_properties_mod(proto_file: &ProtoFile) -> proc_macro2::TokenStream { - let node_identifiers = node_identifiers(&proto_file.nodes); - let node_handlers = node_handlers(&proto_file.nodes); - - quote! { - #[derive(Debug, Clone, PartialEq)] - pub struct TokenProperty { - pub value: Option, - pub kind: Option, - } - - impl TokenProperty { - pub fn new(value: Option, kind: Option) -> TokenProperty { - if value.is_none() && kind.is_none() { - panic!("TokenProperty must have either value or kind"); - } - TokenProperty { value, kind } - } - } - - impl From for TokenProperty { - fn from(value: i32) -> TokenProperty { - TokenProperty { - value: Some(value.to_string()), - kind: None, - } - } - } - - impl From for TokenProperty { - fn from(value: u32) -> TokenProperty { - TokenProperty { - value: Some(value.to_string()), - kind: None, - } - } - } - - - impl From for TokenProperty { - fn from(value: i64) -> TokenProperty { - TokenProperty { - value: Some(value.to_string()), - kind: None, - } - } - } - - impl From for TokenProperty { - fn from(value: u64) -> TokenProperty { - TokenProperty { - value: Some(value.to_string()), - kind: None, - } - } - } - - impl From for TokenProperty { - fn from(value: f64) -> TokenProperty { - TokenProperty { - value: Some(value.to_string()), - kind: None, - } - } - } - - impl From for TokenProperty { - fn from(value: bool) -> TokenProperty { - TokenProperty { - value: Some(value.to_string()), - kind: None, - } - } - } - - impl From for TokenProperty { - fn from(value: String) -> TokenProperty { - assert!(value.len() > 0, "String property value has length 0"); - TokenProperty { - value: Some(value.to_lowercase()), - kind: None, - } - } - } - - - impl From<&pg_query::protobuf::Integer> for TokenProperty { - fn from(node: &pg_query::protobuf::Integer) -> TokenProperty { - TokenProperty { - value: Some(node.ival.to_string()), - kind: Some(SyntaxKind::Iconst) - } - } - } - - impl From<&pg_query::protobuf::Boolean> for TokenProperty { - fn from(node: &pg_query::protobuf::Boolean) -> TokenProperty { - TokenProperty { - value: Some(node.boolval.to_string()), - kind: match node.boolval { - true => Some(SyntaxKind::TrueP), - false => Some(SyntaxKind::FalseP), - } - } - } - } - - impl From for TokenProperty { - fn from(kind: SyntaxKind) -> TokenProperty { - TokenProperty { - value: None, - kind: Some(kind), - } - } - } - - impl From for TokenProperty { - fn from(token: Token) -> TokenProperty { - TokenProperty { - value: None, - kind: Some(SyntaxKind::from(token)), - } - } - } - - pub fn get_node_properties(node: &NodeEnum, parent: Option<&NodeEnum>) -> Vec { - let mut tokens: Vec = Vec::new(); - - match node { - #(NodeEnum::#node_identifiers(n) => {#node_handlers}),*, - }; - - tokens - } - - } -} - -fn node_identifiers(nodes: &[Node]) -> Vec { - nodes - .iter() - .map(|node| format_ident!("{}", &node.name)) - .collect() -} - -fn node_handlers(nodes: &[Node]) -> Vec { - nodes - .iter() - .map(|node| { - let string_property_handlers = string_property_handlers(node); - let custom_handlers = custom_handlers(node); - quote! { - #custom_handlers - #(#string_property_handlers)* - } - }) - .collect() -} - -fn custom_handlers(node: &Node) -> TokenStream { - match node.name.as_str() { - "SelectStmt" => quote! { - tokens.push(TokenProperty::from(Token::Select)); - if n.distinct_clause.len() > 0 { - tokens.push(TokenProperty::from(Token::Distinct)); - } - if n.values_lists.len() > 0 { - tokens.push(TokenProperty::from(Token::Values)); - } - if n.from_clause.len() > 0 { - tokens.push(TokenProperty::from(Token::From)); - } - if n.where_clause.is_some() { - tokens.push(TokenProperty::from(Token::Where)); - } - if n.group_clause.len() > 0 { - tokens.push(TokenProperty::from(Token::GroupP)); - tokens.push(TokenProperty::from(Token::By)); - } - match n.op() { - protobuf::SetOperation::Undefined => {}, - protobuf::SetOperation::SetopNone => {}, - protobuf::SetOperation::SetopUnion => tokens.push(TokenProperty::from(Token::Union)), - protobuf::SetOperation::SetopIntersect => tokens.push(TokenProperty::from(Token::Intersect)), - protobuf::SetOperation::SetopExcept => tokens.push(TokenProperty::from(Token::Except)), - _ => panic!("Unknown SelectStmt op {:#?}", n.op()), - } - if n.all { - tokens.push(TokenProperty::from(Token::All)); - } - }, - "BoolExpr" => quote! { - match n.boolop() { - protobuf::BoolExprType::AndExpr => tokens.push(TokenProperty::from(Token::And)), - protobuf::BoolExprType::OrExpr => tokens.push(TokenProperty::from(Token::Or)), - protobuf::BoolExprType::NotExpr => tokens.push(TokenProperty::from(Token::Not)), - _ => panic!("Unknown BoolExpr {:#?}", n.boolop()), - } - }, - "JoinExpr" => quote! { - tokens.push(TokenProperty::from(Token::Join)); - tokens.push(TokenProperty::from(Token::On)); - match n.jointype() { - protobuf::JoinType::JoinInner => tokens.push(TokenProperty::from(Token::InnerP)), - protobuf::JoinType::JoinLeft => tokens.push(TokenProperty::from(Token::Left)), - protobuf::JoinType::JoinFull => tokens.push(TokenProperty::from(Token::Full)), - protobuf::JoinType::JoinRight => tokens.push(TokenProperty::from(Token::Right)), - _ => panic!("Unknown JoinExpr jointype {:#?}", n.jointype()), - } - - }, - "ResTarget" => quote! { - if n.name.len() > 0 { - tokens.push(TokenProperty::from(Token::As)); - } - }, - "Integer" => quote! { - tokens.push(TokenProperty::from(n)); - }, - "DefElem" => quote! { - match n.defname.as_str() { - "location" => { - tokens.push(TokenProperty::from(Token::Default)); - }, - "connection_limit" => { - tokens.push(TokenProperty::from(Token::Limit)); - tokens.push(TokenProperty::from(Token::Iconst)); - }, - "owner" => { - tokens.push(TokenProperty::from(Token::Owner)); - } - _ => {} - } - match n.defaction() { - protobuf::DefElemAction::DefelemUnspec => tokens.push(TokenProperty::from(Token::Ascii61)), - _ => panic!("Unknown DefElem {:#?}", n.defaction()), - } - }, - "Alias" => quote! { - tokens.push(TokenProperty::from(Token::As)); - }, - "CollateClause" => quote! { - tokens.push(TokenProperty::from(Token::Collate)); - }, - "AExpr" => quote! { - match n.kind() { - protobuf::AExprKind::AexprOp => {}, // do nothing - protobuf::AExprKind::AexprOpAny => tokens.push(TokenProperty::from(Token::Any)), - protobuf::AExprKind::AexprIn => tokens.push(TokenProperty::from(Token::InP)), - _ => panic!("Unknown AExpr kind {:#?}", n.kind()), - } - }, - "WindowDef" => quote! { - if n.partition_clause.len() > 0 || n.order_clause.len() > 0 { - tokens.push(TokenProperty::from(Token::Window)); - tokens.push(TokenProperty::from(Token::As)); - } - if n.partition_clause.len() > 0 { - tokens.push(TokenProperty::from(Token::Partition)); - tokens.push(TokenProperty::from(Token::By)); - } - }, - "Boolean" => quote! { - tokens.push(TokenProperty::from(n)); - }, - "AStar" => quote! { - tokens.push(TokenProperty::from(Token::Ascii42)); - }, - "FuncCall" => quote! { - if n.funcname.len() == 1 && n.args.len() == 0 { - // check if count(*) - if let Some(node) = &n.funcname[0].node { - if let NodeEnum::String(n) = node { - if n.sval == "count" { - tokens.push(TokenProperty::from(Token::Ascii42)); - } - } - } - } - if n.agg_filter.is_some() { - tokens.push(TokenProperty::from(Token::Filter)); - tokens.push(TokenProperty::from(Token::Where)); - } - if n.over.is_some() { - tokens.push(TokenProperty::from(Token::Over)); - } - }, - "SqlvalueFunction" => quote! { - match n.op() { - protobuf::SqlValueFunctionOp::SvfopCurrentRole => tokens.push(TokenProperty::from(Token::CurrentRole)), - protobuf::SqlValueFunctionOp::SvfopCurrentUser => tokens.push(TokenProperty::from(Token::CurrentUser)), - _ => panic!("Unknown SqlvalueFunction {:#?}", n.op()), - } - }, - "SortBy" => quote! { - tokens.push(TokenProperty::from(Token::Order)); - tokens.push(TokenProperty::from(Token::By)); - match n.sortby_dir() { - protobuf::SortByDir::SortbyAsc => tokens.push(TokenProperty::from(Token::Asc)), - protobuf::SortByDir::SortbyDesc => tokens.push(TokenProperty::from(Token::Desc)), - _ => {} - } - }, - "AConst" => quote! { - if n.isnull { - tokens.push(TokenProperty::from(Token::NullP)); - } - }, - "AlterTableStmt" => quote! { - tokens.push(TokenProperty::from(Token::Alter)); - tokens.push(TokenProperty::from(Token::Table)); - }, - "AlterTableCmd" => quote! { - match n.subtype() { - protobuf::AlterTableType::AtColumnDefault => { - tokens.push(TokenProperty::from(Token::Alter)); - tokens.push(TokenProperty::from(Token::Column)); - tokens.push(TokenProperty::from(Token::Set)); - tokens.push(TokenProperty::from(Token::Default)); - }, - protobuf::AlterTableType::AtAddConstraint => tokens.push(TokenProperty::from(Token::AddP)), - protobuf::AlterTableType::AtAlterColumnType => { - tokens.push(TokenProperty::from(Token::Alter)); - tokens.push(TokenProperty::from(Token::Column)); - tokens.push(TokenProperty::from(Token::TypeP)); - }, - protobuf::AlterTableType::AtDropColumn => { - tokens.push(TokenProperty::from(Token::Drop)); - tokens.push(TokenProperty::from(Token::Column)); - }, - _ => panic!("Unknown AlterTableCmd {:#?}", n.subtype()), - } - }, - "VariableSetStmt" => quote! { - tokens.push(TokenProperty::from(Token::Set)); - match n.kind() { - protobuf::VariableSetKind::VarSetValue => tokens.push(TokenProperty::from(Token::To)), - _ => panic!("Unknown VariableSetStmt {:#?}", n.kind()), - } - }, - "CreatePolicyStmt" => quote! { - tokens.push(TokenProperty::from(Token::Create)); - tokens.push(TokenProperty::from(Token::Policy)); - tokens.push(TokenProperty::from(Token::On)); - if n.roles.len() > 0 { - tokens.push(TokenProperty::from(Token::To)); - } - if n.qual.is_some() { - tokens.push(TokenProperty::from(Token::Using)); - } - if n.with_check.is_some() { - tokens.push(TokenProperty::from(Token::With)); - tokens.push(TokenProperty::from(Token::Check)); - } - }, - "CopyStmt" => quote! { - tokens.push(TokenProperty::from(Token::Copy)); - tokens.push(TokenProperty::from(Token::From)); - }, - "RenameStmt" => quote! { - tokens.push(TokenProperty::from(Token::Alter)); - tokens.push(TokenProperty::from(Token::Table)); - tokens.push(TokenProperty::from(Token::Rename)); - tokens.push(TokenProperty::from(Token::To)); - }, - "Constraint" => quote! { - match n.contype() { - protobuf::ConstrType::ConstrNotnull => { - tokens.push(TokenProperty::from(Token::Not)); - tokens.push(TokenProperty::from(Token::NullP)); - }, - protobuf::ConstrType::ConstrDefault => tokens.push(TokenProperty::from(Token::Default)), - protobuf::ConstrType::ConstrCheck => tokens.push(TokenProperty::from(Token::Check)), - protobuf::ConstrType::ConstrPrimary => { - tokens.push(TokenProperty::from(Token::Primary)); - tokens.push(TokenProperty::from(Token::Key)); - }, - protobuf::ConstrType::ConstrForeign => tokens.push(TokenProperty::from(Token::References)), - protobuf::ConstrType::ConstrUnique => tokens.push(TokenProperty::from(Token::Unique)), - _ => panic!("Unknown Constraint {:#?}", n.contype()), - }; - if n.options.len() > 0 { - tokens.push(TokenProperty::from(Token::With)); - } - }, - "PartitionSpec" => quote! { - tokens.push(TokenProperty::from(Token::Partition)); - tokens.push(TokenProperty::from(Token::By)); - }, - "InsertStmt" => quote! { - tokens.push(TokenProperty::from(Token::Insert)); - tokens.push(TokenProperty::from(Token::Into)); - }, - "DeleteStmt" => quote! { - tokens.push(TokenProperty::from(Token::DeleteP)); - tokens.push(TokenProperty::from(Token::From)); - if n.where_clause.is_some() { - tokens.push(TokenProperty::from(Token::Where)); - } - if n.using_clause.len() > 0 { - tokens.push(TokenProperty::from(Token::Using)); - } - }, - "ViewStmt" => quote! { - tokens.push(TokenProperty::from(Token::Create)); - tokens.push(TokenProperty::from(Token::View)); - if n.query.is_some() { - tokens.push(TokenProperty::from(Token::As)); - // check if SelectStmt with WithClause with recursive set to true - if let Some(NodeEnum::SelectStmt(select_stmt)) = n.query.as_ref().and_then(|query| query.node.as_ref()) { - if select_stmt.with_clause.is_some() && select_stmt.with_clause.as_ref().unwrap().recursive { - tokens.push(TokenProperty::from(Token::Recursive)); - } - } - } - if n.replace { - tokens.push(TokenProperty::from(Token::Or)); - tokens.push(TokenProperty::from(Token::Replace)); - } - if let Some(n) = &n.view { - match n.relpersistence.as_str() { - // Temporary - "t" => tokens.push(TokenProperty::from(Token::Temporary)), - _ => {}, - } - } - match n.with_check_option() { - protobuf::ViewCheckOption::LocalCheckOption => { - tokens.push(TokenProperty::from(Token::With)); - tokens.push(TokenProperty::from(Token::Local)); - tokens.push(TokenProperty::from(Token::Check)); - tokens.push(TokenProperty::from(Token::Option)); - }, - protobuf::ViewCheckOption::CascadedCheckOption => { - tokens.push(TokenProperty::from(Token::With)); - tokens.push(TokenProperty::from(Token::Cascaded)); - tokens.push(TokenProperty::from(Token::Check)); - tokens.push(TokenProperty::from(Token::Option)); - }, - _ => {} - } - }, - "CreateStmt" => quote! { - tokens.push(TokenProperty::from(Token::Create)); - tokens.push(TokenProperty::from(Token::Table)); - if n.tablespacename.len() > 0 { - tokens.push(TokenProperty::from(Token::Tablespace)); - } - if n.options.len() > 0 { - tokens.push(TokenProperty::from(Token::With)); - } - if n.if_not_exists { - tokens.push(TokenProperty::from(Token::IfP)); - tokens.push(TokenProperty::from(Token::Not)); - tokens.push(TokenProperty::from(Token::Exists)); - } - if n.partbound.is_some() { - tokens.push(TokenProperty::from(Token::Partition)); - tokens.push(TokenProperty::from(Token::Of)); - tokens.push(TokenProperty::from(Token::For)); - tokens.push(TokenProperty::from(Token::Values)); - } - if let Some(n) = &n.relation { - match n.relpersistence.as_str() { - // Unlogged - "u" => tokens.push(TokenProperty::from(Token::Unlogged)), - // Temporary - "t" => tokens.push(TokenProperty::from(Token::Temporary)), - _ => {}, - } - if n.inh { - tokens.push(TokenProperty::from(Token::Inherits)); - } - } - }, - "TableLikeClause" => quote! { - tokens.push(TokenProperty::from(Token::Like)); - // CREATE_TABLE_LIKE_ALL - if n.options == 0x7FFFFFFF { - tokens.push(TokenProperty::from(Token::Including)); - tokens.push(TokenProperty::from(Token::All)); - } else { - tokens.push(TokenProperty::from(Token::Excluding)); - tokens.push(TokenProperty::from(Token::All)); - } - }, - "TransactionStmt" => quote! { - match n.kind() { - protobuf::TransactionStmtKind::TransStmtBegin => tokens.push(TokenProperty::from(Token::BeginP)), - protobuf::TransactionStmtKind::TransStmtCommit => tokens.push(TokenProperty::from(Token::Commit)), - _ => panic!("Unknown TransactionStmt {:#?}", n.kind()) - } - }, - "PartitionBoundSpec" => quote! { - tokens.push(TokenProperty::from(Token::From)); - tokens.push(TokenProperty::from(Token::To)); - }, - "CaseExpr" => quote! { - tokens.push(TokenProperty::from(Token::Case)); - tokens.push(TokenProperty::from(Token::EndP)); - if n.defresult.is_some() { - tokens.push(TokenProperty::from(Token::Else)); - } - }, - "NullTest" => quote! { - match n.nulltesttype() { - protobuf::NullTestType::IsNull => tokens.push(TokenProperty::from(Token::Is)), - protobuf::NullTestType::IsNotNull => { - tokens.push(TokenProperty::from(Token::Is)); - tokens.push(TokenProperty::from(Token::Not)); - }, - _ => panic!("Unknown NullTest {:#?}", n.nulltesttype()), - } - tokens.push(TokenProperty::from(Token::NullP)); - }, - "CreateFunctionStmt" => quote! { - tokens.push(TokenProperty::from(Token::Create)); - if n.is_procedure { - tokens.push(TokenProperty::from(Token::Procedure)); - } else { - tokens.push(TokenProperty::from(Token::Function)); - } - if n.replace { - tokens.push(TokenProperty::from(Token::Or)); - tokens.push(TokenProperty::from(Token::Replace)); - } - if let Some(return_type) = &n.return_type { - tokens.push(TokenProperty::from(Token::Returns)); - if return_type.setof { - tokens.push(TokenProperty::from(Token::Setof)); - } - } - for option in &n.options { - if let Some(NodeEnum::DefElem(node)) = &option.node { - if node.defname == "strict" { - if let Some(NodeEnum::Boolean(node)) = - node.arg.as_ref().and_then(|arg| arg.node.as_ref()) - { - if node.boolval { - tokens.push(TokenProperty::from(Token::NullP)); - tokens.push(TokenProperty::from(Token::On)); - tokens.push(TokenProperty::from(Token::NullP)); - tokens.push(TokenProperty::from(Token::InputP)); - } else { - tokens.push(TokenProperty::from(Token::On)); - tokens.push(TokenProperty::from(Token::NullP)); - tokens.push(TokenProperty::from(Token::InputP)); - } - } - } - } - } - }, - "FunctionParameter" => quote! { - match n.mode() { - protobuf::FunctionParameterMode::FuncParamIn => tokens.push(TokenProperty::from(Token::InP)), - protobuf::FunctionParameterMode::FuncParamOut => tokens.push(TokenProperty::from(Token::OutP)), - protobuf::FunctionParameterMode::FuncParamInout => tokens.push(TokenProperty::from(Token::Inout)), - protobuf::FunctionParameterMode::FuncParamVariadic => tokens.push(TokenProperty::from(Token::Variadic)), - // protobuf::FunctionParameterMode::FuncParamTable => tokens.push(TokenProperty::from(Token::Table)), - protobuf::FunctionParameterMode::FuncParamDefault => {}, // do nothing - _ => panic!("Unknown FunctionParameter {:#?}", n.mode()), - }; - if n.defexpr.is_some() { - tokens.push(TokenProperty::from(Token::Default)); - } - }, - "NamedArgExpr" => quote! { - // => - tokens.push(TokenProperty::from(Token::EqualsGreater)); - }, - "CaseWhen" => quote! { - tokens.push(TokenProperty::from(Token::When)); - tokens.push(TokenProperty::from(Token::Then)); - }, - "TypeCast" => quote! { - tokens.push(TokenProperty::from(Token::Typecast)); - }, - "CreateDomainStmt" => quote! { - tokens.push(TokenProperty::from(Token::Create)); - tokens.push(TokenProperty::from(Token::DomainP)); - if n.type_name.is_some() { - tokens.push(TokenProperty::from(Token::As)); - } - }, - "List" => quote! { - if parent.is_some() { - // if parent is `DefineStmt`, we need to check whether an ORDER BY needs to be added - if let NodeEnum::DefineStmt(define_stmt) = parent.unwrap() { - // there *seems* to be an integer node in the last position of the DefineStmt args that - // defines whether the list contains an order by statement - let integer = define_stmt.args.last() - .and_then(|node| node.node.as_ref()) - .and_then(|node| if let NodeEnum::Integer(n) = node { Some(n.ival) } else { None }); - if integer.is_none() { - panic!("DefineStmt of type ObjectAggregate has no integer node in last position of args"); - } - // if the integer is 1, then there is an order by statement - // we add it to the `List` node because that seems to make most sense based off the grammar definition - // ref: https://github.com/postgres/postgres/blob/REL_15_STABLE/src/backend/parser/gram.y#L8355 - // ``` - // aggr_args: - // | '(' aggr_args_list ORDER BY aggr_args_list ')' - // ``` - if integer.unwrap() == 1 { - tokens.push(TokenProperty::from(Token::Order)); - tokens.push(TokenProperty::from(Token::By)); - } - } - } - }, - "DefineStmt" => quote! { - tokens.push(TokenProperty::from(Token::Create)); - if n.replace { - tokens.push(TokenProperty::from(Token::Or)); - tokens.push(TokenProperty::from(Token::Replace)); - } - match n.kind() { - protobuf::ObjectType::ObjectAggregate => { - tokens.push(TokenProperty::from(Token::Aggregate)); - - // n.args is always an array with two nodes - assert_eq!(n.args.len(), 2, "DefineStmt of type ObjectAggregate does not have exactly 2 args"); - // the first is either a List or a Node { node: None } - - if let Some(node) = &n.args.first() { - if node.node.is_none() { - // if first element is a Node { node: None }, then it's "*" - tokens.push(TokenProperty::from(Token::Ascii42)); - } } - // if its a list, we handle it in the handler for `List` - }, - protobuf::ObjectType::ObjectType => { - tokens.push(TokenProperty::from(Token::TypeP)); - }, - _ => panic!("Unknown DefineStmt {:#?}", n.kind()), - } - }, - "CreateSchemaStmt" => quote! { - tokens.push(TokenProperty::from(Token::Create)); - tokens.push(TokenProperty::from(Token::Schema)); - if n.if_not_exists { - tokens.push(TokenProperty::from(Token::IfP)); - tokens.push(TokenProperty::from(Token::Not)); - tokens.push(TokenProperty::from(Token::Exists)); - } - if n.authrole.is_some() { - tokens.push(TokenProperty::from(Token::Authorization)); - } - }, - "CreateEnumStmt" => quote! { - tokens.push(TokenProperty::from(Token::Create)); - tokens.push(TokenProperty::from(Token::TypeP)); - tokens.push(TokenProperty::from(Token::As)); - tokens.push(TokenProperty::from(Token::EnumP)); - }, - "CreateCastStmt" => quote! { - tokens.push(TokenProperty::from(Token::Create)); - tokens.push(TokenProperty::from(Token::Cast)); - tokens.push(TokenProperty::from(Token::As)); - if n.inout { - tokens.push(TokenProperty::from(Token::With)); - tokens.push(TokenProperty::from(Token::Inout)); - } else if n.func.is_some() { - tokens.push(TokenProperty::from(Token::With)); - tokens.push(TokenProperty::from(Token::Function)); - } else { - tokens.push(TokenProperty::from(Token::Without)); - tokens.push(TokenProperty::from(Token::Function)); - } - match n.context() { - protobuf::CoercionContext::CoercionImplicit => { - tokens.push(TokenProperty::from(Token::As)); - tokens.push(TokenProperty::from(Token::ImplicitP)); - }, - protobuf::CoercionContext::CoercionAssignment => { - tokens.push(TokenProperty::from(Token::As)); - tokens.push(TokenProperty::from(Token::Assignment)); - }, - protobuf::CoercionContext::CoercionPlpgsql => {}, - protobuf::CoercionContext::CoercionExplicit => {}, - _ => panic!("Unknown CreateCastStmt {:#?}", n.context()) - } - }, - "CreateRangeStmt" => quote! { - tokens.push(TokenProperty::from(Token::Create)); - tokens.push(TokenProperty::from(Token::TypeP)); - tokens.push(TokenProperty::from(Token::As)); - tokens.push(TokenProperty::from(Token::Range)); - }, - "IndexStmt" => quote! { - tokens.push(TokenProperty::from(Token::Create)); - if n.unique { - tokens.push(TokenProperty::from(Token::Unique)); - } - tokens.push(TokenProperty::from(Token::Index)); - if n.concurrent { - tokens.push(TokenProperty::from(Token::Concurrently)); - } - if n.if_not_exists { - tokens.push(TokenProperty::from(Token::IfP)); - tokens.push(TokenProperty::from(Token::Not)); - tokens.push(TokenProperty::from(Token::Exists)); - } - tokens.push(TokenProperty::from(Token::On)); - // access_method is btree by default - if n.access_method.len() > 0 { - tokens.push(TokenProperty::from(Token::Using)); - } - if n.index_including_params.len() > 0 { - tokens.push(TokenProperty::from(Token::Include)); - } - if n.options.len() > 0 { - tokens.push(TokenProperty::from(Token::With)); - } - // table_space is an empty string by default - if n.table_space.len() > 0 { - tokens.push(TokenProperty::from(Token::Tablespace)); - } - }, - "IndexElem" => quote! { - if n.collation.len() > 0 { - tokens.push(TokenProperty::from(Token::Collate)); - } - match n.nulls_ordering() { - protobuf::SortByNulls::SortbyNullsDefault => {}, - protobuf::SortByNulls::SortbyNullsFirst => { - tokens.push(TokenProperty::from(Token::NullsP)); - tokens.push(TokenProperty::from(Token::FirstP)); - }, - protobuf::SortByNulls::SortbyNullsLast => { - tokens.push(TokenProperty::from(Token::NullsP)); - tokens.push(TokenProperty::from(Token::LastP)); - }, - _ => panic!("Unknown IndexElem {:#?}", n.nulls_ordering()), - } - }, - "CreateTableSpaceStmt" => quote! { - tokens.push(TokenProperty::from(Token::Create)); - tokens.push(TokenProperty::from(Token::Tablespace)); - tokens.push(TokenProperty::from(Token::Location)); - if n.owner.is_some() { - tokens.push(TokenProperty::from(Token::Owner)); - } - if n.options.len() > 0 { - tokens.push(TokenProperty::from(Token::With)); - } - }, - "CreatePublicationStmt" => quote! { - tokens.push(TokenProperty::from(Token::Create)); - tokens.push(TokenProperty::from(Token::Publication)); - if n.for_all_tables { - tokens.push(TokenProperty::from(Token::For)); - tokens.push(TokenProperty::from(Token::All)); - tokens.push(TokenProperty::from(Token::Tables)); - } - if let Some(n) = n.options.first() { - tokens.push(TokenProperty::from(Token::With)); - } - if let Some(n) = n.pubobjects.first() { - tokens.push(TokenProperty::from(Token::For)); - if let Some(NodeEnum::PublicationObjSpec(n)) = &n.node { - match n.pubobjtype() { - protobuf::PublicationObjSpecType::PublicationobjTable => { - tokens.push(TokenProperty::from(Token::Table)); - }, - protobuf::PublicationObjSpecType::PublicationobjTablesInSchema => { - tokens.push(TokenProperty::from(Token::Tables)); - tokens.push(TokenProperty::from(Token::InP)); - tokens.push(TokenProperty::from(Token::Schema)); - }, - _ => panic!("Unknown CreatePublicationStmt {:#?}", n.pubobjtype()) - } - } - } - if let Some(n) = n.pubobjects.last() { - if let Some(NodeEnum::PublicationObjSpec(n)) = &n.node { - match n.pubobjtype() { - protobuf::PublicationObjSpecType::PublicationobjTablesInSchema => { - tokens.push(TokenProperty::from(Token::Tables)); - tokens.push(TokenProperty::from(Token::InP)); - tokens.push(TokenProperty::from(Token::Schema)); - }, - _ => {} - } - } - } - }, - "PublicationTable" => quote! { - if n.where_clause.is_some() { - tokens.push(TokenProperty::from(Token::Where)); - } - }, - "BooleanTest" => quote! { - match n.booltesttype() { - protobuf::BoolTestType::IsTrue => { - tokens.push(TokenProperty::from(Token::Is)); - tokens.push(TokenProperty::from(Token::TrueP)); - }, - protobuf::BoolTestType::IsNotTrue => { - tokens.push(TokenProperty::from(Token::Is)); - tokens.push(TokenProperty::from(Token::Not)); - tokens.push(TokenProperty::from(Token::TrueP)); - }, - protobuf::BoolTestType::IsFalse => { - tokens.push(TokenProperty::from(Token::Is)); - tokens.push(TokenProperty::from(Token::FalseP)); - }, - protobuf::BoolTestType::IsNotFalse => { - tokens.push(TokenProperty::from(Token::Is)); - tokens.push(TokenProperty::from(Token::Not)); - tokens.push(TokenProperty::from(Token::FalseP)); - }, - _ => panic!("Unknown BooleanTest {:#?}", n.booltesttype()), - } - }, - "CompositeTypeStmt" => quote! { - tokens.push(TokenProperty::from(Token::Create)); - tokens.push(TokenProperty::from(Token::TypeP)); - tokens.push(TokenProperty::from(Token::As)); - }, - "CreatedbStmt" => quote! { - tokens.push(TokenProperty::from(Token::Create)); - tokens.push(TokenProperty::from(Token::Database)); - }, - "CreateExtensionStmt" => quote! { - tokens.push(TokenProperty::from(Token::Create)); - tokens.push(TokenProperty::from(Token::Extension)); - if n.if_not_exists { - tokens.push(TokenProperty::from(Token::IfP)); - tokens.push(TokenProperty::from(Token::Not)); - tokens.push(TokenProperty::from(Token::Exists)); - } - }, - "CreateConversionStmt" => quote! { - tokens.push(TokenProperty::from(Token::Create)); - if n.def { - tokens.push(TokenProperty::from(Token::Default)); - } - tokens.push(TokenProperty::from(Token::ConversionP)); - if n.for_encoding_name.len() > 0 { - tokens.push(TokenProperty::from(Token::For)); - } - if n.to_encoding_name.len() > 0 { - tokens.push(TokenProperty::from(Token::To)); - } - if n.func_name.len() == 1 { - tokens.push(TokenProperty::from(Token::From)); - } else if n.func_name.len() > 1 { - panic!("Encountered multiple defined func_name elements in CreateConversionStmt"); - } - }, - "CreateTransformStmt" => quote! { - tokens.push(TokenProperty::from(Token::Create)); - if n.replace { - tokens.push(TokenProperty::from(Token::Or)); - tokens.push(TokenProperty::from(Token::Replace)); - } - tokens.push(TokenProperty::from(Token::Transform)); - if n.type_name.is_some() { - tokens.push(TokenProperty::from(Token::For)); - } - tokens.push(TokenProperty::from(Token::Language)); - if n.fromsql.is_some() { - tokens.push(TokenProperty::from(Token::From)); - tokens.push(TokenProperty::from(Token::SqlP)); - tokens.push(TokenProperty::from(Token::With)); - tokens.push(TokenProperty::from(Token::Function)); - } - if n.tosql.is_some() { - tokens.push(TokenProperty::from(Token::To)); - tokens.push(TokenProperty::from(Token::SqlP)); - tokens.push(TokenProperty::from(Token::With)); - tokens.push(TokenProperty::from(Token::Function)); - } - }, - "TypeName" => quote! { - let names = n.names - .iter() - .filter_map(|n| if let Some(NodeEnum::String(s)) = &n.node { Some(s.sval.clone()) } else { None }) - .collect::>(); - - if names.len() == 2 && names[0] == "pg_catalog" { - match names[1].as_str() { - "float8" => { - tokens.push(TokenProperty::from(Token::DoubleP)); - tokens.push(TokenProperty::from(Token::Precision)); - }, - "interval" => { - // Adapted from https://github.com/postgres/postgres/blob/REL_15_STABLE/src/backend/utils/adt/timestamp.c#L1103 - const MONTH: i32 = 1; - const YEAR: i32 = 2; - const DAY: i32 = 3; - const HOUR: i32 = 10; - const MINUTE: i32 = 11; - const SECOND: i32 = 12; - - let fields = &n.typmods.first() - .and_then(|node| node.node.as_ref()) - .and_then(|node| if let NodeEnum::AConst(n) = node { n.val.clone() } else { None }) - .and_then(|node| if let protobuf::a_const::Val::Ival(n) = node { Some(n.ival) } else { None }); - - if let Some(fields) = fields { - match fields.clone() { - // YEAR TO MONTH - i if i == 1 << YEAR | 1 << MONTH => { - tokens.push(TokenProperty::from(Token::To)); - tokens.push(TokenProperty::from(Token::MonthP)); - }, - // DAY TO HOUR - i if i == 1 << DAY | 1 << HOUR => { - tokens.push(TokenProperty::from(Token::To)); - tokens.push(TokenProperty::from(Token::HourP)); - }, - // DAY TO MINUTE - i if i == 1 << DAY | 1 << HOUR | 1 << MINUTE => { - tokens.push(TokenProperty::from(Token::To)); - tokens.push(TokenProperty::from(Token::MinuteP)); - }, - // DAY TO SECOND - i if i == 1 << DAY | 1 << HOUR | 1 << MINUTE | 1 << SECOND => { - tokens.push(TokenProperty::from(Token::To)); - tokens.push(TokenProperty::from(Token::SecondP)); - }, - // HOUR TO MINUTE - i if i == 1 << HOUR | 1 << MINUTE => { - tokens.push(TokenProperty::from(Token::To)); - tokens.push(TokenProperty::from(Token::MinuteP)); - }, - // HOUR TO SECOND - i if i == 1 << HOUR | 1 << MINUTE | 1 << SECOND => { - tokens.push(TokenProperty::from(Token::To)); - tokens.push(TokenProperty::from(Token::SecondP)); - }, - // MINUTE TO SECOND - i if i == 1 << MINUTE | 1 << SECOND => { - tokens.push(TokenProperty::from(Token::To)); - tokens.push(TokenProperty::from(Token::SecondP)); - }, - _ => panic!("Unknown Interval fields {:#?}", fields), - } - } - }, - "timestamptz" => { - tokens.push(TokenProperty::from(Token::Timestamp)); - tokens.push(TokenProperty::from(Token::With)); - tokens.push(TokenProperty::from(Token::Time)); - tokens.push(TokenProperty::from(Token::Zone)); - } - "timetz" => { - tokens.push(TokenProperty::from(Token::Time)); - tokens.push(TokenProperty::from(Token::With)); - tokens.push(TokenProperty::from(Token::Time)); - tokens.push(TokenProperty::from(Token::Zone)); - } - _ => {} - } - } - }, - "TruncateStmt" => quote! { - tokens.push(TokenProperty::from(Token::Truncate)); - tokens.push(TokenProperty::from(Token::Table)); - if n.restart_seqs { - tokens.push(TokenProperty::from(Token::Restart)); - tokens.push(TokenProperty::from(Token::IdentityP)); - } else { - tokens.push(TokenProperty::from(Token::ContinueP)); - tokens.push(TokenProperty::from(Token::IdentityP)); - } - match n.behavior { - // DropRestrict - 1 => tokens.push(TokenProperty::from(Token::Restrict)), - // DropCascade - 2 => tokens.push(TokenProperty::from(Token::Cascade)), - _ => {} - } - }, - _ => quote! {}, - } -} - -fn string_property_handlers(node: &Node) -> Vec { - node.fields - .iter() - .filter_map(|field| { - if field.repeated { - return None; - } - let field_name = format_ident!("{}", field.name.as_str()); - match field.field_type { - // just handle string values for now - FieldType::String => Some(quote! { - // most string values are never None, but an empty string - if n.#field_name.len() > 0 { - tokens.push(TokenProperty::from(n.#field_name.to_owned())); - } - }), - _ => None, - } - }) - .collect() -} diff --git a/crates/pgt_query_ext_codegen/src/get_nodes.rs b/crates/pgt_query_ext_codegen/src/get_nodes.rs deleted file mode 100644 index e0381331..00000000 --- a/crates/pgt_query_ext_codegen/src/get_nodes.rs +++ /dev/null @@ -1,141 +0,0 @@ -use pgt_query_proto_parser::{FieldType, Node, ProtoFile}; -use proc_macro2::{Ident, TokenStream}; -use quote::{format_ident, quote}; - -pub fn get_nodes_mod(proto_file: &ProtoFile) -> proc_macro2::TokenStream { - let manual_node_names = manual_node_names(); - - let node_identifiers = node_identifiers(&proto_file.nodes, &manual_node_names); - let node_handlers = node_handlers(&proto_file.nodes, &manual_node_names); - - quote! { - #[derive(Debug, Clone)] - pub struct Node { - pub inner: NodeEnum, - pub depth: usize, - pub properties: Vec, - pub location: Option, - } - - /// Returns all children of the node, recursively - /// location is resolved manually - pub fn get_nodes(node: &NodeEnum) -> StableGraph { - let mut g = StableGraph::::new(); - - let root_node_idx = g.add_node(Node { - inner: node.to_owned(), - depth: 0, - properties: get_node_properties(node, None), - location: get_location(node), - }); - - // Parent node idx, Node, depth - let mut stack: VecDeque<(NodeIndex, NodeEnum, usize)> = - VecDeque::from(vec![(root_node_idx, node.to_owned(), 0)]); - while !stack.is_empty() { - let (parent_idx, node, depth) = stack.pop_front().unwrap(); - let current_depth = depth + 1; - let mut handle_child = |c: NodeEnum| { - if match &c { - // all "simple nodes" are not handled individually but merged with their parent - NodeEnum::String(n) => true, - NodeEnum::Integer(n) => true, - NodeEnum::Float(n) => true, - NodeEnum::Boolean(n) => true, - NodeEnum::BitString(n) => true, - _ => false - } { - g[parent_idx].properties.extend(get_node_properties(&c, Some(&node))); - } else { - let node_idx = g.add_node(Node { - depth: current_depth, - properties: get_node_properties(&c, Some(&node)), - location: get_location(&c), - inner: c.to_owned(), - }); - g.add_edge(parent_idx, node_idx, ()); - stack.push_back((node_idx, c.to_owned(), current_depth)); - } - }; - match &node { - // `AConst` is the only node with a `one of` property, so we handle it manually - // if you need to handle other nodes manually, add them to the `manual_node_names` function below - NodeEnum::AConst(n) => { - if n.val.is_some() { - handle_child(match n.val.to_owned().unwrap() { - pg_query::protobuf::a_const::Val::Ival(v) => NodeEnum::Integer(v), - pg_query::protobuf::a_const::Val::Fval(v) => NodeEnum::Float(v), - pg_query::protobuf::a_const::Val::Boolval(v) => NodeEnum::Boolean(v), - pg_query::protobuf::a_const::Val::Sval(v) => NodeEnum::String(v), - pg_query::protobuf::a_const::Val::Bsval(v) => NodeEnum::BitString(v), - }); - } - } - #(NodeEnum::#node_identifiers(n) => {#node_handlers}),*, - }; - } - g - } - } -} - -fn manual_node_names() -> Vec<&'static str> { - vec!["AConst"] -} - -fn node_identifiers(nodes: &[Node], exclude_nodes: &[&str]) -> Vec { - nodes - .iter() - .filter(|node| !exclude_nodes.contains(&node.name.as_str())) - .map(|node| format_ident!("{}", &node.name)) - .collect() -} - -fn node_handlers(nodes: &[Node], exclude_nodes: &[&str]) -> Vec { - nodes - .iter() - .filter(|node| !exclude_nodes.contains(&node.name.as_str())) - .map(|node| { - let property_handlers = property_handlers(node); - quote! { - #(#property_handlers)* - } - }) - .collect() -} - -fn property_handlers(node: &Node) -> Vec { - node.fields - .iter() - .filter_map(|field| { - let field_name = format_ident!("{}", field.name.as_str()); - if field.field_type == FieldType::Node && field.repeated { - Some(quote! { - n.#field_name - .iter() - .for_each(|x| if x.node.is_some() { - handle_child(x.node.as_ref().unwrap().to_owned()); - }); - }) - } else if field.field_type == FieldType::Node && !field.is_one_of { - if field.node_name == Some("Node".to_owned()) { - Some(quote! { - if n.#field_name.is_some() { - handle_child(n.#field_name.to_owned().unwrap().node.unwrap()); - } - }) - } else { - let enum_variant_name = - format_ident!("{}", field.enum_variant_name.as_ref().unwrap().as_str()); - Some(quote! { - if n.#field_name.is_some() { - handle_child(NodeEnum::#enum_variant_name(n.#field_name.to_owned().unwrap())); - } - }) - } - } else { - None - } - }) - .collect() -} diff --git a/crates/pgt_query_ext_codegen/src/lib.rs b/crates/pgt_query_ext_codegen/src/lib.rs deleted file mode 100644 index c4f39c0e..00000000 --- a/crates/pgt_query_ext_codegen/src/lib.rs +++ /dev/null @@ -1,48 +0,0 @@ -mod get_location; -mod get_node_properties; -mod get_nodes; -mod node_iterator; - -use get_location::get_location_mod; -use get_node_properties::get_node_properties_mod; -use get_nodes::get_nodes_mod; -use node_iterator::node_iterator_mod; -use pgt_query_proto_parser::ProtoParser; -use quote::quote; -use std::{env, path, path::Path}; - -#[proc_macro] -pub fn codegen(_input: proc_macro::TokenStream) -> proc_macro::TokenStream { - let parser = ProtoParser::new(&proto_file_path()); - let proto_file = parser.parse(); - - let get_location = get_location_mod(&proto_file); - let get_node_properties = get_node_properties_mod(&proto_file); - let get_nodes = get_nodes_mod(&proto_file); - let iterator = node_iterator_mod(&proto_file); - - quote! { - use pgt_lexer::SyntaxKind; - use std::collections::VecDeque; - use pg_query::{protobuf, protobuf::ScanToken, protobuf::Token, NodeEnum, NodeRef}; - use std::cmp::{min, Ordering}; - use std::fmt::{Display, Formatter}; - use petgraph::stable_graph::{StableGraph}; - use petgraph::graph::{NodeIndex}; - - #get_location - #get_node_properties - #get_nodes - #iterator - } - .into() -} - -fn proto_file_path() -> path::PathBuf { - Path::new(env!("CARGO_MANIFEST_DIR")) - .ancestors() - .nth(2) - .unwrap() - .join("libpg_query/protobuf/pg_query.proto") - .to_path_buf() -} diff --git a/crates/pgt_query_ext_codegen/src/node_iterator.rs b/crates/pgt_query_ext_codegen/src/node_iterator.rs deleted file mode 100644 index 526966df..00000000 --- a/crates/pgt_query_ext_codegen/src/node_iterator.rs +++ /dev/null @@ -1,123 +0,0 @@ -use pgt_query_proto_parser::{FieldType, Node, ProtoFile}; -use proc_macro2::{Ident, TokenStream}; -use quote::{format_ident, quote}; - -pub fn node_iterator_mod(proto_file: &ProtoFile) -> proc_macro2::TokenStream { - let manual_node_names = manual_node_names(); - - let node_identifiers = node_identifiers(&proto_file.nodes, &manual_node_names); - let node_handlers = node_handlers(&proto_file.nodes, &manual_node_names); - - quote! { - #[derive(Debug, Clone)] - pub struct ChildrenIterator { - stack: VecDeque<(NodeEnum, usize)>, - nodes: Vec, - } - - impl ChildrenIterator { - pub fn new(root: NodeEnum) -> Self { - Self { - stack: VecDeque::from(vec![(root, 0)]), - nodes: Vec::new(), - } - } - } - - impl Iterator for ChildrenIterator { - type Item = NodeEnum; - - fn next(&mut self) -> Option { - if self.stack.is_empty() { - return None; - } - - let (node, depth) = self.stack.pop_front().unwrap(); - - let current_depth = depth + 1; - - match &node { - // `AConst` is the only node with a `one of` property, so we handle it manually - // if you need to handle other nodes manually, add them to the `manual_node_names` function below - NodeEnum::AConst(n) => { - // if n.val.is_some() { - // let new_node = match n.val.as_ref().unwrap() { - // pg_query::protobuf::a_const::Val::Ival(v) => Box::new(NodeEnum::Integer(v.clone())), - // pg_query::protobuf::a_const::Val::Fval(v) => Box::new(NodeEnum::Float(v.clone())), - // pg_query::protobuf::a_const::Val::Boolval(v) => Box::new(NodeEnum::Boolean(v.clone())), - // pg_query::protobuf::a_const::Val::Sval(v) => Box::new(NodeEnum::String(v.clone())), - // pg_query::protobuf::a_const::Val::Bsval(v) => Box::new(NodeEnum::BitString(v.clone())), - // }; - // self.stack.push_back((&new_node, current_depth)); - // self.boxed_nodes.push(new_node); - // } - } - #(NodeEnum::#node_identifiers(n) => {#node_handlers}),*, - }; - - Some(node) - } - } - } -} - -fn manual_node_names() -> Vec<&'static str> { - vec!["AConst"] -} - -fn node_identifiers(nodes: &[Node], exclude_nodes: &[&str]) -> Vec { - nodes - .iter() - .filter(|node| !exclude_nodes.contains(&node.name.as_str())) - .map(|node| format_ident!("{}", &node.name)) - .collect() -} - -fn node_handlers(nodes: &[Node], exclude_nodes: &[&str]) -> Vec { - nodes - .iter() - .filter(|node| !exclude_nodes.contains(&node.name.as_str())) - .map(|node| { - let property_handlers = property_handlers(node); - quote! { - #(#property_handlers)* - } - }) - .collect() -} - -fn property_handlers(node: &Node) -> Vec { - node.fields - .iter() - .filter_map(|field| { - let field_name = format_ident!("{}", field.name.as_str()); - if field.field_type == FieldType::Node && field.repeated { - Some(quote! { - n.#field_name - .iter() - .for_each(|x| if x.node.is_some() { - self.stack.push_back((x.node.as_ref().unwrap().to_owned(), current_depth)); - }); - }) - } else if field.field_type == FieldType::Node && !field.is_one_of { - if field.node_name == Some("Node".to_owned()) { - Some(quote! { - if n.#field_name.is_some() { - self.stack.push_back((n.#field_name.to_owned().unwrap().node.unwrap(), current_depth)); - } - }) - } else { - let enum_variant_name = - format_ident!("{}", field.enum_variant_name.as_ref().unwrap().as_str()); - Some(quote! { - if n.#field_name.is_some() { - self.stack.push_back((NodeEnum::#enum_variant_name(n.#field_name.to_owned().unwrap()), current_depth)); - } - }) - } - } else { - None - } - }) - .collect() -} diff --git a/crates/pgt_query_proto_parser/Cargo.toml b/crates/pgt_query_proto_parser/Cargo.toml deleted file mode 100644 index 729c94b4..00000000 --- a/crates/pgt_query_proto_parser/Cargo.toml +++ /dev/null @@ -1,20 +0,0 @@ -[package] -authors.workspace = true -categories.workspace = true -description = "" -edition.workspace = true -homepage.workspace = true -keywords.workspace = true -license.workspace = true -name = "pgt_query_proto_parser" -repository.workspace = true -version = "0.0.0" - - -[dependencies] -convert_case = "0.6.0" -protobuf = "3.3.0" -protobuf-parse = "3.3.0" - -[lib] -doctest = false diff --git a/crates/pgt_query_proto_parser/src/lib.rs b/crates/pgt_query_proto_parser/src/lib.rs deleted file mode 100644 index 12f8cf9c..00000000 --- a/crates/pgt_query_proto_parser/src/lib.rs +++ /dev/null @@ -1,9 +0,0 @@ -//! A parser for the libg_query proto file -//! -//! This crate provides a parser for the libg_query proto file, and a struct to represent and interact with the parsed file. - -mod proto_file; -mod proto_parser; - -pub use crate::proto_file::{Field, FieldType, Node, ProtoFile, Token}; -pub use crate::proto_parser::ProtoParser; diff --git a/crates/pgt_query_proto_parser/src/proto_file.rs b/crates/pgt_query_proto_parser/src/proto_file.rs deleted file mode 100644 index 2cc32798..00000000 --- a/crates/pgt_query_proto_parser/src/proto_file.rs +++ /dev/null @@ -1,60 +0,0 @@ -/// The FieldTypes of a protobuf message -#[derive(Debug, Eq, PartialEq)] -pub enum FieldType { - Node, - Double, - Float, - Int64, - Uint64, - Int32, - Fixed64, - Fixed32, - Bool, - String, - Group, - Message, - Bytes, - Uint32, - Enum, - Sfixed32, - Sfixed64, - Sint32, - Sint64, -} - -/// A libg_query token -#[derive(Debug)] -pub struct Token { - pub name: String, - pub value: i32, -} - -/// A libg_query field -#[derive(Debug)] -pub struct Field { - pub name: String, - pub node_name: Option, - pub enum_variant_name: Option, - pub field_type: FieldType, - pub repeated: bool, - pub is_one_of: bool, -} - -/// A libg_query node -#[derive(Debug)] -pub struct Node { - pub name: String, - pub fields: Vec, -} - -/// The libg_query proto file -pub struct ProtoFile { - pub tokens: Vec, - pub nodes: Vec, -} - -impl ProtoFile { - pub fn node(&self, name: &str) -> Option<&Node> { - self.nodes.iter().find(|n| n.name == name) - } -} diff --git a/crates/pgt_query_proto_parser/src/proto_parser.rs b/crates/pgt_query_proto_parser/src/proto_parser.rs deleted file mode 100644 index 56f93c6e..00000000 --- a/crates/pgt_query_proto_parser/src/proto_parser.rs +++ /dev/null @@ -1,179 +0,0 @@ -use convert_case::{Case, Casing}; -use protobuf::descriptor::{FileDescriptorProto, field_descriptor_proto::Label}; -use protobuf_parse::Parser; -use std::{ffi::OsStr, path::Path}; - -use crate::proto_file::{Field, FieldType, Node, ProtoFile, Token}; - -/// The parser for the libg_query proto file -pub struct ProtoParser { - inner: FileDescriptorProto, -} - -impl ProtoParser { - pub fn new(file_path: &impl AsRef) -> Self { - let proto_file = Path::new(file_path); - let proto_dir = proto_file.parent().unwrap(); - - let result = Parser::new() - .pure() - .include(proto_dir) - .input(proto_file) - .parse_and_typecheck() - .unwrap(); - - ProtoParser { - inner: result.file_descriptors[0].clone(), - } - } - - pub fn parse(&self) -> ProtoFile { - ProtoFile { - tokens: self.tokens(), - nodes: self.nodes(), - } - } - - fn tokens(&self) -> Vec { - self.inner - .enum_type - .iter() - .find(|e| e.name == Some("Token".into())) - .unwrap() - .value - .iter() - .map(|e| Token { - // token names in proto are UPPERCASE_SNAKE_CASE - name: e.name.clone().unwrap().to_case(Case::UpperCamel), - value: e.number.unwrap(), - }) - .collect() - } - - fn get_enum_variant_name(&self, type_name: &str) -> Option { - let variant = self - .inner - .message_type - .iter() - .find(|e| e.name == Some("Node".into())) - .unwrap() - .field - .iter() - .find(|e| e.type_name().split(".").last().unwrap() == type_name); - variant.map(|v| v.name.clone().unwrap().to_case(Case::UpperCamel)) - } - - fn nodes(&self) -> Vec { - self.inner - .message_type - .iter() - .find(|e| e.name == Some("Node".into())) - .unwrap() - .field - .iter() - .map(|e| { - let name: String = e.name.to_owned().unwrap().to_case(Case::UpperCamel); - let node = self - .inner - .message_type - .iter() - .find(|n| { - n.name.clone().unwrap().to_case(Case::UpperCamel) - == e.json_name.as_ref().unwrap().to_case(Case::UpperCamel) - }) - .unwrap(); - - let mut fields: Vec = Vec::new(); - // from node fields - fields.append(&mut - node - .field - .iter() - .filter_map(|e| { - // skip one of fields, they are handled separately - if e.has_oneof_index() { - return None; - } - // use label and type to get the field type - let type_name: FieldType = match e.type_name() { - "" => match e.type_() { - protobuf::descriptor::field_descriptor_proto::Type::TYPE_DOUBLE => FieldType::Double, - protobuf::descriptor::field_descriptor_proto::Type::TYPE_FLOAT => FieldType::Float, - protobuf::descriptor::field_descriptor_proto::Type::TYPE_INT64 => FieldType::Int64, - protobuf::descriptor::field_descriptor_proto::Type::TYPE_UINT64 => FieldType::Uint64, - protobuf::descriptor::field_descriptor_proto::Type::TYPE_INT32 => FieldType::Int32, - protobuf::descriptor::field_descriptor_proto::Type::TYPE_FIXED64 => FieldType::Fixed64, - protobuf::descriptor::field_descriptor_proto::Type::TYPE_FIXED32 => FieldType::Fixed32, - protobuf::descriptor::field_descriptor_proto::Type::TYPE_BOOL => FieldType::Bool, - protobuf::descriptor::field_descriptor_proto::Type::TYPE_STRING => FieldType::String, - protobuf::descriptor::field_descriptor_proto::Type::TYPE_GROUP => FieldType::Group, - protobuf::descriptor::field_descriptor_proto::Type::TYPE_MESSAGE => FieldType::Message, - protobuf::descriptor::field_descriptor_proto::Type::TYPE_BYTES => FieldType::Bytes, - protobuf::descriptor::field_descriptor_proto::Type::TYPE_UINT32 => FieldType::Uint32, - protobuf::descriptor::field_descriptor_proto::Type::TYPE_ENUM => FieldType::Enum, - protobuf::descriptor::field_descriptor_proto::Type::TYPE_SFIXED32 => FieldType::Sfixed32, - protobuf::descriptor::field_descriptor_proto::Type::TYPE_SFIXED64 => FieldType::Sfixed64, - protobuf::descriptor::field_descriptor_proto::Type::TYPE_SINT32 => FieldType::Sint32, - protobuf::descriptor::field_descriptor_proto::Type::TYPE_SINT64 => FieldType::Sint64, - }, - _ => { - if !e.type_name().starts_with(".pg_query") { - panic!("Unknown type: {}", e.type_name()); - - } - if e.type_() == protobuf::descriptor::field_descriptor_proto::Type::TYPE_ENUM { - FieldType::Enum - } else { - FieldType::Node - } - }, - }; - let mut node_name = None; - let mut enum_variant_name = None; - if e.type_name().starts_with(".pg_query") { - let n = e.type_name().split(".").last().unwrap().to_string(); - node_name = Some(n.clone()); - if n != "Node" { - enum_variant_name = self.get_enum_variant_name(e.type_name().split(".").last().unwrap().to_string().as_str()); - } - } - // TODO: node name must be derived from the property name in the node - // enum - Some(Field { - name: e.name.clone().unwrap(), - node_name, - enum_variant_name, - field_type: type_name, - repeated: e.label() == Label::LABEL_REPEATED, - is_one_of: false, - }) - }) - .collect() - ); - - // one of declarations - fields.append(&mut - node - .oneof_decl - .iter() - .map(|e| { - Field { - name: e.name.clone().unwrap(), - node_name: Some("Node".to_string()), - enum_variant_name: None, - field_type: FieldType::Node, - repeated: false, - is_one_of: true, - } - }) - .collect() - ); - Node { - // token names in proto are UPPERCASE_SNAKE_CASE - name: name.clone(), - fields, - } - }) - .collect() - } -} diff --git a/crates/pgt_schema_cache/Cargo.toml b/crates/pgt_schema_cache/Cargo.toml deleted file mode 100644 index c5fadb3e..00000000 --- a/crates/pgt_schema_cache/Cargo.toml +++ /dev/null @@ -1,30 +0,0 @@ -[package] -authors.workspace = true -categories.workspace = true -description = "" -edition.workspace = true -homepage.workspace = true -keywords.workspace = true -license.workspace = true -name = "pgt_schema_cache" -repository.workspace = true -version = "0.0.0" - - -[dependencies] -anyhow.workspace = true -async-std = { version = "1.12.0" } -futures-util = "0.3.31" -pgt_console.workspace = true -pgt_diagnostics.workspace = true -serde.workspace = true -serde_json.workspace = true -sqlx.workspace = true -strum = { workspace = true } -tokio.workspace = true - -[dev-dependencies] -pgt_test_utils.workspace = true - -[lib] -doctest = false diff --git a/crates/pgt_schema_cache/src/columns.rs b/crates/pgt_schema_cache/src/columns.rs deleted file mode 100644 index 60d422fd..00000000 --- a/crates/pgt_schema_cache/src/columns.rs +++ /dev/null @@ -1,203 +0,0 @@ -use crate::schema_cache::SchemaCacheItem; - -#[derive(Debug, Clone, PartialEq, Eq)] -pub enum ColumnClassKind { - OrdinaryTable, - View, - MaterializedView, - ForeignTable, - PartitionedTable, -} - -impl From<&str> for ColumnClassKind { - fn from(value: &str) -> Self { - match value { - "r" => ColumnClassKind::OrdinaryTable, - "v" => ColumnClassKind::View, - "m" => ColumnClassKind::MaterializedView, - "f" => ColumnClassKind::ForeignTable, - "p" => ColumnClassKind::PartitionedTable, - _ => panic!( - "Columns belonging to a class with pg_class.relkind = '{}' should be filtered out in the query.", - value - ), - } - } -} - -impl From for ColumnClassKind { - fn from(value: String) -> Self { - ColumnClassKind::from(value.as_str()) - } -} - -impl From for ColumnClassKind { - fn from(value: char) -> Self { - ColumnClassKind::from(String::from(value)) - } -} - -#[derive(Debug, PartialEq, Eq)] -pub struct Column { - pub name: String, - - pub table_name: String, - pub table_oid: i64, - /// What type of class does this column belong to? - pub class_kind: ColumnClassKind, - - pub schema_name: String, - pub type_id: i64, - pub type_name: Option, - pub is_nullable: bool, - - pub is_primary_key: bool, - pub is_unique: bool, - - /// The Default "value" of the column. Might be a function call, hence "_expr". - pub default_expr: Option, - - pub varchar_length: Option, - - /// Comment inserted via `COMMENT ON COLUMN my_table.my_comment '...'`, if present. - pub comment: Option, -} - -#[derive(Debug, Clone, PartialEq, Eq)] -pub struct ForeignKeyReference { - pub schema: Option, - pub table: String, - pub column: String, -} - -impl SchemaCacheItem for Column { - type Item = Column; - - async fn load(pool: &sqlx::PgPool) -> Result, sqlx::Error> { - sqlx::query_file_as!(Column, "src/queries/columns.sql") - .fetch_all(pool) - .await - } -} - -#[cfg(test)] -mod tests { - use pgt_test_utils::test_database::get_new_test_db; - use sqlx::Executor; - - use crate::{SchemaCache, columns::ColumnClassKind}; - - #[tokio::test] - async fn loads_columns() { - let test_db = get_new_test_db().await; - - let setup = r#" - create table public.users ( - id serial primary key, - name varchar(255) not null, - is_vegetarian bool default false, - middle_name varchar(255) - ); - - create schema real_estate; - - create table real_estate.addresses ( - user_id serial references users(id), - postal_code smallint not null, - street text, - city text - ); - - create table real_estate.properties ( - id serial primary key, - owner_id int references users(id), - square_meters smallint not null - ); - - comment on column real_estate.properties.owner_id is 'users might own many houses'; - "#; - - test_db - .execute(setup) - .await - .expect("Failed to setup test database"); - - let cache = SchemaCache::load(&test_db) - .await - .expect("Failed to load Schema Cache"); - - let public_schema_columns = cache - .columns - .iter() - .filter(|c| c.schema_name.as_str() == "public") - .count(); - - assert_eq!(public_schema_columns, 4); - - let real_estate_schema_columns = cache - .columns - .iter() - .filter(|c| c.schema_name.as_str() == "real_estate") - .count(); - - assert_eq!(real_estate_schema_columns, 7); - - let user_id_col = cache.find_col("id", "users", None).unwrap(); - assert_eq!(user_id_col.class_kind, ColumnClassKind::OrdinaryTable); - assert_eq!(user_id_col.comment, None); - assert_eq!( - user_id_col.default_expr, - Some("nextval('users_id_seq'::regclass)".into()) - ); - assert!(!user_id_col.is_nullable); - assert!(user_id_col.is_primary_key); - assert!(user_id_col.is_unique); - assert_eq!(user_id_col.varchar_length, None); - - let user_name_col = cache.find_col("name", "users", None).unwrap(); - assert_eq!(user_name_col.class_kind, ColumnClassKind::OrdinaryTable); - assert_eq!(user_name_col.comment, None); - assert_eq!(user_name_col.default_expr, None); - assert!(!user_name_col.is_nullable); - assert!(!user_name_col.is_primary_key); - assert!(!user_name_col.is_unique); - assert_eq!(user_name_col.varchar_length, Some(255)); - - let user_is_veg_col = cache.find_col("is_vegetarian", "users", None).unwrap(); - assert_eq!(user_is_veg_col.class_kind, ColumnClassKind::OrdinaryTable); - assert_eq!(user_is_veg_col.comment, None); - assert_eq!(user_is_veg_col.default_expr, Some("false".into())); - assert!(user_is_veg_col.is_nullable); - assert!(!user_is_veg_col.is_primary_key); - assert!(!user_is_veg_col.is_unique); - assert_eq!(user_is_veg_col.varchar_length, None); - - let user_middle_name_col = cache.find_col("middle_name", "users", None).unwrap(); - assert_eq!( - user_middle_name_col.class_kind, - ColumnClassKind::OrdinaryTable - ); - assert_eq!(user_middle_name_col.comment, None); - assert_eq!(user_middle_name_col.default_expr, None); - assert!(user_middle_name_col.is_nullable); - assert!(!user_middle_name_col.is_primary_key); - assert!(!user_middle_name_col.is_unique); - assert_eq!(user_middle_name_col.varchar_length, Some(255)); - - let properties_owner_id_col = cache - .find_col("owner_id", "properties", Some("real_estate")) - .unwrap(); - assert_eq!( - properties_owner_id_col.class_kind, - ColumnClassKind::OrdinaryTable - ); - assert_eq!( - properties_owner_id_col.comment, - Some("users might own many houses".into()) - ); - assert!(properties_owner_id_col.is_nullable); - assert!(!properties_owner_id_col.is_primary_key); - assert!(!properties_owner_id_col.is_unique); - assert_eq!(properties_owner_id_col.varchar_length, None); - } -} diff --git a/crates/pgt_schema_cache/src/functions.rs b/crates/pgt_schema_cache/src/functions.rs deleted file mode 100644 index 5e40709f..00000000 --- a/crates/pgt_schema_cache/src/functions.rs +++ /dev/null @@ -1,117 +0,0 @@ -use serde::{Deserialize, Serialize}; -use sqlx::PgPool; -use sqlx::types::JsonValue; - -use crate::schema_cache::SchemaCacheItem; - -/// `Behavior` describes the characteristics of the function. Is it deterministic? Does it changed due to side effects, and if so, when? -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, Default)] -pub enum Behavior { - /// The function is a pure function (same input leads to same output.) - Immutable, - - /// The results of the function do not change within a scan. - Stable, - - /// The results of the function might change at any time. - #[default] - Volatile, -} - -impl From> for Behavior { - fn from(s: Option) -> Self { - match s { - Some(s) => match s.as_str() { - "IMMUTABLE" => Behavior::Immutable, - "STABLE" => Behavior::Stable, - "VOLATILE" => Behavior::Volatile, - _ => panic!("Invalid behavior"), - }, - None => Behavior::Volatile, - } - } -} - -#[derive(Debug, Clone, Default, Serialize, Deserialize)] -pub struct FunctionArg { - /// `in`, `out`, or `inout`. - pub mode: String, - - pub name: String, - - /// Refers to the argument type's ID in the `pg_type` table. - pub type_id: i64, - - pub has_default: Option, -} - -#[derive(Debug, Clone, Default, Serialize, Deserialize)] -pub struct FunctionArgs { - pub args: Vec, -} - -impl From> for FunctionArgs { - fn from(s: Option) -> Self { - let args: Vec = - serde_json::from_value(s.unwrap_or(JsonValue::Array(vec![]))).unwrap(); - FunctionArgs { args } - } -} - -#[derive(Debug, Default, Serialize, Deserialize)] -pub struct Function { - /// The Id (`oid`). - pub id: i64, - - /// The name of the schema the function belongs to. - pub schema: String, - - /// The name of the function. - pub name: String, - - /// e.g. `plpgsql/sql` or `internal`. - pub language: String, - - /// The body of the function – the `declare [..] begin [..] end [..]` block.` Not set for internal functions. - pub body: Option, - - /// The full definition of the function. Includes the full `CREATE OR REPLACE...` shenanigans. Not set for internal functions. - pub definition: Option, - - /// The Rust representation of the function's arguments. - pub args: FunctionArgs, - - /// Comma-separated list of argument types, in the form required for a CREATE FUNCTION statement. For example, `"text, smallint"`. `None` if the function doesn't take any arguments. - pub argument_types: Option, - - /// Comma-separated list of argument types, in the form required to identify a function in an ALTER FUNCTION statement. For example, `"text, smallint"`. `None` if the function doesn't take any arguments. - pub identity_argument_types: Option, - - /// An ID identifying the return type. For example, `2275` refers to `cstring`. 2278 refers to `void`. - pub return_type_id: i64, - - /// The return type, for example "text", "trigger", or "void". - pub return_type: String, - - /// If the return type is a composite type, this will point the matching entry's `oid` column in the `pg_class` table. `None` if the function does not return a composite type. - pub return_type_relation_id: Option, - - /// Does the function returns multiple values of a data type? - pub is_set_returning_function: bool, - - /// See `Behavior`. - pub behavior: Behavior, - - /// Is the function's security set to `Definer` (true) or `Invoker` (false)? - pub security_definer: bool, -} - -impl SchemaCacheItem for Function { - type Item = Function; - - async fn load(pool: &PgPool) -> Result, sqlx::Error> { - sqlx::query_file_as!(Function, "src/queries/functions.sql") - .fetch_all(pool) - .await - } -} diff --git a/crates/pgt_schema_cache/src/lib.rs b/crates/pgt_schema_cache/src/lib.rs deleted file mode 100644 index 9beb2f8a..00000000 --- a/crates/pgt_schema_cache/src/lib.rs +++ /dev/null @@ -1,24 +0,0 @@ -//! The schema cache - -#![allow(dead_code)] - -mod columns; -mod functions; -mod policies; -mod roles; -mod schema_cache; -mod schemas; -mod tables; -mod triggers; -mod types; -mod versions; - -pub use columns::*; -pub use functions::{Behavior, Function, FunctionArg, FunctionArgs}; -pub use policies::{Policy, PolicyCommand}; -pub use roles::*; -pub use schema_cache::SchemaCache; -pub use schemas::Schema; -pub use tables::{ReplicaIdentity, Table, TableKind}; -pub use triggers::{Trigger, TriggerAffected, TriggerEvent}; -pub use types::{PostgresType, PostgresTypeAttribute}; diff --git a/crates/pgt_schema_cache/src/policies.rs b/crates/pgt_schema_cache/src/policies.rs deleted file mode 100644 index 85cd7821..00000000 --- a/crates/pgt_schema_cache/src/policies.rs +++ /dev/null @@ -1,225 +0,0 @@ -use crate::schema_cache::SchemaCacheItem; - -#[derive(Debug, Clone, PartialEq, Eq)] -pub enum PolicyCommand { - Select, - Insert, - Update, - Delete, - All, -} - -impl From<&str> for PolicyCommand { - fn from(value: &str) -> Self { - match value { - "SELECT" => PolicyCommand::Select, - "INSERT" => PolicyCommand::Insert, - "UPDATE" => PolicyCommand::Update, - "DELETE" => PolicyCommand::Delete, - "ALL" => PolicyCommand::All, - _ => panic!("Invalid Policy Command {}", value), - } - } -} -impl From for PolicyCommand { - fn from(value: String) -> Self { - PolicyCommand::from(value.as_str()) - } -} - -#[derive(Debug, Clone, PartialEq, Eq)] -struct PolicyQueried { - name: String, - table_name: String, - schema_name: String, - is_permissive: String, - command: String, - role_names: Option>, - security_qualification: Option, - with_check: Option, -} - -impl From for Policy { - fn from(value: PolicyQueried) -> Self { - Self { - name: value.name, - table_name: value.table_name, - schema_name: value.schema_name, - is_permissive: value.is_permissive == "PERMISSIVE", - command: PolicyCommand::from(value.command), - role_names: value.role_names.unwrap_or_default(), - security_qualification: value.security_qualification, - with_check: value.with_check, - } - } -} - -#[derive(Debug, PartialEq, Eq)] -pub struct Policy { - pub name: String, - pub table_name: String, - pub schema_name: String, - pub is_permissive: bool, - pub command: PolicyCommand, - pub role_names: Vec, - pub security_qualification: Option, - pub with_check: Option, -} - -impl SchemaCacheItem for Policy { - type Item = Policy; - - async fn load(pool: &sqlx::PgPool) -> Result, sqlx::Error> { - let policies = sqlx::query_file_as!(PolicyQueried, "src/queries/policies.sql") - .fetch_all(pool) - .await?; - - Ok(policies.into_iter().map(Policy::from).collect()) - } -} - -#[cfg(test)] -mod tests { - use pgt_test_utils::test_database::get_new_test_db; - use sqlx::Executor; - - use crate::{SchemaCache, policies::PolicyCommand}; - - #[tokio::test] - async fn loads_policies() { - let test_db = get_new_test_db().await; - - let setup = r#" - do $$ - begin - if not exists ( - select from pg_catalog.pg_roles - where rolname = 'admin' - ) then - create role admin; - end if; - end $$; - - - create table public.users ( - id serial primary key, - name varchar(255) not null - ); - - -- multiple policies to test various commands - create policy public_policy - on public.users - for select - to public - using (true); - - create policy public_policy_del - on public.users - for delete - to public - using (true); - - create policy public_policy_ins - on public.users - for insert - to public - with check (true); - - create policy admin_policy - on public.users - for all - to admin - with check (true); - - do $$ - begin - if not exists ( - select from pg_catalog.pg_roles - where rolname = 'owner' - ) then - create role owner; - end if; - end $$; - - create schema real_estate; - - create table real_estate.properties ( - id serial primary key, - owner_id int not null - ); - - create policy owner_policy - on real_estate.properties - for update - to owner - using (owner_id = current_user::int); - "#; - - test_db - .execute(setup) - .await - .expect("Failed to setup test database"); - - let cache = SchemaCache::load(&test_db) - .await - .expect("Failed to load Schema Cache"); - - let public_policies = cache - .policies - .iter() - .filter(|p| p.schema_name == "public") - .count(); - - assert_eq!(public_policies, 4); - - let real_estate_policies = cache - .policies - .iter() - .filter(|p| p.schema_name == "real_estate") - .count(); - - assert_eq!(real_estate_policies, 1); - - let public_policy = cache - .policies - .iter() - .find(|p| p.name == "public_policy") - .unwrap(); - assert_eq!(public_policy.table_name, "users"); - assert_eq!(public_policy.schema_name, "public"); - assert!(public_policy.is_permissive); - assert_eq!(public_policy.command, PolicyCommand::Select); - assert_eq!(public_policy.role_names, vec!["public"]); - assert_eq!(public_policy.security_qualification, Some("true".into())); - assert_eq!(public_policy.with_check, None); - - let admin_policy = cache - .policies - .iter() - .find(|p| p.name == "admin_policy") - .unwrap(); - assert_eq!(admin_policy.table_name, "users"); - assert_eq!(admin_policy.schema_name, "public"); - assert!(admin_policy.is_permissive); - assert_eq!(admin_policy.command, PolicyCommand::All); - assert_eq!(admin_policy.role_names, vec!["admin"]); - assert_eq!(admin_policy.security_qualification, None); - assert_eq!(admin_policy.with_check, Some("true".into())); - - let owner_policy = cache - .policies - .iter() - .find(|p| p.name == "owner_policy") - .unwrap(); - assert_eq!(owner_policy.table_name, "properties"); - assert_eq!(owner_policy.schema_name, "real_estate"); - assert!(owner_policy.is_permissive); - assert_eq!(owner_policy.command, PolicyCommand::Update); - assert_eq!(owner_policy.role_names, vec!["owner"]); - assert_eq!( - owner_policy.security_qualification, - Some("(owner_id = (CURRENT_USER)::integer)".into()) - ); - assert_eq!(owner_policy.with_check, None); - } -} diff --git a/crates/pgt_schema_cache/src/queries/columns.sql b/crates/pgt_schema_cache/src/queries/columns.sql deleted file mode 100644 index 14b32cb2..00000000 --- a/crates/pgt_schema_cache/src/queries/columns.sql +++ /dev/null @@ -1,62 +0,0 @@ -with - available_tables as ( - select - c.relname as table_name, - c.oid as table_oid, - c.relkind as class_kind, - n.nspname as schema_name - from - pg_catalog.pg_class c - join pg_catalog.pg_namespace n on n.oid = c.relnamespace - where - -- r: normal tables - -- v: views - -- m: materialized views - -- f: foreign tables - -- p: partitioned tables - c.relkind in ('r', 'v', 'm', 'f', 'p') - ), - available_indexes as ( - select - unnest (ix.indkey) as attnum, - ix.indisprimary as is_primary, - ix.indisunique as is_unique, - ix.indrelid as table_oid - from - pg_catalog.pg_class c - join pg_catalog.pg_index ix on c.oid = ix.indexrelid - where - c.relkind = 'i' - ) -select - atts.attname as name, - ts.table_name, - ts.table_oid :: int8 as "table_oid!", - ts.class_kind :: char as "class_kind!", - ts.schema_name, - atts.atttypid :: int8 as "type_id!", - tps.typname as "type_name", - not atts.attnotnull as "is_nullable!", - nullif( - information_schema._pg_char_max_length (atts.atttypid, atts.atttypmod), - -1 - ) as varchar_length, - pg_get_expr (def.adbin, def.adrelid) as default_expr, - coalesce(ix.is_primary, false) as "is_primary_key!", - coalesce(ix.is_unique, false) as "is_unique!", - pg_catalog.col_description (ts.table_oid, atts.attnum) as comment -from - pg_catalog.pg_attribute atts - join available_tables ts on atts.attrelid = ts.table_oid - left join available_indexes ix on atts.attrelid = ix.table_oid - and atts.attnum = ix.attnum - left join pg_catalog.pg_attrdef def on atts.attrelid = def.adrelid - and atts.attnum = def.adnum - left join pg_catalog.pg_type tps on atts.atttypid = tps.oid -where - -- system columns, such as `cmax` or `tableoid`, have negative `attnum`s - atts.attnum >= 0 and atts.atttypid is not null and tps.oid is not null -order by - schema_name desc, - table_name, - atts.attnum; \ No newline at end of file diff --git a/crates/pgt_schema_cache/src/queries/functions.sql b/crates/pgt_schema_cache/src/queries/functions.sql deleted file mode 100644 index f78ba91e..00000000 --- a/crates/pgt_schema_cache/src/queries/functions.sql +++ /dev/null @@ -1,128 +0,0 @@ -with functions as ( - select - oid, - proname, - prosrc, - prorettype, - proretset, - provolatile, - prosecdef, - prolang, - pronamespace, - proconfig, - -- proargmodes is null when all arg modes are IN - coalesce( - p.proargmodes, - array_fill( - 'i' :: text, - array [cardinality(coalesce(p.proallargtypes, p.proargtypes))] - ) - ) as arg_modes, - -- proargnames is null when all args are unnamed - coalesce( - p.proargnames, - array_fill( - '' :: text, - array [cardinality(coalesce(p.proallargtypes, p.proargtypes))] - ) - ) as arg_names, - -- proallargtypes is null when all arg modes are IN - coalesce(p.proallargtypes, p.proargtypes) as arg_types, - array_cat( - array_fill(false, array [pronargs - pronargdefaults]), - array_fill(true, array [pronargdefaults]) - ) as arg_has_defaults - from - pg_proc as p - where - p.prokind = 'f' -) -select - f.oid :: int8 as "id!", - n.nspname as "schema!", - f.proname as "name!", - l.lanname as "language!", - case - when l.lanname = 'internal' then null - else f.prosrc - end as body, - case - when l.lanname = 'internal' then null - else pg_get_functiondef(f.oid) - end as definition, - coalesce(f_args.args, '[]') as args, - nullif(pg_get_function_arguments(f.oid), '') as argument_types, - nullif(pg_get_function_identity_arguments(f.oid), '') as identity_argument_types, - f.prorettype :: int8 as "return_type_id!", - pg_get_function_result(f.oid) as "return_type!", - nullif(rt.typrelid :: int8, 0) as return_type_relation_id, - f.proretset as is_set_returning_function, - case - when f.provolatile = 'i' then 'IMMUTABLE' - when f.provolatile = 's' then 'STABLE' - when f.provolatile = 'v' then 'VOLATILE' - end as behavior, - f.prosecdef as security_definer -from - functions f - left join pg_namespace n on f.pronamespace = n.oid - left join pg_language l on f.prolang = l.oid - left join pg_type rt on rt.oid = f.prorettype - left join ( - select - oid, - jsonb_object_agg(param, value) filter ( - where - param is not null - ) as config_params - from - ( - select - oid, - (string_to_array(unnest(proconfig), '=')) [1] as param, - (string_to_array(unnest(proconfig), '=')) [2] as value - from - functions - ) as t - group by - oid - ) f_config on f_config.oid = f.oid - left join ( - select - oid, - jsonb_agg( - jsonb_build_object( - 'mode', - t2.mode, - 'name', - name, - 'type_id', - type_id, - 'has_default', - has_default - ) - ) as args - from - ( - select - oid, - unnest(arg_modes) as mode, - unnest(arg_names) as name, - unnest(arg_types) :: int8 as type_id, - unnest(arg_has_defaults) as has_default - from - functions - ) as t1, - lateral ( - select - case - when t1.mode = 'i' then 'in' - when t1.mode = 'o' then 'out' - when t1.mode = 'b' then 'inout' - when t1.mode = 'v' then 'variadic' - else 'table' - end as mode - ) as t2 - group by - t1.oid - ) f_args on f_args.oid = f.oid; \ No newline at end of file diff --git a/crates/pgt_schema_cache/src/queries/policies.sql b/crates/pgt_schema_cache/src/queries/policies.sql deleted file mode 100644 index 2c0af39f..00000000 --- a/crates/pgt_schema_cache/src/queries/policies.sql +++ /dev/null @@ -1,11 +0,0 @@ -select - schemaname as "schema_name!", - tablename as "table_name!", - policyname as "name!", - permissive as "is_permissive!", - roles as "role_names!", - cmd as "command!", - qual as "security_qualification", - with_check -from - pg_catalog.pg_policies; \ No newline at end of file diff --git a/crates/pgt_schema_cache/src/queries/roles.sql b/crates/pgt_schema_cache/src/queries/roles.sql deleted file mode 100644 index da5d0bfc..00000000 --- a/crates/pgt_schema_cache/src/queries/roles.sql +++ /dev/null @@ -1,7 +0,0 @@ -select - rolname as "name!", - rolsuper as "is_super_user!", - rolcreatedb as "can_create_db!", - rolcanlogin as "can_login!", - rolbypassrls as "can_bypass_rls!" -from pg_catalog.pg_roles; \ No newline at end of file diff --git a/crates/pgt_schema_cache/src/queries/schemas.sql b/crates/pgt_schema_cache/src/queries/schemas.sql deleted file mode 100644 index 55e1824b..00000000 --- a/crates/pgt_schema_cache/src/queries/schemas.sql +++ /dev/null @@ -1,15 +0,0 @@ -select - n.oid :: int8 as "id!", - n.nspname as name, - u.rolname as "owner!" -from - pg_namespace n, - pg_roles u -where - n.nspowner = u.oid - and ( - pg_has_role(n.nspowner, 'USAGE') - or has_schema_privilege(n.oid, 'CREATE, USAGE') - ) - and not pg_catalog.starts_with(n.nspname, 'pg_temp_') - and not pg_catalog.starts_with(n.nspname, 'pg_toast_temp_'); \ No newline at end of file diff --git a/crates/pgt_schema_cache/src/queries/tables.sql b/crates/pgt_schema_cache/src/queries/tables.sql deleted file mode 100644 index 6e6865a2..00000000 --- a/crates/pgt_schema_cache/src/queries/tables.sql +++ /dev/null @@ -1,41 +0,0 @@ -select - c.oid :: int8 as "id!", - nc.nspname as schema, - c.relname as name, - c.relkind as table_kind, - c.relrowsecurity as rls_enabled, - c.relforcerowsecurity as rls_forced, - case - when c.relreplident = 'd' then 'DEFAULT' - when c.relreplident = 'i' then 'INDEX' - when c.relreplident = 'f' then 'FULL' - else 'NOTHING' - end as "replica_identity!", - pg_total_relation_size(format('%I.%I', nc.nspname, c.relname)) :: int8 as "bytes!", - pg_size_pretty( - pg_total_relation_size(format('%I.%I', nc.nspname, c.relname)) - ) as "size!", - pg_stat_get_live_tuples(c.oid) as "live_rows_estimate!", - pg_stat_get_dead_tuples(c.oid) as "dead_rows_estimate!", - obj_description(c.oid) as comment -from - pg_namespace nc - join pg_class c on nc.oid = c.relnamespace -where - c.relkind in ('r', 'p', 'v', 'm') - and not pg_is_other_temp_schema(nc.oid) - and ( - pg_has_role(c.relowner, 'USAGE') - or has_table_privilege( - c.oid, - 'SELECT, INSERT, UPDATE, DELETE, TRUNCATE, REFERENCES, TRIGGER' - ) - or has_any_column_privilege(c.oid, 'SELECT, INSERT, UPDATE, REFERENCES') - ) -group by - c.oid, - c.relname, - c.relrowsecurity, - c.relforcerowsecurity, - c.relreplident, - nc.nspname; \ No newline at end of file diff --git a/crates/pgt_schema_cache/src/queries/triggers.sql b/crates/pgt_schema_cache/src/queries/triggers.sql deleted file mode 100644 index c28cc39f..00000000 --- a/crates/pgt_schema_cache/src/queries/triggers.sql +++ /dev/null @@ -1,17 +0,0 @@ --- we need to join tables from the pg_catalog since "TRUNCATE" triggers are --- not available in the information_schema.trigger table. -select - t.tgname as "name!", - c.relname as "table_name!", - p.proname as "proc_name!", - n.nspname as "schema_name!", - t.tgtype as "details_bitmask!" -from - pg_catalog.pg_trigger t - left join pg_catalog.pg_proc p on t.tgfoid = p.oid - left join pg_catalog.pg_class c on t.tgrelid = c.oid - left join pg_catalog.pg_namespace n on c.relnamespace = n.oid -where - -- triggers enforcing constraints (e.g. unique fields) should not be included. - t.tgisinternal = false and - t.tgconstraint = 0; diff --git a/crates/pgt_schema_cache/src/queries/types.sql b/crates/pgt_schema_cache/src/queries/types.sql deleted file mode 100644 index 39f6b71c..00000000 --- a/crates/pgt_schema_cache/src/queries/types.sql +++ /dev/null @@ -1,53 +0,0 @@ -select - t.oid :: int8 as "id!", - t.typname as name, - n.nspname as "schema!", - format_type (t.oid, null) as "format!", - coalesce(t_enums.enums, '[]') as enums, - coalesce(t_attributes.attributes, '[]') as attributes, - obj_description (t.oid, 'pg_type') as comment -from - pg_type t - left join pg_namespace n on n.oid = t.typnamespace - left join ( - select - enumtypid, - jsonb_agg( - enumlabel - order by - enumsortorder - ) as enums - from - pg_enum - group by - enumtypid - ) as t_enums on t_enums.enumtypid = t.oid - left join ( - select - oid, - jsonb_agg( - jsonb_build_object('name', a.attname, 'type_id', a.atttypid :: int8) - order by - a.attnum asc - ) as attributes - from - pg_class c - join pg_attribute a on a.attrelid = c.oid - where - c.relkind = 'c' - and not a.attisdropped - group by - c.oid - ) as t_attributes on t_attributes.oid = t.typrelid -where - ( - t.typrelid = 0 - or ( - select - c.relkind = 'c' - from - pg_class c - where - c.oid = t.typrelid - ) - ); \ No newline at end of file diff --git a/crates/pgt_schema_cache/src/queries/versions.sql b/crates/pgt_schema_cache/src/queries/versions.sql deleted file mode 100644 index c756e9c5..00000000 --- a/crates/pgt_schema_cache/src/queries/versions.sql +++ /dev/null @@ -1,10 +0,0 @@ -select - version(), - current_setting('server_version_num') :: int8 AS version_num, - ( - select - count(*) :: int8 AS active_connections - FROM - pg_stat_activity - ) AS active_connections, - current_setting('max_connections') :: int8 AS max_connections; \ No newline at end of file diff --git a/crates/pgt_schema_cache/src/roles.rs b/crates/pgt_schema_cache/src/roles.rs deleted file mode 100644 index c212b791..00000000 --- a/crates/pgt_schema_cache/src/roles.rs +++ /dev/null @@ -1,85 +0,0 @@ -use crate::schema_cache::SchemaCacheItem; - -#[derive(Debug, PartialEq, Eq)] -pub struct Role { - pub name: String, - pub is_super_user: bool, - pub can_create_db: bool, - pub can_login: bool, - pub can_bypass_rls: bool, -} - -impl SchemaCacheItem for Role { - type Item = Role; - - async fn load(pool: &sqlx::PgPool) -> Result, sqlx::Error> { - sqlx::query_file_as!(Role, "src/queries/roles.sql") - .fetch_all(pool) - .await - } -} - -#[cfg(test)] -mod tests { - use crate::SchemaCache; - use pgt_test_utils::test_database::get_new_test_db; - use sqlx::Executor; - - #[tokio::test] - async fn loads_roles() { - let test_db = get_new_test_db().await; - - let setup = r#" - do $$ - begin - if not exists ( - select from pg_catalog.pg_roles - where rolname = 'test_super' - ) then - create role test_super superuser createdb login bypassrls; - end if; - if not exists ( - select from pg_catalog.pg_roles - where rolname = 'test_nologin' - ) then - create role test_nologin; - end if; - if not exists ( - select from pg_catalog.pg_roles - where rolname = 'test_login' - ) then - create role test_login login; - end if; - end $$; - "#; - - test_db - .execute(setup) - .await - .expect("Failed to setup test database"); - - let cache = SchemaCache::load(&test_db) - .await - .expect("Failed to load Schema Cache"); - - let roles = &cache.roles; - - let super_role = roles.iter().find(|r| r.name == "test_super").unwrap(); - assert!(super_role.is_super_user); - assert!(super_role.can_create_db); - assert!(super_role.can_login); - assert!(super_role.can_bypass_rls); - - let nologin_role = roles.iter().find(|r| r.name == "test_nologin").unwrap(); - assert!(!nologin_role.is_super_user); - assert!(!nologin_role.can_create_db); - assert!(!nologin_role.can_login); - assert!(!nologin_role.can_bypass_rls); - - let login_role = roles.iter().find(|r| r.name == "test_login").unwrap(); - assert!(!login_role.is_super_user); - assert!(!login_role.can_create_db); - assert!(login_role.can_login); - assert!(!login_role.can_bypass_rls); - } -} diff --git a/crates/pgt_schema_cache/src/schema_cache.rs b/crates/pgt_schema_cache/src/schema_cache.rs deleted file mode 100644 index 516b37e6..00000000 --- a/crates/pgt_schema_cache/src/schema_cache.rs +++ /dev/null @@ -1,108 +0,0 @@ -use sqlx::postgres::PgPool; - -use crate::columns::Column; -use crate::functions::Function; -use crate::policies::Policy; -use crate::schemas::Schema; -use crate::tables::Table; -use crate::types::PostgresType; -use crate::versions::Version; -use crate::{Role, Trigger}; - -#[derive(Debug, Default)] -pub struct SchemaCache { - pub schemas: Vec, - pub tables: Vec, - pub functions: Vec, - pub types: Vec, - pub versions: Vec, - pub columns: Vec, - pub policies: Vec, - pub triggers: Vec, - pub roles: Vec, -} - -impl SchemaCache { - pub async fn load(pool: &PgPool) -> Result { - let (schemas, tables, functions, types, versions, columns, policies, triggers, roles) = futures_util::try_join!( - Schema::load(pool), - Table::load(pool), - Function::load(pool), - PostgresType::load(pool), - Version::load(pool), - Column::load(pool), - Policy::load(pool), - Trigger::load(pool), - Role::load(pool) - )?; - - Ok(SchemaCache { - schemas, - tables, - functions, - types, - versions, - columns, - policies, - triggers, - roles, - }) - } - - /// Applies an AST node to the repository - /// - /// For example, alter table add column will add the column to the table if it does not exist - /// yet - pub fn mutate(&mut self) { - unimplemented!(); - } - - pub fn find_table(&self, name: &str, schema: Option<&str>) -> Option<&Table> { - self.tables - .iter() - .find(|t| t.name == name && schema.is_none() || Some(t.schema.as_str()) == schema) - } - - pub fn find_type(&self, name: &str, schema: Option<&str>) -> Option<&PostgresType> { - self.types - .iter() - .find(|t| t.name == name && schema.is_none() || Some(t.schema.as_str()) == schema) - } - - pub fn find_col(&self, name: &str, table: &str, schema: Option<&str>) -> Option<&Column> { - self.columns.iter().find(|c| { - c.name.as_str() == name - && c.table_name.as_str() == table - && schema.is_none_or(|s| s == c.schema_name.as_str()) - }) - } - - pub fn find_types(&self, name: &str, schema: Option<&str>) -> Vec<&PostgresType> { - self.types - .iter() - .filter(|t| t.name == name && schema.is_none() || Some(t.schema.as_str()) == schema) - .collect() - } -} - -pub trait SchemaCacheItem { - type Item; - - async fn load(pool: &PgPool) -> Result, sqlx::Error>; -} - -#[cfg(test)] -mod tests { - use pgt_test_utils::test_database::get_new_test_db; - - use crate::SchemaCache; - - #[tokio::test] - async fn it_loads() { - let test_db = get_new_test_db().await; - - SchemaCache::load(&test_db) - .await - .expect("Couldnt' load Schema Cache"); - } -} diff --git a/crates/pgt_schema_cache/src/schemas.rs b/crates/pgt_schema_cache/src/schemas.rs deleted file mode 100644 index 5a007e51..00000000 --- a/crates/pgt_schema_cache/src/schemas.rs +++ /dev/null @@ -1,20 +0,0 @@ -use sqlx::PgPool; - -use crate::schema_cache::SchemaCacheItem; - -#[derive(Debug, Default)] -pub struct Schema { - pub id: i64, - pub name: String, - pub owner: String, -} - -impl SchemaCacheItem for Schema { - type Item = Schema; - - async fn load(pool: &PgPool) -> Result, sqlx::Error> { - sqlx::query_file_as!(Schema, "src/queries/schemas.sql") - .fetch_all(pool) - .await - } -} diff --git a/crates/pgt_schema_cache/src/tables.rs b/crates/pgt_schema_cache/src/tables.rs deleted file mode 100644 index a0a40d6a..00000000 --- a/crates/pgt_schema_cache/src/tables.rs +++ /dev/null @@ -1,151 +0,0 @@ -use sqlx::PgPool; - -use crate::schema_cache::SchemaCacheItem; - -#[derive(Debug, Clone, PartialEq, Eq, Default)] -pub enum ReplicaIdentity { - #[default] - Default, - Index, - Full, - Nothing, -} - -impl From for ReplicaIdentity { - fn from(s: String) -> Self { - match s.as_str() { - "DEFAULT" => ReplicaIdentity::Default, - "INDEX" => ReplicaIdentity::Index, - "FULL" => ReplicaIdentity::Full, - "NOTHING" => ReplicaIdentity::Nothing, - _ => panic!("Invalid replica identity"), - } - } -} - -#[derive(Debug, Clone, PartialEq, Eq, Default)] -pub enum TableKind { - #[default] - Ordinary, - View, - MaterializedView, - Partitioned, -} - -impl From for TableKind { - fn from(s: char) -> Self { - match s { - 'r' => Self::Ordinary, - 'p' => Self::Partitioned, - 'v' => Self::View, - 'm' => Self::MaterializedView, - _ => panic!("Invalid table kind"), - } - } -} - -impl From for TableKind { - fn from(s: i8) -> Self { - let c = char::from(u8::try_from(s).unwrap()); - c.into() - } -} - -#[derive(Debug, Default, PartialEq, Eq)] -pub struct Table { - pub id: i64, - pub schema: String, - pub name: String, - pub rls_enabled: bool, - pub rls_forced: bool, - pub replica_identity: ReplicaIdentity, - pub table_kind: TableKind, - pub bytes: i64, - pub size: String, - pub live_rows_estimate: i64, - pub dead_rows_estimate: i64, - pub comment: Option, -} - -impl SchemaCacheItem for Table { - type Item = Table; - - async fn load(pool: &PgPool) -> Result, sqlx::Error> { - sqlx::query_file_as!(Table, "src/queries/tables.sql") - .fetch_all(pool) - .await - } -} - -#[cfg(test)] -mod tests { - use crate::{SchemaCache, tables::TableKind}; - use pgt_test_utils::test_database::get_new_test_db; - use sqlx::Executor; - - #[tokio::test] - async fn includes_views_in_query() { - let test_db = get_new_test_db().await; - - let setup = r#" - create table public.base_table ( - id serial primary key, - value text - ); - - create view public.my_view as - select * from public.base_table; - "#; - - test_db - .execute(setup) - .await - .expect("Failed to setup test database"); - - let cache = SchemaCache::load(&test_db) - .await - .expect("Failed to load Schema Cache"); - - let view = cache - .tables - .iter() - .find(|t| t.name == "my_view") - .expect("View not found"); - - assert_eq!(view.table_kind, TableKind::View); - assert_eq!(view.schema, "public"); - } - - #[tokio::test] - async fn includes_materialized_views_in_query() { - let test_db = get_new_test_db().await; - - let setup = r#" - create table public.base_table ( - id serial primary key, - value text - ); - - create materialized view public.my_mat_view as - select * from public.base_table; - "#; - - test_db - .execute(setup) - .await - .expect("Failed to setup test database"); - - let cache = SchemaCache::load(&test_db) - .await - .expect("Failed to load Schema Cache"); - - let mat_view = cache - .tables - .iter() - .find(|t| t.name == "my_mat_view") - .expect("Materialized view not found"); - - assert_eq!(mat_view.table_kind, TableKind::MaterializedView); - assert_eq!(mat_view.schema, "public"); - } -} diff --git a/crates/pgt_schema_cache/src/triggers.rs b/crates/pgt_schema_cache/src/triggers.rs deleted file mode 100644 index 0a5241d6..00000000 --- a/crates/pgt_schema_cache/src/triggers.rs +++ /dev/null @@ -1,300 +0,0 @@ -use crate::schema_cache::SchemaCacheItem; -use strum::{EnumIter, IntoEnumIterator}; - -#[derive(Debug, PartialEq, Eq)] -pub enum TriggerAffected { - Row, - Statement, -} - -impl From for TriggerAffected { - fn from(value: i16) -> Self { - let is_row = 0b0000_0001; - if value & is_row == is_row { - Self::Row - } else { - Self::Statement - } - } -} - -#[derive(Debug, PartialEq, Eq, EnumIter)] -pub enum TriggerEvent { - Insert, - Delete, - Update, - Truncate, -} - -struct TriggerEvents(Vec); - -impl From for TriggerEvents { - fn from(value: i16) -> Self { - Self( - TriggerEvent::iter() - .filter(|variant| { - #[rustfmt::skip] - let mask = match variant { - TriggerEvent::Insert => 0b0000_0100, - TriggerEvent::Delete => 0b0000_1000, - TriggerEvent::Update => 0b0001_0000, - TriggerEvent::Truncate => 0b0010_0000, - }; - mask & value == mask - }) - .collect(), - ) - } -} - -#[derive(Debug, PartialEq, Eq, EnumIter)] -pub enum TriggerTiming { - Before, - After, - Instead, -} - -impl TryFrom for TriggerTiming { - type Error = (); - fn try_from(value: i16) -> Result { - TriggerTiming::iter() - .find(|variant| { - match variant { - TriggerTiming::Instead => { - let mask = 0b0100_0000; - mask & value == mask - } - TriggerTiming::Before => { - let mask = 0b0000_0010; - mask & value == mask - } - TriggerTiming::After => { - let mask = 0b1011_1101; - // timing is "AFTER" if neither INSTEAD nor BEFORE bit are set. - mask | value == mask - } - } - }) - .ok_or(()) - } -} - -pub struct TriggerQueried { - name: String, - table_name: String, - schema_name: String, - proc_name: String, - details_bitmask: i16, -} - -#[derive(Debug, PartialEq, Eq)] -pub struct Trigger { - name: String, - table_name: String, - schema_name: String, - proc_name: String, - affected: TriggerAffected, - timing: TriggerTiming, - events: Vec, -} - -impl From for Trigger { - fn from(value: TriggerQueried) -> Self { - Self { - name: value.name, - table_name: value.table_name, - proc_name: value.proc_name, - schema_name: value.schema_name, - affected: value.details_bitmask.into(), - timing: value.details_bitmask.try_into().unwrap(), - events: TriggerEvents::from(value.details_bitmask).0, - } - } -} - -impl SchemaCacheItem for Trigger { - type Item = Trigger; - - async fn load(pool: &sqlx::PgPool) -> Result, sqlx::Error> { - let results = sqlx::query_file_as!(TriggerQueried, "src/queries/triggers.sql") - .fetch_all(pool) - .await?; - - Ok(results.into_iter().map(|r| r.into()).collect()) - } -} - -#[cfg(test)] -mod tests { - use pgt_test_utils::test_database::get_new_test_db; - use sqlx::Executor; - - use crate::{ - SchemaCache, - triggers::{TriggerAffected, TriggerEvent, TriggerTiming}, - }; - - #[tokio::test] - async fn loads_triggers() { - let test_db = get_new_test_db().await; - - let setup = r#" - create table public.users ( - id serial primary key, - name text - ); - - create or replace function public.log_user_insert() - returns trigger as $$ - begin - -- dummy body - return new; - end; - $$ language plpgsql; - - create trigger trg_users_insert - before insert on public.users - for each row - execute function public.log_user_insert(); - - create trigger trg_users_update - after update or insert on public.users - for each statement - execute function public.log_user_insert(); - - create trigger trg_users_delete - before delete on public.users - for each row - execute function public.log_user_insert(); - "#; - - test_db - .execute(setup) - .await - .expect("Failed to setup test database"); - - let cache = SchemaCache::load(&test_db) - .await - .expect("Failed to load Schema Cache"); - - let triggers: Vec<_> = cache - .triggers - .iter() - .filter(|t| t.table_name == "users") - .collect(); - assert_eq!(triggers.len(), 3); - - let insert_trigger = triggers - .iter() - .find(|t| t.name == "trg_users_insert") - .unwrap(); - assert_eq!(insert_trigger.schema_name, "public"); - assert_eq!(insert_trigger.table_name, "users"); - assert_eq!(insert_trigger.timing, TriggerTiming::Before); - assert_eq!(insert_trigger.affected, TriggerAffected::Row); - assert!(insert_trigger.events.contains(&TriggerEvent::Insert)); - assert_eq!(insert_trigger.proc_name, "log_user_insert"); - - let update_trigger = triggers - .iter() - .find(|t| t.name == "trg_users_update") - .unwrap(); - assert_eq!(insert_trigger.schema_name, "public"); - assert_eq!(insert_trigger.table_name, "users"); - assert_eq!(update_trigger.timing, TriggerTiming::After); - assert_eq!(update_trigger.affected, TriggerAffected::Statement); - assert!(update_trigger.events.contains(&TriggerEvent::Update)); - assert!(update_trigger.events.contains(&TriggerEvent::Insert)); - assert_eq!(update_trigger.proc_name, "log_user_insert"); - - let delete_trigger = triggers - .iter() - .find(|t| t.name == "trg_users_delete") - .unwrap(); - assert_eq!(insert_trigger.schema_name, "public"); - assert_eq!(insert_trigger.table_name, "users"); - assert_eq!(delete_trigger.timing, TriggerTiming::Before); - assert_eq!(delete_trigger.affected, TriggerAffected::Row); - assert!(delete_trigger.events.contains(&TriggerEvent::Delete)); - assert_eq!(delete_trigger.proc_name, "log_user_insert"); - } - - #[tokio::test] - async fn loads_instead_and_truncate_triggers() { - let test_db = get_new_test_db().await; - - let setup = r#" - create table public.docs ( - id serial primary key, - content text - ); - - create view public.docs_view as - select * from public.docs; - - create or replace function public.docs_instead_of_update() - returns trigger as $$ - begin - -- dummy body - return new; - end; - $$ language plpgsql; - - create trigger trg_docs_instead_update - instead of update on public.docs_view - for each row - execute function public.docs_instead_of_update(); - - create or replace function public.docs_truncate() - returns trigger as $$ - begin - -- dummy body - return null; - end; - $$ language plpgsql; - - create trigger trg_docs_truncate - after truncate on public.docs - for each statement - execute function public.docs_truncate(); - "#; - - test_db - .execute(setup) - .await - .expect("Failed to setup test database"); - - let cache = SchemaCache::load(&test_db) - .await - .expect("Failed to load Schema Cache"); - - let triggers: Vec<_> = cache - .triggers - .iter() - .filter(|t| t.table_name == "docs" || t.table_name == "docs_view") - .collect(); - assert_eq!(triggers.len(), 2); - - let instead_trigger = triggers - .iter() - .find(|t| t.name == "trg_docs_instead_update") - .unwrap(); - assert_eq!(instead_trigger.schema_name, "public"); - assert_eq!(instead_trigger.table_name, "docs_view"); - assert_eq!(instead_trigger.timing, TriggerTiming::Instead); - assert_eq!(instead_trigger.affected, TriggerAffected::Row); - assert!(instead_trigger.events.contains(&TriggerEvent::Update)); - assert_eq!(instead_trigger.proc_name, "docs_instead_of_update"); - - let truncate_trigger = triggers - .iter() - .find(|t| t.name == "trg_docs_truncate") - .unwrap(); - assert_eq!(truncate_trigger.schema_name, "public"); - assert_eq!(truncate_trigger.table_name, "docs"); - assert_eq!(truncate_trigger.timing, TriggerTiming::After); - assert_eq!(truncate_trigger.affected, TriggerAffected::Statement); - assert!(truncate_trigger.events.contains(&TriggerEvent::Truncate)); - assert_eq!(truncate_trigger.proc_name, "docs_truncate"); - } -} diff --git a/crates/pgt_schema_cache/src/types.rs b/crates/pgt_schema_cache/src/types.rs deleted file mode 100644 index d540c363..00000000 --- a/crates/pgt_schema_cache/src/types.rs +++ /dev/null @@ -1,58 +0,0 @@ -use serde::Deserialize; -use sqlx::PgPool; -use sqlx::types::JsonValue; - -use crate::schema_cache::SchemaCacheItem; - -#[derive(Debug, Clone, Default)] -pub struct TypeAttributes { - pub attrs: Vec, -} - -#[derive(Debug, Clone, Default, Deserialize)] -pub struct PostgresTypeAttribute { - pub name: String, - pub type_id: i64, -} - -impl From> for TypeAttributes { - fn from(s: Option) -> Self { - let values: Vec = - serde_json::from_value(s.unwrap_or(JsonValue::Array(vec![]))).unwrap(); - TypeAttributes { attrs: values } - } -} - -#[derive(Debug, Clone, Default)] -pub struct Enums { - pub values: Vec, -} - -impl From> for Enums { - fn from(s: Option) -> Self { - let values: Vec = - serde_json::from_value(s.unwrap_or(JsonValue::Array(vec![]))).unwrap(); - Enums { values } - } -} - -#[derive(Debug, Default)] -pub struct PostgresType { - pub id: i64, - pub name: String, - pub schema: String, - pub format: String, - pub enums: Enums, - pub attributes: TypeAttributes, - pub comment: Option, -} - -impl SchemaCacheItem for PostgresType { - type Item = PostgresType; - - async fn load(pool: &PgPool) -> Result, sqlx::Error> { - sqlx::query_file_as!(PostgresType, "src/queries/types.sql") - .fetch_all(pool) - .await - } -} diff --git a/crates/pgt_schema_cache/src/versions.rs b/crates/pgt_schema_cache/src/versions.rs deleted file mode 100644 index a4769c55..00000000 --- a/crates/pgt_schema_cache/src/versions.rs +++ /dev/null @@ -1,30 +0,0 @@ -use sqlx::PgPool; - -use crate::schema_cache::SchemaCacheItem; - -#[derive(Debug, Default)] -pub struct Version { - pub version: Option, - pub version_num: Option, - pub active_connections: Option, - pub max_connections: Option, -} - -impl SchemaCacheItem for Version { - type Item = Version; - - async fn load(pool: &PgPool) -> Result, sqlx::Error> { - sqlx::query_file_as!(Version, "src/queries/versions.sql") - .fetch_all(pool) - .await - } - - /* - Sample Output: - -[ RECORD 1 ]------+-------------------------------------------------------------------------------------------------------------------------- - version | PostgreSQL 15.7 (Debian 15.7-1.pgdg120+1) on aarch64-unknown-linux-gnu, compiled by gcc (Debian 12.2.0-14) 12.2.0, 64-bit - version_num | 150007 - active_connections | 8 - max_connections | 100 - */ -} diff --git a/crates/pgt_statement_splitter/Cargo.toml b/crates/pgt_statement_splitter/Cargo.toml deleted file mode 100644 index deea07bb..00000000 --- a/crates/pgt_statement_splitter/Cargo.toml +++ /dev/null @@ -1,22 +0,0 @@ -[package] -authors.workspace = true -categories.workspace = true -description = "" -edition.workspace = true -homepage.workspace = true -keywords.workspace = true -license.workspace = true -name = "pgt_statement_splitter" -repository.workspace = true -version = "0.0.0" - - -[dependencies] -pgt_diagnostics = { workspace = true } -pgt_lexer.workspace = true -pgt_query_ext.workspace = true -pgt_text_size.workspace = true -regex.workspace = true - -[dev-dependencies] -ntest = "0.9.3" diff --git a/crates/pgt_statement_splitter/src/diagnostics.rs b/crates/pgt_statement_splitter/src/diagnostics.rs deleted file mode 100644 index bcff6e80..00000000 --- a/crates/pgt_statement_splitter/src/diagnostics.rs +++ /dev/null @@ -1,25 +0,0 @@ -use pgt_diagnostics::{Diagnostic, MessageAndDescription}; -use pgt_text_size::TextRange; - -/// A specialized diagnostic for the statement splitter parser. -/// -/// Parser diagnostics are always **errors**. -#[derive(Clone, Debug, Diagnostic, PartialEq)] -#[diagnostic(category = "syntax", severity = Error)] -pub struct SplitDiagnostic { - /// The location where the error is occurred - #[location(span)] - span: Option, - #[message] - #[description] - pub message: MessageAndDescription, -} - -impl SplitDiagnostic { - pub fn new(message: impl Into, range: TextRange) -> Self { - Self { - span: Some(range), - message: MessageAndDescription::from(message.into()), - } - } -} diff --git a/crates/pgt_statement_splitter/src/lib.rs b/crates/pgt_statement_splitter/src/lib.rs deleted file mode 100644 index e43a1095..00000000 --- a/crates/pgt_statement_splitter/src/lib.rs +++ /dev/null @@ -1,369 +0,0 @@ -//! Postgres Statement Splitter -//! -//! This crate provides a function to split a SQL source string into individual statements. -pub mod diagnostics; -mod parser; - -use parser::{Parser, ParserResult, source}; -use pgt_lexer::diagnostics::ScanError; - -pub fn split(sql: &str) -> Result> { - let tokens = pgt_lexer::lex(sql)?; - - let mut parser = Parser::new(tokens); - - source(&mut parser); - - Ok(parser.finish()) -} - -#[cfg(test)] -mod tests { - use diagnostics::SplitDiagnostic; - use ntest::timeout; - use pgt_lexer::SyntaxKind; - use pgt_text_size::TextRange; - - use super::*; - - struct Tester { - input: String, - parse: ParserResult, - } - - impl From<&str> for Tester { - fn from(input: &str) -> Self { - Tester { - parse: split(input).expect("Failed to split"), - input: input.to_string(), - } - } - } - - impl Tester { - fn expect_statements(&self, expected: Vec<&str>) -> &Self { - assert_eq!( - self.parse.ranges.len(), - expected.len(), - "Expected {} statements, got {}: {:?}", - expected.len(), - self.parse.ranges.len(), - self.parse - .ranges - .iter() - .map(|r| &self.input[*r]) - .collect::>() - ); - - for (range, expected) in self.parse.ranges.iter().zip(expected.iter()) { - assert_eq!(*expected, self.input[*range].to_string()); - } - - assert!( - self.parse.ranges.is_sorted_by_key(|r| r.start()), - "Ranges are not sorted" - ); - - self - } - - fn expect_errors(&self, expected: Vec) -> &Self { - assert_eq!( - self.parse.errors.len(), - expected.len(), - "Expected {} errors, got {}: {:?}", - expected.len(), - self.parse.errors.len(), - self.parse.errors - ); - - for (err, expected) in self.parse.errors.iter().zip(expected.iter()) { - assert_eq!(expected, err); - } - - self - } - } - - #[test] - fn ts_with_timezone() { - Tester::from("alter table foo add column bar timestamp with time zone;").expect_statements( - vec!["alter table foo add column bar timestamp with time zone;"], - ); - } - - #[test] - fn failing_lexer() { - let input = "select 1443ddwwd33djwdkjw13331333333333"; - let res = split(input).unwrap_err(); - assert!(!res.is_empty()); - } - - #[test] - #[timeout(1000)] - fn basic() { - Tester::from("select 1 from contact; select 1;") - .expect_statements(vec!["select 1 from contact;", "select 1;"]); - } - - #[test] - fn no_semicolons() { - Tester::from("select 1 from contact\nselect 1") - .expect_statements(vec!["select 1 from contact", "select 1"]); - } - - #[test] - fn grant() { - Tester::from("GRANT SELECT ON TABLE \"public\".\"my_table\" TO \"my_role\";") - .expect_statements(vec![ - "GRANT SELECT ON TABLE \"public\".\"my_table\" TO \"my_role\";", - ]); - } - - #[test] - fn double_newlines() { - Tester::from("select 1 from contact\n\nselect 1\n\nselect 3").expect_statements(vec![ - "select 1 from contact", - "select 1", - "select 3", - ]); - } - - #[test] - fn single_newlines() { - Tester::from("select 1\nfrom contact\n\nselect 3") - .expect_statements(vec!["select 1\nfrom contact", "select 3"]); - } - - #[test] - fn alter_column() { - Tester::from("alter table users alter column email drop not null;") - .expect_statements(vec!["alter table users alter column email drop not null;"]); - } - - #[test] - fn insert_expect_error() { - Tester::from("\ninsert select 1\n\nselect 3") - .expect_statements(vec!["insert select 1", "select 3"]) - .expect_errors(vec![SplitDiagnostic::new( - format!("Expected {:?}", SyntaxKind::Into), - TextRange::new(8.into(), 14.into()), - )]); - } - - #[test] - fn command_between_not_starting() { - Tester::from("select 1\n \\com test\nselect 2") - .expect_statements(vec!["select 1", "select 2"]); - } - - #[test] - fn command_between() { - Tester::from("select 1\n\\com test\nselect 2") - .expect_statements(vec!["select 1", "select 2"]); - } - - #[test] - fn command_standalone() { - Tester::from("select 1\n\n\\com test\n\nselect 2") - .expect_statements(vec!["select 1", "select 2"]); - } - - #[test] - fn insert_with_select() { - Tester::from("\ninsert into tbl (id) select 1\n\nselect 3") - .expect_statements(vec!["insert into tbl (id) select 1", "select 3"]); - } - - #[test] - fn c_style_comments() { - Tester::from("/* this is a test */\nselect 1").expect_statements(vec!["select 1"]); - } - - #[test] - fn trigger_instead_of() { - Tester::from( - "CREATE OR REPLACE TRIGGER my_trigger - INSTEAD OF INSERT ON my_table - FOR EACH ROW - EXECUTE FUNCTION my_table_trigger_fn();", - ) - .expect_statements(vec![ - "CREATE OR REPLACE TRIGGER my_trigger - INSTEAD OF INSERT ON my_table - FOR EACH ROW - EXECUTE FUNCTION my_table_trigger_fn();", - ]); - } - - #[test] - fn with_check() { - Tester::from("create policy employee_insert on journey_execution for insert to authenticated with check ((select private.organisation_id()) = organisation_id);") - .expect_statements(vec!["create policy employee_insert on journey_execution for insert to authenticated with check ((select private.organisation_id()) = organisation_id);"]); - } - - #[test] - fn nested_parenthesis() { - Tester::from( - "create table if not exists journey_node_execution ( - id uuid default gen_random_uuid() not null primary key, - - constraint uq_node_exec unique (journey_execution_id, journey_node_id) -);", - ) - .expect_statements(vec![ - "create table if not exists journey_node_execution ( - id uuid default gen_random_uuid() not null primary key, - - constraint uq_node_exec unique (journey_execution_id, journey_node_id) -);", - ]); - } - - #[test] - fn with_cte() { - Tester::from("with test as (select 1 as id) select * from test;") - .expect_statements(vec!["with test as (select 1 as id) select * from test;"]); - } - - #[test] - fn case() { - Tester::from("select case when select 2 then 1 else 0 end") - .expect_statements(vec!["select case when select 2 then 1 else 0 end"]); - } - - #[test] - fn with_security_invoker() { - Tester::from( - "create view api.my_view with (security_invoker) as select id from public.my_table;", - ) - .expect_statements(vec![ - "create view api.my_view with (security_invoker) as select id from public.my_table;", - ]); - } - - #[test] - fn create_trigger() { - Tester::from("alter table appointment_status add constraint valid_key check (private.strip_special_chars(key) = key and length(key) > 0 and length(key) < 60); - -create trigger default_key before insert on appointment_type for each row when (new.key is null) execute procedure default_key (); - -create trigger default_key before insert or update on appointment_status for each row when (new.key is null) execute procedure default_key (); - -alter table deal_type add column key text not null; -") - .expect_statements(vec!["alter table appointment_status add constraint valid_key check (private.strip_special_chars(key) = key and length(key) > 0 and length(key) < 60);", - "create trigger default_key before insert on appointment_type for each row when (new.key is null) execute procedure default_key ();", - "create trigger default_key before insert or update on appointment_status for each row when (new.key is null) execute procedure default_key ();", - "alter table deal_type add column key text not null;", - ]); - } - - #[test] - fn policy() { - Tester::from("create policy employee_tokenauthed_select on provider_template_approval for select to authenticated, tokenauthed using ( select true );") - .expect_statements(vec!["create policy employee_tokenauthed_select on provider_template_approval for select to authenticated, tokenauthed using ( select true );"]); - } - - #[test] - #[timeout(1000)] - fn simple_select() { - Tester::from( - " -select id, name, test1231234123, unknown from co; - -select 14433313331333 - -alter table test drop column id; - -select lower('test'); -", - ) - .expect_statements(vec![ - "select id, name, test1231234123, unknown from co;", - "select 14433313331333", - "alter table test drop column id;", - "select lower('test');", - ]); - } - - #[test] - fn create_rule() { - Tester::from( - "create rule log_employee_insert as -on insert to employees -do also insert into employee_log (action, employee_id, log_time) -values ('insert', new.id, now());", - ) - .expect_statements(vec![ - "create rule log_employee_insert as -on insert to employees -do also insert into employee_log (action, employee_id, log_time) -values ('insert', new.id, now());", - ]); - } - - #[test] - fn insert_into() { - Tester::from("randomness\ninsert into tbl (id) values (1)\nselect 3").expect_statements( - vec!["randomness", "insert into tbl (id) values (1)\nselect 3"], - ); - } - - #[test] - fn update() { - Tester::from("more randomness\nupdate tbl set col = '1'\n\nselect 3").expect_statements( - vec!["more randomness", "update tbl set col = '1'", "select 3"], - ); - } - - #[test] - fn delete_from() { - Tester::from("more randomness\ndelete from test where id = 1\n\nselect 3") - .expect_statements(vec![ - "more randomness", - "delete from test where id = 1", - "select 3", - ]); - } - - #[test] - fn with_ordinality() { - Tester::from("insert into table (col) select 1 from other t cross join lateral jsonb_array_elements(t.buttons) with ordinality as a(b, nr) where t.buttons is not null;").expect_statements(vec!["insert into table (col) select 1 from other t cross join lateral jsonb_array_elements(t.buttons) with ordinality as a(b, nr) where t.buttons is not null;"]); - } - - #[test] - fn unknown() { - Tester::from("random stuff\n\nmore randomness\n\nselect 3").expect_statements(vec![ - "random stuff", - "more randomness", - "select 3", - ]); - } - - #[test] - fn unknown_2() { - Tester::from("random stuff\nselect 1\n\nselect 3").expect_statements(vec![ - "random stuff", - "select 1", - "select 3", - ]); - } - - #[test] - fn commas_and_newlines() { - Tester::from( - " - select - email, - - - from - auth.users; - ", - ) - .expect_statements(vec![ - "select\n email,\n\n\n from\n auth.users;", - ]); - } -} diff --git a/crates/pgt_statement_splitter/src/parser.rs b/crates/pgt_statement_splitter/src/parser.rs deleted file mode 100644 index 241d0c70..00000000 --- a/crates/pgt_statement_splitter/src/parser.rs +++ /dev/null @@ -1,237 +0,0 @@ -mod common; -mod data; -mod ddl; -mod dml; - -pub use common::source; - -use pgt_lexer::{SyntaxKind, Token, WHITESPACE_TOKENS}; -use pgt_text_size::{TextRange, TextSize}; - -use crate::diagnostics::SplitDiagnostic; - -/// Main parser that exposes the `cstree` api, and collects errors and statements -/// It is modelled after a Pratt Parser. For a gentle introduction to Pratt Parsing, see https://matklad.github.io/2020/04/13/simple-but-powerful-pratt-parsing.html -pub struct Parser { - /// The statement ranges are defined by the indices of the start/end tokens - stmt_ranges: Vec<(usize, usize)>, - - /// The syntax errors accumulated during parsing - errors: Vec, - - current_stmt_start: Option, - - tokens: Vec, - - eof_token: Token, - - current_pos: usize, -} - -#[derive(Debug)] -pub struct ParserResult { - /// The ranges of the parsed statements - pub ranges: Vec, - /// The syntax errors accumulated during parsing - pub errors: Vec, -} - -impl Parser { - pub fn new(tokens: Vec) -> Self { - let eof_token = Token::eof(usize::from( - tokens - .last() - .map(|t| t.span.end()) - .unwrap_or(TextSize::from(0)), - )); - - // Place `current_pos` on the first relevant token - let mut current_pos = 0; - while is_irrelevant_token(tokens.get(current_pos).unwrap_or(&eof_token)) { - current_pos += 1; - } - - Self { - stmt_ranges: Vec::new(), - eof_token, - errors: Vec::new(), - current_stmt_start: None, - tokens, - current_pos, - } - } - - pub fn finish(self) -> ParserResult { - ParserResult { - ranges: self - .stmt_ranges - .iter() - .map(|(start_token_pos, end_token_pos)| { - let from = self.tokens.get(*start_token_pos); - let to = self.tokens.get(*end_token_pos).unwrap_or(&self.eof_token); - - TextRange::new(from.unwrap().span.start(), to.span.end()) - }) - .collect(), - errors: self.errors, - } - } - - pub fn start_stmt(&mut self) { - assert!( - self.current_stmt_start.is_none(), - "cannot start statement within statement at {:?}", - self.tokens.get(self.current_stmt_start.unwrap()) - ); - self.current_stmt_start = Some(self.current_pos); - } - - pub fn close_stmt(&mut self) { - assert!( - self.current_stmt_start.is_some(), - "Must start statement before closing it." - ); - - let start_token_pos = self.current_stmt_start.unwrap(); - - assert!( - self.current_pos > start_token_pos, - "Must close the statement on a token that's later than the start token." - ); - - let (end_token_pos, _) = self.find_last_relevant().unwrap(); - - self.stmt_ranges.push((start_token_pos, end_token_pos)); - - self.current_stmt_start = None; - } - - fn current(&self) -> &Token { - match self.tokens.get(self.current_pos) { - Some(token) => token, - None => &self.eof_token, - } - } - - /// Advances the parser to the next relevant token and returns it. - /// - /// NOTE: This will skip irrelevant tokens. - fn advance(&mut self) -> &Token { - // can't reuse any `find_next_relevant` logic because of Mr. Borrow Checker - let (pos, token) = self - .tokens - .iter() - .enumerate() - .skip(self.current_pos + 1) - .find(|(_, t)| is_relevant(t)) - .unwrap_or((self.tokens.len(), &self.eof_token)); - - self.current_pos = pos; - token - } - - fn look_ahead(&self) -> Option<&Token> { - self.tokens - .iter() - .skip(self.current_pos + 1) - .find(|t| is_relevant(t)) - } - - /// Returns `None` if there are no previous relevant tokens - fn look_back(&self) -> Option<&Token> { - self.find_last_relevant().map(|it| it.1) - } - - /// Will advance if the `kind` matches the current token. - /// Otherwise, will add a diagnostic to the internal `errors`. - pub fn expect(&mut self, kind: SyntaxKind) { - if self.current().kind == kind { - self.advance(); - } else { - self.errors.push(SplitDiagnostic::new( - format!("Expected {:#?}", kind), - self.current().span, - )); - } - } - - fn find_last_relevant(&self) -> Option<(usize, &Token)> { - self.tokens - .iter() - .enumerate() - .take(self.current_pos) - .rfind(|(_, t)| is_relevant(t)) - } -} - -#[cfg(windows)] -/// Returns true if the token is relevant for the parsing process -/// -/// On windows, a newline is represented by `\r\n` which is two characters. -fn is_irrelevant_token(t: &Token) -> bool { - WHITESPACE_TOKENS.contains(&t.kind) - // double new lines are relevant, single ones are not - && (t.kind != SyntaxKind::Newline || t.text == "\r\n" || t.text.chars().count() == 1) -} - -#[cfg(not(windows))] -/// Returns true if the token is relevant for the parsing process -fn is_irrelevant_token(t: &Token) -> bool { - WHITESPACE_TOKENS.contains(&t.kind) - // double new lines are relevant, single ones are not - && (t.kind != SyntaxKind::Newline || t.text.chars().count() == 1) -} - -fn is_relevant(t: &Token) -> bool { - !is_irrelevant_token(t) -} - -#[cfg(test)] -mod tests { - use pgt_lexer::SyntaxKind; - - use crate::parser::Parser; - - #[test] - fn advance_works_as_expected() { - let sql = r#" - create table users ( - id serial primary key, - name text, - email text - ); - "#; - let tokens = pgt_lexer::lex(sql).unwrap(); - let total_num_tokens = tokens.len(); - - let mut parser = Parser::new(tokens); - - let expected = vec![ - (SyntaxKind::Create, 2), - (SyntaxKind::Table, 4), - (SyntaxKind::Ident, 6), - (SyntaxKind::Ascii40, 8), - (SyntaxKind::Ident, 11), - (SyntaxKind::Ident, 13), - (SyntaxKind::Primary, 15), - (SyntaxKind::Key, 17), - (SyntaxKind::Ascii44, 18), - (SyntaxKind::NameP, 21), - (SyntaxKind::TextP, 23), - (SyntaxKind::Ascii44, 24), - (SyntaxKind::Ident, 27), - (SyntaxKind::TextP, 29), - (SyntaxKind::Ascii41, 32), - (SyntaxKind::Ascii59, 33), - ]; - - for (kind, pos) in expected { - assert_eq!(parser.current().kind, kind); - assert_eq!(parser.current_pos, pos); - parser.advance(); - } - - assert_eq!(parser.current().kind, SyntaxKind::Eof); - assert_eq!(parser.current_pos, total_num_tokens); - } -} diff --git a/crates/pgt_statement_splitter/src/parser/common.rs b/crates/pgt_statement_splitter/src/parser/common.rs deleted file mode 100644 index a5d68df1..00000000 --- a/crates/pgt_statement_splitter/src/parser/common.rs +++ /dev/null @@ -1,283 +0,0 @@ -use pgt_lexer::{SyntaxKind, Token, TokenType, WHITESPACE_TOKENS}; - -use super::{ - Parser, - data::at_statement_start, - ddl::{alter, create}, - dml::{cte, delete, insert, select, update}, -}; - -pub fn source(p: &mut Parser) { - loop { - match p.current() { - Token { - kind: SyntaxKind::Eof, - .. - } => { - break; - } - Token { - // we might want to ignore TokenType::NoKeyword here too - // but this will lead to invalid statements to not being picked up - token_type: TokenType::Whitespace, - .. - } => { - p.advance(); - } - Token { - kind: SyntaxKind::Ascii92, - .. - } => { - plpgsql_command(p); - } - _ => { - statement(p); - } - } - } -} - -pub(crate) fn statement(p: &mut Parser) { - p.start_stmt(); - match p.current().kind { - SyntaxKind::With => { - cte(p); - } - SyntaxKind::Select => { - select(p); - } - SyntaxKind::Insert => { - insert(p); - } - SyntaxKind::Update => { - update(p); - } - SyntaxKind::DeleteP => { - delete(p); - } - SyntaxKind::Create => { - create(p); - } - SyntaxKind::Alter => { - alter(p); - } - _ => { - unknown(p, &[]); - } - } - p.close_stmt(); -} - -pub(crate) fn parenthesis(p: &mut Parser) { - p.expect(SyntaxKind::Ascii40); - - let mut depth = 1; - - loop { - match p.current().kind { - SyntaxKind::Ascii40 => { - p.advance(); - depth += 1; - } - SyntaxKind::Ascii41 | SyntaxKind::Eof => { - p.advance(); - depth -= 1; - if depth == 0 { - break; - } - } - _ => { - p.advance(); - } - } - } -} - -pub(crate) fn plpgsql_command(p: &mut Parser) { - p.expect(SyntaxKind::Ascii92); - - loop { - match p.current().kind { - SyntaxKind::Newline => { - p.advance(); - break; - } - _ => { - // advance the parser to the next token without ignoring irrelevant tokens - // we would skip a newline with `advance()` - p.current_pos += 1; - } - } - } -} - -pub(crate) fn case(p: &mut Parser) { - p.expect(SyntaxKind::Case); - - loop { - match p.current().kind { - SyntaxKind::EndP => { - p.advance(); - break; - } - _ => { - p.advance(); - } - } - } -} - -pub(crate) fn unknown(p: &mut Parser, exclude: &[SyntaxKind]) { - loop { - match p.current() { - Token { - kind: SyntaxKind::Ascii59, - .. - } => { - p.advance(); - break; - } - Token { - kind: SyntaxKind::Eof, - .. - } => { - break; - } - Token { - kind: SyntaxKind::Newline, - .. - } => { - if p.look_back().is_some_and(|t| t.kind == SyntaxKind::Ascii44) { - p.advance(); - } else { - break; - } - } - Token { - kind: SyntaxKind::Case, - .. - } => { - case(p); - } - Token { - kind: SyntaxKind::Ascii92, - .. - } => { - // pgsql commands e.g. - // - // ``` - // \if test - // ``` - // - // we wait for "\" and check if the previous token is a newline - - // newline is a whitespace, but we do not want to ignore it here - let irrelevant = WHITESPACE_TOKENS - .iter() - .filter(|t| **t != SyntaxKind::Newline) - .collect::>(); - - // go back from the current position without ignoring irrelevant tokens - if p.tokens - .iter() - .take(p.current_pos) - .rev() - .find(|t| !irrelevant.contains(&&t.kind)) - .is_some_and(|t| t.kind == SyntaxKind::Newline) - { - break; - } - p.advance(); - } - Token { - kind: SyntaxKind::Ascii40, - .. - } => { - parenthesis(p); - } - t => match at_statement_start(t.kind, exclude) { - Some(SyntaxKind::Select) => { - let prev = p.look_back().map(|t| t.kind); - if [ - // for policies, with for select - SyntaxKind::For, - // for create view / table as - SyntaxKind::As, - // for create rule - SyntaxKind::On, - // for create rule - SyntaxKind::Also, - // for create rule - SyntaxKind::Instead, - // for UNION - SyntaxKind::Union, - // for UNION ALL - SyntaxKind::All, - // for UNION ... EXCEPT - SyntaxKind::Except, - // for grant - SyntaxKind::Grant, - ] - .iter() - .all(|x| Some(x) != prev.as_ref()) - { - break; - } - - p.advance(); - } - Some(SyntaxKind::Insert) | Some(SyntaxKind::Update) | Some(SyntaxKind::DeleteP) => { - let prev = p.look_back().map(|t| t.kind); - if [ - // for create trigger - SyntaxKind::Before, - SyntaxKind::After, - // for policies, e.g. for insert - SyntaxKind::For, - // e.g. on insert or delete - SyntaxKind::Or, - // e.g. INSTEAD OF INSERT - SyntaxKind::Of, - // for create rule - SyntaxKind::On, - // for create rule - SyntaxKind::Also, - // for create rule - SyntaxKind::Instead, - // for grant - SyntaxKind::Grant, - ] - .iter() - .all(|x| Some(x) != prev.as_ref()) - { - break; - } - p.advance(); - } - Some(SyntaxKind::With) => { - let next = p.look_ahead().map(|t| t.kind); - if [ - // WITH ORDINALITY should not start a new statement - SyntaxKind::Ordinality, - // WITH CHECK should not start a new statement - SyntaxKind::Check, - // TIMESTAMP WITH TIME ZONE should not start a new statement - SyntaxKind::Time, - ] - .iter() - .all(|x| Some(x) != next.as_ref()) - { - break; - } - p.advance(); - } - Some(_) => { - break; - } - None => { - p.advance(); - } - }, - } - } -} diff --git a/crates/pgt_statement_splitter/src/parser/data.rs b/crates/pgt_statement_splitter/src/parser/data.rs deleted file mode 100644 index c0792c39..00000000 --- a/crates/pgt_statement_splitter/src/parser/data.rs +++ /dev/null @@ -1,22 +0,0 @@ -use pgt_lexer::SyntaxKind; - -// All tokens listed here must be explicitly handled in the `unknown` function to ensure that we do -// not break in the middle of another statement that contains a statement start token. -// -// All of these statements must have a dedicated parser function called from the `statement` function -static STATEMENT_START_TOKENS: &[SyntaxKind] = &[ - SyntaxKind::With, - SyntaxKind::Select, - SyntaxKind::Insert, - SyntaxKind::Update, - SyntaxKind::DeleteP, - SyntaxKind::Create, - SyntaxKind::Alter, -]; - -pub(crate) fn at_statement_start(kind: SyntaxKind, exclude: &[SyntaxKind]) -> Option<&SyntaxKind> { - STATEMENT_START_TOKENS - .iter() - .filter(|&x| !exclude.contains(x)) - .find(|&x| x == &kind) -} diff --git a/crates/pgt_statement_splitter/src/parser/ddl.rs b/crates/pgt_statement_splitter/src/parser/ddl.rs deleted file mode 100644 index d9f233c2..00000000 --- a/crates/pgt_statement_splitter/src/parser/ddl.rs +++ /dev/null @@ -1,15 +0,0 @@ -use pgt_lexer::SyntaxKind; - -use super::{Parser, common::unknown}; - -pub(crate) fn create(p: &mut Parser) { - p.expect(SyntaxKind::Create); - - unknown(p, &[SyntaxKind::With]); -} - -pub(crate) fn alter(p: &mut Parser) { - p.expect(SyntaxKind::Alter); - - unknown(p, &[SyntaxKind::Alter]); -} diff --git a/crates/pgt_statement_splitter/src/parser/dml.rs b/crates/pgt_statement_splitter/src/parser/dml.rs deleted file mode 100644 index 015c50b6..00000000 --- a/crates/pgt_statement_splitter/src/parser/dml.rs +++ /dev/null @@ -1,59 +0,0 @@ -use pgt_lexer::SyntaxKind; - -use super::{ - Parser, - common::{parenthesis, unknown}, -}; - -pub(crate) fn cte(p: &mut Parser) { - p.expect(SyntaxKind::With); - - loop { - p.expect(SyntaxKind::Ident); - p.expect(SyntaxKind::As); - parenthesis(p); - - if p.current().kind == SyntaxKind::Ascii44 { - p.advance(); - } else { - break; - } - } - - unknown( - p, - &[ - SyntaxKind::Select, - SyntaxKind::Insert, - SyntaxKind::Update, - SyntaxKind::DeleteP, - SyntaxKind::Merge, - ], - ); -} - -pub(crate) fn select(p: &mut Parser) { - p.expect(SyntaxKind::Select); - - unknown(p, &[]); -} - -pub(crate) fn insert(p: &mut Parser) { - p.expect(SyntaxKind::Insert); - p.expect(SyntaxKind::Into); - - unknown(p, &[SyntaxKind::Select]); -} - -pub(crate) fn update(p: &mut Parser) { - p.expect(SyntaxKind::Update); - - unknown(p, &[]); -} - -pub(crate) fn delete(p: &mut Parser) { - p.expect(SyntaxKind::DeleteP); - p.expect(SyntaxKind::From); - - unknown(p, &[]); -} diff --git a/crates/pgt_statement_splitter/tests/data/simple_select__4.sql b/crates/pgt_statement_splitter/tests/data/simple_select__4.sql deleted file mode 100644 index bfd9e429..00000000 --- a/crates/pgt_statement_splitter/tests/data/simple_select__4.sql +++ /dev/null @@ -1,8 +0,0 @@ -select id, name, test1231234123, unknown from co; - -select 14433313331333 - -alter table test drop column id; - -select lower('test'); - diff --git a/crates/pgt_statement_splitter/tests/data/simple_union__4.sql b/crates/pgt_statement_splitter/tests/data/simple_union__4.sql deleted file mode 100644 index 100b59ea..00000000 --- a/crates/pgt_statement_splitter/tests/data/simple_union__4.sql +++ /dev/null @@ -1,7 +0,0 @@ -select 1 union all select 2; - -select 1 union select 2; - -select 1 union select 2 except select 3; - -select 1 union all select 2 except select 3; \ No newline at end of file diff --git a/crates/pgt_statement_splitter/tests/data/with_comments__4.sql b/crates/pgt_statement_splitter/tests/data/with_comments__4.sql deleted file mode 100644 index 652185cd..00000000 --- a/crates/pgt_statement_splitter/tests/data/with_comments__4.sql +++ /dev/null @@ -1,13 +0,0 @@ --- test -select id, name, test1231234123, unknown from co; - --- in between two statements - -select 14433313331333 -- after a statement - -alter table --within a statement -test drop column id; - -select lower('test'); ---after a statement - diff --git a/crates/pgt_statement_splitter/tests/statement_splitter_tests.rs b/crates/pgt_statement_splitter/tests/statement_splitter_tests.rs deleted file mode 100644 index e0534725..00000000 --- a/crates/pgt_statement_splitter/tests/statement_splitter_tests.rs +++ /dev/null @@ -1,36 +0,0 @@ -use std::fs::{self}; - -const DATA_DIR_PATH: &str = "tests/data/"; - -#[test] -fn test_statement_splitter() { - let mut paths: Vec<_> = fs::read_dir(DATA_DIR_PATH) - .unwrap() - .map(|r| r.unwrap()) - .collect(); - paths.sort_by_key(|dir| dir.path()); - - for f in paths.iter() { - let path = f.path(); - let test_name = path.file_stem().unwrap().to_str().unwrap(); - let expected_count = test_name - .split("__") - .last() - .unwrap() - .parse::() - .unwrap(); - - let contents = fs::read_to_string(&path).unwrap(); - - let split = pgt_statement_splitter::split(&contents).expect("Failed to split"); - - assert_eq!( - split.ranges.len(), - expected_count, - "Mismatch in statement count for file {}: Expected {} statements, got {}", - test_name, - expected_count, - split.ranges.len() - ); - } -} diff --git a/crates/pgt_test_macros/Cargo.toml b/crates/pgt_test_macros/Cargo.toml deleted file mode 100644 index 7f1f8b1a..00000000 --- a/crates/pgt_test_macros/Cargo.toml +++ /dev/null @@ -1,22 +0,0 @@ - -[package] -authors.workspace = true -categories.workspace = true -description = "" -edition.workspace = true -homepage.workspace = true -keywords.workspace = true -license.workspace = true -name = "pgt_test_macros" -repository.workspace = true -version = "0.0.0" - -[lib] -proc-macro = true - -[dependencies] -globwalk = { version = "0.9.1" } -proc-macro-error = { version = "1.0.4" } -proc-macro2 = { version = '1.0.93' } -quote = { workspace = true } -syn = { workspace = true } diff --git a/crates/pgt_test_macros/README.md b/crates/pgt_test_macros/README.md deleted file mode 100644 index aa05cd82..00000000 --- a/crates/pgt_test_macros/README.md +++ /dev/null @@ -1,67 +0,0 @@ -# Tests macros - -Macros to help auto-generate tests based on files. - -## Usage - -Pass a glob pattern that'll identify your files and a test-function that'll run for each file. The glob pattern has to start at the root of your crate. - -You can add a `.expected.` file next to your test file. Its path will be passed to your test function so you can make outcome-based assertions. (Alternatively, write snapshot tests.) - -Given the following file structure: - -```txt -crate/ -|-- src/ -|-- tests/ - |-- queries/ - |-- test.sql - |-- test.expected.sql - |-- querytest.rs -``` - -You can generate tests like so: - -```rust - // crate/tests/querytest.rs - - tests_macros::gen_tests!{ - "tests/queries/*.sql", - crate::run_test // use `crate::` if the linter complains. - } - - fn run_test( - test_path: &str, // absolute path on the machine - expected_path: &str, // absolute path of .expected file - test_dir: &str // absolute path of the test file's parent - ) { - // your logic - } -``` - -Given a `crate/tests/queries/some_test_abc.sql` file, this will generate the following: - -```rust -#[test] -pub fn some_test_abc() -{ - let test_file = "/tests/queries/some_test_abc.sql"; - let test_expected_file = "/tests/queries/some_test_abc.expected.sql"; - let parent = "/tests/queries"; - run_test(test_file, test_expected_file, parent); -} -``` - -This will be replicated for each file matched by the glob pattern. - -## Pitfalls - -- If you use a Rust-keyword as a file name, this'll result in invalid syntax for the generated tests. -- You might get linting errors if your test files aren't snake case. -- All files of the glob-pattern must (currently) be `.sql` files. -- The `.expected.sql` file-name will always be passed, even if the file doesn't exist. -- The macro will wrap your tests in a `mod tests { .. }` module. If you need multiple generations, wrap them in modules like so: `mod some_test { tests_macros::gen_tests! { .. } }`. - -## How to run - -Simply run your `cargo test` commands as usual. diff --git a/crates/pgt_test_macros/src/lib.rs b/crates/pgt_test_macros/src/lib.rs deleted file mode 100644 index e8b954cf..00000000 --- a/crates/pgt_test_macros/src/lib.rs +++ /dev/null @@ -1,246 +0,0 @@ -use globwalk::GlobWalkerBuilder; -use proc_macro::TokenStream; -use proc_macro_error::*; -use quote::*; -use std::{ - collections::HashMap, - ffi::OsStr, - path::{Component, Path, PathBuf}, -}; - -#[proc_macro] -#[proc_macro_error] -pub fn gen_tests(input: TokenStream) -> TokenStream { - let args = syn::parse_macro_input!(input as Arguments); - - match args.generate() { - Ok(tokens) => tokens, - Err(e) => abort!(e, "{}", e), - } -} - -/// A Recursive Tree Structure that stores tests per *part* of a Path. -/// -/* foo - * ├── bar - * │ ├── testA.sql - * ├── testB.sql - * - * Results in: - * - * TestModules { - * modules: { - * "foo": TestModules { - * modules: { - * "bar": TestModules { - * modules: {}, - * tests: [stream->testA.sql] - * } - * } - * tests: [stream->testB.sql] - * } - * } - * tests: [] - * } - * - * Note that `tests` does not hold actual files but the TokenStreams for the tests for those files. -*/ -#[derive(Default)] -struct TestModules { - modules: HashMap, - tests: Vec, -} - -impl TestModules { - fn insert<'a>( - &mut self, - mut path: impl Iterator, - test: proc_macro2::TokenStream, - ) { - match path.next() { - Some(part) => { - let module = self.modules.entry(part.into()).or_default(); - module.insert(path, test); - } - None => { - self.tests.push(test); - } - } - } - - fn print(self, output: &mut proc_macro2::TokenStream) { - for (name, sub_module) in self.modules { - let name = syn::Ident::new(&name, proc_macro2::Span::call_site()); - - let mut sub_module_stream = proc_macro2::TokenStream::new(); - sub_module.print(&mut sub_module_stream); - - // wrap the submodule tests in a `mod` - output.extend(quote! { - mod #name { #sub_module_stream } - }); - } - output.extend(self.tests) - } -} - -struct Arguments { - pattern: syn::ExprLit, - test_function: syn::Path, -} - -impl syn::parse::Parse for Arguments { - fn parse(input: syn::parse::ParseStream) -> syn::Result { - let pattern = input.parse()?; - let _: syn::Token!(,) = input.parse()?; - let test_function = input.parse()?; - Ok(Arguments { - pattern, - test_function, - }) - } -} - -impl Arguments { - fn get_filepaths(&self) -> Result, &'static str> { - let base = std::env::var("CARGO_MANIFEST_DIR") - .map_err(|_| "Cannot find CARGO_MANIFEST_DIR. Are you using cargo?")?; - - let pattern = match &self.pattern.lit { - syn::Lit::Str(s) => s.value(), - _ => return Err("Invalid pattern."), - }; - - let walker = GlobWalkerBuilder::new(base, pattern) - .build() - .map_err(|_| "Cannot build walker.")?; - - let mut paths = Vec::new(); - - for entry in walker { - let entry = entry.map_err(|_| "Error iteraring over entry.")?; - - let filename = entry - .file_name() - .to_str() - .ok_or("Cannot convert filename to string.")?; - - if filename.ends_with(".expected.sql") { - continue; - } - - let meta = entry.metadata().map_err(|_| "Cannot open file.")?; - - if meta.is_file() { - paths.push(entry.path().to_path_buf()); - } - } - - Ok(paths) - } - - fn generate(self) -> Result { - let files = self.get_filepaths()?; - let mut modules = TestModules::default(); - - for file in files { - let Variables { - test_name, - test_fullpath, - test_expected_fullpath, - test_dir, - } = file.try_into()?; - - let path = Path::new(&test_fullpath) - .parent() - .expect("Do not put tests in root directory.") - .components() - .map(Component::as_os_str) - .skip_while(|c| { - let bytes = c.as_encoded_bytes(); - bytes != b"specs" && bytes != b"tests" - }) - .filter_map(OsStr::to_str); - - let span = self.pattern.lit.span(); - let test_name = syn::Ident::new(&test_name, span); - let func = &self.test_function; - - modules.insert( - path, - quote! { - #[test] - pub fn #test_name () { - let test_fullpath = #test_fullpath; - let test_expected_fullpath = #test_expected_fullpath; - let test_dir = #test_dir; - #func(test_fullpath, test_expected_fullpath, test_dir); - } - }, - ) - } - - let mut output = proc_macro2::TokenStream::new(); - modules.print(&mut output); - - Ok(output.into()) - } -} - -struct Variables { - test_name: String, - test_fullpath: String, - test_expected_fullpath: String, - test_dir: String, -} - -impl TryFrom for Variables { - type Error = &'static str; - - fn try_from(mut path: PathBuf) -> Result { - let test_name: String = path - .file_stem() - .ok_or("Cannot get file stem.")? - .to_str() - .ok_or("Cannot convert file stem to string.")? - .into(); - - let ext: String = path - .extension() - .ok_or("Cannot get extension.")? - .to_str() - .ok_or("Cannot convert extension to string.")? - .into(); - assert_eq!(ext, "sql", "Expected .sql extension but received: {}", ext); - - let test_dir: String = path - .parent() - .ok_or("Cannot get parent directory.")? - .to_str() - .ok_or("Cannot convert parent directory to string.")? - .into(); - - let test_fullpath: String = path - .as_os_str() - .to_str() - .ok_or("Cannot convert file stem to string.")? - .into(); - - path.set_extension(OsStr::new("")); - - let without_ext: String = path - .as_os_str() - .to_str() - .ok_or("Cannot convert file stem to string.")? - .into(); - - let test_expected_fullpath = format!("{}.expected.{}", without_ext, ext); - - Ok(Variables { - test_name, - test_fullpath, - test_expected_fullpath, - test_dir, - }) - } -} diff --git a/crates/pgt_test_utils/Cargo.toml b/crates/pgt_test_utils/Cargo.toml deleted file mode 100644 index dcf77688..00000000 --- a/crates/pgt_test_utils/Cargo.toml +++ /dev/null @@ -1,26 +0,0 @@ -[package] -authors.workspace = true -categories.workspace = true -description = "" -edition.workspace = true -homepage.workspace = true -keywords.workspace = true -license.workspace = true -name = "pgt_test_utils" -repository.workspace = true -version = "0.0.0" - - -[[bin]] -name = "tree_print" -path = "src/bin/tree_print.rs" - -[dependencies] -anyhow = "1.0.81" -clap = { version = "4.5.23", features = ["derive"] } -dotenv = "0.15.0" -uuid = { version = "1.11.0", features = ["v4"] } - -sqlx.workspace = true -tree-sitter.workspace = true -tree_sitter_sql.workspace = true diff --git a/crates/pgt_test_utils/src/bin/tree_print.rs b/crates/pgt_test_utils/src/bin/tree_print.rs deleted file mode 100644 index 469dcc8e..00000000 --- a/crates/pgt_test_utils/src/bin/tree_print.rs +++ /dev/null @@ -1,53 +0,0 @@ -use clap::*; - -#[derive(Parser)] -#[command( - name = "tree-printer", - about = "Prints the TreeSitter tree of the given file." -)] -struct Args { - #[arg(long = "file", short = 'f')] - file: String, -} - -fn main() { - let args = Args::parse(); - - let query = std::fs::read_to_string(&args.file).expect("Failed to read file."); - - let mut parser = tree_sitter::Parser::new(); - let lang = tree_sitter_sql::language(); - - parser.set_language(lang).expect("Setting Language failed."); - - let tree = parser - .parse(query.clone(), None) - .expect("Failed to parse query."); - - print_tree(&tree.root_node(), &query, 0); -} - -fn print_tree(node: &tree_sitter::Node, source: &str, level: usize) { - let indent = " ".repeat(level); - - let node_text = node - .utf8_text(source.as_bytes()) - .unwrap_or("NO_NAME") - .split_whitespace() - .collect::>() - .join(" "); - - println!( - "{}{} [{}..{}] '{}'", - indent, - node.kind(), - node.start_position().column, - node.end_position().column, - node_text - ); - - let mut cursor = node.walk(); - for child in node.children(&mut cursor) { - print_tree(&child, source, level + 1); - } -} diff --git a/crates/pgt_test_utils/src/lib.rs b/crates/pgt_test_utils/src/lib.rs deleted file mode 100644 index 4d6d3070..00000000 --- a/crates/pgt_test_utils/src/lib.rs +++ /dev/null @@ -1 +0,0 @@ -pub mod test_database; diff --git a/crates/pgt_test_utils/src/test_database.rs b/crates/pgt_test_utils/src/test_database.rs deleted file mode 100644 index 67415c4a..00000000 --- a/crates/pgt_test_utils/src/test_database.rs +++ /dev/null @@ -1,42 +0,0 @@ -use sqlx::{Executor, PgPool, postgres::PgConnectOptions}; -use uuid::Uuid; - -// TODO: Work with proper config objects instead of a connection_string. -// With the current implementation, we can't parse the password from the connection string. -pub async fn get_new_test_db() -> PgPool { - dotenv::dotenv().expect("Unable to load .env file for tests"); - - let connection_string = std::env::var("DATABASE_URL").expect("DATABASE_URL not set"); - let password = std::env::var("DB_PASSWORD").unwrap_or("postgres".into()); - - let options_from_conn_str: PgConnectOptions = connection_string - .parse() - .expect("Invalid Connection String"); - - let host = options_from_conn_str.get_host(); - assert!( - host == "localhost" || host == "127.0.0.1", - "Running tests against non-local database!" - ); - - let options_without_db_name = PgConnectOptions::new() - .host(host) - .port(options_from_conn_str.get_port()) - .username(options_from_conn_str.get_username()) - .password(&password); - - let postgres = sqlx::PgPool::connect_with(options_without_db_name.clone()) - .await - .expect("Unable to connect to test postgres instance"); - - let database_name = Uuid::new_v4().to_string(); - - postgres - .execute(format!(r#"create database "{}";"#, database_name).as_str()) - .await - .expect("Failed to create test database."); - - sqlx::PgPool::connect_with(options_without_db_name.database(&database_name)) - .await - .expect("Could not connect to test database") -} diff --git a/crates/pgt_text_edit/Cargo.toml b/crates/pgt_text_edit/Cargo.toml deleted file mode 100644 index e0dc83a6..00000000 --- a/crates/pgt_text_edit/Cargo.toml +++ /dev/null @@ -1,27 +0,0 @@ -[package] -authors.workspace = true -categories.workspace = true -description = "" -edition.workspace = true -homepage.workspace = true -keywords.workspace = true -license.workspace = true -name = "pgt_text_edit" -repository.workspace = true -version = "0.0.0" - - -[dependencies] -pgt_text_size = { workspace = true } -schemars = { workspace = true, optional = true } -serde = { workspace = true, features = ["derive"], optional = true } -similar = { workspace = true, features = ["unicode"] } - -[features] -schema = ["dep:schemars", "pgt_text_size/schema"] -serde = ["dep:serde", "pgt_text_size/serde"] - -[dev-dependencies] - -[lib] -doctest = false diff --git a/crates/pgt_text_edit/src/lib.rs b/crates/pgt_text_edit/src/lib.rs deleted file mode 100644 index ba7f3c84..00000000 --- a/crates/pgt_text_edit/src/lib.rs +++ /dev/null @@ -1,386 +0,0 @@ -//! Representation of a `TextEdit`. -//! -//! This is taken from [biome's text_edit crate](https://github.com/biomejs/biome) - -#![warn( - rust_2018_idioms, - unused_lifetimes, - semicolon_in_expressions_from_macros -)] - -use std::{cmp::Ordering, num::NonZeroU32}; - -use pgt_text_size::{TextRange, TextSize}; -pub use similar::ChangeTag; -use similar::{TextDiff, utils::TextDiffRemapper}; - -#[derive(Default, Debug, Clone, PartialEq, Eq, Hash)] -#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] -#[cfg_attr( - feature = "serde", - derive(serde::Serialize, serde::Deserialize), - serde(rename_all = "camelCase") -)] -pub struct TextEdit { - dictionary: String, - ops: Vec, -} - -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] -#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] -#[cfg_attr( - feature = "serde", - derive(serde::Serialize, serde::Deserialize), - serde(rename_all = "camelCase") -)] -pub enum CompressedOp { - DiffOp(DiffOp), - EqualLines { line_count: NonZeroU32 }, -} - -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] -#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] -#[cfg_attr( - feature = "serde", - derive(serde::Serialize, serde::Deserialize), - serde(rename_all = "camelCase") -)] -pub enum DiffOp { - Equal { range: TextRange }, - Insert { range: TextRange }, - Delete { range: TextRange }, -} - -impl DiffOp { - pub fn tag(self) -> ChangeTag { - match self { - DiffOp::Equal { .. } => ChangeTag::Equal, - DiffOp::Insert { .. } => ChangeTag::Insert, - DiffOp::Delete { .. } => ChangeTag::Delete, - } - } - - pub fn text(self, diff: &TextEdit) -> &str { - let range = match self { - DiffOp::Equal { range } => range, - DiffOp::Insert { range } => range, - DiffOp::Delete { range } => range, - }; - - diff.get_text(range) - } -} - -#[derive(Debug, Default, Clone)] -pub struct TextEditBuilder { - index: Vec, - edit: TextEdit, -} - -impl TextEdit { - /// Convenience method for creating a new [TextEditBuilder] - pub fn builder() -> TextEditBuilder { - TextEditBuilder::default() - } - - /// Create a diff of `old` to `new`, tokenized by Unicode words - pub fn from_unicode_words(old: &str, new: &str) -> Self { - let mut builder = Self::builder(); - builder.with_unicode_words_diff(old, new); - builder.finish() - } - - /// Returns the number of [DiffOp] in this [TextEdit] - pub fn len(&self) -> usize { - self.ops.len() - } - - /// Return `true` is this [TextEdit] doesn't contain any [DiffOp] - pub fn is_empty(&self) -> bool { - self.ops.is_empty() - } - - /// Returns an [Iterator] over the [DiffOp] of this [TextEdit] - pub fn iter(&self) -> std::slice::Iter<'_, CompressedOp> { - self.into_iter() - } - - /// Return the text value of range interned in this [TextEdit] dictionnary - pub fn get_text(&self, range: TextRange) -> &str { - &self.dictionary[range] - } - - /// Return the content of the "new" revision of the text represented in - /// this [TextEdit]. This methods needs to be provided with the "old" - /// revision of the string since [TextEdit] doesn't store the content of - /// text sections that are equal between revisions - pub fn new_string(&self, old_string: &str) -> String { - let mut output = String::new(); - let mut input_position = TextSize::from(0); - - for op in &self.ops { - match op { - CompressedOp::DiffOp(DiffOp::Equal { range }) => { - output.push_str(&self.dictionary[*range]); - input_position += range.len(); - } - CompressedOp::DiffOp(DiffOp::Insert { range }) => { - output.push_str(&self.dictionary[*range]); - } - CompressedOp::DiffOp(DiffOp::Delete { range }) => { - input_position += range.len(); - } - CompressedOp::EqualLines { line_count } => { - let start = u32::from(input_position) as usize; - let input = &old_string[start..]; - - let line_break_count = line_count.get() as usize + 1; - for line in input.split_inclusive('\n').take(line_break_count) { - output.push_str(line); - input_position += TextSize::of(line); - } - } - } - } - - output - } -} - -impl IntoIterator for TextEdit { - type Item = CompressedOp; - type IntoIter = std::vec::IntoIter; - - fn into_iter(self) -> Self::IntoIter { - self.ops.into_iter() - } -} - -impl<'a> IntoIterator for &'a TextEdit { - type Item = &'a CompressedOp; - type IntoIter = std::slice::Iter<'a, CompressedOp>; - - fn into_iter(self) -> Self::IntoIter { - self.ops.iter() - } -} - -impl TextEditBuilder { - pub fn is_empty(&self) -> bool { - self.edit.ops.is_empty() - } - - /// Add a piece of string to the dictionnary, returning the corresponding - /// range in the dictionnary string - fn intern(&mut self, value: &str) -> TextRange { - let value_bytes = value.as_bytes(); - let value_len = TextSize::of(value); - - let index = self.index.binary_search_by(|range| { - let entry = self.edit.dictionary[*range].as_bytes(); - - for (lhs, rhs) in entry.iter().zip(value_bytes) { - match lhs.cmp(rhs) { - Ordering::Equal => continue, - ordering => return ordering, - } - } - - match entry.len().cmp(&value_bytes.len()) { - // If all bytes in the shared sub-slice match, the dictionary - // entry is allowed to be longer than the text being inserted - Ordering::Greater => Ordering::Equal, - ordering => ordering, - } - }); - - match index { - Ok(index) => { - let range = self.index[index]; - let len = value_len.min(range.len()); - TextRange::at(range.start(), len) - } - Err(index) => { - let start = TextSize::of(&self.edit.dictionary); - self.edit.dictionary.push_str(value); - - let range = TextRange::at(start, value_len); - self.index.insert(index, range); - range - } - } - } - - pub fn equal(&mut self, text: &str) { - match compress_equal_op(text) { - Some((start, mid, end)) => { - let start = self.intern(start); - self.edit - .ops - .push(CompressedOp::DiffOp(DiffOp::Equal { range: start })); - - self.edit - .ops - .push(CompressedOp::EqualLines { line_count: mid }); - - let end = self.intern(end); - self.edit - .ops - .push(CompressedOp::DiffOp(DiffOp::Equal { range: end })); - } - None => { - let range = self.intern(text); - self.edit - .ops - .push(CompressedOp::DiffOp(DiffOp::Equal { range })); - } - } - } - - pub fn insert(&mut self, text: &str) { - let range = self.intern(text); - self.edit - .ops - .push(CompressedOp::DiffOp(DiffOp::Insert { range })); - } - - pub fn delete(&mut self, text: &str) { - let range = self.intern(text); - self.edit - .ops - .push(CompressedOp::DiffOp(DiffOp::Delete { range })); - } - - pub fn replace(&mut self, old: &str, new: &str) { - self.delete(old); - self.insert(new); - } - - pub fn finish(self) -> TextEdit { - self.edit - } - - /// A higher level utility function for the text edit builder to generate - /// mutiple text edit steps (equal, delete and insert) to represent the - /// diff from the old string to the new string. - pub fn with_unicode_words_diff(&mut self, old: &str, new: &str) { - let diff = TextDiff::configure() - .newline_terminated(true) - .diff_unicode_words(old, new); - - let remapper = TextDiffRemapper::from_text_diff(&diff, old, new); - - for (tag, text) in diff.ops().iter().flat_map(|op| remapper.iter_slices(op)) { - match tag { - ChangeTag::Equal => { - self.equal(text); - } - ChangeTag::Delete => { - self.delete(text); - } - ChangeTag::Insert => { - self.insert(text); - } - } - } - } -} - -/// Number of lines to keep as [DiffOp::Equal] operations around a -/// [CompressedOp::EqualCompressedLines] operation. This has the effect of -/// making the compressed diff retain a few line of equal content around -/// changes, which is useful for display as it makes it possible to print a few -/// context lines around changes without having to keep the full original text -/// around. -const COMPRESSED_DIFFS_CONTEXT_LINES: usize = 2; - -fn compress_equal_op(text: &str) -> Option<(&str, NonZeroU32, &str)> { - let mut iter = text.split('\n'); - - let mut leading_len = COMPRESSED_DIFFS_CONTEXT_LINES; - for _ in 0..=COMPRESSED_DIFFS_CONTEXT_LINES { - leading_len += iter.next()?.len(); - } - - let mut trailing_len = COMPRESSED_DIFFS_CONTEXT_LINES; - for _ in 0..=COMPRESSED_DIFFS_CONTEXT_LINES { - trailing_len += iter.next_back()?.len(); - } - - let mid_count = iter.count(); - let mid_count = u32::try_from(mid_count).ok()?; - let mid_count = NonZeroU32::new(mid_count)?; - - let trailing_start = text.len().saturating_sub(trailing_len); - - Some((&text[..leading_len], mid_count, &text[trailing_start..])) -} - -#[cfg(test)] -mod tests { - use std::num::NonZeroU32; - - use crate::{TextEdit, compress_equal_op}; - - #[test] - fn compress_short() { - let output = compress_equal_op( - " -start 1 -start 2 -end 1 -end 2 -", - ); - - assert_eq!(output, None); - } - - #[test] - fn compress_long() { - let output = compress_equal_op( - " -start 1 -start 2 -mid 1 -mid 2 -mid 3 -end 1 -end 2 -", - ); - - assert_eq!( - output, - Some(( - "\nstart 1\nstart 2", - NonZeroU32::new(3).unwrap(), - "end 1\nend 2\n" - )) - ); - } - - #[test] - fn new_string_compressed() { - const OLD: &str = "line 1 old -line 2 -line 3 -line 4 -line 5 -line 6 -line 7 old"; - - const NEW: &str = "line 1 new -line 2 -line 3 -line 4 -line 5 -line 6 -line 7 new"; - - let diff = TextEdit::from_unicode_words(OLD, NEW); - let new_string = diff.new_string(OLD); - - assert_eq!(new_string, NEW); - } -} diff --git a/crates/pgt_text_size/Cargo.toml b/crates/pgt_text_size/Cargo.toml deleted file mode 100644 index 8e897832..00000000 --- a/crates/pgt_text_size/Cargo.toml +++ /dev/null @@ -1,28 +0,0 @@ -[package] -authors.workspace = true -categories.workspace = true -description = "" -edition.workspace = true -homepage.workspace = true -keywords.workspace = true -license.workspace = true -name = "pgt_text_size" -repository.workspace = true -version = "0.0.0" - -[dependencies] -schemars = { workspace = true, optional = true } -serde = { workspace = true, optional = true } - -[features] -schema = ["dep:schemars"] -serde = ["dep:serde"] - -[dev-dependencies] -serde_test = "1.0" -static_assertions = "1.1" - -[[test]] -name = "serde" -path = "tests/serde.rs" -required-features = ["serde"] diff --git a/crates/pgt_text_size/src/lib.rs b/crates/pgt_text_size/src/lib.rs deleted file mode 100644 index 133f6192..00000000 --- a/crates/pgt_text_size/src/lib.rs +++ /dev/null @@ -1,35 +0,0 @@ -//! Newtypes for working with text sizes/ranges in a more type-safe manner. -//! -//! This library can help with two things: -//! * Reducing storage requirements for offsets and ranges, under the -//! assumption that 32 bits is enough. -//! * Providing standard vocabulary types for applications where text ranges -//! are pervasive. -//! -//! However, you should not use this library simply because you work with -//! strings. In the overwhelming majority of cases, using `usize` and -//! `std::ops::Range` is better. In particular, if you are publishing a -//! library, using only std types in the interface would make it more -//! interoperable. Similarly, if you are writing something like a lexer, which -//! produces, but does not *store* text ranges, then sticking to `usize` would -//! be better. -//! -//! Minimal Supported Rust Version: latest stable. - -#![forbid(unsafe_code)] -#![warn(missing_debug_implementations, missing_docs)] - -mod range; -mod size; -mod traits; - -#[cfg(feature = "serde")] -mod serde_impls; - -#[cfg(feature = "schema")] -mod schemars_impls; - -pub use crate::{range::TextRange, size::TextSize, traits::TextLen}; - -#[cfg(target_pointer_width = "16")] -compile_error!("text-size assumes usize >= u32 and does not work on 16-bit targets"); diff --git a/crates/pgt_text_size/src/range.rs b/crates/pgt_text_size/src/range.rs deleted file mode 100644 index baab91e9..00000000 --- a/crates/pgt_text_size/src/range.rs +++ /dev/null @@ -1,507 +0,0 @@ -use cmp::Ordering; - -use { - crate::TextSize, - std::{ - cmp, fmt, - ops::{Add, AddAssign, Bound, Index, IndexMut, Range, RangeBounds, Sub, SubAssign}, - }, -}; - -/// A range in text, represented as a pair of [`TextSize`][struct@TextSize]. -/// -/// It is a logic error for `start` to be greater than `end`. -#[derive(Default, Copy, Clone, Eq, PartialEq, Hash)] -pub struct TextRange { - // Invariant: start <= end - start: TextSize, - end: TextSize, -} - -impl fmt::Debug for TextRange { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "{}..{}", self.start().raw, self.end().raw) - } -} - -impl TextRange { - /// Creates a new `TextRange` with the given `start` and `end` (`start..end`). - /// - /// # Panics - /// - /// Panics if `end < start`. - /// - /// # Examples - /// - /// ```rust - /// # use pgt_text_size::*; - /// let start = TextSize::from(5); - /// let end = TextSize::from(10); - /// let range = TextRange::new(start, end); - /// - /// assert_eq!(range.start(), start); - /// assert_eq!(range.end(), end); - /// assert_eq!(range.len(), end - start); - /// ``` - #[inline] - pub const fn new(start: TextSize, end: TextSize) -> TextRange { - assert!(start.raw <= end.raw); - TextRange { start, end } - } - - /// Create a new `TextRange` with the given `offset` and `len` (`offset..offset + len`). - /// - /// # Examples - /// - /// ```rust - /// # use pgt_text_size::*; - /// let text = "0123456789"; - /// - /// let offset = TextSize::from(2); - /// let length = TextSize::from(5); - /// let range = TextRange::at(offset, length); - /// - /// assert_eq!(range, TextRange::new(offset, offset + length)); - /// assert_eq!(&text[range], "23456") - /// ``` - #[inline] - pub const fn at(offset: TextSize, len: TextSize) -> TextRange { - TextRange::new(offset, TextSize::new(offset.raw + len.raw)) - } - - /// Create a zero-length range at the specified offset (`offset..offset`). - /// - /// # Examples - /// - /// ```rust - /// # use pgt_text_size::*; - /// let point: TextSize; - /// # point = TextSize::from(3); - /// let range = TextRange::empty(point); - /// assert!(range.is_empty()); - /// assert_eq!(range, TextRange::new(point, point)); - /// ``` - #[inline] - pub const fn empty(offset: TextSize) -> TextRange { - TextRange { - start: offset, - end: offset, - } - } - - /// Create a range up to the given end (`..end`). - /// - /// # Examples - /// - /// ```rust - /// # use pgt_text_size::*; - /// let point: TextSize; - /// # point = TextSize::from(12); - /// let range = TextRange::up_to(point); - /// - /// assert_eq!(range.len(), point); - /// assert_eq!(range, TextRange::new(0.into(), point)); - /// assert_eq!(range, TextRange::at(0.into(), point)); - /// ``` - #[inline] - pub const fn up_to(end: TextSize) -> TextRange { - TextRange { - start: TextSize::new(0), - end, - } - } -} - -/// Identity methods. -impl TextRange { - /// The start point of this range. - #[inline] - pub const fn start(self) -> TextSize { - self.start - } - - /// The end point of this range. - #[inline] - pub const fn end(self) -> TextSize { - self.end - } - - /// The size of this range. - #[inline] - pub const fn len(self) -> TextSize { - // HACK for const fn: math on primitives only - TextSize { - raw: self.end().raw - self.start().raw, - } - } - - /// Check if this range is empty. - #[inline] - pub const fn is_empty(self) -> bool { - // HACK for const fn: math on primitives only - self.start().raw == self.end().raw - } -} - -/// Manipulation methods. -impl TextRange { - /// Check if this range contains an offset. - /// - /// The end index is considered excluded. - /// - /// # Examples - /// - /// ```rust - /// # use pgt_text_size::*; - /// let (start, end): (TextSize, TextSize); - /// # start = 10.into(); end = 20.into(); - /// let range = TextRange::new(start, end); - /// assert!(range.contains(start)); - /// assert!(!range.contains(end)); - /// ``` - #[inline] - pub fn contains(self, offset: TextSize) -> bool { - self.start() <= offset && offset < self.end() - } - - /// Check if this range contains an offset. - /// - /// The end index is considered included. - /// - /// # Examples - /// - /// ```rust - /// # use pgt_text_size::*; - /// let (start, end): (TextSize, TextSize); - /// # start = 10.into(); end = 20.into(); - /// let range = TextRange::new(start, end); - /// assert!(range.contains_inclusive(start)); - /// assert!(range.contains_inclusive(end)); - /// ``` - #[inline] - pub fn contains_inclusive(self, offset: TextSize) -> bool { - self.start() <= offset && offset <= self.end() - } - - /// Check if this range completely contains another range. - /// - /// # Examples - /// - /// ```rust - /// # use pgt_text_size::*; - /// let larger = TextRange::new(0.into(), 20.into()); - /// let smaller = TextRange::new(5.into(), 15.into()); - /// assert!(larger.contains_range(smaller)); - /// assert!(!smaller.contains_range(larger)); - /// - /// // a range always contains itself - /// assert!(larger.contains_range(larger)); - /// assert!(smaller.contains_range(smaller)); - /// ``` - #[inline] - pub fn contains_range(self, other: TextRange) -> bool { - self.start() <= other.start() && other.end() <= self.end() - } - - /// The range covered by both ranges, if it exists. - /// If the ranges touch but do not overlap, the output range is empty. - /// - /// # Examples - /// - /// ```rust - /// # use pgt_text_size::*; - /// assert_eq!( - /// TextRange::intersect( - /// TextRange::new(0.into(), 10.into()), - /// TextRange::new(5.into(), 15.into()), - /// ), - /// Some(TextRange::new(5.into(), 10.into())), - /// ); - /// ``` - #[inline] - pub fn intersect(self, other: TextRange) -> Option { - let start = cmp::max(self.start(), other.start()); - let end = cmp::min(self.end(), other.end()); - if end < start { - return None; - } - Some(TextRange::new(start, end)) - } - - /// Extends the range to cover `other` as well. - /// - /// # Examples - /// - /// ```rust - /// # use pgt_text_size::*; - /// assert_eq!( - /// TextRange::cover( - /// TextRange::new(0.into(), 5.into()), - /// TextRange::new(15.into(), 20.into()), - /// ), - /// TextRange::new(0.into(), 20.into()), - /// ); - /// ``` - #[inline] - pub fn cover(self, other: TextRange) -> TextRange { - let start = cmp::min(self.start(), other.start()); - let end = cmp::max(self.end(), other.end()); - TextRange::new(start, end) - } - - /// Extends the range to cover `other` offsets as well. - /// - /// # Examples - /// - /// ```rust - /// # use pgt_text_size::*; - /// assert_eq!( - /// TextRange::empty(0.into()).cover_offset(20.into()), - /// TextRange::new(0.into(), 20.into()), - /// ) - /// ``` - #[inline] - pub fn cover_offset(self, offset: TextSize) -> TextRange { - self.cover(TextRange::empty(offset)) - } - - /// Add an offset to this range. - /// - /// Note that this is not appropriate for changing where a `TextRange` is - /// within some string; rather, it is for changing the reference anchor - /// that the `TextRange` is measured against. - /// - /// The unchecked version (`Add::add`) will _always_ panic on overflow, - /// in contrast to primitive integers, which check in debug mode only. - #[inline] - pub fn checked_add(self, offset: TextSize) -> Option { - Some(TextRange { - start: self.start.checked_add(offset)?, - end: self.end.checked_add(offset)?, - }) - } - - /// Expand the range's end by the given offset. - /// - /// # Examples - /// - /// ```rust - /// # use pgt_text_size::*; - /// assert_eq!( - /// TextRange::new(2.into(), 4.into()).checked_expand_end(16.into()).unwrap(), - /// TextRange::new(2.into(), 20.into()), - /// ); - /// ``` - #[inline] - pub fn checked_expand_end(self, offset: TextSize) -> Option { - Some(TextRange { - start: self.start, - end: self.end.checked_add(offset)?, - }) - } - - /// Expand the range's start by the given offset. - /// The start will never exceed the range's end. - /// - /// # Examples - /// - /// ```rust - /// # use pgt_text_size::*; - /// assert_eq!( - /// TextRange::new(2.into(), 12.into()).checked_expand_start(4.into()).unwrap(), - /// TextRange::new(6.into(), 12.into()), - /// ); - /// - /// assert_eq!( - /// TextRange::new(2.into(), 12.into()).checked_expand_start(12.into()).unwrap(), - /// TextRange::new(12.into(), 12.into()), - /// ); - /// ``` - #[inline] - pub fn checked_expand_start(self, offset: TextSize) -> Option { - let new_start = self.start.checked_add(offset)?; - let end = self.end; - - if new_start > end { - Some(TextRange { start: end, end }) - } else { - Some(TextRange { - start: new_start, - end, - }) - } - } - - /// Subtract an offset from this range. - /// - /// Note that this is not appropriate for changing where a `TextRange` is - /// within some string; rather, it is for changing the reference anchor - /// that the `TextRange` is measured against. - /// - /// The unchecked version (`Sub::sub`) will _always_ panic on overflow, - /// in contrast to primitive integers, which check in debug mode only. - #[inline] - pub fn checked_sub(self, offset: TextSize) -> Option { - Some(TextRange { - start: self.start.checked_sub(offset)?, - end: self.end.checked_sub(offset)?, - }) - } - - /// Relative order of the two ranges (overlapping ranges are considered - /// equal). - /// - /// - /// This is useful when, for example, binary searching an array of disjoint - /// ranges. - /// - /// # Examples - /// - /// ``` - /// # use pgt_text_size::*; - /// # use std::cmp::Ordering; - /// - /// let a = TextRange::new(0.into(), 3.into()); - /// let b = TextRange::new(4.into(), 5.into()); - /// assert_eq!(a.ordering(b), Ordering::Less); - /// - /// let a = TextRange::new(0.into(), 3.into()); - /// let b = TextRange::new(3.into(), 5.into()); - /// assert_eq!(a.ordering(b), Ordering::Less); - /// - /// let a = TextRange::new(0.into(), 3.into()); - /// let b = TextRange::new(2.into(), 5.into()); - /// assert_eq!(a.ordering(b), Ordering::Equal); - /// - /// let a = TextRange::new(0.into(), 3.into()); - /// let b = TextRange::new(2.into(), 2.into()); - /// assert_eq!(a.ordering(b), Ordering::Equal); - /// - /// let a = TextRange::new(2.into(), 3.into()); - /// let b = TextRange::new(2.into(), 2.into()); - /// assert_eq!(a.ordering(b), Ordering::Greater); - /// ``` - #[inline] - pub fn ordering(self, other: TextRange) -> Ordering { - if self.end() <= other.start() { - Ordering::Less - } else if other.end() <= self.start() { - Ordering::Greater - } else { - Ordering::Equal - } - } -} - -impl Index for str { - type Output = str; - #[inline] - fn index(&self, index: TextRange) -> &str { - &self[Range::::from(index)] - } -} - -impl Index for String { - type Output = str; - #[inline] - fn index(&self, index: TextRange) -> &str { - &self[Range::::from(index)] - } -} - -impl IndexMut for str { - #[inline] - fn index_mut(&mut self, index: TextRange) -> &mut str { - &mut self[Range::::from(index)] - } -} - -impl IndexMut for String { - #[inline] - fn index_mut(&mut self, index: TextRange) -> &mut str { - &mut self[Range::::from(index)] - } -} - -impl RangeBounds for TextRange { - fn start_bound(&self) -> Bound<&TextSize> { - Bound::Included(&self.start) - } - - fn end_bound(&self) -> Bound<&TextSize> { - Bound::Excluded(&self.end) - } -} - -impl From for Range -where - T: From, -{ - #[inline] - fn from(r: TextRange) -> Self { - r.start().into()..r.end().into() - } -} - -macro_rules! ops { - (impl $Op:ident for TextRange by fn $f:ident = $op:tt) => { - impl $Op<&TextSize> for TextRange { - type Output = TextRange; - #[inline] - fn $f(self, other: &TextSize) -> TextRange { - self $op *other - } - } - impl $Op for &TextRange - where - TextRange: $Op, - { - type Output = TextRange; - #[inline] - fn $f(self, other: T) -> TextRange { - *self $op other - } - } - }; -} - -impl Add for TextRange { - type Output = TextRange; - #[inline] - fn add(self, offset: TextSize) -> TextRange { - self.checked_add(offset) - .expect("TextRange +offset overflowed") - } -} - -impl Sub for TextRange { - type Output = TextRange; - #[inline] - fn sub(self, offset: TextSize) -> TextRange { - self.checked_sub(offset) - .expect("TextRange -offset overflowed") - } -} - -ops!(impl Add for TextRange by fn add = +); -ops!(impl Sub for TextRange by fn sub = -); - -impl AddAssign for TextRange -where - TextRange: Add, -{ - #[inline] - fn add_assign(&mut self, rhs: A) { - *self = *self + rhs - } -} - -impl SubAssign for TextRange -where - TextRange: Sub, -{ - #[inline] - fn sub_assign(&mut self, rhs: S) { - *self = *self - rhs - } -} diff --git a/crates/pgt_text_size/src/schemars_impls.rs b/crates/pgt_text_size/src/schemars_impls.rs deleted file mode 100644 index 8c216ccd..00000000 --- a/crates/pgt_text_size/src/schemars_impls.rs +++ /dev/null @@ -1,32 +0,0 @@ -//! This module implements the [JsonSchema] trait from the [schemars] crate for -//! [TextSize] and [TextRange] if the `schemars` feature is enabled. This trait -//! exposes meta-information on how a given type is serialized and deserialized -//! using `serde`, and is currently used to generate TypeScript types for the node.js and wasm -//! bindings to the Workspace API - -use crate::{TextRange, TextSize}; -use schemars::{JsonSchema, r#gen::SchemaGenerator, schema::Schema}; - -impl JsonSchema for TextSize { - fn schema_name() -> String { - String::from("TextSize") - } - - fn json_schema(generator: &mut SchemaGenerator) -> Schema { - // TextSize is represented as a raw u32, see serde_impls.rs for the - // actual implementation - ::json_schema(generator) - } -} - -impl JsonSchema for TextRange { - fn schema_name() -> String { - String::from("TextRange") - } - - fn json_schema(generator: &mut SchemaGenerator) -> Schema { - // TextSize is represented as (TextSize, TextSize), see serde_impls.rs - // for the actual implementation - <(TextSize, TextSize)>::json_schema(generator) - } -} diff --git a/crates/pgt_text_size/src/serde_impls.rs b/crates/pgt_text_size/src/serde_impls.rs deleted file mode 100644 index a422c75f..00000000 --- a/crates/pgt_text_size/src/serde_impls.rs +++ /dev/null @@ -1,48 +0,0 @@ -use { - crate::{TextRange, TextSize}, - serde::{Deserialize, Deserializer, Serialize, Serializer, de}, -}; - -impl Serialize for TextSize { - fn serialize(&self, serializer: S) -> Result - where - S: Serializer, - { - self.raw.serialize(serializer) - } -} - -impl<'de> Deserialize<'de> for TextSize { - fn deserialize(deserializer: D) -> Result - where - D: Deserializer<'de>, - { - u32::deserialize(deserializer).map(TextSize::from) - } -} - -impl Serialize for TextRange { - fn serialize(&self, serializer: S) -> Result - where - S: Serializer, - { - (self.start(), self.end()).serialize(serializer) - } -} - -impl<'de> Deserialize<'de> for TextRange { - #[allow(clippy::nonminimal_bool)] - fn deserialize(deserializer: D) -> Result - where - D: Deserializer<'de>, - { - let (start, end) = Deserialize::deserialize(deserializer)?; - if !(start <= end) { - return Err(de::Error::custom(format!( - "invalid range: {:?}..{:?}", - start, end - ))); - } - Ok(TextRange::new(start, end)) - } -} diff --git a/crates/pgt_text_size/src/size.rs b/crates/pgt_text_size/src/size.rs deleted file mode 100644 index 1082485e..00000000 --- a/crates/pgt_text_size/src/size.rs +++ /dev/null @@ -1,179 +0,0 @@ -use { - crate::TextLen, - std::{ - convert::TryFrom, - fmt::{self, Display}, - iter, - num::TryFromIntError, - ops::{Add, AddAssign, Sub, SubAssign}, - }, -}; - -/// A measure of text length. Also, equivalently, an index into text. -/// -/// This is a UTF-8 bytes offset stored as `u32`, but -/// most clients should treat it as an opaque measure. -/// -/// For cases that need to escape `TextSize` and return to working directly -/// with primitive integers, `TextSize` can be converted losslessly to/from -/// `u32` via [`From`] conversions as well as losslessly be converted [`Into`] -/// `usize`. The `usize -> TextSize` direction can be done via [`TryFrom`]. -/// -/// These escape hatches are primarily required for unit testing and when -/// converting from UTF-8 size to another coordinate space, such as UTF-16. -#[derive(Clone, Copy, Default, PartialEq, Eq, PartialOrd, Ord, Hash)] -pub struct TextSize { - pub(crate) raw: u32, -} - -impl fmt::Debug for TextSize { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "{}", self.raw) - } -} - -impl Display for TextSize { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "{}", self.raw) - } -} - -impl TextSize { - /// Creates a new instance of `TextSize` from a raw `u32`. - #[inline] - pub const fn new(raw: u32) -> TextSize { - TextSize { raw } - } - - /// The text size of some primitive text-like object. - /// - /// Accepts `char`, `&str`, and `&String`. - /// - /// # Examples - /// - /// ```rust - /// # use pgt_text_size::*; - /// let char_size = TextSize::of('🦀'); - /// assert_eq!(char_size, TextSize::from(4)); - /// - /// let str_size = TextSize::of("rust-analyzer"); - /// assert_eq!(str_size, TextSize::from(13)); - /// ``` - #[inline] - pub fn of(text: T) -> TextSize { - text.text_len() - } -} - -/// Methods to act like a primitive integer type, where reasonably applicable. -// Last updated for parity with Rust 1.42.0. -impl TextSize { - /// Checked addition. Returns `None` if overflow occurred. - #[inline] - pub const fn checked_add(self, rhs: TextSize) -> Option { - match self.raw.checked_add(rhs.raw) { - Some(raw) => Some(TextSize { raw }), - None => None, - } - } - - /// Checked subtraction. Returns `None` if overflow occurred. - #[inline] - pub const fn checked_sub(self, rhs: TextSize) -> Option { - match self.raw.checked_sub(rhs.raw) { - Some(raw) => Some(TextSize { raw }), - None => None, - } - } -} - -impl From for TextSize { - #[inline] - fn from(raw: u32) -> Self { - TextSize { raw } - } -} - -impl From for u32 { - #[inline] - fn from(value: TextSize) -> Self { - value.raw - } -} - -impl TryFrom for TextSize { - type Error = TryFromIntError; - #[inline] - fn try_from(value: usize) -> Result { - Ok(u32::try_from(value)?.into()) - } -} - -impl From for usize { - #[inline] - fn from(value: TextSize) -> Self { - value.raw as usize - } -} - -macro_rules! ops { - (impl $Op:ident for TextSize by fn $f:ident = $op:tt) => { - impl $Op for TextSize { - type Output = TextSize; - #[inline] - fn $f(self, other: TextSize) -> TextSize { - TextSize { raw: self.raw $op other.raw } - } - } - impl $Op<&TextSize> for TextSize { - type Output = TextSize; - #[inline] - fn $f(self, other: &TextSize) -> TextSize { - self $op *other - } - } - impl $Op for &TextSize - where - TextSize: $Op, - { - type Output = TextSize; - #[inline] - fn $f(self, other: T) -> TextSize { - *self $op other - } - } - }; -} - -ops!(impl Add for TextSize by fn add = +); -ops!(impl Sub for TextSize by fn sub = -); - -impl AddAssign for TextSize -where - TextSize: Add, -{ - #[inline] - fn add_assign(&mut self, rhs: A) { - *self = *self + rhs - } -} - -impl SubAssign for TextSize -where - TextSize: Sub, -{ - #[inline] - fn sub_assign(&mut self, rhs: S) { - *self = *self - rhs - } -} - -impl iter::Sum for TextSize -where - TextSize: Add, -{ - #[inline] - fn sum>(iter: I) -> TextSize { - iter.fold(0.into(), Add::add) - } -} diff --git a/crates/pgt_text_size/src/traits.rs b/crates/pgt_text_size/src/traits.rs deleted file mode 100644 index d0bb6c1f..00000000 --- a/crates/pgt_text_size/src/traits.rs +++ /dev/null @@ -1,36 +0,0 @@ -use {crate::TextSize, std::convert::TryInto}; - -use priv_in_pub::Sealed; -mod priv_in_pub { - pub trait Sealed {} -} - -/// Primitives with a textual length that can be passed to [`TextSize::of`]. -pub trait TextLen: Copy + Sealed { - /// The textual length of this primitive. - fn text_len(self) -> TextSize; -} - -impl Sealed for &'_ str {} -impl TextLen for &'_ str { - #[inline] - fn text_len(self) -> TextSize { - self.len().try_into().unwrap() - } -} - -impl Sealed for &'_ String {} -impl TextLen for &'_ String { - #[inline] - fn text_len(self) -> TextSize { - self.as_str().text_len() - } -} - -impl Sealed for char {} -impl TextLen for char { - #[inline] - fn text_len(self) -> TextSize { - (self.len_utf8() as u32).into() - } -} diff --git a/crates/pgt_text_size/tests/auto_traits.rs b/crates/pgt_text_size/tests/auto_traits.rs deleted file mode 100644 index b3a11a75..00000000 --- a/crates/pgt_text_size/tests/auto_traits.rs +++ /dev/null @@ -1,18 +0,0 @@ -use { - pgt_text_size::*, - static_assertions::*, - std::{ - fmt::Debug, - hash::Hash, - marker::{Send, Sync}, - panic::{RefUnwindSafe, UnwindSafe}, - }, -}; - -// auto traits -assert_impl_all!(TextSize: Send, Sync, Unpin, UnwindSafe, RefUnwindSafe); -assert_impl_all!(TextRange: Send, Sync, Unpin, UnwindSafe, RefUnwindSafe); - -// common traits -assert_impl_all!(TextSize: Copy, Debug, Default, Hash, Ord); -assert_impl_all!(TextRange: Copy, Debug, Default, Hash, Eq); diff --git a/crates/pgt_text_size/tests/constructors.rs b/crates/pgt_text_size/tests/constructors.rs deleted file mode 100644 index e2b66295..00000000 --- a/crates/pgt_text_size/tests/constructors.rs +++ /dev/null @@ -1,24 +0,0 @@ -use pgt_text_size::TextSize; - -#[derive(Copy, Clone)] -struct BadRope<'a>(&'a [&'a str]); - -impl BadRope<'_> { - fn text_len(self) -> TextSize { - self.0.iter().copied().map(TextSize::of).sum() - } -} - -#[test] -fn main() { - let x: char = 'c'; - let _ = TextSize::of(x); - - let x: &str = "hello"; - let _ = TextSize::of(x); - - let x: &String = &"hello".into(); - let _ = TextSize::of(x); - - let _ = BadRope(&[""]).text_len(); -} diff --git a/crates/pgt_text_size/tests/indexing.rs b/crates/pgt_text_size/tests/indexing.rs deleted file mode 100644 index 286884f4..00000000 --- a/crates/pgt_text_size/tests/indexing.rs +++ /dev/null @@ -1,8 +0,0 @@ -use pgt_text_size::*; - -#[test] -fn main() { - let range = TextRange::default(); - _ = &""[range]; - _ = &String::new()[range]; -} diff --git a/crates/pgt_text_size/tests/main.rs b/crates/pgt_text_size/tests/main.rs deleted file mode 100644 index 1f84ce0e..00000000 --- a/crates/pgt_text_size/tests/main.rs +++ /dev/null @@ -1,76 +0,0 @@ -use {pgt_text_size::*, std::ops}; - -fn size(x: u32) -> TextSize { - TextSize::from(x) -} - -fn range(x: ops::Range) -> TextRange { - TextRange::new(x.start.into(), x.end.into()) -} - -#[test] -fn sum() { - let xs: Vec = vec![size(0), size(1), size(2)]; - assert_eq!(xs.iter().sum::(), size(3)); - assert_eq!(xs.into_iter().sum::(), size(3)); -} - -#[test] -fn math() { - assert_eq!(size(10) + size(5), size(15)); - assert_eq!(size(10) - size(5), size(5)); -} - -#[test] -fn checked_math() { - assert_eq!(size(1).checked_add(size(1)), Some(size(2))); - assert_eq!(size(1).checked_sub(size(1)), Some(size(0))); - assert_eq!(size(1).checked_sub(size(2)), None); - assert_eq!(size(!0).checked_add(size(1)), None); -} - -#[test] -#[rustfmt::skip] -fn contains() { - assert!( range(2..4).contains_range(range(2..3))); - assert!( ! range(2..4).contains_range(range(1..3))); -} - -#[test] -fn intersect() { - assert_eq!(range(1..2).intersect(range(2..3)), Some(range(2..2))); - assert_eq!(range(1..5).intersect(range(2..3)), Some(range(2..3))); - assert_eq!(range(1..2).intersect(range(3..4)), None); -} - -#[test] -fn cover() { - assert_eq!(range(1..2).cover(range(2..3)), range(1..3)); - assert_eq!(range(1..5).cover(range(2..3)), range(1..5)); - assert_eq!(range(1..2).cover(range(4..5)), range(1..5)); -} - -#[test] -fn cover_offset() { - assert_eq!(range(1..3).cover_offset(size(0)), range(0..3)); - assert_eq!(range(1..3).cover_offset(size(1)), range(1..3)); - assert_eq!(range(1..3).cover_offset(size(2)), range(1..3)); - assert_eq!(range(1..3).cover_offset(size(3)), range(1..3)); - assert_eq!(range(1..3).cover_offset(size(4)), range(1..4)); -} - -#[test] -#[rustfmt::skip] -fn contains_point() { - assert!( ! range(1..3).contains(size(0))); - assert!( range(1..3).contains(size(1))); - assert!( range(1..3).contains(size(2))); - assert!( ! range(1..3).contains(size(3))); - assert!( ! range(1..3).contains(size(4))); - - assert!( ! range(1..3).contains_inclusive(size(0))); - assert!( range(1..3).contains_inclusive(size(1))); - assert!( range(1..3).contains_inclusive(size(2))); - assert!( range(1..3).contains_inclusive(size(3))); - assert!( ! range(1..3).contains_inclusive(size(4))); -} diff --git a/crates/pgt_text_size/tests/serde.rs b/crates/pgt_text_size/tests/serde.rs deleted file mode 100644 index 4eaf366d..00000000 --- a/crates/pgt_text_size/tests/serde.rs +++ /dev/null @@ -1,79 +0,0 @@ -use {pgt_text_size::*, serde_test::*, std::ops}; - -fn size(x: u32) -> TextSize { - TextSize::from(x) -} - -fn range(x: ops::Range) -> TextRange { - TextRange::new(x.start.into(), x.end.into()) -} - -#[test] -fn size_serialization() { - assert_tokens(&size(00), &[Token::U32(00)]); - assert_tokens(&size(10), &[Token::U32(10)]); - assert_tokens(&size(20), &[Token::U32(20)]); - assert_tokens(&size(30), &[Token::U32(30)]); -} - -#[test] -fn range_serialization() { - assert_tokens( - &range(00..10), - &[ - Token::Tuple { len: 2 }, - Token::U32(00), - Token::U32(10), - Token::TupleEnd, - ], - ); - assert_tokens( - &range(10..20), - &[ - Token::Tuple { len: 2 }, - Token::U32(10), - Token::U32(20), - Token::TupleEnd, - ], - ); - assert_tokens( - &range(20..30), - &[ - Token::Tuple { len: 2 }, - Token::U32(20), - Token::U32(30), - Token::TupleEnd, - ], - ); - assert_tokens( - &range(30..40), - &[ - Token::Tuple { len: 2 }, - Token::U32(30), - Token::U32(40), - Token::TupleEnd, - ], - ); -} - -#[test] -fn invalid_range_deserialization() { - assert_tokens::( - &range(62..92), - &[ - Token::Tuple { len: 2 }, - Token::U32(62), - Token::U32(92), - Token::TupleEnd, - ], - ); - assert_de_tokens_error::( - &[ - Token::Tuple { len: 2 }, - Token::U32(92), - Token::U32(62), - Token::TupleEnd, - ], - "invalid range: 92..62", - ); -} diff --git a/crates/pgt_treesitter_queries/Cargo.toml b/crates/pgt_treesitter_queries/Cargo.toml deleted file mode 100644 index 5806861f..00000000 --- a/crates/pgt_treesitter_queries/Cargo.toml +++ /dev/null @@ -1,22 +0,0 @@ -[package] -authors.workspace = true -categories.workspace = true -description = "" -edition.workspace = true -homepage.workspace = true -keywords.workspace = true -license.workspace = true -name = "pgt_treesitter_queries" -repository.workspace = true -version = "0.0.0" - - -[dependencies] -clap = { version = "4.5.23", features = ["derive"] } -tree-sitter.workspace = true -tree_sitter_sql.workspace = true - -[dev-dependencies] - -[lib] -doctest = false diff --git a/crates/pgt_treesitter_queries/src/lib.rs b/crates/pgt_treesitter_queries/src/lib.rs deleted file mode 100644 index 4bf71e74..00000000 --- a/crates/pgt_treesitter_queries/src/lib.rs +++ /dev/null @@ -1,287 +0,0 @@ -pub mod queries; - -use std::slice::Iter; - -use queries::{Query, QueryResult}; - -pub struct TreeSitterQueriesExecutor<'a> { - root_node: tree_sitter::Node<'a>, - stmt: &'a str, - results: Vec>, -} - -impl<'a> TreeSitterQueriesExecutor<'a> { - pub fn new(root_node: tree_sitter::Node<'a>, stmt: &'a str) -> Self { - Self { - root_node, - stmt, - results: vec![], - } - } - - #[allow(private_bounds)] - pub fn add_query_results>(&mut self) { - let mut results = Q::execute(self.root_node, self.stmt); - self.results.append(&mut results); - } - - pub fn get_iter(&self, range: Option<&'a tree_sitter::Range>) -> QueryResultIter { - match range { - Some(r) => QueryResultIter::new(&self.results).within_range(r), - None => QueryResultIter::new(&self.results), - } - } -} - -pub struct QueryResultIter<'a> { - inner: Iter<'a, QueryResult<'a>>, - range: Option<&'a tree_sitter::Range>, -} - -impl<'a> QueryResultIter<'a> { - pub(crate) fn new(results: &'a Vec>) -> Self { - Self { - inner: results.iter(), - range: None, - } - } - - fn within_range(mut self, r: &'a tree_sitter::Range) -> Self { - self.range = Some(r); - self - } -} - -impl<'a> Iterator for QueryResultIter<'a> { - type Item = &'a QueryResult<'a>; - fn next(&mut self) -> Option { - let next = self.inner.next()?; - - if self.range.as_ref().is_some_and(|r| !next.within_range(r)) { - return self.next(); - } - - Some(next) - } -} - -#[cfg(test)] -mod tests { - - use crate::{ - TreeSitterQueriesExecutor, - queries::{ParameterMatch, RelationMatch, TableAliasMatch}, - }; - - #[test] - fn finds_all_table_aliases() { - let sql = r#" -select - * -from - ( - select - something - from - public.cool_table pu - join private.cool_tableau pr on pu.id = pr.id - where - x = '123' - union - select - something_else - from - another_table puat - inner join private.another_tableau prat on puat.id = prat.id - union - select - x, - y - from - public.get_something_cool () - ) as cool - join users u on u.id = cool.something -where - col = 17; -"#; - - let mut parser = tree_sitter::Parser::new(); - parser.set_language(tree_sitter_sql::language()).unwrap(); - - let tree = parser.parse(sql, None).unwrap(); - - let mut executor = TreeSitterQueriesExecutor::new(tree.root_node(), sql); - - executor.add_query_results::(); - - let results: Vec<&TableAliasMatch> = executor - .get_iter(None) - .filter_map(|q| q.try_into().ok()) - .collect(); - - assert_eq!(results[0].get_schema(sql), Some("public".into())); - assert_eq!(results[0].get_table(sql), "cool_table"); - assert_eq!(results[0].get_alias(sql), "pu"); - - assert_eq!(results[1].get_schema(sql), Some("private".into())); - assert_eq!(results[1].get_table(sql), "cool_tableau"); - assert_eq!(results[1].get_alias(sql), "pr"); - - assert_eq!(results[2].get_schema(sql), None); - assert_eq!(results[2].get_table(sql), "another_table"); - assert_eq!(results[2].get_alias(sql), "puat"); - - assert_eq!(results[3].get_schema(sql), Some("private".into())); - assert_eq!(results[3].get_table(sql), "another_tableau"); - assert_eq!(results[3].get_alias(sql), "prat"); - - assert_eq!(results[4].get_schema(sql), None); - assert_eq!(results[4].get_table(sql), "users"); - assert_eq!(results[4].get_alias(sql), "u"); - } - - #[test] - fn finds_all_relations_and_ignores_functions() { - let sql = r#" -select - * -from - ( - select - something - from - public.cool_table pu - join private.cool_tableau pr on pu.id = pr.id - where - x = '123' - union - select - something_else - from - another_table puat - inner join private.another_tableau prat on puat.id = prat.id - union - select - x, - y - from - public.get_something_cool () - ) -where - col = 17; -"#; - - let mut parser = tree_sitter::Parser::new(); - parser.set_language(tree_sitter_sql::language()).unwrap(); - - let tree = parser.parse(sql, None).unwrap(); - - let mut executor = TreeSitterQueriesExecutor::new(tree.root_node(), sql); - - executor.add_query_results::(); - - let results: Vec<&RelationMatch> = executor - .get_iter(None) - .filter_map(|q| q.try_into().ok()) - .collect(); - - assert_eq!(results[0].get_schema(sql), Some("public".into())); - assert_eq!(results[0].get_table(sql), "cool_table"); - - assert_eq!(results[1].get_schema(sql), Some("private".into())); - assert_eq!(results[1].get_table(sql), "cool_tableau"); - - assert_eq!(results[2].get_schema(sql), None); - assert_eq!(results[2].get_table(sql), "another_table"); - - assert_eq!(results[3].get_schema(sql), Some("private".into())); - assert_eq!(results[3].get_table(sql), "another_tableau"); - - // we have exhausted the matches: function invocations are ignored. - assert!(results.len() == 4); - } - - #[test] - fn only_considers_nodes_in_requested_range() { - let sql = r#" -select - * -from ( - select * - from ( - select * - from private.something - ) as sq2 - join private.tableau pt1 - on sq2.id = pt1.id - ) as sq1 -join private.table pt -on sq1.id = pt.id; -"#; - - let mut parser = tree_sitter::Parser::new(); - parser.set_language(tree_sitter_sql::language()).unwrap(); - - let tree = parser.parse(sql, None).unwrap(); - - // trust me bro - let range = { - let mut cursor = tree.root_node().walk(); - cursor.goto_first_child(); // statement - cursor.goto_first_child(); // select - cursor.goto_next_sibling(); // from - cursor.goto_first_child(); // keyword_from - cursor.goto_next_sibling(); // relation - cursor.goto_first_child(); // subquery (1) - cursor.goto_first_child(); // "(" - cursor.goto_next_sibling(); // select - cursor.goto_next_sibling(); // from - cursor.goto_first_child(); // keyword_from - cursor.goto_next_sibling(); // relation - cursor.goto_first_child(); // subquery (2) - cursor.node().range() - }; - - let mut executor = TreeSitterQueriesExecutor::new(tree.root_node(), sql); - - executor.add_query_results::(); - - let results: Vec<&RelationMatch> = executor - .get_iter(Some(&range)) - .filter_map(|q| q.try_into().ok()) - .collect(); - - assert_eq!(results.len(), 1); - assert_eq!(results[0].get_schema(sql), Some("private".into())); - assert_eq!(results[0].get_table(sql), "something"); - } - - #[test] - fn extracts_parameters() { - let sql = r#"select v_test + fn_name.custom_type.v_test2 + $3 + custom_type.v_test3;"#; - - let mut parser = tree_sitter::Parser::new(); - parser.set_language(tree_sitter_sql::language()).unwrap(); - - let tree = parser.parse(sql, None).unwrap(); - - let mut executor = TreeSitterQueriesExecutor::new(tree.root_node(), sql); - - executor.add_query_results::(); - - let results: Vec<&ParameterMatch> = executor - .get_iter(None) - .filter_map(|q| q.try_into().ok()) - .collect(); - - assert_eq!(results.len(), 4); - - assert_eq!(results[0].get_path(sql), "v_test"); - - assert_eq!(results[1].get_path(sql), "fn_name.custom_type.v_test2"); - - assert_eq!(results[2].get_path(sql), "$3"); - - assert_eq!(results[3].get_path(sql), "custom_type.v_test3"); - } -} diff --git a/crates/pgt_treesitter_queries/src/queries/insert_columns.rs b/crates/pgt_treesitter_queries/src/queries/insert_columns.rs deleted file mode 100644 index 3e88d998..00000000 --- a/crates/pgt_treesitter_queries/src/queries/insert_columns.rs +++ /dev/null @@ -1,150 +0,0 @@ -use std::sync::LazyLock; - -use crate::{Query, QueryResult}; - -use super::QueryTryFrom; - -static TS_QUERY: LazyLock = LazyLock::new(|| { - static QUERY_STR: &str = r#" - (insert - (object_reference) - (list - "("? - (column) @column - ","? - ")"? - ) - ) -"#; - tree_sitter::Query::new(tree_sitter_sql::language(), QUERY_STR).expect("Invalid TS Query") -}); - -#[derive(Debug)] -pub struct InsertColumnMatch<'a> { - pub(crate) column: tree_sitter::Node<'a>, -} - -impl InsertColumnMatch<'_> { - pub fn get_column(&self, sql: &str) -> String { - self.column - .utf8_text(sql.as_bytes()) - .expect("Failed to get column from ColumnMatch") - .to_string() - } -} - -impl<'a> TryFrom<&'a QueryResult<'a>> for &'a InsertColumnMatch<'a> { - type Error = String; - - fn try_from(q: &'a QueryResult<'a>) -> Result { - match q { - QueryResult::InsertClauseColumns(c) => Ok(c), - - #[allow(unreachable_patterns)] - _ => Err("Invalid QueryResult type".into()), - } - } -} - -impl<'a> QueryTryFrom<'a> for InsertColumnMatch<'a> { - type Ref = &'a InsertColumnMatch<'a>; -} - -impl<'a> Query<'a> for InsertColumnMatch<'a> { - fn execute(root_node: tree_sitter::Node<'a>, stmt: &'a str) -> Vec> { - let mut cursor = tree_sitter::QueryCursor::new(); - - let matches = cursor.matches(&TS_QUERY, root_node, stmt.as_bytes()); - - let mut to_return = vec![]; - - for m in matches { - if m.captures.len() == 1 { - let capture = m.captures[0].node; - to_return.push(QueryResult::InsertClauseColumns(InsertColumnMatch { - column: capture, - })); - } - } - - to_return - } -} -#[cfg(test)] -mod tests { - use super::InsertColumnMatch; - use crate::TreeSitterQueriesExecutor; - - #[test] - fn finds_all_insert_columns() { - let sql = r#"insert into users (id, email, name) values (1, 'a@b.com', 'Alice');"#; - - let mut parser = tree_sitter::Parser::new(); - parser.set_language(tree_sitter_sql::language()).unwrap(); - - let tree = parser.parse(sql, None).unwrap(); - - let mut executor = TreeSitterQueriesExecutor::new(tree.root_node(), sql); - - executor.add_query_results::(); - - let results: Vec<&InsertColumnMatch> = executor - .get_iter(None) - .filter_map(|q| q.try_into().ok()) - .collect(); - - let columns: Vec = results.iter().map(|c| c.get_column(sql)).collect(); - - assert_eq!(columns, vec!["id", "email", "name"]); - } - - #[test] - fn finds_insert_columns_with_whitespace_and_commas() { - let sql = r#" - insert into users ( - id, - email, - name - ) values (1, 'a@b.com', 'Alice'); - "#; - - let mut parser = tree_sitter::Parser::new(); - parser.set_language(tree_sitter_sql::language()).unwrap(); - - let tree = parser.parse(sql, None).unwrap(); - - let mut executor = TreeSitterQueriesExecutor::new(tree.root_node(), sql); - - executor.add_query_results::(); - - let results: Vec<&InsertColumnMatch> = executor - .get_iter(None) - .filter_map(|q| q.try_into().ok()) - .collect(); - - let columns: Vec = results.iter().map(|c| c.get_column(sql)).collect(); - - assert_eq!(columns, vec!["id", "email", "name"]); - } - - #[test] - fn returns_empty_for_insert_without_columns() { - let sql = r#"insert into users values (1, 'a@b.com', 'Alice');"#; - - let mut parser = tree_sitter::Parser::new(); - parser.set_language(tree_sitter_sql::language()).unwrap(); - - let tree = parser.parse(sql, None).unwrap(); - - let mut executor = TreeSitterQueriesExecutor::new(tree.root_node(), sql); - - executor.add_query_results::(); - - let results: Vec<&InsertColumnMatch> = executor - .get_iter(None) - .filter_map(|q| q.try_into().ok()) - .collect(); - - assert!(results.is_empty()); - } -} diff --git a/crates/pgt_treesitter_queries/src/queries/mod.rs b/crates/pgt_treesitter_queries/src/queries/mod.rs deleted file mode 100644 index b9f39aed..00000000 --- a/crates/pgt_treesitter_queries/src/queries/mod.rs +++ /dev/null @@ -1,86 +0,0 @@ -mod insert_columns; -mod parameters; -mod relations; -mod select_columns; -mod table_aliases; -mod where_columns; - -pub use insert_columns::*; -pub use parameters::*; -pub use relations::*; -pub use select_columns::*; -pub use table_aliases::*; -pub use where_columns::*; - -#[derive(Debug)] -pub enum QueryResult<'a> { - Relation(RelationMatch<'a>), - Parameter(ParameterMatch<'a>), - TableAliases(TableAliasMatch<'a>), - SelectClauseColumns(SelectColumnMatch<'a>), - InsertClauseColumns(InsertColumnMatch<'a>), - WhereClauseColumns(WhereColumnMatch<'a>), -} - -impl QueryResult<'_> { - pub fn within_range(&self, range: &tree_sitter::Range) -> bool { - match self { - QueryResult::Relation(rm) => { - let start = match rm.schema { - Some(s) => s.start_position(), - None => rm.table.start_position(), - }; - - let end = rm.table.end_position(); - - start >= range.start_point && end <= range.end_point - } - Self::Parameter(pm) => { - let node_range = pm.node.range(); - - node_range.start_point >= range.start_point - && node_range.end_point <= range.end_point - } - QueryResult::TableAliases(m) => { - let start = m.table.start_position(); - let end = m.alias.end_position(); - start >= range.start_point && end <= range.end_point - } - Self::SelectClauseColumns(cm) => { - let start = match cm.alias { - Some(n) => n.start_position(), - None => cm.column.start_position(), - }; - - let end = cm.column.end_position(); - - start >= range.start_point && end <= range.end_point - } - Self::WhereClauseColumns(cm) => { - let start = match cm.alias { - Some(n) => n.start_position(), - None => cm.column.start_position(), - }; - - let end = cm.column.end_position(); - - start >= range.start_point && end <= range.end_point - } - Self::InsertClauseColumns(cm) => { - let start = cm.column.start_position(); - let end = cm.column.end_position(); - start >= range.start_point && end <= range.end_point - } - } - } -} - -// This trait enforces that for any `Self` that implements `Query`, -// its &Self must implement TryFrom<&QueryResult> -pub(crate) trait QueryTryFrom<'a>: Sized { - type Ref: for<'any> TryFrom<&'a QueryResult<'a>, Error = String>; -} - -pub(crate) trait Query<'a>: QueryTryFrom<'a> { - fn execute(root_node: tree_sitter::Node<'a>, stmt: &'a str) -> Vec>; -} diff --git a/crates/pgt_treesitter_queries/src/queries/parameters.rs b/crates/pgt_treesitter_queries/src/queries/parameters.rs deleted file mode 100644 index 85ea9ad2..00000000 --- a/crates/pgt_treesitter_queries/src/queries/parameters.rs +++ /dev/null @@ -1,82 +0,0 @@ -use std::sync::LazyLock; - -use crate::{Query, QueryResult}; - -use super::QueryTryFrom; - -static TS_QUERY: LazyLock = LazyLock::new(|| { - static QUERY_STR: &str = r#" -[ - (field - (identifier)) @reference - (field - (object_reference) - "." (identifier)) @reference - (parameter) @parameter -] -"#; - tree_sitter::Query::new(tree_sitter_sql::language(), QUERY_STR).expect("Invalid TS Query") -}); - -#[derive(Debug)] -pub struct ParameterMatch<'a> { - pub(crate) node: tree_sitter::Node<'a>, -} - -impl ParameterMatch<'_> { - pub fn get_path(&self, sql: &str) -> String { - self.node - .utf8_text(sql.as_bytes()) - .expect("Failed to get path from ParameterMatch") - .to_string() - } - - pub fn get_range(&self) -> tree_sitter::Range { - self.node.range() - } - - pub fn get_byte_range(&self) -> std::ops::Range { - let range = self.node.range(); - range.start_byte..range.end_byte - } -} - -impl<'a> TryFrom<&'a QueryResult<'a>> for &'a ParameterMatch<'a> { - type Error = String; - - fn try_from(q: &'a QueryResult<'a>) -> Result { - match q { - QueryResult::Parameter(r) => Ok(r), - - #[allow(unreachable_patterns)] - _ => Err("Invalid QueryResult type".into()), - } - } -} - -impl<'a> QueryTryFrom<'a> for ParameterMatch<'a> { - type Ref = &'a ParameterMatch<'a>; -} - -impl<'a> Query<'a> for ParameterMatch<'a> { - fn execute(root_node: tree_sitter::Node<'a>, stmt: &'a str) -> Vec> { - let mut cursor = tree_sitter::QueryCursor::new(); - - let matches = cursor.matches(&TS_QUERY, root_node, stmt.as_bytes()); - - matches - .filter_map(|m| { - let captures = m.captures; - - // We expect exactly one capture for a parameter - if captures.len() != 1 { - return None; - } - - Some(QueryResult::Parameter(ParameterMatch { - node: captures[0].node, - })) - }) - .collect() - } -} diff --git a/crates/pgt_treesitter_queries/src/queries/relations.rs b/crates/pgt_treesitter_queries/src/queries/relations.rs deleted file mode 100644 index 38fd0513..00000000 --- a/crates/pgt_treesitter_queries/src/queries/relations.rs +++ /dev/null @@ -1,199 +0,0 @@ -use std::sync::LazyLock; - -use crate::{Query, QueryResult}; - -use super::QueryTryFrom; - -static TS_QUERY: LazyLock = LazyLock::new(|| { - static QUERY_STR: &str = r#" - (relation - (object_reference - . - (identifier) @schema_or_table - "."? - (identifier)? @table - )+ - ) - (insert - (object_reference - . - (identifier) @schema_or_table - "."? - (identifier)? @table - )+ - ) -"#; - tree_sitter::Query::new(tree_sitter_sql::language(), QUERY_STR).expect("Invalid TS Query") -}); - -#[derive(Debug)] -pub struct RelationMatch<'a> { - pub(crate) schema: Option>, - pub(crate) table: tree_sitter::Node<'a>, -} - -impl RelationMatch<'_> { - pub fn get_schema(&self, sql: &str) -> Option { - let str = self - .schema - .as_ref()? - .utf8_text(sql.as_bytes()) - .expect("Failed to get schema from RelationMatch"); - - Some(str.to_string()) - } - - pub fn get_table(&self, sql: &str) -> String { - self.table - .utf8_text(sql.as_bytes()) - .expect("Failed to get table from RelationMatch") - .to_string() - } -} - -impl<'a> TryFrom<&'a QueryResult<'a>> for &'a RelationMatch<'a> { - type Error = String; - - fn try_from(q: &'a QueryResult<'a>) -> Result { - match q { - QueryResult::Relation(r) => Ok(r), - - #[allow(unreachable_patterns)] - _ => Err("Invalid QueryResult type".into()), - } - } -} - -impl<'a> QueryTryFrom<'a> for RelationMatch<'a> { - type Ref = &'a RelationMatch<'a>; -} - -impl<'a> Query<'a> for RelationMatch<'a> { - fn execute(root_node: tree_sitter::Node<'a>, stmt: &'a str) -> Vec> { - let mut cursor = tree_sitter::QueryCursor::new(); - - let matches = cursor.matches(&TS_QUERY, root_node, stmt.as_bytes()); - - let mut to_return = vec![]; - - for m in matches { - if m.captures.len() == 1 { - let capture = m.captures[0].node; - to_return.push(QueryResult::Relation(RelationMatch { - schema: None, - table: capture, - })); - } - - if m.captures.len() == 2 { - let schema = m.captures[0].node; - let table = m.captures[1].node; - - to_return.push(QueryResult::Relation(RelationMatch { - schema: Some(schema), - table, - })); - } - } - - to_return - } -} - -#[cfg(test)] -mod tests { - use super::RelationMatch; - use crate::TreeSitterQueriesExecutor; - - #[test] - fn finds_table_without_schema() { - let sql = r#"select * from users;"#; - - let mut parser = tree_sitter::Parser::new(); - parser.set_language(tree_sitter_sql::language()).unwrap(); - - let tree = parser.parse(sql, None).unwrap(); - - let mut executor = TreeSitterQueriesExecutor::new(tree.root_node(), sql); - - executor.add_query_results::(); - - let results: Vec<&RelationMatch> = executor - .get_iter(None) - .filter_map(|q| q.try_into().ok()) - .collect(); - - assert_eq!(results.len(), 1); - assert_eq!(results[0].get_schema(sql), None); - assert_eq!(results[0].get_table(sql), "users"); - } - - #[test] - fn finds_table_with_schema() { - let sql = r#"select * from public.users;"#; - - let mut parser = tree_sitter::Parser::new(); - parser.set_language(tree_sitter_sql::language()).unwrap(); - - let tree = parser.parse(sql, None).unwrap(); - - let mut executor = TreeSitterQueriesExecutor::new(tree.root_node(), sql); - - executor.add_query_results::(); - - let results: Vec<&RelationMatch> = executor - .get_iter(None) - .filter_map(|q| q.try_into().ok()) - .collect(); - - assert_eq!(results.len(), 1); - assert_eq!(results[0].get_schema(sql), Some("public".to_string())); - assert_eq!(results[0].get_table(sql), "users"); - } - - #[test] - fn finds_insert_into_with_schema_and_table() { - let sql = r#"insert into auth.accounts (id, email) values (1, 'a@b.com');"#; - - let mut parser = tree_sitter::Parser::new(); - parser.set_language(tree_sitter_sql::language()).unwrap(); - - let tree = parser.parse(sql, None).unwrap(); - - let mut executor = TreeSitterQueriesExecutor::new(tree.root_node(), sql); - - executor.add_query_results::(); - - let results: Vec<&RelationMatch> = executor - .get_iter(None) - .filter_map(|q| q.try_into().ok()) - .collect(); - - assert_eq!(results.len(), 1); - assert_eq!(results[0].get_schema(sql), Some("auth".to_string())); - assert_eq!(results[0].get_table(sql), "accounts"); - } - - #[test] - fn finds_insert_into_without_schema() { - let sql = r#"insert into users (id, email) values (1, 'a@b.com');"#; - - let mut parser = tree_sitter::Parser::new(); - parser.set_language(tree_sitter_sql::language()).unwrap(); - - let tree = parser.parse(sql, None).unwrap(); - - let mut executor = TreeSitterQueriesExecutor::new(tree.root_node(), sql); - - executor.add_query_results::(); - - let results: Vec<&RelationMatch> = executor - .get_iter(None) - .filter_map(|q| q.try_into().ok()) - .collect(); - - assert_eq!(results.len(), 1); - assert_eq!(results[0].get_schema(sql), None); - assert_eq!(results[0].get_table(sql), "users"); - } -} diff --git a/crates/pgt_treesitter_queries/src/queries/select_columns.rs b/crates/pgt_treesitter_queries/src/queries/select_columns.rs deleted file mode 100644 index 00b6977d..00000000 --- a/crates/pgt_treesitter_queries/src/queries/select_columns.rs +++ /dev/null @@ -1,172 +0,0 @@ -use std::sync::LazyLock; - -use crate::{Query, QueryResult}; - -use super::QueryTryFrom; - -static TS_QUERY: LazyLock = LazyLock::new(|| { - static QUERY_STR: &str = r#" - (select_expression - (term - (field - (object_reference)? @alias - "."? - (identifier) @column - ) - ) - ","? - ) -"#; - tree_sitter::Query::new(tree_sitter_sql::language(), QUERY_STR).expect("Invalid TS Query") -}); - -#[derive(Debug)] -pub struct SelectColumnMatch<'a> { - pub(crate) alias: Option>, - pub(crate) column: tree_sitter::Node<'a>, -} - -impl SelectColumnMatch<'_> { - pub fn get_alias(&self, sql: &str) -> Option { - let str = self - .alias - .as_ref()? - .utf8_text(sql.as_bytes()) - .expect("Failed to get alias from ColumnMatch"); - - Some(str.to_string()) - } - - pub fn get_column(&self, sql: &str) -> String { - self.column - .utf8_text(sql.as_bytes()) - .expect("Failed to get column from ColumnMatch") - .to_string() - } -} - -impl<'a> TryFrom<&'a QueryResult<'a>> for &'a SelectColumnMatch<'a> { - type Error = String; - - fn try_from(q: &'a QueryResult<'a>) -> Result { - match q { - QueryResult::SelectClauseColumns(c) => Ok(c), - - #[allow(unreachable_patterns)] - _ => Err("Invalid QueryResult type".into()), - } - } -} - -impl<'a> QueryTryFrom<'a> for SelectColumnMatch<'a> { - type Ref = &'a SelectColumnMatch<'a>; -} - -impl<'a> Query<'a> for SelectColumnMatch<'a> { - fn execute(root_node: tree_sitter::Node<'a>, stmt: &'a str) -> Vec> { - let mut cursor = tree_sitter::QueryCursor::new(); - - let matches = cursor.matches(&TS_QUERY, root_node, stmt.as_bytes()); - - let mut to_return = vec![]; - - for m in matches { - if m.captures.len() == 1 { - let capture = m.captures[0].node; - to_return.push(QueryResult::SelectClauseColumns(SelectColumnMatch { - alias: None, - column: capture, - })); - } - - if m.captures.len() == 2 { - let alias = m.captures[0].node; - let column = m.captures[1].node; - - to_return.push(QueryResult::SelectClauseColumns(SelectColumnMatch { - alias: Some(alias), - column, - })); - } - } - - to_return - } -} - -#[cfg(test)] -mod tests { - use crate::TreeSitterQueriesExecutor; - - use super::SelectColumnMatch; - - #[test] - fn finds_all_columns() { - let sql = r#"select aud, id, email from auth.users;"#; - - let mut parser = tree_sitter::Parser::new(); - parser.set_language(tree_sitter_sql::language()).unwrap(); - - let tree = parser.parse(sql, None).unwrap(); - - let mut executor = TreeSitterQueriesExecutor::new(tree.root_node(), sql); - - executor.add_query_results::(); - - let results: Vec<&SelectColumnMatch> = executor - .get_iter(None) - .filter_map(|q| q.try_into().ok()) - .collect(); - - assert_eq!(results[0].get_alias(sql), None); - assert_eq!(results[0].get_column(sql), "aud"); - - assert_eq!(results[1].get_alias(sql), None); - assert_eq!(results[1].get_column(sql), "id"); - - assert_eq!(results[2].get_alias(sql), None); - assert_eq!(results[2].get_column(sql), "email"); - } - - #[test] - fn finds_columns_with_aliases() { - let sql = r#" -select - u.id, - u.email, - cs.user_settings, - cs.client_id -from - auth.users u - join public.client_settings cs - on u.id = cs.user_id; - -"#; - - let mut parser = tree_sitter::Parser::new(); - parser.set_language(tree_sitter_sql::language()).unwrap(); - - let tree = parser.parse(sql, None).unwrap(); - - let mut executor = TreeSitterQueriesExecutor::new(tree.root_node(), sql); - - executor.add_query_results::(); - - let results: Vec<&SelectColumnMatch> = executor - .get_iter(None) - .filter_map(|q| q.try_into().ok()) - .collect(); - - assert_eq!(results[0].get_alias(sql), Some("u".into())); - assert_eq!(results[0].get_column(sql), "id"); - - assert_eq!(results[1].get_alias(sql), Some("u".into())); - assert_eq!(results[1].get_column(sql), "email"); - - assert_eq!(results[2].get_alias(sql), Some("cs".into())); - assert_eq!(results[2].get_column(sql), "user_settings"); - - assert_eq!(results[3].get_alias(sql), Some("cs".into())); - assert_eq!(results[3].get_column(sql), "client_id"); - } -} diff --git a/crates/pgt_treesitter_queries/src/queries/table_aliases.rs b/crates/pgt_treesitter_queries/src/queries/table_aliases.rs deleted file mode 100644 index 4297a218..00000000 --- a/crates/pgt_treesitter_queries/src/queries/table_aliases.rs +++ /dev/null @@ -1,106 +0,0 @@ -use std::sync::LazyLock; - -use crate::{Query, QueryResult}; - -use super::QueryTryFrom; - -static TS_QUERY: LazyLock = LazyLock::new(|| { - static QUERY_STR: &str = r#" - (relation - (object_reference - . - (identifier) @schema_or_table - "."? - (identifier)? @table - ) - (keyword_as)? - (identifier) @alias - ) -"#; - tree_sitter::Query::new(tree_sitter_sql::language(), QUERY_STR).expect("Invalid TS Query") -}); - -#[derive(Debug)] -pub struct TableAliasMatch<'a> { - pub(crate) table: tree_sitter::Node<'a>, - pub(crate) alias: tree_sitter::Node<'a>, - pub(crate) schema: Option>, -} - -impl TableAliasMatch<'_> { - pub fn get_alias(&self, sql: &str) -> String { - self.alias - .utf8_text(sql.as_bytes()) - .expect("Failed to get alias from TableAliasMatch") - .to_string() - } - - pub fn get_table(&self, sql: &str) -> String { - self.table - .utf8_text(sql.as_bytes()) - .expect("Failed to get table from TableAliasMatch") - .to_string() - } - - pub fn get_schema(&self, sql: &str) -> Option { - self.schema.as_ref().map(|n| { - n.utf8_text(sql.as_bytes()) - .expect("Failed to get table from TableAliasMatch") - .to_string() - }) - } -} - -impl<'a> TryFrom<&'a QueryResult<'a>> for &'a TableAliasMatch<'a> { - type Error = String; - - fn try_from(q: &'a QueryResult<'a>) -> Result { - match q { - QueryResult::TableAliases(t) => Ok(t), - - #[allow(unreachable_patterns)] - _ => Err("Invalid QueryResult type".into()), - } - } -} - -impl<'a> QueryTryFrom<'a> for TableAliasMatch<'a> { - type Ref = &'a TableAliasMatch<'a>; -} - -impl<'a> Query<'a> for TableAliasMatch<'a> { - fn execute(root_node: tree_sitter::Node<'a>, stmt: &'a str) -> Vec> { - let mut cursor = tree_sitter::QueryCursor::new(); - - let matches = cursor.matches(&TS_QUERY, root_node, stmt.as_bytes()); - - let mut to_return = vec![]; - - for m in matches { - if m.captures.len() == 3 { - let schema = m.captures[0].node; - let table = m.captures[1].node; - let alias = m.captures[2].node; - - to_return.push(QueryResult::TableAliases(TableAliasMatch { - table, - alias, - schema: Some(schema), - })); - } - - if m.captures.len() == 2 { - let table = m.captures[0].node; - let alias = m.captures[1].node; - - to_return.push(QueryResult::TableAliases(TableAliasMatch { - table, - alias, - schema: None, - })); - } - } - - to_return - } -} diff --git a/crates/pgt_treesitter_queries/src/queries/where_columns.rs b/crates/pgt_treesitter_queries/src/queries/where_columns.rs deleted file mode 100644 index 8e19590d..00000000 --- a/crates/pgt_treesitter_queries/src/queries/where_columns.rs +++ /dev/null @@ -1,96 +0,0 @@ -use std::sync::LazyLock; - -use crate::{Query, QueryResult}; - -use super::QueryTryFrom; - -static TS_QUERY: LazyLock = LazyLock::new(|| { - static QUERY_STR: &str = r#" - (where - (binary_expression - (binary_expression - (field - (object_reference)? @alias - "."? - (identifier) @column - ) - ) - ) - ) -"#; - tree_sitter::Query::new(tree_sitter_sql::language(), QUERY_STR).expect("Invalid TS Query") -}); - -#[derive(Debug)] -pub struct WhereColumnMatch<'a> { - pub(crate) alias: Option>, - pub(crate) column: tree_sitter::Node<'a>, -} - -impl WhereColumnMatch<'_> { - pub fn get_alias(&self, sql: &str) -> Option { - let str = self - .alias - .as_ref()? - .utf8_text(sql.as_bytes()) - .expect("Failed to get alias from ColumnMatch"); - - Some(str.to_string()) - } - - pub fn get_column(&self, sql: &str) -> String { - self.column - .utf8_text(sql.as_bytes()) - .expect("Failed to get column from ColumnMatch") - .to_string() - } -} - -impl<'a> TryFrom<&'a QueryResult<'a>> for &'a WhereColumnMatch<'a> { - type Error = String; - - fn try_from(q: &'a QueryResult<'a>) -> Result { - match q { - QueryResult::WhereClauseColumns(c) => Ok(c), - - #[allow(unreachable_patterns)] - _ => Err("Invalid QueryResult type".into()), - } - } -} - -impl<'a> QueryTryFrom<'a> for WhereColumnMatch<'a> { - type Ref = &'a WhereColumnMatch<'a>; -} - -impl<'a> Query<'a> for WhereColumnMatch<'a> { - fn execute(root_node: tree_sitter::Node<'a>, stmt: &'a str) -> Vec> { - let mut cursor = tree_sitter::QueryCursor::new(); - - let matches = cursor.matches(&TS_QUERY, root_node, stmt.as_bytes()); - - let mut to_return = vec![]; - - for m in matches { - if m.captures.len() == 1 { - let capture = m.captures[0].node; - to_return.push(QueryResult::WhereClauseColumns(WhereColumnMatch { - alias: None, - column: capture, - })); - } - - if m.captures.len() == 2 { - let alias = m.captures[0].node; - let column = m.captures[1].node; - - to_return.push(QueryResult::WhereClauseColumns(WhereColumnMatch { - alias: Some(alias), - column, - })); - } - } - - to_return - } -} diff --git a/crates/pgt_type_resolver/Cargo.toml b/crates/pgt_type_resolver/Cargo.toml deleted file mode 100644 index 5d2a8eb1..00000000 --- a/crates/pgt_type_resolver/Cargo.toml +++ /dev/null @@ -1,21 +0,0 @@ -[package] -authors.workspace = true -categories.workspace = true -description = "" -edition.workspace = true -homepage.workspace = true -keywords.workspace = true -license.workspace = true -name = "pgt_type_resolver" -repository.workspace = true -version = "0.0.0" - - -[dependencies] -pgt_query_ext.workspace = true -pgt_schema_cache.workspace = true - -[dev-dependencies] - -[lib] -doctest = false diff --git a/crates/pgt_type_resolver/src/functions.rs b/crates/pgt_type_resolver/src/functions.rs deleted file mode 100644 index 1b0036b5..00000000 --- a/crates/pgt_type_resolver/src/functions.rs +++ /dev/null @@ -1,88 +0,0 @@ -use pgt_schema_cache::{Function, SchemaCache}; - -use crate::{ - types::{PossibleType, resolve_type}, - util::get_string_from_node, -}; - -pub fn resolve_func_call<'b>( - node: &pgt_query_ext::protobuf::FuncCall, - schema_cache: &'b SchemaCache, -) -> Option<&'b Function> { - let (schema, name) = resolve_func_identifier(node); - - let fns = schema_cache - .functions - .iter() - .filter(|f| { - function_matches( - f, - schema.as_deref(), - name.as_str(), - node.args - .iter() - .map(|a| resolve_type(a.node.as_ref().unwrap(), schema_cache)) - .collect(), - ) - }) - .collect::>(); - - if fns.len() == 1 { Some(fns[0]) } else { None } -} - -fn resolve_func_identifier(node: &pgt_query_ext::protobuf::FuncCall) -> (Option, String) { - match node.funcname.as_slice() { - [name] => (None, get_string_from_node(name)), - [schema, name] => ( - Some(get_string_from_node(schema)), - get_string_from_node(name), - ), - _ => panic!("Function name has more than 2 parts"), - } -} - -fn function_matches( - func: &Function, - schema: Option<&str>, - name: &str, - arg_types: Vec, -) -> bool { - if func.name != name { - return false; - } - - if schema.is_some() && Some(func.schema.as_str()) != schema { - return false; - } - - let arg_count = arg_types.len(); - let args_with_default = func - .args - .args - .iter() - .filter(|a| a.has_default.is_some()) - .count(); - let total_args = func.args.args.len(); - - if total_args < arg_count || total_args - args_with_default > arg_count { - return false; - } - - for (i, (func_arg, possible_type)) in func.args.args.iter().zip(arg_types.iter()).enumerate() { - match possible_type { - PossibleType::Null => { - // can be any type - } - PossibleType::AnyOf(types) => { - if types.iter().all(|type_id| *type_id != func_arg.type_id) { - return false; - } - } - } - - if i >= arg_count && !func_arg.has_default.unwrap_or(false) { - return false; - } - } - true -} diff --git a/crates/pgt_type_resolver/src/lib.rs b/crates/pgt_type_resolver/src/lib.rs deleted file mode 100644 index ef8fdca6..00000000 --- a/crates/pgt_type_resolver/src/lib.rs +++ /dev/null @@ -1,5 +0,0 @@ -mod functions; -mod types; -mod util; - -pub use functions::resolve_func_call; diff --git a/crates/pgt_type_resolver/src/types.rs b/crates/pgt_type_resolver/src/types.rs deleted file mode 100644 index b5560114..00000000 --- a/crates/pgt_type_resolver/src/types.rs +++ /dev/null @@ -1,79 +0,0 @@ -use pgt_schema_cache::SchemaCache; - -pub(crate) enum PossibleType { - Null, - AnyOf(Vec), -} - -pub fn resolve_type(node: &pgt_query_ext::NodeEnum, schema_cache: &SchemaCache) -> PossibleType { - match node { - pgt_query_ext::NodeEnum::AConst(n) => { - if n.isnull { - PossibleType::Null - } else { - match n - .val - .as_ref() - .expect("expected non-nullable AConst to have a value") - { - pgt_query_ext::protobuf::a_const::Val::Ival(_) => { - let types: Vec = ["int2", "int4", "int8"] - .iter() - .map(|s| s.to_string()) - .collect(); - - PossibleType::AnyOf( - schema_cache - .types - .iter() - .filter(|t| { - types.iter().any(|i| i == &t.name) && t.schema == "pg_catalog" - }) - .map(|t| t.id) - .collect(), - ) - } - pgt_query_ext::protobuf::a_const::Val::Fval(_) => { - let types: Vec = - ["float4", "float8"].iter().map(|s| s.to_string()).collect(); - - PossibleType::AnyOf( - schema_cache - .types - .iter() - .filter(|t| types.contains(&t.name) && t.schema == "pg_catalog") - .map(|t| t.id) - .collect(), - ) - } - pgt_query_ext::protobuf::a_const::Val::Boolval(_) => PossibleType::AnyOf( - schema_cache - .types - .iter() - .filter(|t| t.name == "bool" && t.schema == "pg_catalog") - .map(|t| t.id) - .collect(), - ), - pgt_query_ext::protobuf::a_const::Val::Sval(v) => { - let types: Vec = - ["text", "varchar"].iter().map(|s| s.to_string()).collect(); - - PossibleType::AnyOf( - schema_cache - .types - .iter() - .filter(|t| { - (types.iter().any(|i| i == &t.name) && t.schema == "pg_catalog") - || t.enums.values.contains(&v.sval) - }) - .map(|t| t.id) - .collect(), - ) - } - pgt_query_ext::protobuf::a_const::Val::Bsval(_) => todo!(), - } - } - } - _ => todo!(), - } -} diff --git a/crates/pgt_type_resolver/src/util.rs b/crates/pgt_type_resolver/src/util.rs deleted file mode 100644 index f10cf5bb..00000000 --- a/crates/pgt_type_resolver/src/util.rs +++ /dev/null @@ -1,6 +0,0 @@ -pub(crate) fn get_string_from_node(node: &pgt_query_ext::protobuf::Node) -> String { - match &node.node { - Some(pgt_query_ext::NodeEnum::String(s)) => s.sval.to_string(), - _ => "".to_string(), - } -} diff --git a/crates/pgt_typecheck/Cargo.toml b/crates/pgt_typecheck/Cargo.toml deleted file mode 100644 index caacc6d1..00000000 --- a/crates/pgt_typecheck/Cargo.toml +++ /dev/null @@ -1,31 +0,0 @@ -[package] -authors.workspace = true -categories.workspace = true -description = "" -edition.workspace = true -homepage.workspace = true -keywords.workspace = true -license.workspace = true -name = "pgt_typecheck" -repository.workspace = true -version = "0.0.0" - - -[dependencies] -pgt_console.workspace = true -pgt_diagnostics.workspace = true -pgt_query_ext.workspace = true -pgt_schema_cache.workspace = true -pgt_text_size.workspace = true -pgt_treesitter_queries.workspace = true -sqlx.workspace = true -tokio.workspace = true -tree-sitter.workspace = true -tree_sitter_sql.workspace = true - -[dev-dependencies] -insta.workspace = true -pgt_test_utils.workspace = true - -[lib] -doctest = false diff --git a/crates/pgt_typecheck/src/diagnostics.rs b/crates/pgt_typecheck/src/diagnostics.rs deleted file mode 100644 index 2117adbe..00000000 --- a/crates/pgt_typecheck/src/diagnostics.rs +++ /dev/null @@ -1,212 +0,0 @@ -use std::io; - -use pgt_console::markup; -use pgt_diagnostics::{Advices, Diagnostic, LogCategory, MessageAndDescription, Severity, Visit}; -use pgt_text_size::TextRange; -use sqlx::postgres::{PgDatabaseError, PgSeverity}; - -/// A specialized diagnostic for the typechecker. -/// -/// Type diagnostics are always **errors**. -#[derive(Clone, Debug, Diagnostic)] -#[diagnostic(category = "typecheck")] -pub struct TypecheckDiagnostic { - #[location(span)] - span: Option, - #[description] - #[message] - message: MessageAndDescription, - #[advice] - advices: TypecheckAdvices, - #[severity] - severity: Severity, -} - -#[derive(Debug, Clone)] -struct TypecheckAdvices { - code: String, - schema: Option, - table: Option, - column: Option, - data_type: Option, - constraint: Option, - detail: Option, - where_: Option, - hint: Option, - - #[allow(unused)] - line: Option, - #[allow(unused)] - file: Option, - #[allow(unused)] - routine: Option, -} - -impl Advices for TypecheckAdvices { - fn record(&self, visitor: &mut dyn Visit) -> io::Result<()> { - // First, show the error code - visitor.record_log( - LogCategory::Error, - &markup! { "Error Code: " {&self.code} }, - )?; - - // Show detailed message if available - if let Some(detail) = &self.detail { - visitor.record_log(LogCategory::Info, &detail)?; - } - - // Show object location information - if let (Some(schema), Some(table)) = (&self.schema, &self.table) { - let mut location = format!("In table: {schema}.{table}"); - if let Some(column) = &self.column { - location.push_str(&format!(", column: {column}")); - } - visitor.record_log(LogCategory::Info, &location)?; - } - - // Show constraint information - if let Some(constraint) = &self.constraint { - visitor.record_log( - LogCategory::Info, - &markup! { "Constraint: " {constraint} }, - )?; - } - - // Show data type information - if let Some(data_type) = &self.data_type { - visitor.record_log( - LogCategory::Info, - &markup! { "Data type: " {data_type} }, - )?; - } - - // Show context information - if let Some(where_) = &self.where_ { - visitor.record_log(LogCategory::Info, &markup! { "Context:\n"{where_}"" })?; - } - - // Show hint if available - if let Some(hint) = &self.hint { - visitor.record_log(LogCategory::Info, &markup! { "Hint: "{hint}"" })?; - } - - Ok(()) - } -} - -pub(crate) fn create_type_error( - pg_err: &PgDatabaseError, - ts: &tree_sitter::Tree, - positions_valid: bool, -) -> TypecheckDiagnostic { - let position = pg_err.position().and_then(|pos| match pos { - sqlx::postgres::PgErrorPosition::Original(pos) => Some(pos - 1), - _ => None, - }); - - let range = position.and_then(|pos| { - if positions_valid { - ts.root_node() - .named_descendant_for_byte_range(pos, pos) - .map(|node| { - TextRange::new( - node.start_byte().try_into().unwrap(), - node.end_byte().try_into().unwrap(), - ) - }) - } else { - None - } - }); - - let severity = match pg_err.severity() { - PgSeverity::Panic => Severity::Error, - PgSeverity::Fatal => Severity::Error, - PgSeverity::Error => Severity::Error, - PgSeverity::Warning => Severity::Warning, - PgSeverity::Notice => Severity::Hint, - PgSeverity::Debug => Severity::Hint, - PgSeverity::Info => Severity::Information, - PgSeverity::Log => Severity::Information, - }; - - TypecheckDiagnostic { - message: pg_err.to_string().into(), - severity, - span: range, - advices: TypecheckAdvices { - code: pg_err.code().to_string(), - hint: pg_err.hint().and_then(|s| { - if !s.is_empty() { - Some(s.to_string()) - } else { - None - } - }), - schema: pg_err.schema().and_then(|s| { - if !s.is_empty() { - Some(s.to_string()) - } else { - None - } - }), - table: pg_err.table().and_then(|s| { - if !s.is_empty() { - Some(s.to_string()) - } else { - None - } - }), - detail: pg_err.detail().and_then(|s| { - if !s.is_empty() { - Some(s.to_string()) - } else { - None - } - }), - column: pg_err.column().and_then(|s| { - if !s.is_empty() { - Some(s.to_string()) - } else { - None - } - }), - data_type: pg_err.data_type().and_then(|s| { - if !s.is_empty() { - Some(s.to_string()) - } else { - None - } - }), - constraint: pg_err.constraint().and_then(|s| { - if !s.is_empty() { - Some(s.to_string()) - } else { - None - } - }), - line: pg_err.line(), - file: pg_err.file().and_then(|s| { - if !s.is_empty() { - Some(s.to_string()) - } else { - None - } - }), - routine: pg_err.routine().and_then(|s| { - if !s.is_empty() { - Some(s.to_string()) - } else { - None - } - }), - where_: pg_err.r#where().and_then(|s| { - if !s.is_empty() { - Some(s.to_string()) - } else { - None - } - }), - }, - } -} diff --git a/crates/pgt_typecheck/src/lib.rs b/crates/pgt_typecheck/src/lib.rs deleted file mode 100644 index e1dcd259..00000000 --- a/crates/pgt_typecheck/src/lib.rs +++ /dev/null @@ -1,80 +0,0 @@ -mod diagnostics; -mod typed_identifier; - -pub use diagnostics::TypecheckDiagnostic; -use diagnostics::create_type_error; -use pgt_text_size::TextRange; -use sqlx::postgres::PgDatabaseError; -pub use sqlx::postgres::PgSeverity; -use sqlx::{Executor, PgPool}; -use typed_identifier::apply_identifiers; -pub use typed_identifier::{IdentifierType, TypedIdentifier}; - -#[derive(Debug)] -pub struct TypecheckParams<'a> { - pub conn: &'a PgPool, - pub sql: &'a str, - pub ast: &'a pgt_query_ext::NodeEnum, - pub tree: &'a tree_sitter::Tree, - pub schema_cache: &'a pgt_schema_cache::SchemaCache, - pub identifiers: Vec, -} - -#[derive(Debug, Clone)] -pub struct TypeError { - pub message: String, - pub code: String, - pub severity: PgSeverity, - pub position: Option, - pub range: Option, - pub table: Option, - pub column: Option, - pub data_type: Option, - pub constraint: Option, -} - -pub async fn check_sql( - params: TypecheckParams<'_>, -) -> Result, sqlx::Error> { - // Check if the AST is not a supported statement type - if !matches!( - params.ast, - pgt_query_ext::NodeEnum::SelectStmt(_) - | pgt_query_ext::NodeEnum::InsertStmt(_) - | pgt_query_ext::NodeEnum::UpdateStmt(_) - | pgt_query_ext::NodeEnum::DeleteStmt(_) - | pgt_query_ext::NodeEnum::CommonTableExpr(_) - ) { - return Ok(None); - } - - let mut conn = params.conn.acquire().await?; - - // Postgres caches prepared statements within the current DB session (connection). - // This can cause issues if the underlying table schema changes while statements - // are cached. By closing the connection after use, we ensure a fresh state for - // each typecheck operation. - conn.close_on_drop(); - - let (prepared, positions_valid) = apply_identifiers( - params.identifiers, - params.schema_cache, - params.tree, - params.sql, - ); - - let res = conn.prepare(&prepared).await; - - match res { - Ok(_) => Ok(None), - Err(sqlx::Error::Database(err)) => { - let pg_err = err.downcast_ref::(); - Ok(Some(create_type_error( - pg_err, - params.tree, - positions_valid, - ))) - } - Err(err) => Err(err), - } -} diff --git a/crates/pgt_typecheck/src/typed_identifier.rs b/crates/pgt_typecheck/src/typed_identifier.rs deleted file mode 100644 index 5efe0421..00000000 --- a/crates/pgt_typecheck/src/typed_identifier.rs +++ /dev/null @@ -1,342 +0,0 @@ -use pgt_schema_cache::PostgresType; -use pgt_treesitter_queries::{TreeSitterQueriesExecutor, queries::ParameterMatch}; - -/// A typed identifier is a parameter that has a type associated with it. -/// It is used to replace parameters within the SQL string. -#[derive(Debug)] -pub struct TypedIdentifier { - /// The path of the parameter, usually the name of the function. - /// This is because `fn_name.arg_name` is a valid reference within a SQL function. - pub path: String, - /// The name of the argument - pub name: Option, - /// The type of the argument with schema and name - pub type_: IdentifierType, -} - -#[derive(Debug, Clone)] -pub struct IdentifierType { - pub schema: Option, - pub name: String, - pub is_array: bool, -} - -/// Applies the identifiers to the SQL string by replacing them with their default values. -pub fn apply_identifiers<'a>( - identifiers: Vec, - schema_cache: &'a pgt_schema_cache::SchemaCache, - cst: &'a tree_sitter::Tree, - sql: &'a str, -) -> (String, bool) { - let mut executor = TreeSitterQueriesExecutor::new(cst.root_node(), sql); - - executor.add_query_results::(); - - // Collect all replacements first to avoid modifying the string while iterating - let replacements: Vec<_> = executor - .get_iter(None) - .filter_map(|q| { - let m: &ParameterMatch = q.try_into().ok()?; - let path = m.get_path(sql); - let parts: Vec<_> = path.split('.').collect(); - - // Find the matching identifier and its position in the path - let (identifier, position) = find_matching_identifier(&parts, &identifiers)?; - - // Resolve the type based on whether we're accessing a field of a composite type - let type_ = resolve_type(identifier, position, &parts, schema_cache)?; - - Some((m.get_byte_range(), type_, identifier.type_.is_array)) - }) - .collect(); - - let mut result = sql.to_string(); - - let mut valid_positions = true; - - // Apply replacements in reverse order to maintain correct byte offsets - for (range, type_, is_array) in replacements.into_iter().rev() { - let default_value = get_formatted_default_value(type_, is_array); - - // if the default_value is shorter than "range", fill it up with spaces - let default_value = if default_value.len() < range.end - range.start { - format!("{: range.end - range.start { - valid_positions = false; - } - - result.replace_range(range, &default_value); - } - - (result, valid_positions) -} - -/// Format the default value based on the type and whether it's an array -fn get_formatted_default_value(pg_type: &PostgresType, is_array: bool) -> String { - // Get the base default value for this type - let default = resolve_default_value(pg_type); - - let default = if default.len() > "NULL".len() { - // If the default value is longer than "NULL", use "NULL" instead - "NULL".to_string() - } else { - // Otherwise, use the default value - default - }; - - // For arrays, wrap the default in array syntax - if is_array { - format!("'{{{}}}'", default) - } else { - default - } -} - -/// Resolve the default value for a given Postgres type. -/// -/// * `pg_type`: The type to return the default value for. -pub fn resolve_default_value(pg_type: &PostgresType) -> String { - // Handle ENUM types by returning the first variant - if !pg_type.enums.values.is_empty() { - return format!("'{}'", pg_type.enums.values[0]); - } - - match pg_type.name.as_str() { - // Numeric types - "smallint" | "int2" | "integer" | "int" | "int4" | "bigint" | "int8" | "decimal" - | "numeric" | "real" | "float4" | "double precision" | "float8" | "smallserial" - | "serial2" | "serial" | "serial4" | "bigserial" | "serial8" => "0".to_string(), - - // Boolean type - "boolean" | "bool" => "false".to_string(), - - // Character types - "character" | "char" | "character varying" | "varchar" | "text" => "''".to_string(), - - // Date/time types - "date" => "'1970-01-01'".to_string(), - "time" | "time without time zone" => "'00:00:00'".to_string(), - "time with time zone" | "timetz" => "'00:00:00+00'".to_string(), - "timestamp" | "timestamp without time zone" => "'1970-01-01 00:00:00'".to_string(), - "timestamp with time zone" | "timestamptz" => "'1970-01-01 00:00:00+00'".to_string(), - "interval" => "'0'".to_string(), - - // JSON types - "json" | "jsonb" => "'null'".to_string(), - - // UUID - "uuid" => "'00000000-0000-0000-0000-000000000000'".to_string(), - - // Byte array - "bytea" => "'\\x'".to_string(), - - // Network types - "inet" => "'0.0.0.0'".to_string(), - "cidr" => "'0.0.0.0/0'".to_string(), - "macaddr" => "'00:00:00:00:00:00'".to_string(), - "macaddr8" => "'00:00:00:00:00:00:00:00'".to_string(), - - // Monetary type - "money" => "'0.00'".to_string(), - - // Geometric types - "point" => "'(0,0)'".to_string(), - "line" => "'{0,0,0}'".to_string(), - "lseg" => "'[(0,0),(0,0)]'".to_string(), - "box" => "'((0,0),(0,0))'".to_string(), - "path" => "'((0,0),(0,0))'".to_string(), - "polygon" => "'((0,0),(0,0),(0,0))'".to_string(), - "circle" => "'<(0,0),0>'".to_string(), - - // Text search types - "tsvector" => "''".to_string(), - "tsquery" => "''".to_string(), - - // XML - "xml" => "''".to_string(), - - // Log sequence number - "pg_lsn" => "'0/0'".to_string(), - - // Snapshot types - "txid_snapshot" | "pg_snapshot" => "NULL".to_string(), - - // Fallback for unrecognized types - _ => "NULL".to_string(), - } -} - -// Helper function to find the matching identifier and its position in the path -fn find_matching_identifier<'a>( - parts: &[&str], - identifiers: &'a [TypedIdentifier], -) -> Option<(&'a TypedIdentifier, usize)> { - // Case 1: Parameter reference (e.g., $2) - if parts.len() == 1 && parts[0].starts_with('$') { - let idx = parts[0][1..].parse::().ok()?; - let identifier = identifiers.get(idx - 1)?; - return Some((identifier, idx)); - } - - // Case 2: Named reference (e.g., fn_name.custom_type.v_test2) - identifiers.iter().find_map(|identifier| { - let name = identifier.name.as_ref()?; - - parts - .iter() - .enumerate() - .find(|(_idx, part)| **part == name) - .map(|(idx, _)| (identifier, idx)) - }) -} - -// Helper function to resolve the type based on the identifier and path -fn resolve_type<'a>( - identifier: &TypedIdentifier, - position: usize, - parts: &[&str], - schema_cache: &'a pgt_schema_cache::SchemaCache, -) -> Option<&'a PostgresType> { - if position < parts.len() - 1 { - // Find the composite type - let schema_type = schema_cache.types.iter().find(|t| { - identifier - .type_ - .schema - .as_ref() - .is_none_or(|s| t.schema == *s) - && t.name == *identifier.type_.name - })?; - - // Find the field within the composite type - let field_name = parts.last().unwrap(); - let field = schema_type - .attributes - .attrs - .iter() - .find(|a| a.name == *field_name)?; - - // Find the field's type - schema_cache.types.iter().find(|t| t.id == field.type_id) - } else { - // Direct type reference - schema_cache.find_type(&identifier.type_.name, identifier.type_.schema.as_deref()) - } -} - -#[cfg(test)] -mod tests { - use pgt_test_utils::test_database::get_new_test_db; - use sqlx::Executor; - - #[tokio::test] - async fn test_apply_identifiers() { - let input = "select v_test + fn_name.custom_type.v_test2 + $3 + custom_type.v_test3 + fn_name.v_test2 + enum_type"; - - let identifiers = vec![ - super::TypedIdentifier { - path: "fn_name".to_string(), - name: Some("v_test".to_string()), - type_: super::IdentifierType { - schema: None, - name: "int4".to_string(), - is_array: false, - }, - }, - super::TypedIdentifier { - path: "fn_name".to_string(), - name: Some("custom_type".to_string()), - type_: super::IdentifierType { - schema: Some("public".to_string()), - name: "custom_type".to_string(), - is_array: false, - }, - }, - super::TypedIdentifier { - path: "fn_name".to_string(), - name: Some("another".to_string()), - type_: super::IdentifierType { - schema: None, - name: "numeric".to_string(), - is_array: false, - }, - }, - super::TypedIdentifier { - path: "fn_name".to_string(), - name: Some("custom_type".to_string()), - type_: super::IdentifierType { - schema: Some("public".to_string()), - name: "custom_type".to_string(), - is_array: false, - }, - }, - super::TypedIdentifier { - path: "fn_name".to_string(), - name: Some("v_test2".to_string()), - type_: super::IdentifierType { - schema: None, - name: "int4".to_string(), - is_array: false, - }, - }, - super::TypedIdentifier { - path: "fn_name".to_string(), - name: Some("enum_type".to_string()), - type_: super::IdentifierType { - schema: Some("public".to_string()), - name: "enum_type".to_string(), - is_array: false, - }, - }, - ]; - - let test_db = get_new_test_db().await; - - let setup = r#" - CREATE TYPE "public"."custom_type" AS ( - v_test2 integer, - v_test3 integer - ); - - CREATE TYPE "public"."enum_type" AS ENUM ( - 'critical', - 'high', - 'default', - 'low', - 'very_low' - ); - "#; - - test_db - .execute(setup) - .await - .expect("Failed to setup test database"); - - let mut parser = tree_sitter::Parser::new(); - parser - .set_language(tree_sitter_sql::language()) - .expect("Error loading sql language"); - - let schema_cache = pgt_schema_cache::SchemaCache::load(&test_db) - .await - .expect("Failed to load Schema Cache"); - - let tree = parser.parse(input, None).unwrap(); - - let (sql_out, valid_pos) = - super::apply_identifiers(identifiers, &schema_cache, &tree, input); - - assert!(valid_pos); - assert_eq!( - sql_out, - // the numeric parameters are filled with 0; - // all values of the enums are longer than `NULL`, so we use `NULL` instead - "select 0 + 0 + 0 + 0 + 0 + NULL " - ); - } -} diff --git a/crates/pgt_typecheck/tests/diagnostics.rs b/crates/pgt_typecheck/tests/diagnostics.rs deleted file mode 100644 index 9628962d..00000000 --- a/crates/pgt_typecheck/tests/diagnostics.rs +++ /dev/null @@ -1,78 +0,0 @@ -use pgt_console::{ - fmt::{Formatter, HTML}, - markup, -}; -use pgt_diagnostics::PrintDiagnostic; -use pgt_test_utils::test_database::get_new_test_db; -use pgt_typecheck::{TypecheckParams, check_sql}; -use sqlx::Executor; - -async fn test(name: &str, query: &str, setup: Option<&str>) { - let test_db = get_new_test_db().await; - - if let Some(setup) = setup { - test_db - .execute(setup) - .await - .expect("Failed to setup test database"); - } - - let mut parser = tree_sitter::Parser::new(); - parser - .set_language(tree_sitter_sql::language()) - .expect("Error loading sql language"); - - let schema_cache = pgt_schema_cache::SchemaCache::load(&test_db) - .await - .expect("Failed to load Schema Cache"); - - let root = pgt_query_ext::parse(query).unwrap(); - let tree = parser.parse(query, None).unwrap(); - - let conn = &test_db; - let result = check_sql(TypecheckParams { - conn, - sql: query, - ast: &root, - tree: &tree, - schema_cache: &schema_cache, - identifiers: vec![], - }) - .await; - - let mut content = vec![]; - let mut writer = HTML::new(&mut content); - - Formatter::new(&mut writer) - .write_markup(markup! { - {PrintDiagnostic::simple(&result.unwrap().unwrap())} - }) - .unwrap(); - - let content = String::from_utf8(content).unwrap(); - - insta::with_settings!({ - prepend_module_to_snapshot => false, - }, { - insta::assert_snapshot!(name, content); - }); -} - -#[tokio::test] -async fn invalid_column() { - test( - "invalid_column", - "select id, unknown from contacts;", - Some( - r#" - create table public.contacts ( - id serial primary key, - name varchar(255) not null, - is_vegetarian bool default false, - middle_name varchar(255) - ); - "#, - ), - ) - .await; -} diff --git a/crates/pgt_typecheck/tests/snapshots/invalid_column.snap b/crates/pgt_typecheck/tests/snapshots/invalid_column.snap deleted file mode 100644 index aa29a529..00000000 --- a/crates/pgt_typecheck/tests/snapshots/invalid_column.snap +++ /dev/null @@ -1,10 +0,0 @@ ---- -source: crates/pgt_typecheck/tests/diagnostics.rs -expression: normalized -snapshot_kind: text ---- -typecheck ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ - - column "unknown" does not exist - - Error Code: 42703 diff --git a/crates/pgt_workspace/Cargo.toml b/crates/pgt_workspace/Cargo.toml deleted file mode 100644 index 5f598b2d..00000000 --- a/crates/pgt_workspace/Cargo.toml +++ /dev/null @@ -1,66 +0,0 @@ -[package] -authors.workspace = true -categories.workspace = true -description = "" -edition.workspace = true -homepage.workspace = true -keywords.workspace = true -license.workspace = true -name = "pgt_workspace" -repository.workspace = true -version = "0.0.0" - - -[dependencies] -biome_deserialize = "0.6.0" -dashmap = "5.5.3" -futures = "0.3.31" -globset = "0.4.16" - -ignore = { workspace = true } -pgt_analyse = { workspace = true, features = ["serde"] } -pgt_analyser = { workspace = true } -pgt_completions = { workspace = true } -pgt_configuration = { workspace = true } -pgt_console = { workspace = true } -pgt_diagnostics = { workspace = true } -pgt_fs = { workspace = true, features = ["serde"] } -pgt_lexer = { workspace = true } -pgt_query_ext = { workspace = true } -pgt_schema_cache = { workspace = true } -pgt_statement_splitter = { workspace = true } -pgt_text_size.workspace = true -pgt_typecheck = { workspace = true } -rustc-hash = { workspace = true } -schemars = { workspace = true, optional = true } -serde = { workspace = true, features = ["derive"] } -serde_json = { workspace = true, features = ["raw_value"] } -sqlx.workspace = true -strum = { workspace = true } -tokio = { workspace = true, features = ["rt", "rt-multi-thread"] } -tracing = { workspace = true, features = ["attributes", "log"] } -tree-sitter.workspace = true -tree_sitter_sql.workspace = true - -biome_js_factory = { workspace = true, optional = true } -biome_js_syntax = { workspace = true, optional = true } -biome_rowan = { workspace = true, optional = true } - -[features] -schema = [ - "dep:schemars", - "dep:biome_rowan", - "dep:biome_js_syntax", - "dep:biome_js_factory", - "pgt_configuration/schema", - "pgt_diagnostics/schema", - "pgt_fs/schema", - "pgt_analyse/schema", - "pgt_completions/schema", -] - -[dev-dependencies] -tempfile = "3.15.0" - -[lib] -doctest = false diff --git a/crates/pgt_workspace/src/configuration.rs b/crates/pgt_workspace/src/configuration.rs deleted file mode 100644 index 88c04eec..00000000 --- a/crates/pgt_workspace/src/configuration.rs +++ /dev/null @@ -1,373 +0,0 @@ -use std::{ - io::ErrorKind, - ops::Deref, - path::{Path, PathBuf}, -}; - -use pgt_analyse::AnalyserRules; -use pgt_configuration::{ - ConfigurationDiagnostic, ConfigurationPathHint, ConfigurationPayload, PartialConfiguration, - VERSION, push_to_analyser_rules, -}; -use pgt_fs::{AutoSearchResult, ConfigName, FileSystem, OpenOptions}; - -use crate::{DynRef, WorkspaceError, settings::Settings}; - -/// Information regarding the configuration that was found. -/// -/// This contains the expanded configuration including default values where no -/// configuration was present. -#[derive(Default, Debug)] -pub struct LoadedConfiguration { - /// If present, the path of the directory where it was found - pub directory_path: Option, - /// If present, the path of the file where it was found - pub file_path: Option, - /// The Deserialized configuration - pub configuration: PartialConfiguration, -} - -impl LoadedConfiguration { - /// Return the path of the **directory** where the configuration is - pub fn directory_path(&self) -> Option<&Path> { - self.directory_path.as_deref() - } - - /// Return the path of the **file** where the configuration is - pub fn file_path(&self) -> Option<&Path> { - self.file_path.as_deref() - } -} - -impl From> for LoadedConfiguration { - fn from(value: Option) -> Self { - let Some(value) = value else { - return LoadedConfiguration::default(); - }; - - let ConfigurationPayload { - configuration_file_path, - deserialized: partial_configuration, - .. - } = value; - - LoadedConfiguration { - configuration: partial_configuration, - directory_path: configuration_file_path.parent().map(PathBuf::from), - file_path: Some(configuration_file_path), - } - } -} - -/// Load the partial configuration for this session of the CLI. -pub fn load_configuration( - fs: &DynRef<'_, dyn FileSystem>, - config_path: ConfigurationPathHint, -) -> Result { - let config = load_config(fs, config_path)?; - Ok(LoadedConfiguration::from(config)) -} - -/// - [Result]: if an error occurred while loading the configuration file. -/// - [Option]: sometimes not having a configuration file should not be an error, so we need this type. -/// - [ConfigurationPayload]: The result of the operation -type LoadConfig = Result, WorkspaceError>; - -/// Load the configuration from the file system. -/// -/// The configuration file will be read from the `file_system`. A [path hint](ConfigurationPathHint) should be provided. -fn load_config( - file_system: &DynRef<'_, dyn FileSystem>, - base_path: ConfigurationPathHint, -) -> LoadConfig { - // This path is used for configuration resolution from external packages. - let external_resolution_base_path = match base_path { - // Path hint from LSP is always the workspace root - // we use it as the resolution base path. - ConfigurationPathHint::FromLsp(ref path) => path.clone(), - ConfigurationPathHint::FromWorkspace(ref path) => path.clone(), - // Path hint from user means the command is invoked from the CLI - // So we use the working directory (CWD) as the resolution base path - ConfigurationPathHint::FromUser(_) | ConfigurationPathHint::None => file_system - .working_directory() - .map_or(PathBuf::new(), |working_directory| working_directory), - }; - - // If the configuration path hint is from user and is a file path, - // we'll load it directly - if let ConfigurationPathHint::FromUser(ref config_file_path) = base_path { - if file_system.path_is_file(config_file_path) { - let content = strip_jsonc_comments(&file_system.read_file_from_path(config_file_path)?); - - let deserialized = serde_json::from_str::(&content) - .map_err(ConfigurationDiagnostic::new_deserialization_error)?; - - return Ok(Some(ConfigurationPayload { - deserialized, - configuration_file_path: PathBuf::from(config_file_path), - external_resolution_base_path, - })); - } - } - - // If the configuration path hint is not a file path - // we'll auto search for the configuration file - let should_error = base_path.is_from_user(); - let configuration_directory = match base_path { - ConfigurationPathHint::FromLsp(path) => path, - ConfigurationPathHint::FromUser(path) => path, - ConfigurationPathHint::FromWorkspace(path) => path, - ConfigurationPathHint::None => file_system.working_directory().unwrap_or_default(), - }; - - // We first search for `postgrestools.jsonc` - if let Some(auto_search_result) = file_system.auto_search( - &configuration_directory, - ConfigName::file_names().as_slice(), - should_error, - )? { - let AutoSearchResult { content, file_path } = auto_search_result; - - let deserialized = - serde_json::from_str::(&strip_jsonc_comments(&content)) - .map_err(ConfigurationDiagnostic::new_deserialization_error)?; - - Ok(Some(ConfigurationPayload { - deserialized, - configuration_file_path: file_path, - external_resolution_base_path, - })) - } else { - Ok(None) - } -} - -/// Creates a new configuration on file system -/// -/// ## Errors -/// -/// It fails if: -/// - the configuration file already exists -/// - the program doesn't have the write rights -pub fn create_config( - fs: &mut DynRef, - configuration: &mut PartialConfiguration, -) -> Result<(), WorkspaceError> { - let path = PathBuf::from(ConfigName::pgt_jsonc()); - - if fs.path_exists(&path) { - return Err(ConfigurationDiagnostic::new_already_exists().into()); - } - - let options = OpenOptions::default().write(true).create_new(true); - - let mut config_file = fs.open_with_options(&path, options).map_err(|err| { - if err.kind() == ErrorKind::AlreadyExists { - ConfigurationDiagnostic::new_already_exists().into() - } else { - WorkspaceError::cant_read_file(format!("{}", path.display())) - } - })?; - - // we now check if postgrestools is installed inside `node_modules` and if so, we use the schema from there - let node_schema_path = Path::new("./node_modules/@postgrestools/postgrestools/schema.json"); - let options = OpenOptions::default().read(true); - if fs.open_with_options(node_schema_path, options).is_ok() { - configuration.schema = node_schema_path.to_str().map(String::from); - } else if VERSION == "0.0.0" { - // VERSION is 0.0.0 if it has not been explicitly set (e.g local dev, as fallback) - configuration.schema = Some("https://pgtools.dev/schemas/latest/schema.json".to_string()); - } else { - configuration.schema = Some(format!("https://pgtools.dev/schemas/{VERSION}/schema.json")); - } - - let contents = serde_json::to_string_pretty(&configuration) - .map_err(|_| ConfigurationDiagnostic::new_serialization_error())?; - - config_file - .set_content(contents.as_bytes()) - .map_err(|_| WorkspaceError::cant_read_file(format!("{}", path.display())))?; - - Ok(()) -} - -/// Returns the rules applied to a specific [Path], given the [Settings] -pub fn to_analyser_rules(settings: &Settings) -> AnalyserRules { - let mut analyser_rules = AnalyserRules::default(); - if let Some(rules) = settings.linter.rules.as_ref() { - push_to_analyser_rules(rules, pgt_analyser::METADATA.deref(), &mut analyser_rules); - } - analyser_rules -} - -/// Takes a string of jsonc content and returns a comment free version -/// which should parse fine as regular json. -/// Nested block comments are supported. -pub fn strip_jsonc_comments(jsonc_input: &str) -> String { - let mut json_output = String::new(); - - let mut block_comment_depth: u8 = 0; - let mut is_in_string: bool = false; // Comments cannot be in strings - - for line in jsonc_input.split('\n') { - let mut last_char: Option = None; - for cur_char in line.chars() { - // Check whether we're in a string - if block_comment_depth == 0 && last_char != Some('\\') && cur_char == '"' { - is_in_string = !is_in_string; - } - - // Check for line comment start - if !is_in_string && last_char == Some('/') && cur_char == '/' { - last_char = None; - json_output.push_str(" "); - break; // Stop outputting or parsing this line - } - // Check for block comment start - if !is_in_string && last_char == Some('/') && cur_char == '*' { - block_comment_depth += 1; - last_char = None; - json_output.push_str(" "); - // Check for block comment end - } else if !is_in_string && last_char == Some('*') && cur_char == '/' { - block_comment_depth = block_comment_depth.saturating_sub(1); - last_char = None; - json_output.push_str(" "); - // Output last char if not in any block comment - } else { - if block_comment_depth == 0 { - if let Some(last_char) = last_char { - json_output.push(last_char); - } - } else { - json_output.push(' '); - } - last_char = Some(cur_char); - } - } - - // Add last char and newline if not in any block comment - if let Some(last_char) = last_char { - if block_comment_depth == 0 { - json_output.push(last_char); - } else { - json_output.push(' '); - } - } - - // Remove trailing whitespace from line - while json_output.ends_with(' ') { - json_output.pop(); - } - json_output.push('\n'); - } - - json_output -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn test_strip_jsonc_comments_line_comments() { - let input = r#"{ - "name": "test", // This is a line comment - "value": 42 // Another comment -}"#; - - let expected = r#"{ - "name": "test", - "value": 42 -} -"#; - - assert_eq!(strip_jsonc_comments(input), expected); - } - - #[test] - fn test_strip_jsonc_comments_block_comments() { - let input = r#"{ - /* This is a block comment */ - "name": "test", - "value": /* inline comment */ 42 -}"#; - - let expected = r#"{ - - "name": "test", - "value": 42 -} -"#; - - assert_eq!(strip_jsonc_comments(input), expected); - } - - #[test] - fn test_strip_jsonc_comments_nested_block_comments() { - let input = r#"{ - /* Outer comment /* Nested comment */ still outer */ - "name": "test" -}"#; - - let expected = r#"{ - - "name": "test" -} -"#; - - assert_eq!(strip_jsonc_comments(input), expected); - } - - #[test] - fn test_strip_jsonc_comments_in_strings() { - let input = r#"{ - "comment_like": "This is not a // comment", - "another": "This is not a /* block comment */ either" -}"#; - - let expected = r#"{ - "comment_like": "This is not a // comment", - "another": "This is not a /* block comment */ either" -} -"#; - - assert_eq!(strip_jsonc_comments(input), expected); - } - - #[test] - fn test_strip_jsonc_comments_escaped_quotes() { - let input = r#"{ - "escaped\": \"quote": "value", // Comment after escaped quotes - "normal": "value" // Normal comment -}"#; - - let expected = r#"{ - "escaped\": \"quote": "value", - "normal": "value" -} -"#; - - assert_eq!(strip_jsonc_comments(input), expected); - } - - #[test] - fn test_strip_jsonc_comments_multiline_block() { - let input = r#"{ - /* This is a - multiline block - comment */ - "name": "test" -}"#; - - let expected = r#"{ - - - - "name": "test" -} -"#; - - assert_eq!(strip_jsonc_comments(input), expected); - } -} diff --git a/crates/pgt_workspace/src/diagnostics.rs b/crates/pgt_workspace/src/diagnostics.rs deleted file mode 100644 index 9ba02a1a..00000000 --- a/crates/pgt_workspace/src/diagnostics.rs +++ /dev/null @@ -1,356 +0,0 @@ -use pgt_configuration::ConfigurationDiagnostic; -use pgt_console::fmt::Bytes; -use pgt_console::markup; -use pgt_diagnostics::{ - Advices, Category, Diagnostic, DiagnosticTags, LogCategory, Severity, Visit, category, -}; -use pgt_fs::FileSystemDiagnostic; -use serde::{Deserialize, Serialize}; -use std::error::Error; -use std::fmt; -use std::fmt::{Debug, Display, Formatter}; -use std::process::{ExitCode, Termination}; -use tokio::task::JoinError; - -/// Generic errors thrown during operations -#[derive(Deserialize, Diagnostic, Serialize)] -pub enum WorkspaceError { - /// Error thrown when validating the configuration. Once deserialized, further checks have to be done. - Configuration(ConfigurationDiagnostic), - /// Error when trying to access the database - DatabaseConnectionError(DatabaseConnectionError), - /// Diagnostics emitted when querying the file system - FileSystem(FileSystemDiagnostic), - /// Thrown when we can't read a generic directory - CantReadDirectory(CantReadDirectory), - /// Thrown when we can't read a generic file - CantReadFile(CantReadFile), - /// The file does not exist in the [crate::Workspace] - NotFound(NotFound), - /// Error emitted by the underlying transport layer for a remote Workspace - TransportError(TransportError), - /// Emitted when the file is ignored and should not be processed - FileIgnored(FileIgnored), - /// Emitted when a file could not be parsed because it's larger than the size limit - FileTooLarge(FileTooLarge), - /// Diagnostic raised when a file is protected - ProtectedFile(ProtectedFile), - /// Raised when there's an issue around the VCS integration - Vcs(VcsDiagnostic), - /// Error in the async runtime - RuntimeError(RuntimeError), -} - -impl WorkspaceError { - pub fn cant_read_file(path: String) -> Self { - Self::CantReadFile(CantReadFile { path }) - } - - pub fn not_found() -> Self { - Self::NotFound(NotFound) - } - - pub fn protected_file(file_path: impl Into) -> Self { - Self::ProtectedFile(ProtectedFile { - file_path: file_path.into(), - verbose_advice: ProtectedFileAdvice, - }) - } - - pub fn vcs_disabled() -> Self { - Self::Vcs(VcsDiagnostic::DisabledVcs(DisabledVcs {})) - } - - pub fn runtime(msg: &str) -> Self { - Self::RuntimeError(RuntimeError { - message: msg.into(), - }) - } -} - -impl Error for WorkspaceError {} - -impl Debug for WorkspaceError { - fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { - std::fmt::Display::fmt(self, f) - } -} - -impl Display for WorkspaceError { - fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { - Diagnostic::description(self, f) - } -} - -impl From for WorkspaceError { - fn from(err: TransportError) -> Self { - Self::TransportError(err) - } -} - -impl Termination for WorkspaceError { - fn report(self) -> ExitCode { - ExitCode::FAILURE - } -} - -impl From for WorkspaceError { - fn from(err: FileSystemDiagnostic) -> Self { - Self::FileSystem(err) - } -} - -impl From for WorkspaceError { - fn from(err: ConfigurationDiagnostic) -> Self { - Self::Configuration(err) - } -} - -#[derive(Debug, Serialize, Deserialize)] -/// Error emitted by the underlying transport layer for a remote Workspace -pub enum TransportError { - /// Error emitted by the transport layer if the connection was lost due to an I/O error - ChannelClosed, - /// Error emitted by the transport layer if a request timed out - Timeout, - /// Error caused by a serialization or deserialization issue - SerdeError(String), - /// Generic error type for RPC errors that can't be deserialized into RomeError - RPCError(String), -} - -impl Display for TransportError { - fn fmt(&self, fmt: &mut Formatter) -> fmt::Result { - self.description(fmt) - } -} - -impl Diagnostic for TransportError { - fn category(&self) -> Option<&'static Category> { - Some(category!("internalError/io")) - } - - fn severity(&self) -> Severity { - Severity::Error - } - - fn description(&self, fmt: &mut Formatter<'_>) -> fmt::Result { - match self { - TransportError::SerdeError(err) => write!(fmt, "serialization error: {err}"), - TransportError::ChannelClosed => fmt.write_str( - "a request to the remote workspace failed because the connection was interrupted", - ), - TransportError::Timeout => { - fmt.write_str("the request to the remote workspace timed out") - } - TransportError::RPCError(err) => fmt.write_str(err), - } - } - - fn message(&self, fmt: &mut pgt_console::fmt::Formatter<'_>) -> std::io::Result<()> { - match self { - TransportError::SerdeError(err) => write!(fmt, "serialization error: {err}"), - TransportError::ChannelClosed => fmt.write_str( - "a request to the remote workspace failed because the connection was interrupted", - ), - TransportError::Timeout => { - fmt.write_str("the request to the remote workspace timed out") - } - TransportError::RPCError(err) => fmt.write_str(err), - } - } - fn tags(&self) -> DiagnosticTags { - DiagnosticTags::INTERNAL - } -} - -#[derive(Debug, Deserialize, Diagnostic, Serialize)] -pub enum VcsDiagnostic { - /// When the VCS folder couldn't be found - NoVcsFolderFound(NoVcsFolderFound), - /// VCS is disabled - DisabledVcs(DisabledVcs), -} - -#[derive(Debug, Diagnostic, Serialize, Deserialize)] -#[diagnostic( - category = "internalError/fs", - severity = Warning, - message = "Couldn't determine a directory for the VCS integration. VCS integration will be disabled." -)] -pub struct DisabledVcs {} - -#[derive(Debug, Diagnostic, Serialize, Deserialize)] -#[diagnostic( - category = "internalError/runtime", - severity = Error, - message = "An error occurred in the async runtime." -)] -pub struct RuntimeError { - message: String, -} - -impl From for WorkspaceError { - fn from(err: JoinError) -> Self { - Self::RuntimeError(RuntimeError { - message: err.to_string(), - }) - } -} - -#[derive(Debug, Diagnostic, Serialize, Deserialize)] -#[diagnostic( - category = "internalError/fs", - severity = Error, - message( - description = "Couldn't find the VCS folder at the following path: {path}", - message("Couldn't find the VCS folder at the following path: "{self.path}), - ) -)] -pub struct NoVcsFolderFound { - #[location(resource)] - pub path: String, -} - -impl From for WorkspaceError { - fn from(value: VcsDiagnostic) -> Self { - Self::Vcs(value) - } -} - -#[derive(Debug, Serialize, Deserialize, Diagnostic)] -#[diagnostic( - category = "database/connection", - message = "Database error: {message}" -)] -pub struct DatabaseConnectionError { - message: String, - code: Option, -} - -impl From for WorkspaceError { - fn from(err: sqlx::Error) -> Self { - let db_err = err.as_database_error(); - if let Some(db_err) = db_err { - Self::DatabaseConnectionError(DatabaseConnectionError { - message: db_err.message().to_string(), - code: db_err.code().map(|c| c.to_string()), - }) - } else { - Self::DatabaseConnectionError(DatabaseConnectionError { - message: err.to_string(), - code: None, - }) - } - } -} - -#[derive(Debug, Serialize, Deserialize, Diagnostic)] -#[diagnostic( - category = "internalError/fs", - message = "The file does not exist in the workspace.", - tags(INTERNAL) -)] -pub struct NotFound; - -#[derive(Debug, Serialize, Deserialize, Diagnostic)] -#[diagnostic( - category = "project", - severity = Information, - message( - message("The file "{self.file_path}" is protected because is handled by another tool. We won't process it."), - description = "The file {file_path} is protected because is handled by another tool. We won't process it.", - ), - tags(VERBOSE) -)] -pub struct ProtectedFile { - #[location(resource)] - pub file_path: String, - - #[verbose_advice] - pub verbose_advice: ProtectedFileAdvice, -} - -#[derive(Debug, Serialize, Deserialize)] -pub struct ProtectedFileAdvice; - -impl Advices for ProtectedFileAdvice { - fn record(&self, visitor: &mut dyn Visit) -> std::io::Result<()> { - visitor.record_log(LogCategory::Info, &markup! { "You can hide this diagnostic by using ""--diagnostic-level=warn"" to increase the diagnostic level shown by CLI." }) - } -} - -#[derive(Debug, Serialize, Deserialize, Diagnostic)] -#[diagnostic( - category = "internalError/fs", - message( - message("We couldn't read the following directory, maybe for permissions reasons or it doesn't exist: "{self.path}), - description = "We couldn't read the following directory, maybe for permissions reasons or it doesn't exist: {path}" - ) -)] -pub struct CantReadDirectory { - #[location(resource)] - path: String, -} - -#[derive(Debug, Serialize, Deserialize, Diagnostic)] -#[diagnostic( - category = "internalError/fs", - message( - message("We couldn't read the following file, maybe for permissions reasons or it doesn't exist: "{self.path}), - description = "We couldn't read the following file, maybe for permissions reasons or it doesn't exist: {path}" - ) -)] -pub struct CantReadFile { - #[location(resource)] - path: String, -} - -#[derive(Debug, Serialize, Deserialize, Diagnostic)] -#[diagnostic( - category = "internalError/fs", - message( - message("The file "{self.path}" was ignored."), - description = "The file {path} was ignored." - ), - severity = Warning, -)] -pub struct FileIgnored { - #[location(resource)] - path: String, -} - -#[derive(Debug, Serialize, Deserialize)] -pub struct FileTooLarge { - path: String, - size: usize, - limit: usize, -} - -impl Diagnostic for FileTooLarge { - fn category(&self) -> Option<&'static Category> { - Some(category!("internalError/fs")) - } - - fn message(&self, fmt: &mut pgt_console::fmt::Formatter<'_>) -> std::io::Result<()> { - fmt.write_markup( - markup!{ - "Size of "{self.path}" is "{Bytes(self.size)}" which exceeds configured maximum of "{Bytes(self.limit)}" for this project. - The file size limit exists to prevent us inadvertently slowing down and loading large files that we shouldn't. - Use the `files.maxSize` configuration to change the maximum size of files processed." - } - ) - } - - fn description(&self, fmt: &mut Formatter<'_>) -> fmt::Result { - write!( - fmt, - "Size of {} is {} which exceeds configured maximum of {} for this project.\n\ - The file size limit exists to prevent us inadvertently slowing down and loading large files that we shouldn't.\n\ - Use the `files.maxSize` configuration to change the maximum size of files processed.", - self.path, - Bytes(self.size), - Bytes(self.limit) - ) - } -} diff --git a/crates/pgt_workspace/src/dome.rs b/crates/pgt_workspace/src/dome.rs deleted file mode 100644 index 8fa89c8b..00000000 --- a/crates/pgt_workspace/src/dome.rs +++ /dev/null @@ -1,72 +0,0 @@ -use pgt_fs::PgTPath; -use std::collections::BTreeSet; -use std::collections::btree_set::Iter; -use std::iter::{FusedIterator, Peekable}; - -/// A type that holds the evaluated paths, and provides an iterator to extract -/// specific paths like configuration files, manifests and more. -#[derive(Debug, Default)] -pub struct Dome { - paths: BTreeSet, -} - -impl Dome { - pub fn with_path(mut self, path: impl Into) -> Self { - self.paths.insert(path.into()); - self - } - - pub fn new(paths: BTreeSet) -> Self { - Self { paths } - } - - pub fn iter(&self) -> DomeIterator { - DomeIterator { - iter: self.paths.iter().peekable(), - } - } - - pub fn to_paths(self) -> BTreeSet { - self.paths - } -} - -pub struct DomeIterator<'a> { - iter: Peekable>, -} - -impl<'a> DomeIterator<'a> { - pub fn next_config(&mut self) -> Option<&'a PgTPath> { - if let Some(path) = self.iter.peek() { - if path.is_config() { - self.iter.next() - } else { - None - } - } else { - None - } - } - - pub fn next_ignore(&mut self) -> Option<&'a PgTPath> { - if let Some(path) = self.iter.peek() { - if path.is_ignore() { - self.iter.next() - } else { - None - } - } else { - None - } - } -} - -impl<'a> Iterator for DomeIterator<'a> { - type Item = &'a PgTPath; - - fn next(&mut self) -> Option { - self.iter.next() - } -} - -impl FusedIterator for DomeIterator<'_> {} diff --git a/crates/pgt_workspace/src/features/code_actions.rs b/crates/pgt_workspace/src/features/code_actions.rs deleted file mode 100644 index 22223dd3..00000000 --- a/crates/pgt_workspace/src/features/code_actions.rs +++ /dev/null @@ -1,64 +0,0 @@ -use crate::workspace::StatementId; -use pgt_configuration::RuleSelector; -use pgt_fs::PgTPath; -use pgt_text_size::TextSize; - -#[derive(Debug, serde::Serialize, serde::Deserialize)] -#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] -pub struct CodeActionsParams { - pub path: PgTPath, - pub cursor_position: TextSize, - pub only: Vec, - pub skip: Vec, -} - -#[derive(Debug, serde::Serialize, serde::Deserialize)] -#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] -pub struct CodeActionsResult { - pub actions: Vec, -} - -#[derive(Debug, serde::Serialize, serde::Deserialize)] -#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] -pub struct CodeAction { - pub title: String, - pub kind: CodeActionKind, - pub disabled_reason: Option, -} - -#[derive(Debug, serde::Serialize, serde::Deserialize)] -#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] -pub enum CodeActionKind { - Edit(EditAction), - Command(CommandAction), - EditAndCommand(EditAction, CommandAction), -} - -#[derive(Debug, serde::Serialize, serde::Deserialize)] -#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] -pub struct EditAction {} - -#[derive(Debug, serde::Serialize, serde::Deserialize)] -#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] -pub struct CommandAction { - pub category: CommandActionCategory, -} - -#[derive(Debug, serde::Serialize, serde::Deserialize, strum::EnumIter)] -#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] -pub enum CommandActionCategory { - ExecuteStatement(StatementId), -} - -#[derive(Debug, serde::Serialize, serde::Deserialize)] -#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] -pub struct ExecuteStatementParams { - pub statement_id: StatementId, - pub path: PgTPath, -} - -#[derive(Debug, serde::Serialize, serde::Deserialize)] -#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] -pub struct ExecuteStatementResult { - pub message: String, -} diff --git a/crates/pgt_workspace/src/features/completions.rs b/crates/pgt_workspace/src/features/completions.rs deleted file mode 100644 index 85342183..00000000 --- a/crates/pgt_workspace/src/features/completions.rs +++ /dev/null @@ -1,182 +0,0 @@ -use std::sync::Arc; - -use pgt_completions::CompletionItem; -use pgt_fs::PgTPath; -use pgt_text_size::{TextRange, TextSize}; - -use crate::workspace::{GetCompletionsFilter, GetCompletionsMapper, ParsedDocument, StatementId}; - -#[derive(Debug, serde::Serialize, serde::Deserialize)] -#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] -pub struct GetCompletionsParams { - /// The File for which a completion is requested. - pub path: PgTPath, - /// The Cursor position in the file for which a completion is requested. - pub position: TextSize, -} - -#[derive(Debug, serde::Serialize, serde::Deserialize, Default)] -#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] -pub struct CompletionsResult { - pub(crate) items: Vec, -} - -impl IntoIterator for CompletionsResult { - type Item = CompletionItem; - type IntoIter = as IntoIterator>::IntoIter; - fn into_iter(self) -> Self::IntoIter { - self.items.into_iter() - } -} - -pub(crate) fn get_statement_for_completions( - doc: &ParsedDocument, - position: TextSize, -) -> Option<(StatementId, TextRange, String, Arc)> { - let count = doc.count(); - // no arms no cookies - if count == 0 { - return None; - } - - let mut eligible_statements = doc.iter_with_filter( - GetCompletionsMapper, - GetCompletionsFilter { - cursor_position: position, - }, - ); - - if count == 1 { - eligible_statements.next() - } else { - let mut prev_stmt = None; - - for current_stmt in eligible_statements { - /* - * If we have multiple statements, we want to make sure that we do not overlap - * with the next one. - * - * select 1 |select 1; - */ - if prev_stmt.is_some_and(|_| current_stmt.1.contains(position)) { - return None; - } - prev_stmt = Some(current_stmt) - } - - prev_stmt - } -} - -#[cfg(test)] -mod tests { - use pgt_fs::PgTPath; - use pgt_text_size::TextSize; - - use crate::workspace::ParsedDocument; - - use super::get_statement_for_completions; - - static CURSOR_POSITION: &str = "€"; - - fn get_doc_and_pos(sql: &str) -> (ParsedDocument, TextSize) { - let pos = sql - .find(CURSOR_POSITION) - .expect("Please add cursor position to test sql"); - - let pos: u32 = pos.try_into().unwrap(); - - ( - ParsedDocument::new( - PgTPath::new("test.sql"), - sql.replace(CURSOR_POSITION, ""), - 5, - ), - TextSize::new(pos), - ) - } - - #[test] - fn finds_matching_statement() { - let sql = format!( - r#" - select * from users; - - update {}users set email = 'myemail@com'; - - select 1; - "#, - CURSOR_POSITION - ); - - let (doc, position) = get_doc_and_pos(sql.as_str()); - - let (_, _, text, _) = - get_statement_for_completions(&doc, position).expect("Expected Statement"); - - assert_eq!(text, "update users set email = 'myemail@com';") - } - - #[test] - fn does_not_break_when_no_statements_exist() { - let sql = CURSOR_POSITION.to_string(); - - let (doc, position) = get_doc_and_pos(sql.as_str()); - - assert!(get_statement_for_completions(&doc, position).is_none()); - } - - #[test] - fn does_not_return_overlapping_statements_if_too_close() { - let sql = format!("select * from {}select 1;", CURSOR_POSITION); - - let (doc, position) = get_doc_and_pos(sql.as_str()); - - // make sure these are parsed as two - assert_eq!(doc.count(), 2); - - assert!(get_statement_for_completions(&doc, position).is_none()); - } - - #[test] - fn is_fine_with_spaces() { - let sql = format!("select * from {} ;", CURSOR_POSITION); - - let (doc, position) = get_doc_and_pos(sql.as_str()); - - let (_, _, text, _) = - get_statement_for_completions(&doc, position).expect("Expected Statement"); - - assert_eq!(text, "select * from ;") - } - - #[test] - fn considers_offset() { - let sql = format!("select * from {}", CURSOR_POSITION); - - let (doc, position) = get_doc_and_pos(sql.as_str()); - - let (_, _, text, _) = - get_statement_for_completions(&doc, position).expect("Expected Statement"); - - assert_eq!(text, "select * from") - } - - #[test] - fn does_not_consider_too_far_offset() { - let sql = format!("select * from {}", CURSOR_POSITION); - - let (doc, position) = get_doc_and_pos(sql.as_str()); - - assert!(get_statement_for_completions(&doc, position).is_none()); - } - - #[test] - fn does_not_consider_offset_if_statement_terminated_by_semi() { - let sql = format!("select * from users;{}", CURSOR_POSITION); - - let (doc, position) = get_doc_and_pos(sql.as_str()); - - assert!(get_statement_for_completions(&doc, position).is_none()); - } -} diff --git a/crates/pgt_workspace/src/features/diagnostics.rs b/crates/pgt_workspace/src/features/diagnostics.rs deleted file mode 100644 index ff60e142..00000000 --- a/crates/pgt_workspace/src/features/diagnostics.rs +++ /dev/null @@ -1,21 +0,0 @@ -use pgt_analyse::RuleCategories; -use pgt_configuration::RuleSelector; -use pgt_fs::PgTPath; - -#[derive(Debug, serde::Serialize, serde::Deserialize)] -#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] -pub struct PullDiagnosticsParams { - pub path: PgTPath, - pub categories: RuleCategories, - pub max_diagnostics: u64, - pub only: Vec, - pub skip: Vec, -} - -#[derive(Debug, serde::Serialize, serde::Deserialize)] -#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] -pub struct PullDiagnosticsResult { - pub diagnostics: Vec, - pub errors: usize, - pub skipped_diagnostics: u64, -} diff --git a/crates/pgt_workspace/src/features/mod.rs b/crates/pgt_workspace/src/features/mod.rs deleted file mode 100644 index 31013f36..00000000 --- a/crates/pgt_workspace/src/features/mod.rs +++ /dev/null @@ -1,3 +0,0 @@ -pub mod code_actions; -pub mod completions; -pub mod diagnostics; diff --git a/crates/pgt_workspace/src/lib.rs b/crates/pgt_workspace/src/lib.rs deleted file mode 100644 index 99fe063f..00000000 --- a/crates/pgt_workspace/src/lib.rs +++ /dev/null @@ -1,102 +0,0 @@ -use std::ops::{Deref, DerefMut}; - -use pgt_console::Console; -use pgt_fs::{FileSystem, OsFileSystem}; - -pub mod configuration; -pub mod diagnostics; -pub mod dome; -pub mod features; -pub mod matcher; -pub mod settings; -pub mod workspace; - -#[cfg(feature = "schema")] -pub mod workspace_types; - -pub use crate::diagnostics::{TransportError, WorkspaceError}; -pub use crate::workspace::Workspace; - -/// This is the main entrypoint of the application. -pub struct App<'app> { - /// A reference to the internal virtual file system - pub fs: DynRef<'app, dyn FileSystem>, - /// A reference to the internal workspace - pub workspace: WorkspaceRef<'app>, - /// A reference to the internal console, where its buffer will be used to write messages and - /// errors - pub console: &'app mut dyn Console, -} - -impl<'app> App<'app> { - pub fn with_console(console: &'app mut dyn Console) -> Self { - Self::with_filesystem_and_console(DynRef::Owned(Box::::default()), console) - } - - /// Create a new instance of the app using the specified [FileSystem] and [Console] implementation - pub fn with_filesystem_and_console( - fs: DynRef<'app, dyn FileSystem>, - console: &'app mut dyn Console, - ) -> Self { - Self::new(fs, console, WorkspaceRef::Owned(workspace::server())) - } - - /// Create a new instance of the app using the specified [FileSystem], [Console] and [Workspace] implementation - pub fn new( - fs: DynRef<'app, dyn FileSystem>, - console: &'app mut dyn Console, - workspace: WorkspaceRef<'app>, - ) -> Self { - Self { - fs, - console, - workspace, - } - } -} - -pub enum WorkspaceRef<'app> { - Owned(Box), - Borrowed(&'app dyn Workspace), -} - -impl<'app> Deref for WorkspaceRef<'app> { - type Target = dyn Workspace + 'app; - - // False positive - #[allow(clippy::explicit_auto_deref)] - fn deref(&self) -> &Self::Target { - match self { - WorkspaceRef::Owned(inner) => &**inner, - WorkspaceRef::Borrowed(inner) => *inner, - } - } -} - -/// Clone of [std::borrow::Cow] specialized for storing a trait object and -/// holding a mutable reference in the `Borrowed` variant instead of requiring -/// the inner type to implement [std::borrow::ToOwned] -pub enum DynRef<'app, T: ?Sized + 'app> { - Owned(Box), - Borrowed(&'app mut T), -} - -impl<'app, T: ?Sized + 'app> Deref for DynRef<'app, T> { - type Target = T; - - fn deref(&self) -> &Self::Target { - match self { - DynRef::Owned(inner) => inner, - DynRef::Borrowed(inner) => inner, - } - } -} - -impl<'app, T: ?Sized + 'app> DerefMut for DynRef<'app, T> { - fn deref_mut(&mut self) -> &mut Self::Target { - match self { - DynRef::Owned(inner) => inner, - DynRef::Borrowed(inner) => inner, - } - } -} diff --git a/crates/pgt_workspace/src/matcher/LICENCE-APACHE b/crates/pgt_workspace/src/matcher/LICENCE-APACHE deleted file mode 100644 index 4aca254d..00000000 --- a/crates/pgt_workspace/src/matcher/LICENCE-APACHE +++ /dev/null @@ -1,202 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - -TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - -1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - -2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - -3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - -4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - -5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - -6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - -7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - -8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - -9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - -END OF TERMS AND CONDITIONS - -APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - -Copyright (c) 2023 Biome Developers and Contributors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. - diff --git a/crates/pgt_workspace/src/matcher/LICENSE-MIT b/crates/pgt_workspace/src/matcher/LICENSE-MIT deleted file mode 100644 index 17eebcc2..00000000 --- a/crates/pgt_workspace/src/matcher/LICENSE-MIT +++ /dev/null @@ -1,26 +0,0 @@ -Copyright (c) 2014 The Rust Project Developers - -Permission is hereby granted, free of charge, to any -person obtaining a copy of this software and associated -documentation files (the "Software"), to deal in the -Software without restriction, including without -limitation the rights to use, copy, modify, merge, -publish, distribute, sublicense, and/or sell copies of -the Software, and to permit persons to whom the Software -is furnished to do so, subject to the following -conditions: - -The above copyright notice and this permission notice -shall be included in all copies or substantial portions -of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF -ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED -TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A -PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT -SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR -IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -DEALINGS IN THE SOFTWARE. - diff --git a/crates/pgt_workspace/src/matcher/mod.rs b/crates/pgt_workspace/src/matcher/mod.rs deleted file mode 100644 index 1afc4166..00000000 --- a/crates/pgt_workspace/src/matcher/mod.rs +++ /dev/null @@ -1,199 +0,0 @@ -pub mod pattern; - -pub use pattern::{MatchOptions, Pattern, PatternError}; -use pgt_console::markup; -use pgt_diagnostics::Diagnostic; -use std::collections::HashMap; -use std::path::{Path, PathBuf}; -use std::sync::RwLock; - -/// A data structure to use when there's need to match a string or a path a against -/// a unix shell style patterns -#[derive(Debug, Default)] -pub struct Matcher { - root: Option, - patterns: Vec, - options: MatchOptions, - /// Whether the string was already checked - already_checked: RwLock>, -} - -impl Matcher { - /// Creates a new Matcher with given options. - /// - /// Check [glob website](https://docs.rs/glob/latest/glob/struct.MatchOptions.html) for [MatchOptions] - pub fn new(options: MatchOptions) -> Self { - Self { - root: None, - patterns: Vec::new(), - options, - already_checked: RwLock::new(HashMap::default()), - } - } - - pub fn empty() -> Self { - Self { - root: None, - patterns: Vec::new(), - options: MatchOptions::default(), - already_checked: RwLock::new(HashMap::default()), - } - } - - pub fn set_root(&mut self, root: PathBuf) { - self.root = Some(root); - } - - /// It adds a unix shell style pattern - pub fn add_pattern(&mut self, pattern: &str) -> Result<(), PatternError> { - let pattern = Pattern::new(pattern)?; - self.patterns.push(pattern); - Ok(()) - } - - /// It matches the given string against the stored patterns. - /// - /// It returns [true] if there's at least a match - pub fn matches(&self, source: &str) -> bool { - let mut already_ignored = self.already_checked.write().unwrap(); - if let Some(matches) = already_ignored.get(source) { - return *matches; - } - for pattern in &self.patterns { - if pattern.matches_with(source, self.options) || source.contains(pattern.as_str()) { - already_ignored.insert(source.to_string(), true); - return true; - } - } - already_ignored.insert(source.to_string(), false); - false - } - - pub fn is_empty(&self) -> bool { - self.patterns.is_empty() - } - - /// It matches the given path against the stored patterns - /// - /// It returns [true] if there's at least one match - pub fn matches_path(&self, source: &Path) -> bool { - if self.is_empty() { - return false; - } - let mut already_checked = self.already_checked.write().unwrap(); - let source_as_string = source.to_str(); - if let Some(source_as_string) = source_as_string { - if let Some(matches) = already_checked.get(source_as_string) { - return *matches; - } - } - let matches = self.run_match(source); - - if let Some(source_as_string) = source_as_string { - already_checked.insert(source_as_string.to_string(), matches); - } - - matches - } - - fn run_match(&self, source: &Path) -> bool { - for pattern in &self.patterns { - let matches = if pattern.matches_path_with(source, self.options) { - true - } else { - // Here we cover cases where the user specifies single files inside the patterns. - // The pattern library doesn't support single files, we here we just do a check - // on contains - // - // Given the pattern `out`: - // - `out/index.html` -> matches - // - `out/` -> matches - // - `layout.tsx` -> does not match - // - `routes/foo.ts` -> does not match - source - .ancestors() - .any(|ancestor| ancestor.ends_with(pattern.as_str())) - }; - - if matches { - return true; - } - } - false - } -} - -impl Diagnostic for PatternError { - fn description(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(fmt, "{}", self.msg) - } - - fn message(&self, fmt: &mut pgt_console::fmt::Formatter<'_>) -> std::io::Result<()> { - fmt.write_markup(markup!({ self.msg })) - } -} - -#[cfg(test)] -mod test { - use crate::matcher::Matcher; - use crate::matcher::pattern::MatchOptions; - use std::env; - - #[test] - fn matches() { - let current = env::current_dir().unwrap(); - let dir = format!("{}/**/*.rs", current.display()); - let mut ignore = Matcher::new(MatchOptions::default()); - ignore.add_pattern(&dir).unwrap(); - let path = env::current_dir().unwrap().join("src/workspace.rs"); - let result = ignore.matches(path.to_str().unwrap()); - - assert!(result); - } - - #[test] - fn matches_path() { - let current = env::current_dir().unwrap(); - let dir = format!("{}/**/*.rs", current.display()); - let mut ignore = Matcher::new(MatchOptions::default()); - ignore.add_pattern(&dir).unwrap(); - let path = env::current_dir().unwrap().join("src/workspace.rs"); - let result = ignore.matches_path(path.as_path()); - - assert!(result); - } - - #[test] - fn matches_path_for_single_file_or_directory_name() { - let dir = "inv"; - let valid_test_dir = "valid/"; - let mut ignore = Matcher::new(MatchOptions::default()); - ignore.add_pattern(dir).unwrap(); - ignore.add_pattern(valid_test_dir).unwrap(); - - let path = env::current_dir().unwrap().join("tests").join("invalid"); - let result = ignore.matches_path(path.as_path()); - - assert!(!result); - - let path = env::current_dir().unwrap().join("tests").join("valid"); - let result = ignore.matches_path(path.as_path()); - - assert!(result); - } - - #[test] - fn matches_single_path() { - let dir = "workspace.rs"; - let mut ignore = Matcher::new(MatchOptions { - require_literal_separator: true, - case_sensitive: true, - require_literal_leading_dot: true, - }); - ignore.add_pattern(dir).unwrap(); - let path = env::current_dir().unwrap().join("src/workspace.rs"); - let result = ignore.matches(path.to_str().unwrap()); - - assert!(result); - } -} diff --git a/crates/pgt_workspace/src/matcher/pattern.rs b/crates/pgt_workspace/src/matcher/pattern.rs deleted file mode 100644 index aa38979e..00000000 --- a/crates/pgt_workspace/src/matcher/pattern.rs +++ /dev/null @@ -1,1100 +0,0 @@ -use crate::matcher::pattern::CharSpecifier::{CharRange, SingleChar}; -use crate::matcher::pattern::MatchResult::{ - EntirePatternDoesntMatch, Match, SubPatternDoesntMatch, -}; -use crate::matcher::pattern::PatternToken::{ - AnyChar, AnyExcept, AnyPattern, AnyRecursiveSequence, AnySequence, AnyWithin, Char, -}; -use std::error::Error; -use std::path::Path; -use std::str::FromStr; -use std::{fmt, path}; - -/// A pattern parsing error. -#[derive(Debug)] -#[allow(missing_copy_implementations)] -pub struct PatternError { - /// The approximate character index of where the error occurred. - pub pos: usize, - - /// A message describing the error. - pub msg: &'static str, -} - -impl Error for PatternError { - fn description(&self) -> &str { - self.msg - } -} - -impl fmt::Display for PatternError { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - write!( - f, - "Pattern syntax error near position {}: {}", - self.pos, self.msg - ) - } -} - -/// A compiled Unix shell style pattern. -/// -/// - `?` matches any single character. -/// -/// - `*` matches any (possibly empty) sequence of characters. -/// -/// - `**` matches the current directory and arbitrary subdirectories. This -/// sequence **must** form a single path component, so both `**a` and `b**` -/// are invalid and will result in an error. A sequence of more than two -/// consecutive `*` characters is also invalid. -/// -/// - `[...]` matches any character inside the brackets. Character sequences -/// can also specify ranges of characters, as ordered by Unicode, so e.g. -/// `[0-9]` specifies any character between 0 and 9 inclusive. An unclosed -/// bracket is invalid. -/// -/// - `[!...]` is the negation of `[...]`, i.e. it matches any characters -/// **not** in the brackets. -/// -/// - The metacharacters `?`, `*`, `[`, `]` can be matched by using brackets -/// (e.g. `[?]`). When a `]` occurs immediately following `[` or `[!` then it -/// is interpreted as being part of, rather then ending, the character set, so -/// `]` and NOT `]` can be matched by `[]]` and `[!]]` respectively. The `-` -/// character can be specified inside a character sequence pattern by placing -/// it at the start or the end, e.g. `[abc-]`. -/// -/// - `{...}` can be used to specify multiple patterns separated by commas. For -/// example, `a/{b,c}/d` will match `a/b/d` and `a/c/d`. -#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Default, Debug)] -pub struct Pattern { - /// The original glob pattern that was parsed to create this `Pattern`. - original: String, - tokens: Vec, - is_recursive: bool, - /// Did this pattern come from an `.editorconfig` file? - /// - /// TODO: Remove this flag and support `{a,b}` globs in Biome 2.0 - is_editorconfig: bool, -} - -/// Show the original glob pattern. -impl fmt::Display for Pattern { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - self.original.fmt(f) - } -} - -impl FromStr for Pattern { - type Err = PatternError; - - fn from_str(s: &str) -> Result { - Self::new(s) - } -} - -#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)] -enum PatternToken { - Char(char), - AnyChar, - AnySequence, - AnyRecursiveSequence, - AnyWithin(Vec), - AnyExcept(Vec), - /// A set of patterns that at least one of them must match - AnyPattern(Vec), -} - -#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)] -enum CharSpecifier { - SingleChar(char), - CharRange(char, char), -} - -#[derive(Copy, Clone, PartialEq)] -enum MatchResult { - Match, - SubPatternDoesntMatch, - EntirePatternDoesntMatch, -} - -const ERROR_WILDCARDS: &str = "wildcards are either regular `*` or recursive `**`"; -const ERROR_RECURSIVE_WILDCARDS: &str = "recursive wildcards must form a single path \ - component"; -const ERROR_INVALID_RANGE: &str = "invalid range pattern"; - -impl Pattern { - /// This function compiles Unix shell style patterns. - /// - /// An invalid glob pattern will yield a `PatternError`. - pub fn new(pattern: &str) -> Result { - Self::parse(pattern, false) - } - - /// This function compiles Unix shell style patterns. - /// - /// An invalid glob pattern will yield a `PatternError`. - pub fn parse(pattern: &str, is_editorconfig: bool) -> Result { - let chars = pattern.chars().collect::>(); - let mut tokens = Vec::new(); - let mut is_recursive = false; - let mut i = 0; - - // A pattern is relative if it starts with "." followed by a separator, - // eg. "./test" or ".\test" - let is_relative = matches!(chars.get(..2), Some(['.', sep]) if path::is_separator(*sep)); - if is_relative { - // If a pattern starts with a relative prefix, strip it from the - // pattern and replace it with a "**" sequence - i += 2; - tokens.push(AnyRecursiveSequence); - } else { - // A pattern is absolute if it starts with a path separator, eg. "/home" or "\\?\C:\Users" - let mut is_absolute = chars.first().is_some_and(|c| path::is_separator(*c)); - - // On windows a pattern may also be absolute if it starts with a - // drive letter, a colon and a separator, eg. "c:/Users" or "G:\Users" - if cfg!(windows) && !is_absolute { - is_absolute = matches!(chars.get(..3), Some(['a'..='z' | 'A'..='Z', ':', sep]) if path::is_separator(*sep)); - } - - // If a pattern is not absolute, insert a "**" sequence in front - if !is_absolute { - tokens.push(AnyRecursiveSequence); - } - } - - while i < chars.len() { - match chars[i] { - '?' => { - tokens.push(AnyChar); - i += 1; - } - '*' => { - let old = i; - - while i < chars.len() && chars[i] == '*' { - i += 1; - } - - let count = i - old; - - match count { - count if count > 2 => { - return Err(PatternError { - pos: old + 2, - msg: ERROR_WILDCARDS, - }); - } - count if count == 2 => { - // ** can only be an entire path component - // i.e. a/**/b is valid, but a**/b or a/**b is not - // invalid matches are treated literally - let is_valid = if i == 2 || path::is_separator(chars[i - count - 1]) { - // it ends in a '/' - if i < chars.len() && path::is_separator(chars[i]) { - i += 1; - true - // or the pattern ends here - // this enables the existing globbing mechanism - } else if i == chars.len() { - true - // `**` ends in non-separator - } else { - return Err(PatternError { - pos: i, - msg: ERROR_RECURSIVE_WILDCARDS, - }); - } - // `**` begins with non-separator - } else { - return Err(PatternError { - pos: old - 1, - msg: ERROR_RECURSIVE_WILDCARDS, - }); - }; - - if is_valid { - // collapse consecutive AnyRecursiveSequence to a - // single one - - let tokens_len = tokens.len(); - - if !(tokens_len > 1 - && tokens[tokens_len - 1] == AnyRecursiveSequence) - { - is_recursive = true; - tokens.push(AnyRecursiveSequence); - } - } - } - _ => { - tokens.push(AnySequence); - } - } - } - '[' => { - if i + 4 <= chars.len() && chars[i + 1] == '!' { - match chars[i + 3..].iter().position(|x| *x == ']') { - None => (), - Some(j) => { - let chars = &chars[i + 2..i + 3 + j]; - let cs = parse_char_specifiers(chars); - tokens.push(AnyExcept(cs)); - i += j + 4; - continue; - } - } - } else if i + 3 <= chars.len() && chars[i + 1] != '!' { - match chars[i + 2..].iter().position(|x| *x == ']') { - None => (), - Some(j) => { - let cs = parse_char_specifiers(&chars[i + 1..i + 2 + j]); - tokens.push(AnyWithin(cs)); - i += j + 3; - continue; - } - } - } - - // if we get here then this is not a valid range pattern - return Err(PatternError { - pos: i, - msg: ERROR_INVALID_RANGE, - }); - } - '{' if is_editorconfig => { - let mut depth = 1; - let mut j = i + 1; - while j < chars.len() { - match chars[j] { - '{' => depth += 1, - '}' => depth -= 1, - _ => (), - } - if depth > 1 { - return Err(PatternError { - pos: j, - msg: "nested '{' in '{...}' is not allowed", - }); - } - if depth == 0 { - break; - } - j += 1; - } - - if depth != 0 { - return Err(PatternError { - pos: i, - msg: "unmatched '{'", - }); - } - - let mut subpatterns = Vec::new(); - for subpattern in pattern[i + 1..j].split(',') { - let mut pattern = Pattern::new(subpattern)?; - // HACK: remove the leading '**' if it exists - if pattern.tokens.first() == Some(&PatternToken::AnyRecursiveSequence) { - pattern.tokens.remove(0); - } - subpatterns.push(pattern); - } - tokens.push(AnyPattern(subpatterns)); - i = j + 1; - } - c => { - tokens.push(Char(c)); - i += 1; - } - } - } - - Ok(Self { - tokens, - original: pattern.to_string(), - is_recursive, - is_editorconfig, - }) - } - - fn from_tokens(tokens: Vec, original: String, is_recursive: bool) -> Self { - Self { - tokens, - original, - is_recursive, - is_editorconfig: false, - } - } - - /// Escape metacharacters within the given string by surrounding them in - /// brackets. The resulting string will, when compiled into a `Pattern`, - /// match the input string and nothing else. - pub fn escape(s: &str) -> String { - let mut escaped = String::new(); - for c in s.chars() { - match c { - // note that ! does not need escaping because it is only special - // inside brackets - '?' | '*' | '[' | ']' => { - escaped.push('['); - escaped.push(c); - escaped.push(']'); - } - c => { - escaped.push(c); - } - } - } - escaped - } - - /// Return if the given `str` matches this `Pattern` using the default - /// match options (i.e. `MatchOptions::new()`). - /// - /// # Examples - /// - /// ```rust,ignore - /// use crate::Pattern; - /// - /// assert!(Pattern::new("c?t").unwrap().matches("cat")); - /// assert!(Pattern::new("k[!e]tteh").unwrap().matches("kitteh")); - /// assert!(Pattern::new("d*g").unwrap().matches("doog")); - /// ``` - pub fn matches(&self, str: &str) -> bool { - self.matches_with(str, MatchOptions::new()) - } - - /// Return if the given `Path`, when converted to a `str`, matches this - /// `Pattern` using the default match options (i.e. `MatchOptions::new()`). - pub fn matches_path(&self, path: &Path) -> bool { - // FIXME (#9639): This needs to handle non-utf8 paths - path.to_str().is_some_and(|s| self.matches(s)) - } - - /// Return if the given `str` matches this `Pattern` using the specified - /// match options. - pub fn matches_with(&self, str: &str, options: MatchOptions) -> bool { - self.matches_from(true, str.chars(), 0, options) == Match - } - - /// Return if the given `Path`, when converted to a `str`, matches this - /// `Pattern` using the specified match options. - pub fn matches_path_with(&self, path: &Path, options: MatchOptions) -> bool { - // FIXME (#9639): This needs to handle non-utf8 paths - path.to_str().is_some_and(|s| self.matches_with(s, options)) - } - - /// Access the original glob pattern. - pub fn as_str(&self) -> &str { - &self.original - } - - fn matches_from( - &self, - mut follows_separator: bool, - mut file: std::str::Chars, - i: usize, - options: MatchOptions, - ) -> MatchResult { - for (ti, token) in self.tokens[i..].iter().enumerate() { - match token { - AnySequence | AnyRecursiveSequence => { - // ** must be at the start. - debug_assert!(match *token { - AnyRecursiveSequence => follows_separator, - _ => true, - }); - - // Empty match - match self.matches_from(follows_separator, file.clone(), i + ti + 1, options) { - SubPatternDoesntMatch => (), // keep trying - m => return m, - }; - - while let Some(c) = file.next() { - if follows_separator && options.require_literal_leading_dot && c == '.' { - return SubPatternDoesntMatch; - } - follows_separator = path::is_separator(c); - match *token { - AnyRecursiveSequence if !follows_separator => continue, - AnySequence - if options.require_literal_separator && follows_separator => - { - return SubPatternDoesntMatch; - } - _ => (), - } - match self.matches_from( - follows_separator, - file.clone(), - i + ti + 1, - options, - ) { - SubPatternDoesntMatch => (), // keep trying - m => return m, - } - } - } - AnyPattern(patterns) => { - for pattern in patterns.iter() { - let mut tokens = pattern.tokens.clone(); - tokens.extend_from_slice(&self.tokens[(i + ti + 1)..]); - let new_pattern = Pattern::from_tokens( - tokens, - pattern.original.clone(), - pattern.is_recursive, - ); - if new_pattern.matches_from(follows_separator, file.clone(), 0, options) - == Match - { - return Match; - } - } - return SubPatternDoesntMatch; - } - _ => { - let c = match file.next() { - Some(c) => c, - None => return EntirePatternDoesntMatch, - }; - - let is_sep = path::is_separator(c); - - if !match *token { - AnyChar | AnyWithin(..) | AnyExcept(..) - if (options.require_literal_separator && is_sep) - || (follows_separator - && options.require_literal_leading_dot - && c == '.') => - { - false - } - AnyChar => true, - AnyWithin(ref specifiers) => in_char_specifiers(specifiers, c, options), - AnyExcept(ref specifiers) => !in_char_specifiers(specifiers, c, options), - Char(c2) => chars_eq(c, c2, options.case_sensitive), - AnySequence | AnyRecursiveSequence | AnyPattern(_) => unreachable!(), - } { - return SubPatternDoesntMatch; - } - follows_separator = is_sep; - } - } - } - - // Iter is fused. - if file.next().is_none() { - Match - } else { - SubPatternDoesntMatch - } - } -} - -fn parse_char_specifiers(s: &[char]) -> Vec { - let mut cs = Vec::new(); - let mut i = 0; - while i < s.len() { - if i + 3 <= s.len() && s[i + 1] == '-' { - cs.push(CharRange(s[i], s[i + 2])); - i += 3; - } else { - cs.push(SingleChar(s[i])); - i += 1; - } - } - cs -} - -fn in_char_specifiers(specifiers: &[CharSpecifier], c: char, options: MatchOptions) -> bool { - for &specifier in specifiers.iter() { - match specifier { - SingleChar(sc) => { - if chars_eq(c, sc, options.case_sensitive) { - return true; - } - } - CharRange(start, end) => { - // FIXME: work with non-ascii chars properly (issue #1347) - if !options.case_sensitive && c.is_ascii() && start.is_ascii() && end.is_ascii() { - let start = start.to_ascii_lowercase(); - let end = end.to_ascii_lowercase(); - - let start_up = start.to_uppercase().next().unwrap(); - let end_up = end.to_uppercase().next().unwrap(); - - // only allow case insensitive matching when - // both start and end are within a-z or A-Z - if start != start_up && end != end_up { - let c = c.to_ascii_lowercase(); - if c >= start && c <= end { - return true; - } - } - } - - if c >= start && c <= end { - return true; - } - } - } - } - - false -} - -/// A helper function to determine if two chars are (possibly case-insensitively) equal. -fn chars_eq(a: char, b: char, case_sensitive: bool) -> bool { - if cfg!(windows) && path::is_separator(a) && path::is_separator(b) { - true - } else if !case_sensitive && a.is_ascii() && b.is_ascii() { - // FIXME: work with non-ascii chars properly (issue #9084) - a.eq_ignore_ascii_case(&b) - } else { - a == b - } -} - -/// Configuration options to modify the behaviour of `Pattern::matches_with(..)`. -#[allow(missing_copy_implementations)] -#[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] -pub struct MatchOptions { - /// Whether or not patterns should be matched in a case-sensitive manner. - /// This currently only considers upper/lower case relationships between - /// ASCII characters, but in future this might be extended to work with - /// Unicode. - pub case_sensitive: bool, - - /// Whether or not path-component separator characters (e.g. `/` on - /// Posix) must be matched by a literal `/`, rather than by `*` or `?` or - /// `[...]`. - pub require_literal_separator: bool, - - /// Whether or not paths that contain components that start with a `.` - /// will require that `.` appears literally in the pattern; `*`, `?`, `**`, - /// or `[...]` will not match. This is useful because such files are - /// conventionally considered hidden on Unix systems and it might be - /// desirable to skip them when listing files. - pub require_literal_leading_dot: bool, -} - -impl MatchOptions { - /// Constructs a new `MatchOptions` with default field values. This is used - /// when calling functions that do not take an explicit `MatchOptions` - /// parameter. - /// - /// This function always returns this value: - /// - /// ```rust,ignore - /// MatchOptions { - /// case_sensitive: true, - /// require_literal_separator: false, - /// require_literal_leading_dot: false - /// } - /// ``` - pub fn new() -> Self { - Self { - case_sensitive: true, - require_literal_separator: false, - require_literal_leading_dot: false, - } - } -} - -#[cfg(test)] -mod test { - use super::{MatchOptions, Pattern}; - use std::path::Path; - - #[test] - fn test_pattern_from_str() { - assert!("a*b".parse::().unwrap().matches("a_b")); - assert!("a/**b".parse::().unwrap_err().pos == 4); - } - - #[test] - fn test_wildcard_errors() { - assert!(Pattern::new("a/**b").unwrap_err().pos == 4); - assert!(Pattern::new("a/bc**").unwrap_err().pos == 3); - assert!(Pattern::new("a/*****").unwrap_err().pos == 4); - assert!(Pattern::new("a/b**c**d").unwrap_err().pos == 2); - assert!(Pattern::new("a**b").unwrap_err().pos == 0); - } - - #[test] - fn test_unclosed_bracket_errors() { - assert!(Pattern::new("abc[def").unwrap_err().pos == 3); - assert!(Pattern::new("abc[!def").unwrap_err().pos == 3); - assert!(Pattern::new("abc[").unwrap_err().pos == 3); - assert!(Pattern::new("abc[!").unwrap_err().pos == 3); - assert!(Pattern::new("abc[d").unwrap_err().pos == 3); - assert!(Pattern::new("abc[!d").unwrap_err().pos == 3); - assert!(Pattern::new("abc[]").unwrap_err().pos == 3); - assert!(Pattern::new("abc[!]").unwrap_err().pos == 3); - } - - #[test] - fn test_wildcards() { - assert!(Pattern::new("a*b").unwrap().matches("a_b")); - assert!(Pattern::new("a*b*c").unwrap().matches("abc")); - assert!(!Pattern::new("a*b*c").unwrap().matches("abcd")); - assert!(Pattern::new("a*b*c").unwrap().matches("a_b_c")); - assert!(Pattern::new("a*b*c").unwrap().matches("a___b___c")); - assert!( - Pattern::new("abc*abc*abc") - .unwrap() - .matches("abcabcabcabcabcabcabc") - ); - assert!( - !Pattern::new("abc*abc*abc") - .unwrap() - .matches("abcabcabcabcabcabcabca") - ); - assert!( - Pattern::new("a*a*a*a*a*a*a*a*a") - .unwrap() - .matches("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa") - ); - assert!(Pattern::new("a*b[xyz]c*d").unwrap().matches("abxcdbxcddd")); - } - - #[test] - fn test_recursive_wildcards() { - let pat = Pattern::new("some/**/needle.txt").unwrap(); - assert!(pat.matches("some/needle.txt")); - assert!(pat.matches("some/one/needle.txt")); - assert!(pat.matches("some/one/two/needle.txt")); - assert!(pat.matches("some/other/needle.txt")); - assert!(!pat.matches("some/other/notthis.txt")); - - // a single ** should be valid, for globs - // Should accept anything - let pat = Pattern::new("**").unwrap(); - assert!(pat.is_recursive); - assert!(pat.matches("abcde")); - assert!(pat.matches("")); - assert!(pat.matches(".asdf")); - assert!(pat.matches("/x/.asdf")); - - // collapse consecutive wildcards - let pat = Pattern::new("some/**/**/needle.txt").unwrap(); - assert!(pat.matches("some/needle.txt")); - assert!(pat.matches("some/one/needle.txt")); - assert!(pat.matches("some/one/two/needle.txt")); - assert!(pat.matches("some/other/needle.txt")); - assert!(!pat.matches("some/other/notthis.txt")); - - // ** can begin the pattern - let pat = Pattern::new("**/test").unwrap(); - assert!(pat.matches("one/two/test")); - assert!(pat.matches("one/test")); - assert!(pat.matches("test")); - - // /** can begin the pattern - let pat = Pattern::new("/**/test").unwrap(); - assert!(pat.matches("/one/two/test")); - assert!(pat.matches("/one/test")); - assert!(pat.matches("/test")); - assert!(!pat.matches("/one/notthis")); - assert!(!pat.matches("/notthis")); - - // Only start sub-patterns on start of path segment. - let pat = Pattern::new("**/.*").unwrap(); - assert!(pat.matches(".abc")); - assert!(pat.matches("abc/.abc")); - assert!(!pat.matches("ab.c")); - assert!(!pat.matches("abc/ab.c")); - } - - #[test] - fn test_range_pattern() { - let pat = Pattern::new("a[0-9]b").unwrap(); - for i in 0..10 { - assert!(pat.matches(&format!("a{i}b"))); - } - assert!(!pat.matches("a_b")); - - let pat = Pattern::new("a[!0-9]b").unwrap(); - for i in 0..10 { - assert!(!pat.matches(&format!("a{i}b"))); - } - assert!(pat.matches("a_b")); - - let pats = ["[a-z123]", "[1a-z23]", "[123a-z]"]; - for &p in pats.iter() { - let pat = Pattern::new(p).unwrap(); - for c in "abcdefghijklmnopqrstuvwxyz".chars() { - assert!(pat.matches(&c.to_string())); - } - for c in "ABCDEFGHIJKLMNOPQRSTUVWXYZ".chars() { - let options = MatchOptions { - case_sensitive: false, - ..MatchOptions::new() - }; - assert!(pat.matches_with(&c.to_string(), options)); - } - assert!(pat.matches("1")); - assert!(pat.matches("2")); - assert!(pat.matches("3")); - } - - let pats = ["[abc-]", "[-abc]", "[a-c-]"]; - for &p in pats.iter() { - let pat = Pattern::new(p).unwrap(); - assert!(pat.matches("a")); - assert!(pat.matches("b")); - assert!(pat.matches("c")); - assert!(pat.matches("-")); - assert!(!pat.matches("d")); - } - - let pat = Pattern::new("[2-1]").unwrap(); - assert!(!pat.matches("1")); - assert!(!pat.matches("2")); - - assert!(Pattern::new("[-]").unwrap().matches("-")); - assert!(!Pattern::new("[!-]").unwrap().matches("-")); - } - - #[test] - fn test_pattern_matches() { - let txt_pat = Pattern::new("*hello.txt").unwrap(); - assert!(txt_pat.matches("hello.txt")); - assert!(txt_pat.matches("gareth_says_hello.txt")); - assert!(txt_pat.matches("some/path/to/hello.txt")); - assert!(txt_pat.matches("some\\path\\to\\hello.txt")); - assert!(txt_pat.matches("/an/absolute/path/to/hello.txt")); - assert!(!txt_pat.matches("hello.txt-and-then-some")); - assert!(!txt_pat.matches("goodbye.txt")); - - let dir_pat = Pattern::new("*some/path/to/hello.txt").unwrap(); - assert!(dir_pat.matches("some/path/to/hello.txt")); - assert!(dir_pat.matches("a/bigger/some/path/to/hello.txt")); - assert!(!dir_pat.matches("some/path/to/hello.txt-and-then-some")); - assert!(!dir_pat.matches("some/other/path/to/hello.txt")); - } - - #[test] - fn test_pattern_escape() { - let s = "_[_]_?_*_!_"; - assert_eq!(Pattern::escape(s), "_[[]_[]]_[?]_[*]_!_".to_string()); - assert!(Pattern::new(&Pattern::escape(s)).unwrap().matches(s)); - } - - #[test] - fn test_pattern_matches_case_insensitive() { - let pat = Pattern::new("aBcDeFg").unwrap(); - let options = MatchOptions { - case_sensitive: false, - require_literal_separator: false, - require_literal_leading_dot: false, - }; - - assert!(pat.matches_with("aBcDeFg", options)); - assert!(pat.matches_with("abcdefg", options)); - assert!(pat.matches_with("ABCDEFG", options)); - assert!(pat.matches_with("AbCdEfG", options)); - } - - #[test] - fn test_pattern_matches_case_insensitive_range() { - let pat_within = Pattern::new("[a]").unwrap(); - let pat_except = Pattern::new("[!a]").unwrap(); - - let options_case_insensitive = MatchOptions { - case_sensitive: false, - require_literal_separator: false, - require_literal_leading_dot: false, - }; - let options_case_sensitive = MatchOptions { - case_sensitive: true, - require_literal_separator: false, - require_literal_leading_dot: false, - }; - - assert!(pat_within.matches_with("a", options_case_insensitive)); - assert!(pat_within.matches_with("A", options_case_insensitive)); - assert!(!pat_within.matches_with("A", options_case_sensitive)); - - assert!(!pat_except.matches_with("a", options_case_insensitive)); - assert!(!pat_except.matches_with("A", options_case_insensitive)); - assert!(pat_except.matches_with("A", options_case_sensitive)); - } - - #[test] - fn test_pattern_matches_require_literal_separator() { - let options_require_literal = MatchOptions { - case_sensitive: true, - require_literal_separator: true, - require_literal_leading_dot: false, - }; - let options_not_require_literal = MatchOptions { - case_sensitive: true, - require_literal_separator: false, - require_literal_leading_dot: false, - }; - - assert!( - Pattern::new("abc/def") - .unwrap() - .matches_with("abc/def", options_require_literal) - ); - assert!( - !Pattern::new("abc?def") - .unwrap() - .matches_with("abc/def", options_require_literal) - ); - assert!( - !Pattern::new("abc*def") - .unwrap() - .matches_with("abc/def", options_require_literal) - ); - assert!( - !Pattern::new("abc[/]def") - .unwrap() - .matches_with("abc/def", options_require_literal) - ); - - assert!( - Pattern::new("abc/def") - .unwrap() - .matches_with("abc/def", options_not_require_literal) - ); - assert!( - Pattern::new("abc?def") - .unwrap() - .matches_with("abc/def", options_not_require_literal) - ); - assert!( - Pattern::new("abc*def") - .unwrap() - .matches_with("abc/def", options_not_require_literal) - ); - assert!( - Pattern::new("abc[/]def") - .unwrap() - .matches_with("abc/def", options_not_require_literal) - ); - } - - #[test] - fn test_pattern_matches_require_literal_leading_dot() { - let options_require_literal_leading_dot = MatchOptions { - case_sensitive: true, - require_literal_separator: false, - require_literal_leading_dot: true, - }; - let options_not_require_literal_leading_dot = MatchOptions { - case_sensitive: true, - require_literal_separator: false, - require_literal_leading_dot: false, - }; - - let f = |options| { - Pattern::new("*.txt") - .unwrap() - .matches_with(".hello.txt", options) - }; - assert!(f(options_not_require_literal_leading_dot)); - assert!(!f(options_require_literal_leading_dot)); - - let f = |options| { - Pattern::new(".*.*") - .unwrap() - .matches_with(".hello.txt", options) - }; - assert!(f(options_not_require_literal_leading_dot)); - assert!(f(options_require_literal_leading_dot)); - - let f = |options| { - Pattern::new("aaa/bbb/*") - .unwrap() - .matches_with("aaa/bbb/.ccc", options) - }; - assert!(f(options_not_require_literal_leading_dot)); - assert!(!f(options_require_literal_leading_dot)); - - let f = |options| { - Pattern::new("aaa/bbb/*") - .unwrap() - .matches_with("aaa/bbb/c.c.c.", options) - }; - assert!(f(options_not_require_literal_leading_dot)); - assert!(f(options_require_literal_leading_dot)); - - let f = |options| { - Pattern::new("aaa/bbb/.*") - .unwrap() - .matches_with("aaa/bbb/.ccc", options) - }; - assert!(f(options_not_require_literal_leading_dot)); - assert!(f(options_require_literal_leading_dot)); - - let f = |options| { - Pattern::new("aaa/?bbb") - .unwrap() - .matches_with("aaa/.bbb", options) - }; - assert!(f(options_not_require_literal_leading_dot)); - assert!(!f(options_require_literal_leading_dot)); - - let f = |options| { - Pattern::new("aaa/[.]bbb") - .unwrap() - .matches_with("aaa/.bbb", options) - }; - assert!(f(options_not_require_literal_leading_dot)); - assert!(!f(options_require_literal_leading_dot)); - - let f = |options| Pattern::new("**/*").unwrap().matches_with(".bbb", options); - assert!(f(options_not_require_literal_leading_dot)); - assert!(!f(options_require_literal_leading_dot)); - } - - #[test] - fn test_matches_path() { - // on windows, (Path::new("a/b").as_str().unwrap() == "a\\b"), so this - // tests that / and \ are considered equivalent on windows - assert!(Pattern::new("a/b").unwrap().matches_path(Path::new("a/b"))); - } - - #[test] - fn test_path_join() { - let pattern = Path::new("one").join(Path::new("**/*.rs")); - assert!(Pattern::new(pattern.to_str().unwrap()).is_ok()); - } - - #[test] - fn test_pattern_relative() { - assert!(Pattern::new("./b").unwrap().matches_path(Path::new("a/b"))); - assert!(Pattern::new("b").unwrap().matches_path(Path::new("a/b"))); - - if cfg!(windows) { - assert!( - Pattern::new(".\\b") - .unwrap() - .matches_path(Path::new("a\\b")) - ); - assert!(Pattern::new("b").unwrap().matches_path(Path::new("a\\b"))); - } - } - - #[test] - fn test_pattern_absolute() { - assert!( - Pattern::new("/a/b") - .unwrap() - .matches_path(Path::new("/a/b")) - ); - - if cfg!(windows) { - assert!( - Pattern::new("c:/a/b") - .unwrap() - .matches_path(Path::new("c:/a/b")) - ); - assert!( - Pattern::new("C:\\a\\b") - .unwrap() - .matches_path(Path::new("C:\\a\\b")) - ); - - assert!( - Pattern::new("\\\\?\\c:\\a\\b") - .unwrap() - .matches_path(Path::new("\\\\?\\c:\\a\\b")) - ); - assert!( - Pattern::new("\\\\?\\C:/a/b") - .unwrap() - .matches_path(Path::new("\\\\?\\C:/a/b")) - ); - } - } - - #[test] - fn test_pattern_glob() { - assert!( - Pattern::new("*.js") - .unwrap() - .matches_path(Path::new("b/c.js")) - ); - - assert!( - Pattern::new("**/*.js") - .unwrap() - .matches_path(Path::new("b/c.js")) - ); - - assert!( - Pattern::new("*.js") - .unwrap() - .matches_path(Path::new("/a/b/c.js")) - ); - - assert!( - Pattern::new("**/*.js") - .unwrap() - .matches_path(Path::new("/a/b/c.js")) - ); - - if cfg!(windows) { - assert!( - Pattern::new("*.js") - .unwrap() - .matches_path(Path::new("C:\\a\\b\\c.js")) - ); - - assert!( - Pattern::new("**/*.js") - .unwrap() - .matches_path(Path::new("\\\\?\\C:\\a\\b\\c.js")) - ); - } - } - - #[test] - fn test_pattern_glob_brackets() { - let pattern = Pattern::parse("{foo.js,bar.js}", true).unwrap(); - assert!(pattern.matches_path(Path::new("foo.js"))); - assert!(pattern.matches_path(Path::new("bar.js"))); - assert!(!pattern.matches_path(Path::new("baz.js"))); - - let pattern = Pattern::parse("{foo,bar}.js", true).unwrap(); - assert!(pattern.matches_path(Path::new("foo.js"))); - assert!(pattern.matches_path(Path::new("bar.js"))); - assert!(!pattern.matches_path(Path::new("baz.js"))); - - assert!( - Pattern::parse("**/{foo,bar}.js", true) - .unwrap() - .matches_path(Path::new("a/b/foo.js")) - ); - - let pattern = Pattern::parse("src/{a/foo,bar}.js", true).unwrap(); - assert!(pattern.matches_path(Path::new("src/a/foo.js"))); - assert!(pattern.matches_path(Path::new("src/bar.js"))); - assert!(!pattern.matches_path(Path::new("src/a/b/foo.js"))); - assert!(!pattern.matches_path(Path::new("src/a/bar.js"))); - - let pattern = Pattern::parse("src/{a,b}/{c,d}/foo.js", true).unwrap(); - assert!(pattern.matches_path(Path::new("src/a/c/foo.js"))); - assert!(pattern.matches_path(Path::new("src/a/d/foo.js"))); - assert!(pattern.matches_path(Path::new("src/b/c/foo.js"))); - assert!(pattern.matches_path(Path::new("src/b/d/foo.js"))); - assert!(!pattern.matches_path(Path::new("src/bar/foo.js"))); - - let _ = Pattern::parse("{{foo,bar},baz}", true) - .expect_err("should not allow curly brackets more than 1 level deep"); - } - - #[test] - fn test_pattern_glob_brackets_not_available_by_default() { - // RODO: Remove this test when we make brackets available by default - let pattern = Pattern::parse("{foo.js,bar.js}", false).unwrap(); - assert!(!pattern.matches_path(Path::new("foo.js"))); - assert!(!pattern.matches_path(Path::new("bar.js"))); - assert!(!pattern.matches_path(Path::new("baz.js"))); - } -} diff --git a/crates/pgt_workspace/src/settings.rs b/crates/pgt_workspace/src/settings.rs deleted file mode 100644 index f9275aa9..00000000 --- a/crates/pgt_workspace/src/settings.rs +++ /dev/null @@ -1,491 +0,0 @@ -use biome_deserialize::StringSet; -use globset::Glob; -use pgt_diagnostics::Category; -use std::{ - borrow::Cow, - num::NonZeroU64, - path::{Path, PathBuf}, - sync::{RwLock, RwLockReadGuard, RwLockWriteGuard}, - time::Duration, -}; - -use ignore::gitignore::{Gitignore, GitignoreBuilder}; -use pgt_configuration::{ - ConfigurationDiagnostic, LinterConfiguration, PartialConfiguration, - database::PartialDatabaseConfiguration, - diagnostics::InvalidIgnorePattern, - files::FilesConfiguration, - migrations::{MigrationsConfiguration, PartialMigrationsConfiguration}, -}; -use pgt_fs::FileSystem; - -use crate::{DynRef, WorkspaceError, matcher::Matcher}; - -/// Global settings for the entire workspace -#[derive(Debug, Default)] -pub struct Settings { - /// Filesystem settings for the workspace - pub files: FilesSettings, - - /// Database settings for the workspace - pub db: DatabaseSettings, - - /// Linter settings applied to all files in the workspace - pub linter: LinterSettings, - - /// Migrations settings - pub migrations: Option, -} - -#[derive(Debug)] -pub struct SettingsHandleMut<'a> { - inner: RwLockWriteGuard<'a, Settings>, -} - -/// Handle object holding a temporary lock on the settings -#[derive(Debug)] -pub struct SettingsHandle<'a> { - inner: RwLockReadGuard<'a, Settings>, -} - -impl<'a> SettingsHandle<'a> { - pub(crate) fn new(settings: &'a RwLock) -> Self { - Self { - inner: settings.read().unwrap(), - } - } -} - -impl AsRef for SettingsHandle<'_> { - fn as_ref(&self) -> &Settings { - &self.inner - } -} - -impl<'a> SettingsHandleMut<'a> { - pub(crate) fn new(settings: &'a RwLock) -> Self { - Self { - inner: settings.write().unwrap(), - } - } -} - -impl AsMut for SettingsHandleMut<'_> { - fn as_mut(&mut self) -> &mut Settings { - &mut self.inner - } -} - -impl Settings { - /// The [PartialConfiguration] is merged into the workspace - #[tracing::instrument(level = "trace", skip(self), err)] - pub fn merge_with_configuration( - &mut self, - configuration: PartialConfiguration, - working_directory: Option, - vcs_path: Option, - gitignore_matches: &[String], - ) -> Result<(), WorkspaceError> { - // Filesystem settings - if let Some(files) = to_file_settings( - working_directory.clone(), - configuration.files.map(FilesConfiguration::from), - vcs_path, - gitignore_matches, - )? { - self.files = files; - } - - // db settings - if let Some(db) = configuration.db { - self.db = db.into() - } - - // linter part - if let Some(linter) = configuration.linter { - self.linter = - to_linter_settings(working_directory.clone(), LinterConfiguration::from(linter))?; - } - - // Migrations settings - if let Some(migrations) = configuration.migrations { - self.migrations = to_migration_settings( - working_directory.clone(), - MigrationsConfiguration::from(migrations), - ); - } - - Ok(()) - } - - /// Retrieves the settings of the linter - pub fn linter(&self) -> &LinterSettings { - &self.linter - } - - /// Returns linter rules. - pub fn as_linter_rules(&self) -> Option> { - self.linter.rules.as_ref().map(Cow::Borrowed) - } - - /// It retrieves the severity based on the `code` of the rule and the current configuration. - /// - /// The code of the has the following pattern: `{group}/{rule_name}`. - /// - /// It returns [None] if the `code` doesn't match any rule. - pub fn get_severity_from_rule_code( - &self, - code: &Category, - ) -> Option { - let rules = self.linter.rules.as_ref(); - if let Some(rules) = rules { - rules.get_severity_from_code(code) - } else { - None - } - } -} - -fn to_linter_settings( - working_directory: Option, - conf: LinterConfiguration, -) -> Result { - Ok(LinterSettings { - enabled: conf.enabled, - rules: Some(conf.rules), - ignored_files: to_matcher(working_directory.clone(), Some(&conf.ignore))?, - included_files: to_matcher(working_directory.clone(), Some(&conf.include))?, - }) -} - -fn to_file_settings( - working_directory: Option, - config: Option, - vcs_config_path: Option, - gitignore_matches: &[String], -) -> Result, WorkspaceError> { - let config = match config { - Some(config) => Some(config), - _ => { - if vcs_config_path.is_some() { - Some(FilesConfiguration::default()) - } else { - None - } - } - }; - let git_ignore = if let Some(vcs_config_path) = vcs_config_path { - Some(to_git_ignore(vcs_config_path, gitignore_matches)?) - } else { - None - }; - Ok(match config { - Some(config) => Some(FilesSettings { - max_size: config.max_size, - git_ignore, - ignored_files: to_matcher(working_directory.clone(), Some(&config.ignore))?, - included_files: to_matcher(working_directory, Some(&config.include))?, - }), - _ => None, - }) -} - -fn to_git_ignore(path: PathBuf, matches: &[String]) -> Result { - let mut gitignore_builder = GitignoreBuilder::new(path.clone()); - - for the_match in matches { - gitignore_builder - .add_line(Some(path.clone()), the_match) - .map_err(|err| { - ConfigurationDiagnostic::InvalidIgnorePattern(InvalidIgnorePattern { - message: err.to_string(), - file_path: path.to_str().map(|s| s.to_string()), - }) - })?; - } - let gitignore = gitignore_builder.build().map_err(|err| { - ConfigurationDiagnostic::InvalidIgnorePattern(InvalidIgnorePattern { - message: err.to_string(), - file_path: path.to_str().map(|s| s.to_string()), - }) - })?; - Ok(gitignore) -} - -/// Creates a [Matcher] from a [StringSet] -/// -/// ## Errors -/// -/// It can raise an error if the patterns aren't valid -pub fn to_matcher( - working_directory: Option, - string_set: Option<&StringSet>, -) -> Result { - let mut matcher = Matcher::empty(); - if let Some(working_directory) = working_directory { - matcher.set_root(working_directory) - } - if let Some(string_set) = string_set { - for pattern in string_set.iter() { - matcher.add_pattern(pattern).map_err(|err| { - ConfigurationDiagnostic::new_invalid_ignore_pattern( - pattern.to_string(), - err.msg.to_string(), - ) - })?; - } - } - Ok(matcher) -} - -/// Linter settings for the entire workspace -#[derive(Debug)] -pub struct LinterSettings { - /// Enabled by default - pub enabled: bool, - - /// List of rules - pub rules: Option, - - /// List of ignored paths/files to match - pub ignored_files: Matcher, - - /// List of included paths/files to match - pub included_files: Matcher, -} - -impl Default for LinterSettings { - fn default() -> Self { - Self { - enabled: true, - rules: Some(pgt_configuration::analyser::linter::Rules::default()), - ignored_files: Matcher::empty(), - included_files: Matcher::empty(), - } - } -} - -/// Database settings for the entire workspace -#[derive(Debug)] -pub struct DatabaseSettings { - pub enable_connection: bool, - pub host: String, - pub port: u16, - pub username: String, - pub password: String, - pub database: String, - pub conn_timeout_secs: Duration, - pub allow_statement_executions: bool, -} - -impl Default for DatabaseSettings { - fn default() -> Self { - Self { - enable_connection: false, - host: "127.0.0.1".to_string(), - port: 5432, - username: "postgres".to_string(), - password: "postgres".to_string(), - database: "postgres".to_string(), - conn_timeout_secs: Duration::from_secs(10), - allow_statement_executions: true, - } - } -} - -impl From for DatabaseSettings { - fn from(value: PartialDatabaseConfiguration) -> Self { - let d = DatabaseSettings::default(); - - // "host" is the minimum required setting for database features - // to be enabled. - let enable_connection = value - .host - .as_ref() - .is_some_and(|_| value.disable_connection.is_none_or(|disabled| !disabled)); - - let database = value.database.unwrap_or(d.database); - let host = value.host.unwrap_or(d.host); - - let allow_statement_executions = value - .allow_statement_executions_against - .map(|stringset| { - stringset.iter().any(|pattern| { - let glob = Glob::new(pattern) - .unwrap_or_else(|_| panic!("Invalid pattern: {}", pattern)) - .compile_matcher(); - - glob.is_match(format!("{}/{}", host, database)) - }) - }) - .unwrap_or(false); - - Self { - enable_connection, - - port: value.port.unwrap_or(d.port), - username: value.username.unwrap_or(d.username), - password: value.password.unwrap_or(d.password), - database, - host, - - conn_timeout_secs: value - .conn_timeout_secs - .map(|s| Duration::from_secs(s.into())) - .unwrap_or(d.conn_timeout_secs), - - allow_statement_executions, - } - } -} - -/// Filesystem settings for the entire workspace -#[derive(Debug)] -pub struct FilesSettings { - /// File size limit in bytes - pub max_size: NonZeroU64, - - /// List of paths/files to matcher - pub ignored_files: Matcher, - - /// List of paths/files to matcher - pub included_files: Matcher, - - /// gitignore file patterns - pub git_ignore: Option, -} - -/// Migration settings -#[derive(Debug, Default)] -pub struct MigrationSettings { - pub path: Option, - pub after: Option, -} - -impl From for MigrationSettings { - fn from(value: PartialMigrationsConfiguration) -> Self { - Self { - path: value.migrations_dir.map(PathBuf::from), - after: value.after, - } - } -} - -fn to_migration_settings( - working_directory: Option, - conf: MigrationsConfiguration, -) -> Option { - working_directory.map(|working_directory| MigrationSettings { - path: Some(working_directory.join(conf.migrations_dir)), - after: Some(conf.after), - }) -} - -/// Limit the size of files to 1.0 MiB by default -pub(crate) const DEFAULT_FILE_SIZE_LIMIT: NonZeroU64 = - // SAFETY: This constant is initialized with a non-zero value - NonZeroU64::new(1024 * 1024).unwrap(); - -impl Default for FilesSettings { - fn default() -> Self { - Self { - max_size: DEFAULT_FILE_SIZE_LIMIT, - ignored_files: Matcher::empty(), - included_files: Matcher::empty(), - git_ignore: None, - } - } -} - -pub trait PartialConfigurationExt { - fn retrieve_gitignore_matches( - &self, - file_system: &DynRef<'_, dyn FileSystem>, - vcs_base_path: Option<&Path>, - ) -> Result<(Option, Vec), WorkspaceError>; -} - -impl PartialConfigurationExt for PartialConfiguration { - /// This function checks if the VCS integration is enabled, and if so, it will attempts to resolve the - /// VCS root directory and the `.gitignore` file. - /// - /// ## Returns - /// - /// A tuple with VCS root folder and the contents of the `.gitignore` file - fn retrieve_gitignore_matches( - &self, - file_system: &DynRef<'_, dyn FileSystem>, - vcs_base_path: Option<&Path>, - ) -> Result<(Option, Vec), WorkspaceError> { - let Some(vcs) = &self.vcs else { - return Ok((None, vec![])); - }; - if vcs.is_enabled() { - let vcs_base_path = match (vcs_base_path, &vcs.root) { - (Some(vcs_base_path), Some(root)) => vcs_base_path.join(root), - (None, Some(root)) => PathBuf::from(root), - (Some(vcs_base_path), None) => PathBuf::from(vcs_base_path), - (None, None) => return Err(WorkspaceError::vcs_disabled()), - }; - if let Some(client_kind) = &vcs.client_kind { - if !vcs.ignore_file_disabled() { - let result = file_system - .auto_search(&vcs_base_path, &[client_kind.ignore_file()], false) - .map_err(WorkspaceError::from)?; - - if let Some(result) = result { - return Ok(( - result.file_path.parent().map(PathBuf::from), - result - .content - .lines() - .map(String::from) - .collect::>(), - )); - } - } - } - } - Ok((None, vec![])) - } -} - -#[cfg(test)] -mod tests { - use biome_deserialize::StringSet; - use pgt_configuration::database::PartialDatabaseConfiguration; - - use super::DatabaseSettings; - - #[test] - fn should_identify_allowed_statement_executions() { - let partial_config = PartialDatabaseConfiguration { - allow_statement_executions_against: Some(StringSet::from_iter(vec![String::from( - "localhost/*", - )])), - host: Some("localhost".into()), - database: Some("test-db".into()), - ..Default::default() - }; - - let config = DatabaseSettings::from(partial_config); - - assert!(config.allow_statement_executions) - } - - #[test] - fn should_identify_not_allowed_statement_executions() { - let partial_config = PartialDatabaseConfiguration { - allow_statement_executions_against: Some(StringSet::from_iter(vec![String::from( - "localhost/*", - )])), - host: Some("production".into()), - database: Some("test-db".into()), - ..Default::default() - }; - - let config = DatabaseSettings::from(partial_config); - - assert!(!config.allow_statement_executions) - } -} diff --git a/crates/pgt_workspace/src/workspace.rs b/crates/pgt_workspace/src/workspace.rs deleted file mode 100644 index 873dd83e..00000000 --- a/crates/pgt_workspace/src/workspace.rs +++ /dev/null @@ -1,224 +0,0 @@ -use std::{panic::RefUnwindSafe, path::PathBuf, sync::Arc}; - -pub use self::client::{TransportRequest, WorkspaceClient, WorkspaceTransport}; -use pgt_analyse::RuleCategories; -use pgt_configuration::{PartialConfiguration, RuleSelector}; -use pgt_fs::PgTPath; -use pgt_text_size::TextRange; -use serde::{Deserialize, Serialize}; - -use crate::{ - WorkspaceError, - features::{ - code_actions::{ - CodeActionsParams, CodeActionsResult, ExecuteStatementParams, ExecuteStatementResult, - }, - completions::{CompletionsResult, GetCompletionsParams}, - diagnostics::{PullDiagnosticsParams, PullDiagnosticsResult}, - }, -}; - -mod client; -mod server; - -pub use server::StatementId; -pub(crate) use server::parsed_document::*; - -#[derive(Debug, serde::Serialize, serde::Deserialize)] -#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] -pub struct OpenFileParams { - pub path: PgTPath, - pub content: String, - pub version: i32, -} - -#[derive(Debug, serde::Serialize, serde::Deserialize)] -#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] -pub struct CloseFileParams { - pub path: PgTPath, -} - -#[derive(Debug, serde::Serialize, serde::Deserialize)] -#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] -pub struct ChangeFileParams { - pub path: PgTPath, - pub version: i32, - pub changes: Vec, -} - -#[derive(Debug, serde::Serialize, serde::Deserialize)] -#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] -pub struct ChangeParams { - /// The range of the file that changed. If `None`, the whole file changed. - pub range: Option, - pub text: String, -} - -impl ChangeParams { - pub fn overwrite(text: String) -> Self { - Self { range: None, text } - } -} - -#[derive(Debug, serde::Serialize, serde::Deserialize)] -#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] -pub struct IsPathIgnoredParams { - pub pgt_path: PgTPath, -} - -#[derive(Debug, serde::Serialize, serde::Deserialize)] -#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] -pub struct UpdateSettingsParams { - pub configuration: PartialConfiguration, - pub vcs_base_path: Option, - pub gitignore_matches: Vec, - pub workspace_directory: Option, -} - -#[derive(Debug, serde::Serialize, serde::Deserialize)] -#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] -pub struct GetFileContentParams { - pub path: PgTPath, -} - -#[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)] -#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] -pub struct ServerInfo { - /// The name of the server as defined by the server. - pub name: String, - - /// The server's version as defined by the server. - #[serde(skip_serializing_if = "Option::is_none")] - pub version: Option, -} - -pub trait Workspace: Send + Sync + RefUnwindSafe { - /// Retrieves the list of diagnostics associated to a file - fn pull_diagnostics( - &self, - params: PullDiagnosticsParams, - ) -> Result; - - /// Retrieves a list of available code_actions for a file/cursor_position - fn pull_code_actions( - &self, - params: CodeActionsParams, - ) -> Result; - - fn get_completions( - &self, - params: GetCompletionsParams, - ) -> Result; - - /// Update the global settings for this workspace - fn update_settings(&self, params: UpdateSettingsParams) -> Result<(), WorkspaceError>; - - /// Add a new file to the workspace - fn open_file(&self, params: OpenFileParams) -> Result<(), WorkspaceError>; - - /// Remove a file from the workspace - fn close_file(&self, params: CloseFileParams) -> Result<(), WorkspaceError>; - - /// Change the content of an open file - fn change_file(&self, params: ChangeFileParams) -> Result<(), WorkspaceError>; - - /// Returns information about the server this workspace is connected to or `None` if the workspace isn't connected to a server. - fn server_info(&self) -> Option<&ServerInfo>; - - /// Return the content of a file - fn get_file_content(&self, params: GetFileContentParams) -> Result; - - /// Checks if the current path is ignored by the workspace. - /// - /// Takes as input the path of the file that workspace is currently processing and - /// a list of paths to match against. - /// - /// If the file path matches, then `true` is returned, and it should be considered ignored. - fn is_path_ignored(&self, params: IsPathIgnoredParams) -> Result; - - fn execute_statement( - &self, - params: ExecuteStatementParams, - ) -> Result; -} - -/// Convenience function for constructing a server instance of [Workspace] -pub fn server() -> Box { - Box::new(server::WorkspaceServer::new()) -} - -/// Convenience function for constructing a server instance of [Workspace] -pub fn server_sync() -> Arc { - Arc::new(server::WorkspaceServer::new()) -} - -// Convenience function for constructing a client instance of [Workspace] -pub fn client(transport: T) -> Result, WorkspaceError> -where - T: WorkspaceTransport + RefUnwindSafe + Send + Sync + 'static, -{ - Ok(Box::new(client::WorkspaceClient::new(transport)?)) -} - -/// [RAII](https://en.wikipedia.org/wiki/Resource_acquisition_is_initialization) -/// guard for an open file in a workspace, takes care of closing the file -/// automatically on drop -pub struct FileGuard<'app, W: Workspace + ?Sized> { - workspace: &'app W, - path: PgTPath, -} - -impl<'app, W: Workspace + ?Sized> FileGuard<'app, W> { - pub fn open(workspace: &'app W, params: OpenFileParams) -> Result { - let path = params.path.clone(); - workspace.open_file(params)?; - Ok(Self { workspace, path }) - } - - pub fn change_file( - &self, - version: i32, - changes: Vec, - ) -> Result<(), WorkspaceError> { - self.workspace.change_file(ChangeFileParams { - path: self.path.clone(), - version, - changes, - }) - } - - pub fn get_file_content(&self) -> Result { - self.workspace.get_file_content(GetFileContentParams { - path: self.path.clone(), - }) - } - - pub fn pull_diagnostics( - &self, - categories: RuleCategories, - max_diagnostics: u32, - only: Vec, - skip: Vec, - ) -> Result { - self.workspace.pull_diagnostics(PullDiagnosticsParams { - path: self.path.clone(), - categories, - max_diagnostics: max_diagnostics.into(), - only, - skip, - }) - } -} - -impl Drop for FileGuard<'_, W> { - fn drop(&mut self) { - self.workspace - .close_file(CloseFileParams { - path: self.path.clone(), - }) - // `close_file` can only error if the file was already closed, in - // this case it's generally better to silently matcher the error - // than panic (especially in a drop handler) - .ok(); - } -} diff --git a/crates/pgt_workspace/src/workspace/client.rs b/crates/pgt_workspace/src/workspace/client.rs deleted file mode 100644 index d727fff6..00000000 --- a/crates/pgt_workspace/src/workspace/client.rs +++ /dev/null @@ -1,147 +0,0 @@ -use crate::workspace::ServerInfo; -use crate::{TransportError, Workspace, WorkspaceError}; -use serde::{Deserialize, Serialize, de::DeserializeOwned}; -use serde_json::json; -use std::{ - panic::RefUnwindSafe, - sync::atomic::{AtomicU64, Ordering}, -}; - -use super::{CloseFileParams, GetFileContentParams, IsPathIgnoredParams, OpenFileParams}; - -pub struct WorkspaceClient { - transport: T, - request_id: AtomicU64, - server_info: Option, -} - -pub trait WorkspaceTransport { - fn request(&self, request: TransportRequest

) -> Result - where - P: Serialize, - R: DeserializeOwned; -} - -#[derive(Debug)] -pub struct TransportRequest

{ - pub id: u64, - pub method: &'static str, - pub params: P, -} - -#[derive(Debug, PartialEq, Eq, Clone, Default, Deserialize, Serialize)] -pub struct InitializeResult { - /// Information about the server. - #[serde(skip_serializing_if = "Option::is_none")] - pub server_info: Option, -} - -impl WorkspaceClient -where - T: WorkspaceTransport + RefUnwindSafe + Send + Sync, -{ - pub fn new(transport: T) -> Result { - let mut client = Self { - transport, - request_id: AtomicU64::new(0), - server_info: None, - }; - - // TODO: The current implementation of the JSON-RPC protocol in - // tower_lsp doesn't allow any request to be sent before a call to - // initialize, this is something we could be able to lift by using our - // own RPC protocol implementation - let value: InitializeResult = client.request( - "initialize", - json!({ - "capabilities": {}, - "clientInfo": { - "name": env!("CARGO_PKG_NAME"), - "version": pgt_configuration::VERSION - }, - }), - )?; - - client.server_info = value.server_info; - - Ok(client) - } - - fn request(&self, method: &'static str, params: P) -> Result - where - P: Serialize, - R: DeserializeOwned, - { - let id = self.request_id.fetch_add(1, Ordering::Relaxed); - let request = TransportRequest { id, method, params }; - - let response = self.transport.request(request)?; - - Ok(response) - } - - pub fn shutdown(self) -> Result<(), WorkspaceError> { - self.request("pgt/shutdown", ()) - } -} - -impl Workspace for WorkspaceClient -where - T: WorkspaceTransport + RefUnwindSafe + Send + Sync, -{ - fn pull_code_actions( - &self, - params: crate::features::code_actions::CodeActionsParams, - ) -> Result { - self.request("pgt/code_actions", params) - } - - fn execute_statement( - &self, - params: crate::features::code_actions::ExecuteStatementParams, - ) -> Result { - self.request("pgt/execute_statement", params) - } - - fn open_file(&self, params: OpenFileParams) -> Result<(), WorkspaceError> { - self.request("pgt/open_file", params) - } - - fn close_file(&self, params: CloseFileParams) -> Result<(), WorkspaceError> { - self.request("pgt/close_file", params) - } - - fn change_file(&self, params: super::ChangeFileParams) -> Result<(), WorkspaceError> { - self.request("pgt/change_file", params) - } - - fn update_settings(&self, params: super::UpdateSettingsParams) -> Result<(), WorkspaceError> { - self.request("pgt/update_settings", params) - } - - fn is_path_ignored(&self, params: IsPathIgnoredParams) -> Result { - self.request("pgt/is_path_ignored", params) - } - - fn server_info(&self) -> Option<&ServerInfo> { - self.server_info.as_ref() - } - - fn get_file_content(&self, params: GetFileContentParams) -> Result { - self.request("pgt/get_file_content", params) - } - - fn pull_diagnostics( - &self, - params: crate::features::diagnostics::PullDiagnosticsParams, - ) -> Result { - self.request("pgt/pull_diagnostics", params) - } - - fn get_completions( - &self, - params: super::GetCompletionsParams, - ) -> Result { - self.request("pgt/get_completions", params) - } -} diff --git a/crates/pgt_workspace/src/workspace/server.rs b/crates/pgt_workspace/src/workspace/server.rs deleted file mode 100644 index 82e79e10..00000000 --- a/crates/pgt_workspace/src/workspace/server.rs +++ /dev/null @@ -1,547 +0,0 @@ -use std::{ - fs, - panic::RefUnwindSafe, - path::Path, - sync::{Arc, RwLock}, -}; - -use analyser::AnalyserVisitorBuilder; -use async_helper::run_async; -use dashmap::DashMap; -use db_connection::DbConnection; -use document::Document; -use futures::{StreamExt, stream}; -use parsed_document::{ - AsyncDiagnosticsMapper, CursorPositionFilter, DefaultMapper, ExecuteStatementMapper, - ParsedDocument, SyncDiagnosticsMapper, -}; -use pgt_analyse::{AnalyserOptions, AnalysisFilter}; -use pgt_analyser::{Analyser, AnalyserConfig, AnalyserContext}; -use pgt_diagnostics::{ - Diagnostic, DiagnosticExt, Error, Severity, serde::Diagnostic as SDiagnostic, -}; -use pgt_fs::{ConfigName, PgTPath}; -use pgt_typecheck::{IdentifierType, TypecheckParams, TypedIdentifier}; -use schema_cache_manager::SchemaCacheManager; -use sqlx::Executor; -use tracing::info; - -use crate::{ - WorkspaceError, - configuration::to_analyser_rules, - features::{ - code_actions::{ - self, CodeAction, CodeActionKind, CodeActionsResult, CommandAction, - CommandActionCategory, ExecuteStatementParams, ExecuteStatementResult, - }, - completions::{CompletionsResult, GetCompletionsParams, get_statement_for_completions}, - diagnostics::{PullDiagnosticsParams, PullDiagnosticsResult}, - }, - settings::{Settings, SettingsHandle, SettingsHandleMut}, -}; - -use super::{ - GetFileContentParams, IsPathIgnoredParams, OpenFileParams, ServerInfo, UpdateSettingsParams, - Workspace, -}; - -pub use statement_identifier::StatementId; - -mod analyser; -mod annotation; -mod async_helper; -mod change; -mod db_connection; -pub(crate) mod document; -mod migration; -pub(crate) mod parsed_document; -mod pg_query; -mod schema_cache_manager; -mod sql_function; -mod statement_identifier; -mod tree_sitter; - -pub(super) struct WorkspaceServer { - /// global settings object for this workspace - settings: RwLock, - - /// Stores the schema cache for this workspace - schema_cache: SchemaCacheManager, - - parsed_documents: DashMap, - - connection: RwLock, -} - -/// The `Workspace` object is long-lived, so we want it to be able to cross -/// unwind boundaries. -/// In return, we have to make sure operations on the workspace either do not -/// panic, of that panicking will not result in any broken invariant (it would -/// not result in any undefined behavior as catching an unwind is safe, but it -/// could lead too hard to debug issues) -impl RefUnwindSafe for WorkspaceServer {} - -impl WorkspaceServer { - /// Create a new [Workspace] - /// - /// This is implemented as a crate-private method instead of using - /// [Default] to disallow instances of [Workspace] from being created - /// outside a [crate::App] - pub(crate) fn new() -> Self { - Self { - settings: RwLock::default(), - parsed_documents: DashMap::default(), - schema_cache: SchemaCacheManager::default(), - connection: RwLock::default(), - } - } - - /// Provides a reference to the current settings - fn settings(&self) -> SettingsHandle { - SettingsHandle::new(&self.settings) - } - - fn settings_mut(&self) -> SettingsHandleMut { - SettingsHandleMut::new(&self.settings) - } - - fn is_ignored_by_migration_config(&self, path: &Path) -> bool { - let set = self.settings(); - set.as_ref() - .migrations - .as_ref() - .and_then(|migration_settings| { - let ignore_before = migration_settings.after.as_ref()?; - let migrations_dir = migration_settings.path.as_ref()?; - let migration = migration::get_migration(path, migrations_dir)?; - - Some(&migration.sequence_number <= ignore_before) - }) - .unwrap_or(false) - } - - /// Check whether a file is ignored in the top-level config `files.ignore`/`files.include` - fn is_ignored(&self, path: &Path) -> bool { - let file_name = path.file_name().and_then(|s| s.to_str()); - // Never ignore Postgres Tools's config file regardless `include`/`ignore` - (file_name != Some(ConfigName::pgt_jsonc())) && - // Apply top-level `include`/`ignore - (self.is_ignored_by_top_level_config(path) || self.is_ignored_by_migration_config(path)) - } - - /// Check whether a file is ignored in the top-level config `files.ignore`/`files.include` - fn is_ignored_by_top_level_config(&self, path: &Path) -> bool { - let set = self.settings(); - let settings = set.as_ref(); - let is_included = settings.files.included_files.is_empty() - || is_dir(path) - || settings.files.included_files.matches_path(path); - !is_included - || settings.files.ignored_files.matches_path(path) - || settings.files.git_ignore.as_ref().is_some_and(|ignore| { - // `matched_path_or_any_parents` panics if `source` is not under the gitignore root. - // This checks excludes absolute paths that are not a prefix of the base root. - if !path.has_root() || path.starts_with(ignore.path()) { - // Because Postgres Tools passes a list of paths, - // we use `matched_path_or_any_parents` instead of `matched`. - ignore - .matched_path_or_any_parents(path, path.is_dir()) - .is_ignore() - } else { - false - } - }) - } -} - -impl Workspace for WorkspaceServer { - /// Update the global settings for this workspace - /// - /// ## Panics - /// This function may panic if the internal settings mutex has been poisoned - /// by another thread having previously panicked while holding the lock - #[tracing::instrument(level = "trace", skip(self), err)] - fn update_settings(&self, params: UpdateSettingsParams) -> Result<(), WorkspaceError> { - tracing::info!("Updating settings in workspace"); - - self.settings_mut().as_mut().merge_with_configuration( - params.configuration, - params.workspace_directory, - params.vcs_base_path, - params.gitignore_matches.as_slice(), - )?; - - tracing::info!("Updated settings in workspace"); - tracing::debug!("Updated settings are {:#?}", self.settings()); - - self.connection - .write() - .unwrap() - .set_conn_settings(&self.settings().as_ref().db); - - tracing::info!("Updated Db connection settings"); - - Ok(()) - } - - /// Add a new file to the workspace - #[tracing::instrument(level = "info", skip_all, fields(path = params.path.as_path().as_os_str().to_str()), err)] - fn open_file(&self, params: OpenFileParams) -> Result<(), WorkspaceError> { - self.parsed_documents - .entry(params.path.clone()) - .or_insert_with(|| { - ParsedDocument::new(params.path.clone(), params.content, params.version) - }); - - Ok(()) - } - - /// Remove a file from the workspace - fn close_file(&self, params: super::CloseFileParams) -> Result<(), WorkspaceError> { - self.parsed_documents - .remove(¶ms.path) - .ok_or_else(WorkspaceError::not_found)?; - - Ok(()) - } - - /// Change the content of an open file - #[tracing::instrument(level = "debug", skip_all, fields( - path = params.path.as_os_str().to_str(), - version = params.version - ), err)] - fn change_file(&self, params: super::ChangeFileParams) -> Result<(), WorkspaceError> { - let mut parser = - self.parsed_documents - .entry(params.path.clone()) - .or_insert(ParsedDocument::new( - params.path.clone(), - "".to_string(), - params.version, - )); - - parser.apply_change(params); - - Ok(()) - } - - fn server_info(&self) -> Option<&ServerInfo> { - None - } - - fn get_file_content(&self, params: GetFileContentParams) -> Result { - let document = self - .parsed_documents - .get(¶ms.path) - .ok_or(WorkspaceError::not_found())?; - Ok(document.get_document_content().to_string()) - } - - fn is_path_ignored(&self, params: IsPathIgnoredParams) -> Result { - Ok(self.is_ignored(params.pgt_path.as_path())) - } - - fn pull_code_actions( - &self, - params: code_actions::CodeActionsParams, - ) -> Result { - let parser = self - .parsed_documents - .get(¶ms.path) - .ok_or(WorkspaceError::not_found())?; - - let settings = self - .settings - .read() - .expect("Unable to read settings for Code Actions"); - - let disabled_reason: Option = if settings.db.allow_statement_executions { - None - } else { - Some("Statement execution not allowed against database.".into()) - }; - - let actions = parser - .iter_with_filter( - DefaultMapper, - CursorPositionFilter::new(params.cursor_position), - ) - .map(|(stmt, _, txt)| { - let title = format!( - "Execute Statement: {}...", - txt.chars().take(50).collect::() - ); - - CodeAction { - title, - kind: CodeActionKind::Command(CommandAction { - category: CommandActionCategory::ExecuteStatement(stmt), - }), - disabled_reason: disabled_reason.clone(), - } - }) - .collect(); - - Ok(CodeActionsResult { actions }) - } - - fn execute_statement( - &self, - params: ExecuteStatementParams, - ) -> Result { - let parser = self - .parsed_documents - .get(¶ms.path) - .ok_or(WorkspaceError::not_found())?; - - let stmt = parser.find(params.statement_id, ExecuteStatementMapper); - - if stmt.is_none() { - return Ok(ExecuteStatementResult { - message: "Statement was not found in document.".into(), - }); - }; - - let (_id, _range, content, ast) = stmt.unwrap(); - - if ast.is_none() { - return Ok(ExecuteStatementResult { - message: "Statement is invalid.".into(), - }); - }; - - let conn = self.connection.read().unwrap(); - let pool = match conn.get_pool() { - Some(p) => p, - None => { - return Ok(ExecuteStatementResult { - message: "Not connected to database.".into(), - }); - } - }; - - let result = run_async(async move { pool.execute(sqlx::query(&content)).await })??; - - Ok(ExecuteStatementResult { - message: format!( - "Successfully executed statement. Rows affected: {}", - result.rows_affected() - ), - }) - } - - fn pull_diagnostics( - &self, - params: PullDiagnosticsParams, - ) -> Result { - let settings = self.settings(); - - // create analyser for this run - // first, collect enabled and disabled rules from the workspace settings - let (enabled_rules, disabled_rules) = AnalyserVisitorBuilder::new(settings.as_ref()) - .with_linter_rules(¶ms.only, ¶ms.skip) - .finish(); - // then, build a map that contains all options - let options = AnalyserOptions { - rules: to_analyser_rules(settings.as_ref()), - }; - // next, build the analysis filter which will be used to match rules - let filter = AnalysisFilter { - categories: params.categories, - enabled_rules: Some(enabled_rules.as_slice()), - disabled_rules: &disabled_rules, - }; - // finally, create the analyser that will be used during this run - let analyser = Analyser::new(AnalyserConfig { - options: &options, - filter, - }); - - let parser = self - .parsed_documents - .get(¶ms.path) - .ok_or(WorkspaceError::not_found())?; - - let mut diagnostics: Vec = parser.document_diagnostics().to_vec(); - - if let Some(pool) = self - .connection - .read() - .expect("DbConnection RwLock panicked") - .get_pool() - { - let path_clone = params.path.clone(); - let schema_cache = self.schema_cache.load(pool.clone())?; - let schema_cache_arc = schema_cache.get_arc(); - let input = parser.iter(AsyncDiagnosticsMapper).collect::>(); - // sorry for the ugly code :( - let async_results = run_async(async move { - stream::iter(input) - .map(|(_id, range, content, ast, cst, sign)| { - let pool = pool.clone(); - let path = path_clone.clone(); - let schema_cache = Arc::clone(&schema_cache_arc); - async move { - if let Some(ast) = ast { - pgt_typecheck::check_sql(TypecheckParams { - conn: &pool, - sql: &content, - ast: &ast, - tree: &cst, - schema_cache: schema_cache.as_ref(), - identifiers: sign - .map(|s| { - s.args - .iter() - .map(|a| TypedIdentifier { - path: s.name.clone(), - name: a.name.clone(), - type_: IdentifierType { - schema: a.type_.schema.clone(), - name: a.type_.name.clone(), - is_array: a.type_.is_array, - }, - }) - .collect::>() - }) - .unwrap_or_default(), - }) - .await - .map(|d| { - d.map(|d| { - let r = d.location().span.map(|span| span + range.start()); - - d.with_file_path(path.as_path().display().to_string()) - .with_file_span(r.unwrap_or(range)) - }) - }) - } else { - Ok(None) - } - } - }) - .buffer_unordered(10) - .collect::>() - .await - })?; - - for result in async_results.into_iter() { - let result = result?; - if let Some(diag) = result { - diagnostics.push(SDiagnostic::new(diag)); - } - } - } - - diagnostics.extend(parser.iter(SyncDiagnosticsMapper).flat_map( - |(_id, range, ast, diag)| { - let mut errors: Vec = vec![]; - - if let Some(diag) = diag { - errors.push(diag.into()); - } - - if let Some(ast) = ast { - errors.extend( - analyser - .run(AnalyserContext { root: &ast }) - .into_iter() - .map(Error::from) - .collect::>(), - ); - } - - errors - .into_iter() - .map(|d| { - let severity = d - .category() - .filter(|category| category.name().starts_with("lint/")) - .map_or_else( - || d.severity(), - |category| { - settings - .as_ref() - .get_severity_from_rule_code(category) - .unwrap_or(Severity::Warning) - }, - ); - - SDiagnostic::new( - d.with_file_path(params.path.as_path().display().to_string()) - .with_file_span(range) - .with_severity(severity), - ) - }) - .collect::>() - }, - )); - - let errors = diagnostics - .iter() - .filter(|d| d.severity() == Severity::Error || d.severity() == Severity::Fatal) - .count(); - - info!("Pulled {:?} diagnostic(s)", diagnostics.len()); - Ok(PullDiagnosticsResult { - diagnostics, - errors, - skipped_diagnostics: 0, - }) - } - - #[tracing::instrument(level = "debug", skip_all, fields( - path = params.path.as_os_str().to_str(), - position = params.position.to_string() - ), err)] - fn get_completions( - &self, - params: GetCompletionsParams, - ) -> Result { - let parsed_doc = self - .parsed_documents - .get(¶ms.path) - .ok_or(WorkspaceError::not_found())?; - - let pool = match self.connection.read().unwrap().get_pool() { - Some(pool) => pool, - None => { - tracing::debug!("No connection to database. Skipping completions."); - return Ok(CompletionsResult::default()); - } - }; - - let schema_cache = self.schema_cache.load(pool)?; - - match get_statement_for_completions(&parsed_doc, params.position) { - None => { - tracing::debug!("No statement found."); - Ok(CompletionsResult::default()) - } - Some((id, range, content, cst)) => { - let position = params.position - range.start(); - - let items = pgt_completions::complete(pgt_completions::CompletionParams { - position, - schema: schema_cache.as_ref(), - tree: &cst, - text: content, - }); - - tracing::debug!( - "Found {} completion items for statement with id {}", - items.len(), - id.raw() - ); - - Ok(CompletionsResult { items }) - } - } - } -} - -/// Returns `true` if `path` is a directory or -/// if it is a symlink that resolves to a directory. -fn is_dir(path: &Path) -> bool { - path.is_dir() || (path.is_symlink() && fs::read_link(path).is_ok_and(|path| path.is_dir())) -} diff --git a/crates/pgt_workspace/src/workspace/server/analyser.rs b/crates/pgt_workspace/src/workspace/server/analyser.rs deleted file mode 100644 index d4b08ba1..00000000 --- a/crates/pgt_workspace/src/workspace/server/analyser.rs +++ /dev/null @@ -1,129 +0,0 @@ -use pgt_analyse::{GroupCategory, RegistryVisitor, Rule, RuleCategory, RuleFilter, RuleGroup}; -use pgt_configuration::RuleSelector; -use rustc_hash::FxHashSet; - -use crate::settings::Settings; - -pub(crate) struct AnalyserVisitorBuilder<'a, 'b> { - lint: Option>, - settings: &'b Settings, -} - -impl<'a, 'b> AnalyserVisitorBuilder<'a, 'b> { - pub(crate) fn new(settings: &'b Settings) -> Self { - Self { - settings, - lint: None, - } - } - #[must_use] - pub(crate) fn with_linter_rules( - mut self, - only: &'b [RuleSelector], - skip: &'b [RuleSelector], - ) -> Self { - self.lint = Some(LintVisitor::new(only, skip, self.settings)); - self - } - - #[must_use] - pub(crate) fn finish(self) -> (Vec>, Vec>) { - let mut disabled_rules = vec![]; - let mut enabled_rules = vec![]; - if let Some(mut lint) = self.lint { - pgt_analyser::visit_registry(&mut lint); - let (linter_enabled_rules, linter_disabled_rules) = lint.finish(); - enabled_rules.extend(linter_enabled_rules); - disabled_rules.extend(linter_disabled_rules); - } - - (enabled_rules, disabled_rules) - } -} - -/// Type meant to register all the lint rules -#[derive(Debug)] -struct LintVisitor<'a, 'b> { - pub(crate) enabled_rules: FxHashSet>, - pub(crate) disabled_rules: FxHashSet>, - only: &'b [RuleSelector], - skip: &'b [RuleSelector], - settings: &'b Settings, -} - -impl<'a, 'b> LintVisitor<'a, 'b> { - pub(crate) fn new( - only: &'b [RuleSelector], - skip: &'b [RuleSelector], - settings: &'b Settings, - ) -> Self { - Self { - enabled_rules: Default::default(), - disabled_rules: Default::default(), - only, - skip, - settings, - } - } - - fn finish(mut self) -> (FxHashSet>, FxHashSet>) { - let has_only_filter = !self.only.is_empty(); - if !has_only_filter { - let enabled_rules = self - .settings - .as_linter_rules() - .map(|rules| rules.as_enabled_rules()) - .unwrap_or_default(); - self.enabled_rules.extend(enabled_rules); - } - (self.enabled_rules, self.disabled_rules) - } - - fn push_rule(&mut self) - where - R: Rule + 'static, - { - // Do not report unused suppression comment diagnostics if a single rule is run. - for selector in self.only { - let filter = RuleFilter::from(selector); - if filter.match_rule::() { - self.enabled_rules.insert(filter); - } - } - for selector in self.skip { - let filter = RuleFilter::from(selector); - if filter.match_rule::() { - self.disabled_rules.insert(filter); - } - } - } -} - -impl RegistryVisitor for LintVisitor<'_, '_> { - fn record_category(&mut self) { - if C::CATEGORY == RuleCategory::Lint { - C::record_groups(self) - } - } - - fn record_group(&mut self) { - for selector in self.only { - if RuleFilter::from(selector).match_group::() { - G::record_rules(self) - } - } - - for selector in self.skip { - if RuleFilter::from(selector).match_group::() { - G::record_rules(self) - } - } - } - - fn record_rule(&mut self) - where - R: Rule + 'static, - { - self.push_rule::() - } -} diff --git a/crates/pgt_workspace/src/workspace/server/annotation.rs b/crates/pgt_workspace/src/workspace/server/annotation.rs deleted file mode 100644 index 321dd3ac..00000000 --- a/crates/pgt_workspace/src/workspace/server/annotation.rs +++ /dev/null @@ -1,87 +0,0 @@ -use std::sync::Arc; - -use dashmap::DashMap; -use pgt_lexer::{SyntaxKind, WHITESPACE_TOKENS}; - -use super::statement_identifier::StatementId; - -#[derive(Debug, Clone, PartialEq, Eq)] -pub struct StatementAnnotations { - ends_with_semicolon: bool, -} - -pub struct AnnotationStore { - db: DashMap>>, -} - -impl AnnotationStore { - pub fn new() -> AnnotationStore { - AnnotationStore { db: DashMap::new() } - } - - #[allow(unused)] - pub fn get_annotations( - &self, - statement: &StatementId, - content: &str, - ) -> Option> { - if let Some(existing) = self.db.get(statement).map(|x| x.clone()) { - return existing; - } - - // we swallow the error here because the lexing within the document would have already - // thrown and we wont even get here if that happened. - let annotations = pgt_lexer::lex(content).ok().map(|tokens| { - let ends_with_semicolon = tokens - .iter() - .rev() - .find(|token| !WHITESPACE_TOKENS.contains(&token.kind)) - .is_some_and(|token| token.kind == SyntaxKind::Ascii59); - - Arc::new(StatementAnnotations { - ends_with_semicolon, - }) - }); - - self.db.insert(statement.clone(), None); - annotations - } - - pub fn clear_statement(&self, id: &StatementId) { - self.db.remove(id); - - if let Some(child_id) = id.get_child_id() { - self.db.remove(&child_id); - } - } -} - -#[cfg(test)] -mod tests { - use crate::workspace::StatementId; - - use super::AnnotationStore; - - #[test] - fn annotates_correctly() { - let store = AnnotationStore::new(); - - let test_cases = [ - ("SELECT * FROM foo", false), - ("SELECT * FROM foo;", true), - ("SELECT * FROM foo ;", true), - ("SELECT * FROM foo ; ", true), - ("SELECT * FROM foo ;\n", true), - ("SELECT * FROM foo\n", false), - ]; - - for (idx, (content, expected)) in test_cases.iter().enumerate() { - let statement_id = StatementId::Root(idx.into()); - - let annotations = store.get_annotations(&statement_id, content); - - assert!(annotations.is_some()); - assert_eq!(annotations.unwrap().ends_with_semicolon, *expected); - } - } -} diff --git a/crates/pgt_workspace/src/workspace/server/async_helper.rs b/crates/pgt_workspace/src/workspace/server/async_helper.rs deleted file mode 100644 index 896a63a4..00000000 --- a/crates/pgt_workspace/src/workspace/server/async_helper.rs +++ /dev/null @@ -1,21 +0,0 @@ -use std::{future::Future, sync::LazyLock}; - -use tokio::runtime::Runtime; - -use crate::WorkspaceError; - -// Global Tokio Runtime -static RUNTIME: LazyLock = - LazyLock::new(|| Runtime::new().expect("Failed to create Tokio runtime")); - -/// Use this function to run async functions in the workspace, which is a sync trait called from an -/// async context. -/// -/// Checkout https://greptime.com/blogs/2023-03-09-bridging-async-and-sync-rust for details. -pub fn run_async(future: F) -> Result -where - F: Future + Send + 'static, - R: Send + 'static, -{ - futures::executor::block_on(async { RUNTIME.spawn(future).await.map_err(|e| e.into()) }) -} diff --git a/crates/pgt_workspace/src/workspace/server/change.rs b/crates/pgt_workspace/src/workspace/server/change.rs deleted file mode 100644 index c8799922..00000000 --- a/crates/pgt_workspace/src/workspace/server/change.rs +++ /dev/null @@ -1,1763 +0,0 @@ -use pgt_text_size::{TextLen, TextRange, TextSize}; -use std::ops::{Add, Sub}; - -use crate::workspace::{ChangeFileParams, ChangeParams}; - -use super::{Document, document, statement_identifier::StatementId}; - -#[derive(Debug, PartialEq, Eq)] -pub enum StatementChange { - Added(AddedStatement), - Deleted(StatementId), - Modified(ModifiedStatement), -} - -#[derive(Debug, PartialEq, Eq)] -pub struct AddedStatement { - pub stmt: StatementId, - pub text: String, -} - -#[derive(Debug, PartialEq, Eq)] -pub struct ModifiedStatement { - pub old_stmt: StatementId, - pub old_stmt_text: String, - - pub new_stmt: StatementId, - pub new_stmt_text: String, - - pub change_range: TextRange, - pub change_text: String, -} - -impl StatementChange { - #[allow(dead_code)] - pub fn statement(&self) -> &StatementId { - match self { - StatementChange::Added(stmt) => &stmt.stmt, - StatementChange::Deleted(stmt) => stmt, - StatementChange::Modified(changed) => &changed.new_stmt, - } - } -} - -/// Returns all relevant details about the change and its effects on the current state of the document. -struct Affected { - /// Full range of the change, including the range of all statements that intersect with the change - affected_range: TextRange, - /// All indices of affected statement positions - affected_indices: Vec, - /// The index of the first statement position before the change, if any - prev_index: Option, - /// The index of the first statement position after the change, if any - next_index: Option, - /// the full affected range includng the prev and next statement - full_affected_range: TextRange, -} - -impl Document { - /// Applies a file change to the document and returns the affected statements - pub fn apply_file_change(&mut self, change: &ChangeFileParams) -> Vec { - // cleanup all diagnostics with every change because we cannot guarantee that they are still valid - // this is because we know their ranges only by finding slices within the content which is - // very much not guaranteed to result in correct ranges - self.diagnostics.clear(); - - // when we recieive more than one change, we need to push back the changes based on the - // total range of the previous ones. This is because the ranges are always related to the original state. - let mut changes = Vec::new(); - - let mut offset: i64 = 0; - - for change in &change.changes { - let adjusted_change = if offset != 0 && change.range.is_some() { - &ChangeParams { - text: change.text.clone(), - range: change.range.map(|range| { - let start = u32::from(range.start()); - let end = u32::from(range.end()); - TextRange::new( - TextSize::from((start as i64 + offset).try_into().unwrap_or(0)), - TextSize::from((end as i64 + offset).try_into().unwrap_or(0)), - ) - }), - } - } else { - change - }; - - changes.extend(self.apply_change(adjusted_change)); - - offset += change.change_size(); - } - - self.version = change.version; - - changes - } - - /// Helper method to drain all positions and return them as deleted statements - fn drain_positions(&mut self) -> Vec { - self.positions - .drain(..) - .map(|(id, _)| StatementChange::Deleted(id)) - .collect() - } - - /// Applies a change to the document and returns the affected statements - /// - /// Will always assume its a full change and reparse the whole document - fn apply_full_change(&mut self, change: &ChangeParams) -> Vec { - let mut changes = Vec::new(); - - changes.extend(self.drain_positions()); - - self.content = change.apply_to_text(&self.content); - - let (ranges, diagnostics) = document::split_with_diagnostics(&self.content, None); - - self.diagnostics = diagnostics; - - // Do not add any statements if there is a fatal error - if self.has_fatal_error() { - return changes; - } - - changes.extend(ranges.into_iter().map(|range| { - let id = self.id_generator.next(); - let text = self.content[range].to_string(); - self.positions.push((id.clone(), range)); - - StatementChange::Added(AddedStatement { stmt: id, text }) - })); - - changes - } - - fn insert_statement(&mut self, range: TextRange) -> StatementId { - let pos = self - .positions - .binary_search_by(|(_, r)| r.start().cmp(&range.start())) - .unwrap_err(); - - let new_id = self.id_generator.next(); - self.positions.insert(pos, (new_id.clone(), range)); - - new_id - } - - /// Returns all relevant details about the change and its effects on the current state of the document. - /// - The affected range is the full range of the change, including the range of all statements that intersect with the change - /// - All indices of affected statement positions - /// - The index of the first statement position before the change, if any - /// - The index of the first statement position after the change, if any - /// - the full affected range includng the prev and next statement - fn get_affected( - &self, - change_range: TextRange, - content_size: TextSize, - diff_size: TextSize, - is_addition: bool, - ) -> Affected { - let mut start = change_range.start(); - let mut end = change_range.end().min(content_size); - - let is_trim = change_range.start() >= content_size; - - let mut affected_indices = Vec::new(); - let mut prev_index = None; - let mut next_index = None; - - for (index, (_, pos_range)) in self.positions.iter().enumerate() { - if pos_range.intersect(change_range).is_some() { - affected_indices.push(index); - start = start.min(pos_range.start()); - end = end.max(pos_range.end()); - } else if pos_range.end() <= change_range.start() { - prev_index = Some(index); - } else if pos_range.start() >= change_range.end() && next_index.is_none() { - next_index = Some(index); - break; - } - } - - if affected_indices.is_empty() && prev_index.is_none() { - // if there is no prev_index and no intersection -> use 0 - start = 0.into(); - } - - if affected_indices.is_empty() && next_index.is_none() { - // if there is no next_index and no intersection -> use content_size - end = content_size; - } - - let first_affected_stmt_start = prev_index - .map(|i| self.positions[i].1.start()) - .unwrap_or(start); - - let mut last_affected_stmt_end = next_index - .map(|i| self.positions[i].1.end()) - .unwrap_or_else(|| end); - - if is_addition { - end = end.add(diff_size); - last_affected_stmt_end = last_affected_stmt_end.add(diff_size); - } else if !is_trim { - end = end.sub(diff_size); - last_affected_stmt_end = last_affected_stmt_end.sub(diff_size) - }; - - Affected { - affected_range: { - let end = end.min(content_size); - TextRange::new(start.min(end), end) - }, - affected_indices, - prev_index, - next_index, - full_affected_range: TextRange::new( - first_affected_stmt_start, - last_affected_stmt_end - .min(content_size) - .max(first_affected_stmt_start), - ), - } - } - - fn move_ranges(&mut self, offset: TextSize, diff_size: TextSize, is_addition: bool) { - self.positions - .iter_mut() - .skip_while(|(_, r)| offset > r.start()) - .for_each(|(_, range)| { - let new_range = if is_addition { - range.add(diff_size) - } else { - range.sub(diff_size) - }; - - *range = new_range; - }); - } - - /// Applies a single change to the document and returns the affected statements - fn apply_change(&mut self, change: &ChangeParams) -> Vec { - // if range is none, we have a full change - if change.range.is_none() { - return self.apply_full_change(change); - } - - // i spent a relatively large amount of time thinking about how to handle range changes - // properly. there are quite a few edge cases to consider. I eventually skipped most of - // them, because the complexity is not worth the return for now. we might want to revisit - // this later though. - - let mut changed: Vec = Vec::with_capacity(self.positions.len()); - - let change_range = change.range.unwrap(); - let previous_content = self.content.clone(); - let new_content = change.apply_to_text(&self.content); - - // we first need to determine the affected range and all affected statements, as well as - // the index of the prev and the next statement, if any. The full affected range is the - // affected range expanded to the start of the previous statement and the end of the next - let Affected { - affected_range, - affected_indices, - prev_index, - next_index, - full_affected_range, - } = self.get_affected( - change_range, - new_content.text_len(), - change.diff_size(), - change.is_addition(), - ); - - // if within a statement, we can modify it if the change results in also a single statement - if affected_indices.len() == 1 { - let changed_content = get_affected(&new_content, affected_range); - - let (new_ranges, diags) = - document::split_with_diagnostics(changed_content, Some(affected_range.start())); - - self.diagnostics = diags; - - if self.has_fatal_error() { - // cleanup all positions if there is a fatal error - changed.extend(self.drain_positions()); - // still process text change - self.content = new_content; - return changed; - } - - if new_ranges.len() == 1 { - let affected_idx = affected_indices[0]; - let new_range = new_ranges[0].add(affected_range.start()); - let (old_id, old_range) = self.positions[affected_idx].clone(); - - // move all statements after the affected range - self.move_ranges(old_range.end(), change.diff_size(), change.is_addition()); - - let new_id = self.id_generator.next(); - self.positions[affected_idx] = (new_id.clone(), new_range); - - changed.push(StatementChange::Modified(ModifiedStatement { - old_stmt: old_id.clone(), - old_stmt_text: previous_content[old_range].to_string(), - - new_stmt: new_id, - new_stmt_text: changed_content[new_ranges[0]].to_string(), - // change must be relative to the statement - change_text: change.text.clone(), - // make sure we always have a valid range >= 0 - change_range: change_range - .checked_sub(old_range.start()) - .unwrap_or(change_range.sub(change_range.start())), - })); - - self.content = new_content; - - return changed; - } - } - - // in any other case, parse the full affected range - let changed_content = get_affected(&new_content, full_affected_range); - - let (new_ranges, diags) = - document::split_with_diagnostics(changed_content, Some(full_affected_range.start())); - - self.diagnostics = diags; - - if self.has_fatal_error() { - // cleanup all positions if there is a fatal error - changed.extend(self.drain_positions()); - // still process text change - self.content = new_content; - return changed; - } - - // delete and add new ones - if let Some(next_index) = next_index { - changed.push(StatementChange::Deleted( - self.positions[next_index].0.clone(), - )); - self.positions.remove(next_index); - } - for idx in affected_indices.iter().rev() { - changed.push(StatementChange::Deleted(self.positions[*idx].0.clone())); - self.positions.remove(*idx); - } - if let Some(prev_index) = prev_index { - changed.push(StatementChange::Deleted( - self.positions[prev_index].0.clone(), - )); - self.positions.remove(prev_index); - } - - new_ranges.iter().for_each(|range| { - let actual_range = range.add(full_affected_range.start()); - let new_id = self.insert_statement(actual_range); - changed.push(StatementChange::Added(AddedStatement { - stmt: new_id, - text: new_content[actual_range].to_string(), - })); - }); - - // move all statements after the afffected range - self.move_ranges( - full_affected_range.end(), - change.diff_size(), - change.is_addition(), - ); - - self.content = new_content; - - changed - } -} - -impl ChangeParams { - /// For lack of a better name, this returns the change in size of the text compared to the range - pub fn change_size(&self) -> i64 { - match self.range { - Some(range) => { - let range_length: usize = range.len().into(); - let text_length = self.text.chars().count(); - text_length as i64 - range_length as i64 - } - None => i64::try_from(self.text.chars().count()).unwrap(), - } - } - - pub fn diff_size(&self) -> TextSize { - match self.range { - Some(range) => { - let range_length: usize = range.len().into(); - let text_length = self.text.chars().count(); - let diff = (text_length as i64 - range_length as i64).abs(); - TextSize::from(u32::try_from(diff).unwrap()) - } - None => TextSize::from(u32::try_from(self.text.chars().count()).unwrap()), - } - } - - pub fn is_addition(&self) -> bool { - self.range.is_some() && self.text.len() > self.range.unwrap().len().into() - } - - pub fn is_deletion(&self) -> bool { - self.range.is_some() && self.text.len() < self.range.unwrap().len().into() - } - - pub fn apply_to_text(&self, text: &str) -> String { - if self.range.is_none() { - return self.text.clone(); - } - - let range = self.range.unwrap(); - let start = usize::from(range.start()); - let end = usize::from(range.end()); - - let mut new_text = String::new(); - new_text.push_str(&text[..start]); - new_text.push_str(&self.text); - if end < text.len() { - new_text.push_str(&text[end..]); - } - - new_text - } -} - -fn get_affected(content: &str, range: TextRange) -> &str { - let start_byte = content - .char_indices() - .nth(usize::from(range.start())) - .map(|(i, _)| i) - .unwrap_or(content.len()); - - let end_byte = content - .char_indices() - .nth(usize::from(range.end())) - .map(|(i, _)| i) - .unwrap_or(content.len()); - - &content[start_byte..end_byte] -} - -#[cfg(test)] -mod tests { - - use super::*; - use pgt_diagnostics::Diagnostic; - use pgt_text_size::TextRange; - - use crate::workspace::{ChangeFileParams, ChangeParams}; - - use pgt_fs::PgTPath; - - impl Document { - pub fn get_text(&self, idx: usize) -> String { - self.content[self.positions[idx].1.start().into()..self.positions[idx].1.end().into()] - .to_string() - } - } - - fn assert_document_integrity(d: &Document) { - let ranges = pgt_statement_splitter::split(&d.content) - .expect("Unexpected scan error") - .ranges; - - assert!( - ranges.len() == d.positions.len(), - "should have the correct amount of positions" - ); - - assert!( - ranges - .iter() - .all(|r| { d.positions.iter().any(|(_, stmt_range)| stmt_range == r) }), - "all ranges should be in positions" - ); - } - - #[test] - fn open_doc_with_scan_error() { - let input = "select id from users;\n\n\n\nselect 1443ddwwd33djwdkjw13331333333333;"; - - let d = Document::new(input.to_string(), 0); - - assert_eq!(d.positions.len(), 0); - assert!(d.has_fatal_error()); - } - - #[test] - fn comments_at_begin() { - let path = PgTPath::new("test.sql"); - let input = "\nselect id from users;\n"; - - let mut d = Document::new(input.to_string(), 0); - - let change1 = ChangeFileParams { - path: path.clone(), - version: 1, - changes: vec![ChangeParams { - text: "-".to_string(), - range: Some(TextRange::new(0.into(), 0.into())), - }], - }; - - let _changed1 = d.apply_file_change(&change1); - - assert_eq!(d.content, "-\nselect id from users;\n"); - assert_eq!(d.positions.len(), 2); - - let change2 = ChangeFileParams { - path: path.clone(), - version: 2, - changes: vec![ChangeParams { - text: "-".to_string(), - range: Some(TextRange::new(1.into(), 1.into())), - }], - }; - - let _changed2 = d.apply_file_change(&change2); - - assert_eq!(d.content, "--\nselect id from users;\n"); - assert_eq!(d.positions.len(), 1); - - let change3 = ChangeFileParams { - path: path.clone(), - version: 3, - changes: vec![ChangeParams { - text: " ".to_string(), - range: Some(TextRange::new(2.into(), 2.into())), - }], - }; - - let _changed3 = d.apply_file_change(&change3); - - assert_eq!(d.content, "-- \nselect id from users;\n"); - assert_eq!(d.positions.len(), 1); - - let change4 = ChangeFileParams { - path: path.clone(), - version: 3, - changes: vec![ChangeParams { - text: "t".to_string(), - range: Some(TextRange::new(3.into(), 3.into())), - }], - }; - - let _changed4 = d.apply_file_change(&change4); - - assert_eq!(d.content, "-- t\nselect id from users;\n"); - assert_eq!(d.positions.len(), 1); - - assert_document_integrity(&d); - } - - #[test] - fn typing_comments() { - let path = PgTPath::new("test.sql"); - let input = "select id from users;\n"; - - let mut d = Document::new(input.to_string(), 0); - - let change1 = ChangeFileParams { - path: path.clone(), - version: 1, - changes: vec![ChangeParams { - text: "-".to_string(), - range: Some(TextRange::new(22.into(), 23.into())), - }], - }; - - let _changed1 = d.apply_file_change(&change1); - - assert_eq!(d.content, "select id from users;\n-"); - assert_eq!(d.positions.len(), 2); - - let change2 = ChangeFileParams { - path: path.clone(), - version: 2, - changes: vec![ChangeParams { - text: "-".to_string(), - range: Some(TextRange::new(23.into(), 24.into())), - }], - }; - - let _changed2 = d.apply_file_change(&change2); - - assert_eq!(d.content, "select id from users;\n--"); - assert_eq!(d.positions.len(), 1); - - let change3 = ChangeFileParams { - path: path.clone(), - version: 3, - changes: vec![ChangeParams { - text: " ".to_string(), - range: Some(TextRange::new(24.into(), 25.into())), - }], - }; - - let _changed3 = d.apply_file_change(&change3); - - assert_eq!(d.content, "select id from users;\n-- "); - assert_eq!(d.positions.len(), 1); - - let change4 = ChangeFileParams { - path: path.clone(), - version: 3, - changes: vec![ChangeParams { - text: "t".to_string(), - range: Some(TextRange::new(25.into(), 26.into())), - }], - }; - - let _changed4 = d.apply_file_change(&change4); - - assert_eq!(d.content, "select id from users;\n-- t"); - assert_eq!(d.positions.len(), 1); - - assert_document_integrity(&d); - } - - #[test] - fn change_into_scan_error_within_statement() { - let path = PgTPath::new("test.sql"); - let input = "select id from users;\n\n\n\nselect 1;"; - - let mut d = Document::new(input.to_string(), 0); - - assert_eq!(d.positions.len(), 2); - assert!(!d.has_fatal_error()); - - let change = ChangeFileParams { - path: path.clone(), - version: 1, - changes: vec![ChangeParams { - text: "d".to_string(), - range: Some(TextRange::new(33.into(), 33.into())), - }], - }; - - let changed = d.apply_file_change(&change); - - assert_eq!(d.content, "select id from users;\n\n\n\nselect 1d;"); - assert!( - changed - .iter() - .all(|c| matches!(c, StatementChange::Deleted(_))), - "should delete all statements" - ); - assert!(d.positions.is_empty(), "should clear all positions"); - assert_eq!(d.diagnostics.len(), 1, "should return a scan error"); - assert_eq!( - d.diagnostics[0].location().span, - Some(TextRange::new(32.into(), 34.into())), - "should have correct span" - ); - assert!(d.has_fatal_error()); - } - - #[test] - fn change_into_scan_error_across_statements() { - let path = PgTPath::new("test.sql"); - let input = "select id from users;\n\n\n\nselect 1;"; - - let mut d = Document::new(input.to_string(), 0); - - assert_eq!(d.positions.len(), 2); - assert!(!d.has_fatal_error()); - - let change = ChangeFileParams { - path: path.clone(), - version: 1, - changes: vec![ChangeParams { - text: "1d".to_string(), - range: Some(TextRange::new(7.into(), 33.into())), - }], - }; - - let changed = d.apply_file_change(&change); - - assert_eq!(d.content, "select 1d;"); - assert!( - changed - .iter() - .all(|c| matches!(c, StatementChange::Deleted(_))), - "should delete all statements" - ); - assert!(d.positions.is_empty(), "should clear all positions"); - assert_eq!(d.diagnostics.len(), 1, "should return a scan error"); - assert_eq!( - d.diagnostics[0].location().span, - Some(TextRange::new(7.into(), 9.into())), - "should have correct span" - ); - assert!(d.has_fatal_error()); - } - - #[test] - fn change_from_invalid_to_invalid() { - let path = PgTPath::new("test.sql"); - let input = "select 1d;"; - - let mut d = Document::new(input.to_string(), 0); - - assert_eq!(d.positions.len(), 0); - assert!(d.has_fatal_error()); - assert_eq!(d.diagnostics.len(), 1); - - let change = ChangeFileParams { - path: path.clone(), - version: 1, - changes: vec![ChangeParams { - text: "2e".to_string(), - range: Some(TextRange::new(7.into(), 9.into())), - }], - }; - - let changed = d.apply_file_change(&change); - - assert_eq!(d.content, "select 2e;"); - assert!(changed.is_empty(), "should not emit any changes"); - assert!(d.positions.is_empty(), "should keep positions empty"); - assert_eq!(d.diagnostics.len(), 1, "should still have a scan error"); - assert_eq!( - d.diagnostics[0].location().span, - Some(TextRange::new(7.into(), 9.into())), - "should have updated span" - ); - assert!(d.has_fatal_error()); - } - - #[test] - fn change_from_invalid_to_valid() { - let path = PgTPath::new("test.sql"); - let input = "select 1d;"; - - let mut d = Document::new(input.to_string(), 0); - - assert_eq!(d.positions.len(), 0); - assert!(d.has_fatal_error()); - assert_eq!(d.diagnostics.len(), 1); - - let change = ChangeFileParams { - path: path.clone(), - version: 1, - changes: vec![ChangeParams { - text: "1".to_string(), - range: Some(TextRange::new(7.into(), 9.into())), - }], - }; - - let changed = d.apply_file_change(&change); - - assert_eq!(d.content, "select 1;"); - assert_eq!(changed.len(), 1, "should emit one change"); - assert!(matches!( - changed[0], - StatementChange::Added(AddedStatement { .. }) - )); - assert_eq!(d.positions.len(), 1, "should have one position"); - assert!(d.diagnostics.is_empty(), "should have no diagnostics"); - assert!(!d.has_fatal_error()); - } - - #[test] - fn within_statements() { - let path = PgTPath::new("test.sql"); - let input = "select id from users;\n\n\n\nselect * from contacts;"; - - let mut d = Document::new(input.to_string(), 0); - - assert_eq!(d.positions.len(), 2); - - let change = ChangeFileParams { - path: path.clone(), - version: 1, - changes: vec![ChangeParams { - text: "select 1;".to_string(), - range: Some(TextRange::new(23.into(), 23.into())), - }], - }; - - let changed = d.apply_file_change(&change); - - assert_eq!(changed.len(), 5); - assert_eq!( - changed - .iter() - .filter(|c| matches!(c, StatementChange::Deleted(_))) - .count(), - 2 - ); - assert_eq!( - changed - .iter() - .filter(|c| matches!(c, StatementChange::Added(_))) - .count(), - 3 - ); - - assert_document_integrity(&d); - } - - #[test] - fn within_statements_2() { - let path = PgTPath::new("test.sql"); - let input = "alter table deal alter column value drop not null;\n"; - let mut d = Document::new(input.to_string(), 0); - - assert_eq!(d.positions.len(), 1); - - let change1 = ChangeFileParams { - path: path.clone(), - version: 1, - changes: vec![ChangeParams { - text: " ".to_string(), - range: Some(TextRange::new(17.into(), 17.into())), - }], - }; - - let changed1 = d.apply_file_change(&change1); - assert_eq!(changed1.len(), 1); - assert_eq!( - d.content, - "alter table deal alter column value drop not null;\n" - ); - assert_document_integrity(&d); - - let change2 = ChangeFileParams { - path: path.clone(), - version: 2, - changes: vec![ChangeParams { - text: " ".to_string(), - range: Some(TextRange::new(18.into(), 18.into())), - }], - }; - - let changed2 = d.apply_file_change(&change2); - assert_eq!(changed2.len(), 1); - assert_eq!( - d.content, - "alter table deal alter column value drop not null;\n" - ); - assert_document_integrity(&d); - - let change3 = ChangeFileParams { - path: path.clone(), - version: 3, - changes: vec![ChangeParams { - text: " ".to_string(), - range: Some(TextRange::new(19.into(), 19.into())), - }], - }; - - let changed3 = d.apply_file_change(&change3); - assert_eq!(changed3.len(), 1); - assert_eq!( - d.content, - "alter table deal alter column value drop not null;\n" - ); - assert_document_integrity(&d); - - let change4 = ChangeFileParams { - path: path.clone(), - version: 4, - changes: vec![ChangeParams { - text: " ".to_string(), - range: Some(TextRange::new(20.into(), 20.into())), - }], - }; - - let changed4 = d.apply_file_change(&change4); - assert_eq!(changed4.len(), 1); - assert_eq!( - d.content, - "alter table deal alter column value drop not null;\n" - ); - assert_document_integrity(&d); - } - - #[test] - fn julians_sample() { - let path = PgTPath::new("test.sql"); - let input = "select\n *\nfrom\n test;\n\nselect\n\nalter table test\n\ndrop column id;"; - let mut d = Document::new(input.to_string(), 0); - - assert_eq!(d.positions.len(), 4); - - let change1 = ChangeFileParams { - path: path.clone(), - version: 1, - changes: vec![ChangeParams { - text: " ".to_string(), - range: Some(TextRange::new(31.into(), 31.into())), - }], - }; - - let changed1 = d.apply_file_change(&change1); - assert_eq!(changed1.len(), 1); - assert_eq!( - d.content, - "select\n *\nfrom\n test;\n\nselect \n\nalter table test\n\ndrop column id;" - ); - assert_document_integrity(&d); - - // problem: this creates a new statement - let change2 = ChangeFileParams { - path: path.clone(), - version: 2, - changes: vec![ChangeParams { - text: ";".to_string(), - range: Some(TextRange::new(32.into(), 32.into())), - }], - }; - - let changed2 = d.apply_file_change(&change2); - assert_eq!(changed2.len(), 4); - assert_eq!( - changed2 - .iter() - .filter(|c| matches!(c, StatementChange::Deleted(_))) - .count(), - 2 - ); - assert_eq!( - changed2 - .iter() - .filter(|c| matches!(c, StatementChange::Added(_))) - .count(), - 2 - ); - assert_document_integrity(&d); - - let change3 = ChangeFileParams { - path: path.clone(), - version: 3, - changes: vec![ChangeParams { - text: "".to_string(), - range: Some(TextRange::new(32.into(), 33.into())), - }], - }; - - let changed3 = d.apply_file_change(&change3); - assert_eq!(changed3.len(), 1); - assert!(matches!(&changed3[0], StatementChange::Modified(_))); - assert_eq!( - d.content, - "select\n *\nfrom\n test;\n\nselect \n\nalter table test\n\ndrop column id;" - ); - match &changed3[0] { - StatementChange::Modified(changed) => { - assert_eq!(changed.old_stmt_text, "select ;"); - assert_eq!(changed.new_stmt_text, "select"); - assert_eq!(changed.change_text, ""); - assert_eq!(changed.change_range, TextRange::new(7.into(), 8.into())); - } - _ => panic!("expected modified statement"), - } - assert_document_integrity(&d); - } - - #[test] - fn across_statements() { - let path = PgTPath::new("test.sql"); - let input = "select id from users;\nselect * from contacts;"; - - let mut d = Document::new(input.to_string(), 0); - - assert_eq!(d.positions.len(), 2); - - let change = ChangeFileParams { - path: path.clone(), - version: 1, - changes: vec![ChangeParams { - text: ",test from users;\nselect 1;".to_string(), - range: Some(TextRange::new(9.into(), 45.into())), - }], - }; - - let changed = d.apply_file_change(&change); - - assert_eq!(changed.len(), 4); - assert!(matches!(changed[0], StatementChange::Deleted(_))); - assert_eq!(changed[0].statement().raw(), 1); - assert!(matches!( - changed[1], - StatementChange::Deleted(StatementId::Root(_)) - )); - assert_eq!(changed[1].statement().raw(), 0); - assert!( - matches!(&changed[2], StatementChange::Added(AddedStatement { stmt: _, text }) if text == "select id,test from users;") - ); - assert!( - matches!(&changed[3], StatementChange::Added(AddedStatement { stmt: _, text }) if text == "select 1;") - ); - - assert_document_integrity(&d); - } - - #[test] - fn append_whitespace_to_statement() { - let path = PgTPath::new("test.sql"); - let input = "select id"; - - let mut d = Document::new(input.to_string(), 0); - - assert_eq!(d.positions.len(), 1); - - let change = ChangeFileParams { - path: path.clone(), - version: 1, - changes: vec![ChangeParams { - text: " ".to_string(), - range: Some(TextRange::new(9.into(), 10.into())), - }], - }; - - let changed = d.apply_file_change(&change); - - assert_eq!(changed.len(), 1); - - assert_document_integrity(&d); - } - - #[test] - fn apply_changes() { - let path = PgTPath::new("test.sql"); - let input = "select id from users;\nselect * from contacts;"; - - let mut d = Document::new(input.to_string(), 0); - - assert_eq!(d.positions.len(), 2); - - let change = ChangeFileParams { - path: path.clone(), - version: 1, - changes: vec![ChangeParams { - text: ",test from users\nselect 1;".to_string(), - range: Some(TextRange::new(9.into(), 45.into())), - }], - }; - - let changed = d.apply_file_change(&change); - - assert_eq!(changed.len(), 4); - - assert!(matches!( - changed[0], - StatementChange::Deleted(StatementId::Root(_)) - )); - assert_eq!(changed[0].statement().raw(), 1); - assert!(matches!( - changed[1], - StatementChange::Deleted(StatementId::Root(_)) - )); - assert_eq!(changed[1].statement().raw(), 0); - assert_eq!( - changed[2], - StatementChange::Added(AddedStatement { - stmt: StatementId::Root(2.into()), - text: "select id,test from users".to_string() - }) - ); - assert_eq!( - changed[3], - StatementChange::Added(AddedStatement { - stmt: StatementId::Root(3.into()), - text: "select 1;".to_string() - }) - ); - - assert_eq!("select id,test from users\nselect 1;", d.content); - - assert_document_integrity(&d); - } - - #[test] - fn removing_newline_at_the_beginning() { - let path = PgTPath::new("test.sql"); - let input = "\n"; - - let mut d = Document::new(input.to_string(), 1); - - assert_eq!(d.positions.len(), 0); - - let change = ChangeFileParams { - path: path.clone(), - version: 2, - changes: vec![ChangeParams { - text: "\nbegin;\n\nselect 1\n\nrollback;\n".to_string(), - range: Some(TextRange::new(0.into(), 1.into())), - }], - }; - - let changes = d.apply_file_change(&change); - - assert_eq!(changes.len(), 3); - - assert_document_integrity(&d); - - let change2 = ChangeFileParams { - path: path.clone(), - version: 3, - changes: vec![ChangeParams { - text: "".to_string(), - range: Some(TextRange::new(0.into(), 1.into())), - }], - }; - - let changes2 = d.apply_file_change(&change2); - - assert_eq!(changes2.len(), 1); - - assert_document_integrity(&d); - } - - #[test] - fn apply_changes_at_end_of_statement() { - let path = PgTPath::new("test.sql"); - let input = "select id from\nselect * from contacts;"; - - let mut d = Document::new(input.to_string(), 1); - - assert_eq!(d.positions.len(), 2); - - let change = ChangeFileParams { - path: path.clone(), - version: 2, - changes: vec![ChangeParams { - text: " contacts;".to_string(), - range: Some(TextRange::new(14.into(), 14.into())), - }], - }; - - let changes = d.apply_file_change(&change); - - assert_eq!(changes.len(), 1); - - assert!(matches!(changes[0], StatementChange::Modified(_))); - - assert_eq!( - "select id from contacts;\nselect * from contacts;", - d.content - ); - - assert_document_integrity(&d); - } - - #[test] - fn apply_changes_replacement() { - let path = PgTPath::new("test.sql"); - - let mut doc = Document::new("".to_string(), 0); - - let change = ChangeFileParams { - path: path.clone(), - version: 1, - changes: vec![ChangeParams { - text: "select 1;\nselect 2;".to_string(), - range: None, - }], - }; - - doc.apply_file_change(&change); - - assert_eq!(doc.get_text(0), "select 1;".to_string()); - assert_eq!(doc.get_text(1), "select 2;".to_string()); - assert_eq!( - doc.positions[0].1, - TextRange::new(TextSize::new(0), TextSize::new(9)) - ); - assert_eq!( - doc.positions[1].1, - TextRange::new(TextSize::new(10), TextSize::new(19)) - ); - - let change_2 = ChangeFileParams { - path: path.clone(), - version: 2, - changes: vec![ChangeParams { - text: "".to_string(), - range: Some(TextRange::new(7.into(), 8.into())), - }], - }; - - doc.apply_file_change(&change_2); - - assert_eq!(doc.content, "select ;\nselect 2;"); - assert_eq!(doc.positions.len(), 2); - assert_eq!(doc.get_text(0), "select ;".to_string()); - assert_eq!(doc.get_text(1), "select 2;".to_string()); - assert_eq!( - doc.positions[0].1, - TextRange::new(TextSize::new(0), TextSize::new(8)) - ); - assert_eq!( - doc.positions[1].1, - TextRange::new(TextSize::new(9), TextSize::new(18)) - ); - - let change_3 = ChangeFileParams { - path: path.clone(), - version: 3, - changes: vec![ChangeParams { - text: "!".to_string(), - range: Some(TextRange::new(7.into(), 7.into())), - }], - }; - - doc.apply_file_change(&change_3); - - assert_eq!(doc.content, "select !;\nselect 2;"); - assert_eq!(doc.positions.len(), 2); - assert_eq!( - doc.positions[0].1, - TextRange::new(TextSize::new(0), TextSize::new(9)) - ); - assert_eq!( - doc.positions[1].1, - TextRange::new(TextSize::new(10), TextSize::new(19)) - ); - - let change_4 = ChangeFileParams { - path: path.clone(), - version: 4, - changes: vec![ChangeParams { - text: "".to_string(), - range: Some(TextRange::new(7.into(), 8.into())), - }], - }; - - doc.apply_file_change(&change_4); - - assert_eq!(doc.content, "select ;\nselect 2;"); - assert_eq!(doc.positions.len(), 2); - assert_eq!( - doc.positions[0].1, - TextRange::new(TextSize::new(0), TextSize::new(8)) - ); - assert_eq!( - doc.positions[1].1, - TextRange::new(TextSize::new(9), TextSize::new(18)) - ); - - let change_5 = ChangeFileParams { - path: path.clone(), - version: 5, - changes: vec![ChangeParams { - text: "1".to_string(), - range: Some(TextRange::new(7.into(), 7.into())), - }], - }; - - doc.apply_file_change(&change_5); - - assert_eq!(doc.content, "select 1;\nselect 2;"); - assert_eq!(doc.positions.len(), 2); - assert_eq!( - doc.positions[0].1, - TextRange::new(TextSize::new(0), TextSize::new(9)) - ); - assert_eq!( - doc.positions[1].1, - TextRange::new(TextSize::new(10), TextSize::new(19)) - ); - - assert_document_integrity(&doc); - } - - #[test] - fn comment_at_begin() { - let path = PgTPath::new("test.sql"); - - let mut doc = Document::new( - "-- Add new schema named \"private\"\nCREATE SCHEMA \"private\";".to_string(), - 0, - ); - - let change = ChangeFileParams { - path: path.clone(), - version: 1, - changes: vec![ChangeParams { - text: "".to_string(), - range: Some(TextRange::new(0.into(), 1.into())), - }], - }; - - let changed = doc.apply_file_change(&change); - - assert_eq!( - doc.content, - "- Add new schema named \"private\"\nCREATE SCHEMA \"private\";" - ); - assert_eq!(changed.len(), 3); - assert!(matches!(&changed[0], StatementChange::Deleted(_))); - assert!(matches!( - changed[1], - StatementChange::Added(AddedStatement { .. }) - )); - assert!(matches!( - changed[2], - StatementChange::Added(AddedStatement { .. }) - )); - - let change_2 = ChangeFileParams { - path: path.clone(), - version: 2, - changes: vec![ChangeParams { - text: "-".to_string(), - range: Some(TextRange::new(0.into(), 0.into())), - }], - }; - - let changed_2 = doc.apply_file_change(&change_2); - - assert_eq!( - doc.content, - "-- Add new schema named \"private\"\nCREATE SCHEMA \"private\";" - ); - - assert_eq!(changed_2.len(), 3); - assert!(matches!( - changed_2[0], - StatementChange::Deleted(StatementId::Root(_)) - )); - assert!(matches!( - changed_2[1], - StatementChange::Deleted(StatementId::Root(_)) - )); - assert!(matches!( - changed_2[2], - StatementChange::Added(AddedStatement { .. }) - )); - - assert_document_integrity(&doc); - } - - #[test] - fn apply_changes_within_statement() { - let input = "select id from users;\nselect * from contacts;"; - let path = PgTPath::new("test.sql"); - - let mut doc = Document::new(input.to_string(), 0); - - assert_eq!(doc.positions.len(), 2); - - let stmt_1_range = doc.positions[0].clone(); - let stmt_2_range = doc.positions[1].clone(); - - let update_text = ",test"; - - let update_range = TextRange::new(9.into(), 10.into()); - - let update_text_len = u32::try_from(update_text.chars().count()).unwrap(); - let update_addition = update_text_len - u32::from(update_range.len()); - - let change = ChangeFileParams { - path: path.clone(), - version: 1, - changes: vec![ChangeParams { - text: update_text.to_string(), - range: Some(update_range), - }], - }; - - doc.apply_file_change(&change); - - assert_eq!( - "select id,test from users;\nselect * from contacts;", - doc.content - ); - assert_eq!(doc.positions.len(), 2); - assert_eq!(doc.positions[0].1.start(), stmt_1_range.1.start()); - assert_eq!( - u32::from(doc.positions[0].1.end()), - u32::from(stmt_1_range.1.end()) + update_addition - ); - assert_eq!( - u32::from(doc.positions[1].1.start()), - u32::from(stmt_2_range.1.start()) + update_addition - ); - assert_eq!( - u32::from(doc.positions[1].1.end()), - u32::from(stmt_2_range.1.end()) + update_addition - ); - - assert_document_integrity(&doc); - } - - #[test] - fn remove_outside_of_content() { - let path = PgTPath::new("test.sql"); - let input = "select id from contacts;\n\nselect * from contacts;"; - - let mut d = Document::new(input.to_string(), 1); - - assert_eq!(d.positions.len(), 2); - - let change1 = ChangeFileParams { - path: path.clone(), - version: 2, - changes: vec![ChangeParams { - text: "\n".to_string(), - range: Some(TextRange::new(49.into(), 49.into())), - }], - }; - - d.apply_file_change(&change1); - - assert_eq!( - d.content, - "select id from contacts;\n\nselect * from contacts;\n" - ); - - let change2 = ChangeFileParams { - path: path.clone(), - version: 3, - changes: vec![ChangeParams { - text: "\n".to_string(), - range: Some(TextRange::new(50.into(), 50.into())), - }], - }; - - d.apply_file_change(&change2); - - assert_eq!( - d.content, - "select id from contacts;\n\nselect * from contacts;\n\n" - ); - - let change5 = ChangeFileParams { - path: path.clone(), - version: 6, - changes: vec![ChangeParams { - text: "".to_string(), - range: Some(TextRange::new(51.into(), 52.into())), - }], - }; - - let changes = d.apply_file_change(&change5); - - assert!(matches!( - changes[0], - StatementChange::Deleted(StatementId::Root(_)) - )); - - assert!(matches!( - changes[1], - StatementChange::Added(AddedStatement { .. }) - )); - - assert_eq!(changes.len(), 2); - - assert_eq!( - d.content, - "select id from contacts;\n\nselect * from contacts;\n\n" - ); - - assert_document_integrity(&d); - } - - #[test] - fn remove_trailing_whitespace() { - let path = PgTPath::new("test.sql"); - - let mut doc = Document::new("select * from ".to_string(), 0); - - let change = ChangeFileParams { - path: path.clone(), - version: 1, - changes: vec![ChangeParams { - text: "".to_string(), - range: Some(TextRange::new(13.into(), 14.into())), - }], - }; - - let changed = doc.apply_file_change(&change); - - assert_eq!(doc.content, "select * from"); - - assert_eq!(changed.len(), 1); - - match &changed[0] { - StatementChange::Modified(stmt) => { - let ModifiedStatement { - change_range, - change_text, - new_stmt_text, - old_stmt_text, - .. - } = stmt; - - assert_eq!(change_range, &TextRange::new(13.into(), 14.into())); - assert_eq!(change_text, ""); - assert_eq!(new_stmt_text, "select * from"); - - // the whitespace was not considered - // to be a part of the statement - assert_eq!(old_stmt_text, "select * from"); - } - - _ => unreachable!("Did not yield a modified statement."), - } - - assert_document_integrity(&doc); - } - - #[test] - fn remove_trailing_whitespace_and_last_char() { - let path = PgTPath::new("test.sql"); - - let mut doc = Document::new("select * from ".to_string(), 0); - - let change = ChangeFileParams { - path: path.clone(), - version: 1, - changes: vec![ChangeParams { - text: "".to_string(), - range: Some(TextRange::new(12.into(), 14.into())), - }], - }; - - let changed = doc.apply_file_change(&change); - - assert_eq!(doc.content, "select * fro"); - - assert_eq!(changed.len(), 1); - - match &changed[0] { - StatementChange::Modified(stmt) => { - let ModifiedStatement { - change_range, - change_text, - new_stmt_text, - old_stmt_text, - .. - } = stmt; - - assert_eq!(change_range, &TextRange::new(12.into(), 14.into())); - assert_eq!(change_text, ""); - assert_eq!(new_stmt_text, "select * fro"); - - // the whitespace was not considered - // to be a part of the statement - assert_eq!(old_stmt_text, "select * from"); - } - - _ => unreachable!("Did not yield a modified statement."), - } - - assert_document_integrity(&doc); - } - - #[test] - fn multiple_deletions_at_once() { - let path = PgTPath::new("test.sql"); - - let mut doc = Document::new("\n\n\n\nALTER TABLE ONLY \"public\".\"sendout\"\n ADD CONSTRAINT \"sendout_organisation_id_fkey\" FOREIGN -KEY (\"organisation_id\") REFERENCES \"public\".\"organisation\"(\"id\") ON UPDATE RESTRICT ON DELETE CASCADE;\n".to_string(), 0); - - let change = ChangeFileParams { - path: path.clone(), - version: 1, - changes: vec![ - ChangeParams { - range: Some(TextRange::new(31.into(), 38.into())), - text: "te".to_string(), - }, - ChangeParams { - range: Some(TextRange::new(60.into(), 67.into())), - text: "te".to_string(), - }, - ], - }; - - let changed = doc.apply_file_change(&change); - - assert_eq!(doc.content, "\n\n\n\nALTER TABLE ONLY \"public\".\"te\"\n ADD CONSTRAINT \"te_organisation_id_fkey\" FOREIGN -KEY (\"organisation_id\") REFERENCES \"public\".\"organisation\"(\"id\") ON UPDATE RESTRICT ON DELETE CASCADE;\n"); - - assert_eq!(changed.len(), 2); - - assert_document_integrity(&doc); - } - - #[test] - fn multiple_additions_at_once() { - let path = PgTPath::new("test.sql"); - - let mut doc = Document::new("\n\n\n\nALTER TABLE ONLY \"public\".\"sendout\"\n ADD CONSTRAINT \"sendout_organisation_id_fkey\" FOREIGN -KEY (\"organisation_id\") REFERENCES \"public\".\"organisation\"(\"id\") ON UPDATE RESTRICT ON DELETE CASCADE;\n".to_string(), 0); - - let change = ChangeFileParams { - path: path.clone(), - version: 1, - changes: vec![ - ChangeParams { - range: Some(TextRange::new(31.into(), 38.into())), - text: "omni_channel_message".to_string(), - }, - ChangeParams { - range: Some(TextRange::new(60.into(), 67.into())), - text: "omni_channel_message".to_string(), - }, - ], - }; - - let changed = doc.apply_file_change(&change); - - assert_eq!(doc.content, "\n\n\n\nALTER TABLE ONLY \"public\".\"omni_channel_message\"\n ADD CONSTRAINT \"omni_channel_message_organisation_id_fkey\" FOREIGN -KEY (\"organisation_id\") REFERENCES \"public\".\"organisation\"(\"id\") ON UPDATE RESTRICT ON DELETE CASCADE;\n"); - - assert_eq!(changed.len(), 2); - - assert_document_integrity(&doc); - } - - #[test] - fn remove_inbetween_whitespace() { - let path = PgTPath::new("test.sql"); - - let mut doc = Document::new("select * from users".to_string(), 0); - - let change = ChangeFileParams { - path: path.clone(), - version: 1, - changes: vec![ChangeParams { - text: "".to_string(), - range: Some(TextRange::new(9.into(), 11.into())), - }], - }; - - let changed = doc.apply_file_change(&change); - - assert_eq!(doc.content, "select * from users"); - - assert_eq!(changed.len(), 1); - - match &changed[0] { - StatementChange::Modified(stmt) => { - let ModifiedStatement { - change_range, - change_text, - new_stmt_text, - old_stmt_text, - .. - } = stmt; - - assert_eq!(change_range, &TextRange::new(9.into(), 11.into())); - assert_eq!(change_text, ""); - assert_eq!(old_stmt_text, "select * from users"); - assert_eq!(new_stmt_text, "select * from users"); - } - - _ => unreachable!("Did not yield a modified statement."), - } - - assert_document_integrity(&doc); - } - - #[test] - fn test_comments_only() { - let path = PgTPath::new("test.sql"); - let initial_content = "-- atlas:import async_trigger/setup.sql\n-- atlas:import public/setup.sql\n-- atlas:import private/setup.sql\n-- atlas:import api/setup.sql\n-- atlas:import async_trigger/index.sql\n-- atlas:import public/enums/index.sql\n-- atlas:import public/types/index.sql\n-- atlas:import private/enums/index.sql\n-- atlas:import private/functions/index.sql\n-- atlas:import public/tables/index.sql\n-- atlas:import public/index.sql\n-- atlas:import private/index.sql\n-- atlas:import api/index.sql\n\n\n\n"; - - // Create a new document - let mut doc = Document::new(initial_content.to_string(), 0); - - // First change: Delete some text at line 2, character 24-29 - let change1 = ChangeFileParams { - path: path.clone(), - version: 3, - changes: vec![ChangeParams { - text: "".to_string(), - range: Some(TextRange::new( - // Calculate the correct position based on the content - // Line 2, character 24 - 98.into(), - // Line 2, character 29 - 103.into(), - )), - }], - }; - - let _changes1 = doc.apply_file_change(&change1); - - // Second change: Add 't' at line 2, character 24 - let change2 = ChangeFileParams { - path: path.clone(), - version: 4, - changes: vec![ChangeParams { - text: "t".to_string(), - range: Some(TextRange::new(98.into(), 98.into())), - }], - }; - - let _changes2 = doc.apply_file_change(&change2); - - assert_eq!( - doc.positions.len(), - 0, - "Document should have no statement after adding 't'" - ); - - // Third change: Add 'e' at line 2, character 25 - let change3 = ChangeFileParams { - path: path.clone(), - version: 5, - changes: vec![ChangeParams { - text: "e".to_string(), - range: Some(TextRange::new(99.into(), 99.into())), - }], - }; - - let _changes3 = doc.apply_file_change(&change3); - assert_eq!( - doc.positions.len(), - 0, - "Document should still have no statement" - ); - - // Fourth change: Add 's' at line 2, character 26 - let change4 = ChangeFileParams { - path: path.clone(), - version: 6, - changes: vec![ChangeParams { - text: "s".to_string(), - range: Some(TextRange::new(100.into(), 100.into())), - }], - }; - - let _changes4 = doc.apply_file_change(&change4); - assert_eq!( - doc.positions.len(), - 0, - "Document should still have no statement" - ); - - // Fifth change: Add 't' at line 2, character 27 - let change5 = ChangeFileParams { - path: path.clone(), - version: 7, - changes: vec![ChangeParams { - text: "t".to_string(), - range: Some(TextRange::new(101.into(), 101.into())), - }], - }; - - let _changes5 = doc.apply_file_change(&change5); - assert_eq!( - doc.positions.len(), - 0, - "Document should still have no statement" - ); - - assert_document_integrity(&doc); - } -} diff --git a/crates/pgt_workspace/src/workspace/server/db_connection.rs b/crates/pgt_workspace/src/workspace/server/db_connection.rs deleted file mode 100644 index d002c0a2..00000000 --- a/crates/pgt_workspace/src/workspace/server/db_connection.rs +++ /dev/null @@ -1,40 +0,0 @@ -use std::time::Duration; - -use sqlx::{PgPool, Postgres, pool::PoolOptions, postgres::PgConnectOptions}; - -use crate::settings::DatabaseSettings; - -#[derive(Default)] -pub struct DbConnection { - pool: Option, -} - -impl DbConnection { - /// There might be no pool available if the user decides to skip db checks. - pub(crate) fn get_pool(&self) -> Option { - self.pool.clone() - } - - pub(crate) fn set_conn_settings(&mut self, settings: &DatabaseSettings) { - if !settings.enable_connection { - tracing::info!("Database connection disabled."); - return; - } - - let config = PgConnectOptions::new() - .host(&settings.host) - .port(settings.port) - .username(&settings.username) - .password(&settings.password) - .database(&settings.database); - - let timeout = settings.conn_timeout_secs; - - let pool = PoolOptions::::new() - .acquire_timeout(timeout) - .acquire_slow_threshold(Duration::from_secs(2)) - .connect_lazy_with(config); - - self.pool = Some(pool); - } -} diff --git a/crates/pgt_workspace/src/workspace/server/document.rs b/crates/pgt_workspace/src/workspace/server/document.rs deleted file mode 100644 index ed0ca40f..00000000 --- a/crates/pgt_workspace/src/workspace/server/document.rs +++ /dev/null @@ -1,118 +0,0 @@ -use pgt_diagnostics::{Diagnostic, DiagnosticExt, Severity, serde::Diagnostic as SDiagnostic}; -use pgt_text_size::{TextRange, TextSize}; - -use super::statement_identifier::{StatementId, StatementIdGenerator}; - -type StatementPos = (StatementId, TextRange); - -pub(crate) struct Document { - pub(crate) content: String, - pub(crate) version: i32, - - pub(super) diagnostics: Vec, - /// List of statements sorted by range.start() - pub(super) positions: Vec, - - pub(super) id_generator: StatementIdGenerator, -} - -impl Document { - pub(crate) fn new(content: String, version: i32) -> Self { - let mut id_generator = StatementIdGenerator::new(); - - let (ranges, diagnostics) = split_with_diagnostics(&content, None); - - Self { - positions: ranges - .into_iter() - .map(|range| (id_generator.next(), range)) - .collect(), - content, - version, - diagnostics, - id_generator, - } - } - - pub fn statement_content(&self, id: &StatementId) -> Option<&str> { - self.positions - .iter() - .find(|(statement_id, _)| statement_id == id) - .map(|(_, range)| &self.content[*range]) - } - - /// Returns true if there is at least one fatal error in the diagnostics - /// - /// A fatal error is a scan error that prevents the document from being used - pub(super) fn has_fatal_error(&self) -> bool { - self.diagnostics - .iter() - .any(|d| d.severity() == Severity::Fatal) - } - - pub fn iter(&self) -> StatementIterator<'_> { - StatementIterator::new(self) - } -} - -/// Helper function that wraps the statement splitter and returns the ranges with unified -/// diagnostics -pub(crate) fn split_with_diagnostics( - content: &str, - offset: Option, -) -> (Vec, Vec) { - let o = offset.unwrap_or_else(|| 0.into()); - match pgt_statement_splitter::split(content) { - Ok(parse) => ( - parse.ranges, - parse - .errors - .into_iter() - .map(|err| { - SDiagnostic::new( - err.clone() - .with_file_span(err.location().span.map(|r| r + o)), - ) - }) - .collect(), - ), - Err(errs) => ( - vec![], - errs.into_iter() - .map(|err| { - SDiagnostic::new( - err.clone() - .with_file_span(err.location().span.map(|r| r + o)), - ) - }) - .collect(), - ), - } -} - -pub struct StatementIterator<'a> { - document: &'a Document, - positions: std::slice::Iter<'a, StatementPos>, -} - -impl<'a> StatementIterator<'a> { - pub fn new(document: &'a Document) -> Self { - Self { - document, - positions: document.positions.iter(), - } - } -} - -impl<'a> Iterator for StatementIterator<'a> { - type Item = (StatementId, TextRange, &'a str); - - fn next(&mut self) -> Option { - self.positions.next().map(|(id, range)| { - let range = *range; - let doc = self.document; - let id = id.clone(); - (id, range, &doc.content[range]) - }) - } -} diff --git a/crates/pgt_workspace/src/workspace/server/migration.rs b/crates/pgt_workspace/src/workspace/server/migration.rs deleted file mode 100644 index d8853727..00000000 --- a/crates/pgt_workspace/src/workspace/server/migration.rs +++ /dev/null @@ -1,142 +0,0 @@ -use std::path::Path; - -#[derive(Debug)] -pub(crate) struct Migration { - pub(crate) sequence_number: u64, - #[allow(unused)] - pub(crate) name: String, -} - -/// Get the migration associated with a path, if it is a migration file -pub(crate) fn get_migration(path: &Path, migrations_dir: &Path) -> Option { - // Check if path is a child of the migration directory - let is_child = path - .canonicalize() - .ok() - .and_then(|canonical_child| { - migrations_dir - .canonicalize() - .ok() - .map(|canonical_dir| canonical_child.starts_with(&canonical_dir)) - }) - .unwrap_or(false); - - if !is_child { - return None; - } - - // we are trying to match patterns used by popular migration tools - - // in the "root" pattern, all files are directly within the migrations directory - // and their names follow _.sql. - // this is used by supabase - let root_migration = path - .file_name() - .and_then(|os_str| os_str.to_str()) - .and_then(parse_migration_name); - - if root_migration.is_some() { - return root_migration; - } - - // in the "subdirectory" pattern, each migration is in a subdirectory named _ - // this is used by prisma and drizzle - path.parent() - .and_then(|parent| parent.file_name()) - .and_then(|os_str| os_str.to_str()) - .and_then(parse_migration_name) -} - -fn parse_migration_name(name: &str) -> Option { - let mut parts = name.splitn(2, '_'); - // remove leading zeros to support numeric - let sequence_number: u64 = parts.next()?.trim_start_matches('0').parse().ok()?; - let full_name = parts.next()?; - let name = full_name - .strip_suffix(".sql") - .unwrap_or(full_name) - .to_string(); - Some(Migration { - sequence_number, - name, - }) -} - -#[cfg(test)] -mod tests { - use super::*; - use std::fs; - use std::path::PathBuf; - use tempfile::TempDir; - - fn setup() -> TempDir { - TempDir::new().expect("Failed to create temp dir") - } - - #[test] - fn test_get_migration_root_pattern() { - let temp_dir = setup(); - let migrations_dir = temp_dir.path().to_path_buf(); - let path = migrations_dir.join("1234567890_create_users.sql"); - fs::write(&path, "").unwrap(); - - let migration = get_migration(&path, &migrations_dir); - - assert!(migration.is_some()); - let migration = migration.unwrap(); - assert_eq!(migration.sequence_number, 1234567890); - assert_eq!(migration.name, "create_users"); - } - - #[test] - fn test_get_migration_subdirectory_pattern() { - let temp_dir = setup(); - let migrations_dir = temp_dir.path().to_path_buf(); - let subdir = migrations_dir.join("1234567890_create_users"); - fs::create_dir(&subdir).unwrap(); - let path = subdir.join("up.sql"); - fs::write(&path, "").unwrap(); - - let migration = get_migration(&path, &migrations_dir); - - assert!(migration.is_some()); - let migration = migration.unwrap(); - assert_eq!(migration.sequence_number, 1234567890); - assert_eq!(migration.name, "create_users"); - } - - #[test] - fn test_get_migration_prefix_number() { - let temp_dir = setup(); - let migrations_dir = temp_dir.path().to_path_buf(); - let path = migrations_dir.join("000201_a_migration.sql"); - fs::write(&path, "").unwrap(); - - let migration = get_migration(&path, &migrations_dir); - - assert!(migration.is_some()); - let migration = migration.unwrap(); - assert_eq!(migration.sequence_number, 201); - assert_eq!(migration.name, "a_migration"); - } - - #[test] - fn test_get_migration_not_timestamp_in_filename() { - let migrations_dir = PathBuf::from("/tmp/migrations"); - let path = migrations_dir.join("not_a_migration.sql"); - - let migration = get_migration(&path, &migrations_dir); - - assert!(migration.is_none()); - } - - #[test] - fn test_get_migration_outside_migrations_dir() { - let migrations_dir = PathBuf::from("/tmp/migrations"); - let path = PathBuf::from("/tmp/other/1234567890_create_users.sql"); - - let migration = get_migration(&path, &migrations_dir); - - assert!(migration.is_none()); - } -} diff --git a/crates/pgt_workspace/src/workspace/server/parsed_document.rs b/crates/pgt_workspace/src/workspace/server/parsed_document.rs deleted file mode 100644 index 2b81faba..00000000 --- a/crates/pgt_workspace/src/workspace/server/parsed_document.rs +++ /dev/null @@ -1,442 +0,0 @@ -use std::sync::Arc; - -use pgt_diagnostics::serde::Diagnostic as SDiagnostic; -use pgt_fs::PgTPath; -use pgt_query_ext::diagnostics::SyntaxDiagnostic; -use pgt_text_size::{TextRange, TextSize}; - -use crate::workspace::ChangeFileParams; - -use super::{ - annotation::AnnotationStore, - change::StatementChange, - document::{Document, StatementIterator}, - pg_query::PgQueryStore, - sql_function::{SQLFunctionSignature, get_sql_fn_body, get_sql_fn_signature}, - statement_identifier::StatementId, - tree_sitter::TreeSitterStore, -}; - -pub struct ParsedDocument { - #[allow(dead_code)] - path: PgTPath, - - doc: Document, - ast_db: PgQueryStore, - cst_db: TreeSitterStore, - annotation_db: AnnotationStore, -} - -impl ParsedDocument { - pub fn new(path: PgTPath, content: String, version: i32) -> ParsedDocument { - let doc = Document::new(content, version); - - let cst_db = TreeSitterStore::new(); - let ast_db = PgQueryStore::new(); - let annotation_db = AnnotationStore::new(); - - doc.iter().for_each(|(stmt, _, content)| { - cst_db.add_statement(&stmt, content); - }); - - ParsedDocument { - path, - doc, - ast_db, - cst_db, - annotation_db, - } - } - - /// Applies a change to the document and updates the CST and AST databases accordingly. - /// - /// Note that only tree-sitter cares about statement modifications vs remove + add. - /// Hence, we just clear the AST for the old statements and lazily load them when requested. - /// - /// * `params`: ChangeFileParams - The parameters for the change to be applied. - pub fn apply_change(&mut self, params: ChangeFileParams) { - for c in &self.doc.apply_file_change(¶ms) { - match c { - StatementChange::Added(added) => { - tracing::debug!( - "Adding statement: id:{:?}, text:{:?}", - added.stmt, - added.text - ); - self.cst_db.add_statement(&added.stmt, &added.text); - } - StatementChange::Deleted(s) => { - tracing::debug!("Deleting statement: id {:?}", s,); - self.cst_db.remove_statement(s); - self.ast_db.clear_statement(s); - self.annotation_db.clear_statement(s); - } - StatementChange::Modified(s) => { - tracing::debug!( - "Modifying statement with id {:?} (new id {:?}). Range {:?}, Changed from '{:?}' to '{:?}', changed text: {:?}", - s.old_stmt, - s.new_stmt, - s.change_range, - s.old_stmt_text, - s.new_stmt_text, - s.change_text - ); - - self.cst_db.modify_statement(s); - self.ast_db.clear_statement(&s.old_stmt); - self.annotation_db.clear_statement(&s.old_stmt); - } - } - } - } - - pub fn get_document_content(&self) -> &str { - &self.doc.content - } - - pub fn document_diagnostics(&self) -> &Vec { - &self.doc.diagnostics - } - - pub fn find<'a, M>(&'a self, id: StatementId, mapper: M) -> Option - where - M: StatementMapper<'a>, - { - self.iter_with_filter(mapper, IdFilter::new(id)).next() - } - - pub fn iter<'a, M>(&'a self, mapper: M) -> ParseIterator<'a, M, NoFilter> - where - M: StatementMapper<'a>, - { - self.iter_with_filter(mapper, NoFilter) - } - - pub fn iter_with_filter<'a, M, F>(&'a self, mapper: M, filter: F) -> ParseIterator<'a, M, F> - where - M: StatementMapper<'a>, - F: StatementFilter<'a>, - { - ParseIterator::new(self, mapper, filter) - } - - #[allow(dead_code)] - pub fn count(&self) -> usize { - self.iter(DefaultMapper).count() - } -} - -pub trait StatementMapper<'a> { - type Output; - - fn map( - &self, - parsed: &'a ParsedDocument, - id: StatementId, - range: TextRange, - content: &str, - ) -> Self::Output; -} - -pub trait StatementFilter<'a> { - fn predicate(&self, id: &StatementId, range: &TextRange, content: &str) -> bool; -} - -pub struct ParseIterator<'a, M, F> { - parser: &'a ParsedDocument, - statements: StatementIterator<'a>, - mapper: M, - filter: F, - pending_sub_statements: Vec<(StatementId, TextRange, String)>, -} - -impl<'a, M, F> ParseIterator<'a, M, F> { - pub fn new(parser: &'a ParsedDocument, mapper: M, filter: F) -> Self { - Self { - parser, - statements: parser.doc.iter(), - mapper, - filter, - pending_sub_statements: Vec::new(), - } - } -} - -impl<'a, M, F> Iterator for ParseIterator<'a, M, F> -where - M: StatementMapper<'a>, - F: StatementFilter<'a>, -{ - type Item = M::Output; - - fn next(&mut self) -> Option { - // First check if we have any pending sub-statements to process - if let Some((id, range, content)) = self.pending_sub_statements.pop() { - if self.filter.predicate(&id, &range, content.as_str()) { - return Some(self.mapper.map(self.parser, id, range, &content)); - } - // If the sub-statement doesn't pass the filter, continue to the next item - return self.next(); - } - - // Process the next top-level statement - let next_statement = self.statements.next(); - - if let Some((root_id, range, content)) = next_statement { - // If we should include sub-statements and this statement has an AST - let content_owned = content.to_string(); - if let Ok(ast) = self - .parser - .ast_db - .get_or_cache_ast(&root_id, &content_owned) - .as_ref() - { - // Check if this is a SQL function definition with a body - if let Some(sub_statement) = get_sql_fn_body(ast, &content_owned) { - // Add sub-statements to our pending queue - self.pending_sub_statements.push(( - root_id.create_child(), - // adjust range to document - sub_statement.range + range.start(), - sub_statement.body.clone(), - )); - } - } - - // Return the current statement if it passes the filter - if self.filter.predicate(&root_id, &range, content) { - return Some(self.mapper.map(self.parser, root_id, range, content)); - } - - // If the current statement doesn't pass the filter, try the next one - return self.next(); - } - - None - } -} - -pub struct DefaultMapper; -impl<'a> StatementMapper<'a> for DefaultMapper { - type Output = (StatementId, TextRange, String); - - fn map( - &self, - _parser: &'a ParsedDocument, - id: StatementId, - range: TextRange, - content: &str, - ) -> Self::Output { - (id, range, content.to_string()) - } -} - -pub struct ExecuteStatementMapper; -impl<'a> StatementMapper<'a> for ExecuteStatementMapper { - type Output = ( - StatementId, - TextRange, - String, - Option, - ); - - fn map( - &self, - parser: &'a ParsedDocument, - id: StatementId, - range: TextRange, - content: &str, - ) -> Self::Output { - let ast_result = parser.ast_db.get_or_cache_ast(&id, content); - let ast_option = match &*ast_result { - Ok(node) => Some(node.clone()), - Err(_) => None, - }; - - (id, range, content.to_string(), ast_option) - } -} - -pub struct AsyncDiagnosticsMapper; -impl<'a> StatementMapper<'a> for AsyncDiagnosticsMapper { - type Output = ( - StatementId, - TextRange, - String, - Option, - Arc, - Option, - ); - - fn map( - &self, - parser: &'a ParsedDocument, - id: StatementId, - range: TextRange, - content: &str, - ) -> Self::Output { - let content_owned = content.to_string(); - let ast_result = parser.ast_db.get_or_cache_ast(&id, &content_owned); - - let ast_option = match &*ast_result { - Ok(node) => Some(node.clone()), - Err(_) => None, - }; - - let cst_result = parser.cst_db.get_or_cache_tree(&id, &content_owned); - - let sql_fn_sig = id - .parent() - .and_then(|root| { - let c = parser.doc.statement_content(&root)?; - Some((root, c)) - }) - .and_then(|(root, c)| { - let ast_option = parser - .ast_db - .get_or_cache_ast(&root, c) - .as_ref() - .clone() - .ok(); - - let ast_option = ast_option.as_ref()?; - - get_sql_fn_signature(ast_option) - }); - - (id, range, content_owned, ast_option, cst_result, sql_fn_sig) - } -} - -pub struct SyncDiagnosticsMapper; -impl<'a> StatementMapper<'a> for SyncDiagnosticsMapper { - type Output = ( - StatementId, - TextRange, - Option, - Option, - ); - - fn map( - &self, - parser: &'a ParsedDocument, - id: StatementId, - range: TextRange, - content: &str, - ) -> Self::Output { - let ast_result = parser.ast_db.get_or_cache_ast(&id, content); - - let (ast_option, diagnostics) = match &*ast_result { - Ok(node) => (Some(node.clone()), None), - Err(diag) => (None, Some(diag.clone())), - }; - - (id, range, ast_option, diagnostics) - } -} - -pub struct GetCompletionsMapper; -impl<'a> StatementMapper<'a> for GetCompletionsMapper { - type Output = (StatementId, TextRange, String, Arc); - - fn map( - &self, - parser: &'a ParsedDocument, - id: StatementId, - range: TextRange, - content: &str, - ) -> Self::Output { - let tree = parser.cst_db.get_or_cache_tree(&id, content); - (id, range, content.into(), tree) - } -} - -/* - * We allow an offset of two for the statement: - * - * select * from | <-- we want to suggest items for the next token. - * - * However, if the current statement is terminated by a semicolon, we don't apply any - * offset. - * - * select * from users; | <-- no autocompletions here. - */ -pub struct GetCompletionsFilter { - pub cursor_position: TextSize, -} -impl StatementFilter<'_> for GetCompletionsFilter { - fn predicate(&self, _id: &StatementId, range: &TextRange, content: &str) -> bool { - let is_terminated_by_semi = content.chars().last().is_some_and(|c| c == ';'); - - let measuring_range = if is_terminated_by_semi { - *range - } else { - range.checked_expand_end(2.into()).unwrap_or(*range) - }; - measuring_range.contains(self.cursor_position) - } -} - -pub struct NoFilter; -impl StatementFilter<'_> for NoFilter { - fn predicate(&self, _id: &StatementId, _range: &TextRange, _content: &str) -> bool { - true - } -} - -pub struct CursorPositionFilter { - pos: TextSize, -} - -impl CursorPositionFilter { - pub fn new(pos: TextSize) -> Self { - Self { pos } - } -} - -impl StatementFilter<'_> for CursorPositionFilter { - fn predicate(&self, _id: &StatementId, range: &TextRange, _content: &str) -> bool { - range.contains(self.pos) - } -} - -pub struct IdFilter { - id: StatementId, -} - -impl IdFilter { - pub fn new(id: StatementId) -> Self { - Self { id } - } -} - -impl StatementFilter<'_> for IdFilter { - fn predicate(&self, id: &StatementId, _range: &TextRange, _content: &str) -> bool { - *id == self.id - } -} - -#[cfg(test)] -mod tests { - use super::*; - - use pgt_fs::PgTPath; - - #[test] - fn sql_function_body() { - let input = "CREATE FUNCTION add(test0 integer, test1 integer) RETURNS integer - AS 'select $1 + $2;' - LANGUAGE SQL - IMMUTABLE - RETURNS NULL ON NULL INPUT;"; - - let path = PgTPath::new("test.sql"); - - let d = ParsedDocument::new(path, input.to_string(), 0); - - let stmts = d.iter(DefaultMapper).collect::>(); - - assert_eq!(stmts.len(), 2); - assert_eq!(stmts[1].2, "select $1 + $2;"); - } -} diff --git a/crates/pgt_workspace/src/workspace/server/pg_query.rs b/crates/pgt_workspace/src/workspace/server/pg_query.rs deleted file mode 100644 index e5c0cac8..00000000 --- a/crates/pgt_workspace/src/workspace/server/pg_query.rs +++ /dev/null @@ -1,38 +0,0 @@ -use std::sync::Arc; - -use dashmap::DashMap; -use pgt_query_ext::diagnostics::*; - -use super::statement_identifier::StatementId; - -pub struct PgQueryStore { - db: DashMap>>, -} - -impl PgQueryStore { - pub fn new() -> PgQueryStore { - PgQueryStore { db: DashMap::new() } - } - - pub fn get_or_cache_ast( - &self, - statement: &StatementId, - content: &str, - ) -> Arc> { - if let Some(existing) = self.db.get(statement).map(|x| x.clone()) { - return existing; - } - - let r = Arc::new(pgt_query_ext::parse(content).map_err(SyntaxDiagnostic::from)); - self.db.insert(statement.clone(), r.clone()); - r - } - - pub fn clear_statement(&self, id: &StatementId) { - self.db.remove(id); - - if let Some(child_id) = id.get_child_id() { - self.db.remove(&child_id); - } - } -} diff --git a/crates/pgt_workspace/src/workspace/server/schema_cache_manager.rs b/crates/pgt_workspace/src/workspace/server/schema_cache_manager.rs deleted file mode 100644 index 03cd6ded..00000000 --- a/crates/pgt_workspace/src/workspace/server/schema_cache_manager.rs +++ /dev/null @@ -1,97 +0,0 @@ -use std::sync::{Arc, RwLock, RwLockReadGuard}; - -use pgt_schema_cache::SchemaCache; -use sqlx::PgPool; - -use crate::WorkspaceError; - -use super::async_helper::run_async; - -pub(crate) struct SchemaCacheHandle<'a> { - inner: RwLockReadGuard<'a, SchemaCacheManagerInner>, -} - -impl<'a> SchemaCacheHandle<'a> { - pub(crate) fn new(cache: &'a RwLock) -> Self { - Self { - inner: cache.read().unwrap(), - } - } - - pub(crate) fn wrap(inner: RwLockReadGuard<'a, SchemaCacheManagerInner>) -> Self { - Self { inner } - } - - pub fn get_arc(&self) -> Arc { - Arc::clone(&self.inner.cache) - } -} - -impl AsRef for SchemaCacheHandle<'_> { - fn as_ref(&self) -> &SchemaCache { - &self.inner.cache - } -} - -#[derive(Default)] -pub(crate) struct SchemaCacheManagerInner { - cache: Arc, - conn_str: String, -} - -#[derive(Default)] -pub struct SchemaCacheManager { - inner: RwLock, -} - -impl SchemaCacheManager { - pub fn load(&self, pool: PgPool) -> Result { - let new_conn_str = pool_to_conn_str(&pool); - - { - // return early if the connection string is the same - let inner = self.inner.read().unwrap(); - if new_conn_str == inner.conn_str { - tracing::info!("Same connection string, no updates."); - return Ok(SchemaCacheHandle::wrap(inner)); - } - } - - let maybe_refreshed = run_async(async move { SchemaCache::load(&pool).await })?; - let refreshed = maybe_refreshed?; - - { - // write lock must be dropped before we return the reference below, hence the block - let mut inner = self.inner.write().unwrap(); - - // Double-check that we still need to refresh (another thread might have done it) - if new_conn_str != inner.conn_str { - inner.cache = Arc::new(refreshed); - inner.conn_str = new_conn_str; - tracing::info!("Refreshed connection."); - } - } - - Ok(SchemaCacheHandle::new(&self.inner)) - } -} - -fn pool_to_conn_str(pool: &PgPool) -> String { - let conn = pool.connect_options(); - - match conn.get_database() { - None => format!( - "postgres://{}:@{}:{}", - conn.get_username(), - conn.get_host(), - conn.get_port() - ), - Some(db) => format!( - "postgres://{}:@{}:{}/{}", - conn.get_username(), - conn.get_host(), - conn.get_port(), - db - ), - } -} diff --git a/crates/pgt_workspace/src/workspace/server/sql_function.rs b/crates/pgt_workspace/src/workspace/server/sql_function.rs deleted file mode 100644 index bc2c6c3b..00000000 --- a/crates/pgt_workspace/src/workspace/server/sql_function.rs +++ /dev/null @@ -1,224 +0,0 @@ -use pgt_text_size::TextRange; - -#[derive(Debug, Clone)] -pub struct ArgType { - pub schema: Option, - pub name: String, - pub is_array: bool, -} - -#[derive(Debug, Clone)] -pub struct SQLFunctionArg { - pub name: Option, - pub type_: ArgType, -} - -#[derive(Debug, Clone)] -pub struct SQLFunctionSignature { - #[allow(dead_code)] - pub schema: Option, - pub name: String, - pub args: Vec, -} - -#[derive(Debug, Clone)] -pub struct SQLFunctionBody { - pub range: TextRange, - pub body: String, -} - -/// Extracts the function signature from a SQL function definition -pub fn get_sql_fn_signature(ast: &pgt_query_ext::NodeEnum) -> Option { - let create_fn = match ast { - pgt_query_ext::NodeEnum::CreateFunctionStmt(cf) => cf, - _ => return None, - }; - - // Extract language from function options - let language = find_option_value(create_fn, "language")?; - - // Only process SQL functions - if language != "sql" { - return None; - } - - let fn_name = parse_name(&create_fn.funcname)?; - - // we return None if anything is not expected - let mut fn_args = Vec::new(); - for arg in &create_fn.parameters { - if let Some(pgt_query_ext::NodeEnum::FunctionParameter(node)) = &arg.node { - let arg_name = (!node.name.is_empty()).then_some(node.name.clone()); - - let arg_type = node.arg_type.as_ref()?; - let type_name = parse_name(&arg_type.names)?; - fn_args.push(SQLFunctionArg { - name: arg_name, - type_: ArgType { - schema: type_name.0, - name: type_name.1, - is_array: node - .arg_type - .as_ref() - .map(|t| !t.array_bounds.is_empty()) - .unwrap_or(false), - }, - }); - } else { - return None; - } - } - - Some(SQLFunctionSignature { - schema: fn_name.0, - name: fn_name.1, - args: fn_args, - }) -} - -/// Extracts the SQL body from a function definition -pub fn get_sql_fn_body(ast: &pgt_query_ext::NodeEnum, content: &str) -> Option { - let create_fn = match ast { - pgt_query_ext::NodeEnum::CreateFunctionStmt(cf) => cf, - _ => return None, - }; - - // Extract language from function options - let language = find_option_value(create_fn, "language")?; - - // Only process SQL functions - if language != "sql" { - return None; - } - - // Extract SQL body from function options - let sql_body = find_option_value(create_fn, "as")?; - - // Find the range of the SQL body in the content - let start = content.find(&sql_body)?; - let end = start + sql_body.len(); - - let range = TextRange::new(start.try_into().unwrap(), end.try_into().unwrap()); - - Some(SQLFunctionBody { - range, - body: sql_body.clone(), - }) -} - -/// Helper function to find a specific option value from function options -fn find_option_value( - create_fn: &pgt_query_ext::protobuf::CreateFunctionStmt, - option_name: &str, -) -> Option { - create_fn - .options - .iter() - .filter_map(|opt_wrapper| opt_wrapper.node.as_ref()) - .find_map(|opt| { - if let pgt_query_ext::NodeEnum::DefElem(def_elem) = opt { - if def_elem.defname == option_name { - def_elem - .arg - .iter() - .filter_map(|arg_wrapper| arg_wrapper.node.as_ref()) - .find_map(|arg| { - if let pgt_query_ext::NodeEnum::String(s) = arg { - Some(s.sval.clone()) - } else if let pgt_query_ext::NodeEnum::List(l) = arg { - l.items.iter().find_map(|item_wrapper| { - if let Some(pgt_query_ext::NodeEnum::String(s)) = - item_wrapper.node.as_ref() - { - Some(s.sval.clone()) - } else { - None - } - }) - } else { - None - } - }) - } else { - None - } - } else { - None - } - }) -} - -fn parse_name(nodes: &[pgt_query_ext::protobuf::Node]) -> Option<(Option, String)> { - let names = nodes - .iter() - .map(|n| match &n.node { - Some(pgt_query_ext::NodeEnum::String(s)) => Some(s.sval.clone()), - _ => None, - }) - .collect::>(); - - match names.as_slice() { - [Some(schema), Some(name)] => Some((Some(schema.clone()), name.clone())), - [Some(name)] => Some((None, name.clone())), - _ => None, - } -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn sql_function_signature() { - let input = "CREATE FUNCTION add(test0 integer, test1 integer) RETURNS integer - AS 'select $1 + $2;' - LANGUAGE SQL - IMMUTABLE - RETURNS NULL ON NULL INPUT;"; - - let ast = pgt_query_ext::parse(input).unwrap(); - - let sig = get_sql_fn_signature(&ast); - - assert!(sig.is_some()); - - let sig = sig.unwrap(); - - let arg1 = sig.args.first().unwrap(); - - assert_eq!(arg1.name, Some("test0".to_string())); - assert_eq!(arg1.type_.name, "int4"); - - let arg2 = sig.args.get(1).unwrap(); - assert_eq!(arg2.name, Some("test1".to_string())); - assert_eq!(arg2.type_.name, "int4"); - } - - #[test] - fn array_type() { - let input = "CREATE FUNCTION add(test0 integer[], test1 integer) RETURNS integer - AS 'select $1 + $2;' - LANGUAGE SQL - IMMUTABLE - RETURNS NULL ON NULL INPUT;"; - - let ast = pgt_query_ext::parse(input).unwrap(); - - let sig = get_sql_fn_signature(&ast); - - assert!(sig.is_some()); - - let sig = sig.unwrap(); - - assert!( - sig.args - .iter() - .find(|arg| arg.type_.is_array) - .map(|arg| { - assert_eq!(arg.type_.name, "int4"); - assert!(arg.type_.is_array); - }) - .is_some() - ); - } -} diff --git a/crates/pgt_workspace/src/workspace/server/statement_identifier.rs b/crates/pgt_workspace/src/workspace/server/statement_identifier.rs deleted file mode 100644 index 7c7d76f0..00000000 --- a/crates/pgt_workspace/src/workspace/server/statement_identifier.rs +++ /dev/null @@ -1,114 +0,0 @@ -use serde::{Deserialize, Serialize}; - -#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)] -#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] -pub struct RootId { - inner: usize, -} - -#[cfg(test)] -impl From for usize { - fn from(val: RootId) -> Self { - val.inner - } -} - -#[cfg(test)] -impl From for RootId { - fn from(inner: usize) -> Self { - RootId { inner } - } -} - -#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)] -#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] -/// `StatementId` can represent IDs for nested statements. -/// -/// For example, an SQL function really consist of two statements; the function creation -/// and the body: -/// -/// ```sql -/// create or replace function get_product_name(product_id INT) -- the root statement -/// returns varchar as $$ -/// select * from … -- the child statement -/// $$ LANGUAGE plpgsql; -/// ``` -/// -/// For now, we only support SQL functions – no complex, nested statements. -/// -/// An SQL function only ever has ONE child, that's why the inner `RootId` of a `Root` -/// is the same as the one of its `Child`. -pub enum StatementId { - Root(RootId), - // StatementId is the same as the root id since we can only have a single sql function body per Root - Child(RootId), -} - -impl Default for StatementId { - fn default() -> Self { - StatementId::Root(RootId { inner: 0 }) - } -} - -impl StatementId { - pub fn raw(&self) -> usize { - match self { - StatementId::Root(s) => s.inner, - StatementId::Child(s) => s.inner, - } - } - - pub fn is_root(&self) -> bool { - matches!(self, StatementId::Root(_)) - } - - pub fn is_child(&self) -> bool { - matches!(self, StatementId::Child(_)) - } - - pub fn parent(&self) -> Option { - match self { - StatementId::Root(_) => None, - StatementId::Child(id) => Some(StatementId::Root(id.clone())), - } - } -} - -/// Helper struct to generate unique statement ids -pub struct StatementIdGenerator { - next_id: usize, -} - -impl StatementIdGenerator { - pub fn new() -> Self { - Self { next_id: 0 } - } - - pub fn next(&mut self) -> StatementId { - let id = self.next_id; - self.next_id += 1; - StatementId::Root(RootId { inner: id }) - } -} - -impl StatementId { - /// Use this to get the matching `StatementId::Child` for - /// a `StatementId::Root`. - /// If the `StatementId` was already a `Child`, this will return `None`. - /// It is not guaranteed that the `Root` actually has a `Child` statement in the workspace. - pub fn get_child_id(&self) -> Option { - match self { - StatementId::Root(id) => Some(StatementId::Child(RootId { inner: id.inner })), - StatementId::Child(_) => None, - } - } - - /// Use this if you need to create a matching `StatementId::Child` for `Root`. - /// You cannot create a `Child` of a `Child`. - pub fn create_child(&self) -> StatementId { - match self { - StatementId::Root(id) => StatementId::Child(RootId { inner: id.inner }), - StatementId::Child(_) => panic!("Cannot create child from a child statement id"), - } - } -} diff --git a/crates/pgt_workspace/src/workspace/server/tree_sitter.rs b/crates/pgt_workspace/src/workspace/server/tree_sitter.rs deleted file mode 100644 index a8932535..00000000 --- a/crates/pgt_workspace/src/workspace/server/tree_sitter.rs +++ /dev/null @@ -1,161 +0,0 @@ -use std::sync::{Arc, Mutex}; - -use dashmap::DashMap; -use tree_sitter::InputEdit; - -use super::{change::ModifiedStatement, statement_identifier::StatementId}; - -pub struct TreeSitterStore { - db: DashMap>, - parser: Mutex, -} - -impl TreeSitterStore { - pub fn new() -> TreeSitterStore { - let mut parser = tree_sitter::Parser::new(); - parser - .set_language(tree_sitter_sql::language()) - .expect("Error loading sql language"); - - TreeSitterStore { - db: DashMap::new(), - parser: Mutex::new(parser), - } - } - - pub fn get_or_cache_tree( - &self, - statement: &StatementId, - content: &str, - ) -> Arc { - if let Some(existing) = self.db.get(statement).map(|x| x.clone()) { - return existing; - } - - let mut parser = self.parser.lock().expect("Failed to lock parser"); - let tree = Arc::new(parser.parse(content, None).unwrap()); - self.db.insert(statement.clone(), tree.clone()); - - tree - } - - pub fn add_statement(&self, statement: &StatementId, content: &str) { - let mut parser = self.parser.lock().expect("Failed to lock parser"); - let tree = parser.parse(content, None).unwrap(); - self.db.insert(statement.clone(), Arc::new(tree)); - } - - pub fn remove_statement(&self, id: &StatementId) { - self.db.remove(id); - - if let Some(child_id) = id.get_child_id() { - self.db.remove(&child_id); - } - } - - pub fn modify_statement(&self, change: &ModifiedStatement) { - let old = self.db.remove(&change.old_stmt); - - if old.is_none() { - self.add_statement(&change.new_stmt, &change.change_text); - return; - } - - // we clone the three for now, lets see if that is sufficient or if we need to mutate the - // original tree instead but that will require some kind of locking - let mut tree = old.unwrap().1.as_ref().clone(); - - let edit = edit_from_change( - change.old_stmt_text.as_str(), - usize::from(change.change_range.start()), - usize::from(change.change_range.end()), - change.change_text.as_str(), - ); - - tree.edit(&edit); - - let mut parser = self.parser.lock().expect("Failed to lock parser"); - // todo handle error - self.db.insert( - change.new_stmt.clone(), - Arc::new(parser.parse(&change.new_stmt_text, Some(&tree)).unwrap()), - ); - } -} - -// Converts character positions and replacement text into a tree-sitter InputEdit -pub(crate) fn edit_from_change( - text: &str, - start_char: usize, - end_char: usize, - replacement_text: &str, -) -> InputEdit { - let mut start_byte = 0; - let mut end_byte = 0; - let mut chars_counted = 0; - - let mut line = 0; - let mut current_line_char_start = 0; // Track start of the current line in characters - let mut column_start = 0; - let mut column_end = 0; - - // Find the byte positions corresponding to the character positions - for (idx, c) in text.char_indices() { - if chars_counted == start_char { - start_byte = idx; - column_start = chars_counted - current_line_char_start; - } - if chars_counted == end_char { - end_byte = idx; - column_end = chars_counted - current_line_char_start; - break; // Found both start and end - } - if c == '\n' { - line += 1; - current_line_char_start = chars_counted + 1; // Next character starts a new line - } - chars_counted += 1; - } - - // Handle case where end_char is at the end of the text - if end_char == chars_counted && end_byte == 0 { - end_byte = text.len(); - column_end = chars_counted - current_line_char_start; - } - - let start_point = tree_sitter::Point::new(line, column_start); - let old_end_point = tree_sitter::Point::new(line, column_end); - - // Calculate the new end byte after the edit - let new_end_byte = start_byte + replacement_text.len(); - - // Calculate the new end position - let new_lines = replacement_text.matches('\n').count(); - let last_line_length = if new_lines > 0 { - replacement_text - .split('\n') - .next_back() - .unwrap_or("") - .chars() - .count() - } else { - replacement_text.chars().count() - }; - - let new_end_position = if new_lines > 0 { - // If there are new lines, the row is offset by the number of new lines, and the column is the length of the last line - tree_sitter::Point::new(start_point.row + new_lines, last_line_length) - } else { - // If there are no new lines, the row remains the same, and the column is offset by the length of the insertion - tree_sitter::Point::new(start_point.row, start_point.column + last_line_length) - }; - - InputEdit { - start_byte, - old_end_byte: end_byte, - new_end_byte, - start_position: start_point, - old_end_position: old_end_point, - new_end_position, - } -} diff --git a/crates/pgt_workspace/src/workspace_types.rs b/crates/pgt_workspace/src/workspace_types.rs deleted file mode 100644 index 02215e79..00000000 --- a/crates/pgt_workspace/src/workspace_types.rs +++ /dev/null @@ -1,471 +0,0 @@ -//! Utility functions to help with generating bindings for the [Workspace] API - -use std::collections::VecDeque; - -use biome_js_syntax::{AnyJsDeclaration, AnyTsTupleTypeElement}; -use rustc_hash::FxHashSet; -use schemars::{ - JsonSchema, - r#gen::{SchemaGenerator, SchemaSettings}, - schema::{InstanceType, RootSchema, Schema, SchemaObject, SingleOrVec}, -}; -use serde_json::Value; - -use crate::{WorkspaceError, workspace::*}; -use biome_js_factory::{ - make, - syntax::{AnyJsObjectMemberName, AnyTsName, AnyTsType, AnyTsTypeMember, T}, -}; -use biome_rowan::AstSeparatedList; - -/// Manages a queue of type definitions that need to be generated -#[derive(Default)] -pub struct ModuleQueue<'a> { - /// Set of type names that have already been emitted - visited: FxHashSet<&'a str>, - /// Queue of type names and definitions that need to be generated - queue: VecDeque<(&'a str, &'a SchemaObject)>, -} - -impl<'a> ModuleQueue<'a> { - /// Add a type definition to the queue if it hasn't been emitted already - fn push_back(&mut self, item: (&'a str, &'a SchemaObject)) { - if self.visited.insert(item.0) { - self.queue.push_back(item); - } - } - - /// Pull a type name and definition from the queue - fn pop_front(&mut self) -> Option<(&'a str, &'a SchemaObject)> { - self.queue.pop_front() - } - - pub fn visited(&self) -> &FxHashSet<&'a str> { - &self.visited - } -} - -/// Generate a [TsType] node from the `instance_type` of a [SchemaObject] -fn instance_type<'a>( - queue: &mut ModuleQueue<'a>, - root_schema: &'a RootSchema, - schema: &'a SchemaObject, - ty: InstanceType, -) -> AnyTsType { - match ty { - // If the instance type is an object, generate a TS object type with the corresponding properties - InstanceType::Object => { - let object = schema.object.as_deref().unwrap(); - AnyTsType::from(make::ts_object_type( - make::token(T!['{']), - make::ts_type_member_list(object.properties.iter().map(|(property, schema)| { - let (ts_type, optional, description) = schema_type(queue, root_schema, schema); - assert!(!optional, "optional nested types are not supported"); - - let mut property = make::ident(property); - if let Some(description) = description { - let comment = format!("/**\n\t* {description} \n\t */"); - let trivia = vec![ - (biome_js_syntax::TriviaPieceKind::Newline, "\n"), - ( - biome_js_syntax::TriviaPieceKind::MultiLineComment, - comment.as_str(), - ), - (biome_js_syntax::TriviaPieceKind::Newline, "\n"), - ]; - property = property.with_leading_trivia(trivia); - } - - AnyTsTypeMember::from( - make::ts_property_signature_type_member(AnyJsObjectMemberName::from( - make::js_literal_member_name(property), - )) - .with_type_annotation(make::ts_type_annotation(make::token(T![:]), ts_type)) - .build(), - ) - })), - make::token(T!['}']), - )) - } - // If the instance type is an array, generate a TS array type with the corresponding item type - InstanceType::Array => { - let array = schema.array.as_deref().unwrap(); - let items = array.items.as_ref().unwrap(); - match items { - SingleOrVec::Single(schema) => { - let (ts_type, optional, _) = schema_type(queue, root_schema, schema); - assert!(!optional, "optional nested types are not supported"); - - AnyTsType::from(make::ts_array_type( - ts_type, - make::token(T!['[']), - make::token(T![']']), - )) - } - SingleOrVec::Vec(items) => AnyTsType::from(make::ts_tuple_type( - make::token(T!['[']), - make::ts_tuple_type_element_list( - items.iter().map(|schema| { - let (ts_type, optional, _) = schema_type(queue, root_schema, schema); - assert!(!optional, "optional nested types are not supported"); - AnyTsTupleTypeElement::AnyTsType(ts_type) - }), - items.iter().map(|_| make::token(T![,])), - ), - make::token(T![']']), - )), - } - } - - // Map native types to the corresponding TS type - InstanceType::Null => AnyTsType::from(make::ts_null_literal_type(make::token(T![null]))), - InstanceType::Boolean => AnyTsType::from(make::ts_boolean_type(make::token(T![boolean]))), - InstanceType::String => AnyTsType::from(make::ts_string_type(make::token(T![string]))), - InstanceType::Number | InstanceType::Integer => { - AnyTsType::from(make::ts_number_type(make::token(T![number]))) - } - } -} - -/// Generate a literal [TsType] from a `serde_json` [Value] -fn value_type(value: &Value) -> AnyTsType { - match value { - Value::Null => AnyTsType::from(make::ts_null_literal_type(make::token(T![null]))), - Value::Bool(true) => AnyTsType::from(make::ts_boolean_literal_type(make::token(T![true]))), - Value::Bool(false) => { - AnyTsType::from(make::ts_boolean_literal_type(make::token(T![false]))) - } - Value::Number(value) => AnyTsType::from( - make::ts_number_literal_type(make::js_number_literal(value.as_f64().unwrap())).build(), - ), - Value::String(value) => { - AnyTsType::from(make::ts_string_literal_type(make::js_string_literal(value))) - } - Value::Array(_) => unimplemented!(), - Value::Object(_) => unimplemented!(), - } -} - -/// Generate a union [TsType] node from a list of [TsType]s, -/// flattening any nested union type the iterator may emit -fn make_union_type(items: impl IntoIterator) -> AnyTsType { - let mut result = Vec::new(); - - for item in items { - if let AnyTsType::TsUnionType(union_type) = item { - for item in union_type.types().iter() { - result.push(item.unwrap()); - } - } else { - result.push(item); - } - } - - let separators = (0..result.len().saturating_sub(1)).map(|_| make::token(T![|])); - AnyTsType::from( - make::ts_union_type(make::ts_union_type_variant_list(result, separators)).build(), - ) -} - -/// Generate a [TsType] node from a [SchemaObject], returning the generated -/// TypeScript type along with a boolean flag indicating whether the type is -/// considered "optional" in the schema -fn schema_object_type<'a>( - queue: &mut ModuleQueue<'a>, - root_schema: &'a RootSchema, - schema: &'a SchemaObject, -) -> (AnyTsType, bool, Option<&'a String>) { - // Start by detecting enum types by inspecting the `enum_values` field, i - // the field is set return a union type generated from the literal enum values - let description = schema - .metadata - .as_ref() - .and_then(|s| s.description.as_ref()); - let ts_type = schema - .enum_values - .as_deref() - .map(|enum_values| make_union_type(enum_values.iter().map(value_type))) - // If the type isn't an enum, inspect its `instance_type` field, if the - // field is set return a type annotation for the corresponding type - .or_else(|| { - Some(match schema.instance_type.as_ref()? { - SingleOrVec::Single(ty) => instance_type(queue, root_schema, schema, **ty), - SingleOrVec::Vec(types) => make_union_type( - types - .iter() - .map(|ty| instance_type(queue, root_schema, schema, *ty)), - ), - }) - }) - // Otherwise inspect the `reference` field of the schema, if its set return - // a TS reference type and add the corresponding type to the queue - .or_else(|| { - let reference = schema.reference.as_deref()?; - let key = reference.trim_start_matches("#/components/schemas/"); - match root_schema.definitions.get(key) { - Some(Schema::Bool(_)) => unimplemented!(), - Some(Schema::Object(schema)) => queue.push_back((key, schema)), - None => panic!("definition for type {key:?} not found"), - } - - Some(AnyTsType::from( - make::ts_reference_type(AnyTsName::from(make::js_reference_identifier( - make::ident(key), - ))) - .build(), - )) - }) - // Finally try to inspect the subschemas for this type - .or_else(|| { - let subschemas = schema.subschemas.as_deref()?; - // First try to inspect the `all_of` list of subschemas, if it's - // set generate an intersection type from it - subschemas - .all_of - .as_deref() - .map(|all_of| { - AnyTsType::from( - make::ts_intersection_type(make::ts_intersection_type_element_list( - all_of.iter().map(|ty| { - let (ts_type, optional, _) = schema_type(queue, root_schema, ty); - assert!(!optional, "optional nested types are not supported"); - ts_type - }), - (0..all_of.len().saturating_sub(1)).map(|_| make::token(T![&])), - )) - .build(), - ) - }) - // Otherwise try to inspect the `any_of` list of subschemas, and - // generate the corresponding union type for it - .or_else(|| { - let any_of = subschemas - .any_of - .as_deref() - .or(subschemas.one_of.as_deref())?; - - Some(make_union_type(any_of.iter().map(|ty| { - let (ts_type, optional, _) = schema_type(queue, root_schema, ty); - assert!(!optional, "optional nested types are not supported"); - ts_type - }))) - }) - }) - .unwrap_or_else(|| { - // this is temporary workaround to fix the `options` field, which is not used at the moment - AnyTsType::from(make::ts_any_type(make::token(T![any]))) - }); - - // Types are considered "optional" in the serialization protocol if they - // have the `nullable` OpenAPI extension property, or if they have a default value - let is_nullable = matches!(schema.extensions.get("nullable"), Some(Value::Bool(true))); - let has_defaults = schema - .metadata - .as_ref() - .is_some_and(|metadata| metadata.default.is_some()); - - (ts_type, is_nullable || has_defaults, description) -} - -/// Generate a [TsType] node from a [Schema], returning the generated type -/// along with a boolean flag indicating whether the type is considered -/// "optional" in the schema -fn schema_type<'a>( - queue: &mut ModuleQueue<'a>, - root_schema: &'a RootSchema, - schema: &'a Schema, -) -> (AnyTsType, bool, Option<&'a String>) { - match schema { - // Types defined as `true` in the schema always pass validation, - // map them to the `any` type - Schema::Bool(true) => ( - AnyTsType::from(make::ts_any_type(make::token(T![any]))), - true, - None, - ), - // Types defined as `false` in the schema never pass validation, - // map them to the `never` type - Schema::Bool(false) => ( - AnyTsType::from(make::ts_never_type(make::token(T![never]))), - false, - None, - ), - Schema::Object(schema_object) => schema_object_type(queue, root_schema, schema_object), - } -} - -/// Generate and emit all the types defined in `root_schema` into the `module` -pub fn generate_type<'a>( - module: &mut Vec<(AnyJsDeclaration, Option<&'a String>)>, - queue: &mut ModuleQueue<'a>, - root_schema: &'a RootSchema, -) -> AnyTsType { - // Read the root type of the schema and push it to the queue - let root_name = root_schema - .schema - .metadata - .as_deref() - .and_then(|metadata| metadata.title.as_deref()) - .unwrap(); - - match root_name { - "Null" => return AnyTsType::TsVoidType(make::ts_void_type(make::token(T![void]))), - "Boolean" => { - return AnyTsType::TsBooleanType(make::ts_boolean_type(make::token(T![boolean]))); - } - "String" => return AnyTsType::TsStringType(make::ts_string_type(make::token(T![string]))), - _ => {} - } - - queue.push_back((root_name, &root_schema.schema)); - - while let Some((name, schema)) = queue.pop_front() { - // Detect if the type being emitted is an object, emit it as an - // interface definition if that's the case - let is_interface = schema.instance_type.as_ref().map_or_else( - || schema.object.is_some(), - |instance_type| { - if let SingleOrVec::Single(instance_type) = instance_type { - matches!(**instance_type, InstanceType::Object) - } else { - false - } - }, - ); - - if is_interface { - let mut members = Vec::new(); - - // Create a property signature member in the interface for each - // property of the corresponding schema object - let object = schema.object.as_deref().unwrap(); - for (property, schema) in &object.properties { - let (ts_type, optional, description) = schema_type(queue, root_schema, schema); - - let mut property = make::ident(property); - if let Some(description) = description { - let comment = format!("/**\n\t* {description} \n\t */"); - let trivia = vec![ - (biome_js_syntax::TriviaPieceKind::Newline, "\n"), - ( - biome_js_syntax::TriviaPieceKind::MultiLineComment, - comment.as_str(), - ), - (biome_js_syntax::TriviaPieceKind::Newline, "\n"), - ]; - property = property.with_leading_trivia(trivia); - } - - let mut builder = make::ts_property_signature_type_member( - AnyJsObjectMemberName::from(make::js_literal_member_name(property)), - ) - .with_type_annotation(make::ts_type_annotation(make::token(T![:]), ts_type)); - - if optional { - builder = builder.with_optional_token(make::token(T![?])); - } - - members.push(AnyTsTypeMember::from(builder.build())); - } - - let description = schema - .metadata - .as_ref() - .and_then(|s| s.description.as_ref()); - let current_module = AnyJsDeclaration::from( - make::ts_interface_declaration( - make::token(T![interface]), - make::ts_identifier_binding(make::ident(name)), - make::token(T!['{']), - make::ts_type_member_list(members), - make::token(T!['}']), - ) - .build(), - ); - module.push((current_module, description)); - } else { - // If the schema for this type is not an object, emit it as a type alias - let (ts_type, optional, description) = schema_object_type(queue, root_schema, schema); - - assert!(!optional, "optional nested types are not supported"); - - let current_module = AnyJsDeclaration::from( - make::ts_type_alias_declaration( - make::token(T![type]), - make::ts_identifier_binding(make::ident(name)), - make::token(T![=]), - ts_type, - ) - .build(), - ); - module.push((current_module, description)); - } - } - - AnyTsType::TsReferenceType( - make::ts_reference_type(AnyTsName::JsReferenceIdentifier( - make::js_reference_identifier(make::ident(root_name)), - )) - .build(), - ) -} - -/// Signature metadata for a [Workspace] method -pub struct WorkspaceMethod { - /// Name of the method - pub name: &'static str, - /// Schema for the parameters object of the method - pub params: RootSchema, - /// Schema for the result object of the method - pub result: RootSchema, -} - -impl WorkspaceMethod { - /// Construct a [WorkspaceMethod] from a name, a parameter type and a result type - fn of(name: &'static str) -> Self - where - P: JsonSchema, - R: JsonSchema, - { - let params = SchemaGenerator::from(SchemaSettings::openapi3()).root_schema_for::

(); - let result = SchemaGenerator::from(SchemaSettings::openapi3()).root_schema_for::(); - Self { - name, - params, - result, - } - } - - /// Construct a [WorkspaceMethod] from a name and a function pointer - fn from_method( - name: &'static str, - _func: fn(T, P) -> Result, - ) -> Self - where - P: JsonSchema, - R: JsonSchema, - { - Self::of::(name) - } -} - -/// Helper macro for generated an OpenAPI schema for a type implementing JsonSchema -macro_rules! workspace_method { - ($name:ident) => { - WorkspaceMethod::from_method(stringify!($name), ::$name) - }; -} - -/// Returns a list of signature for all the methods in the [Workspace] trait -pub fn methods() -> [WorkspaceMethod; 8] { - [ - workspace_method!(is_path_ignored), - workspace_method!(get_file_content), - workspace_method!(pull_diagnostics), - workspace_method!(get_completions), - workspace_method!(update_settings), - workspace_method!(open_file), - workspace_method!(change_file), - workspace_method!(close_file), - ] -} diff --git a/css/fonts/Roboto-Slab-Bold.woff b/css/fonts/Roboto-Slab-Bold.woff new file mode 100644 index 00000000..6cb60000 Binary files /dev/null and b/css/fonts/Roboto-Slab-Bold.woff differ diff --git a/css/fonts/Roboto-Slab-Bold.woff2 b/css/fonts/Roboto-Slab-Bold.woff2 new file mode 100644 index 00000000..7059e231 Binary files /dev/null and b/css/fonts/Roboto-Slab-Bold.woff2 differ diff --git a/css/fonts/Roboto-Slab-Regular.woff b/css/fonts/Roboto-Slab-Regular.woff new file mode 100644 index 00000000..f815f63f Binary files /dev/null and b/css/fonts/Roboto-Slab-Regular.woff differ diff --git a/css/fonts/Roboto-Slab-Regular.woff2 b/css/fonts/Roboto-Slab-Regular.woff2 new file mode 100644 index 00000000..f2c76e5b Binary files /dev/null and b/css/fonts/Roboto-Slab-Regular.woff2 differ diff --git a/css/fonts/fontawesome-webfont.eot b/css/fonts/fontawesome-webfont.eot new file mode 100644 index 00000000..e9f60ca9 Binary files /dev/null and b/css/fonts/fontawesome-webfont.eot differ diff --git a/css/fonts/fontawesome-webfont.svg b/css/fonts/fontawesome-webfont.svg new file mode 100644 index 00000000..855c845e --- /dev/null +++ b/css/fonts/fontawesome-webfont.svg @@ -0,0 +1,2671 @@ + + + + +Created by FontForge 20120731 at Mon Oct 24 17:37:40 2016 + By ,,, +Copyright Dave Gandy 2016. All rights reserved. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/css/fonts/fontawesome-webfont.ttf b/css/fonts/fontawesome-webfont.ttf new file mode 100644 index 00000000..35acda2f Binary files /dev/null and b/css/fonts/fontawesome-webfont.ttf differ diff --git a/css/fonts/fontawesome-webfont.woff b/css/fonts/fontawesome-webfont.woff new file mode 100644 index 00000000..400014a4 Binary files /dev/null and b/css/fonts/fontawesome-webfont.woff differ diff --git a/css/fonts/fontawesome-webfont.woff2 b/css/fonts/fontawesome-webfont.woff2 new file mode 100644 index 00000000..4d13fc60 Binary files /dev/null and b/css/fonts/fontawesome-webfont.woff2 differ diff --git a/css/fonts/lato-bold-italic.woff b/css/fonts/lato-bold-italic.woff new file mode 100644 index 00000000..88ad05b9 Binary files /dev/null and b/css/fonts/lato-bold-italic.woff differ diff --git a/css/fonts/lato-bold-italic.woff2 b/css/fonts/lato-bold-italic.woff2 new file mode 100644 index 00000000..c4e3d804 Binary files /dev/null and b/css/fonts/lato-bold-italic.woff2 differ diff --git a/css/fonts/lato-bold.woff b/css/fonts/lato-bold.woff new file mode 100644 index 00000000..c6dff51f Binary files /dev/null and b/css/fonts/lato-bold.woff differ diff --git a/css/fonts/lato-bold.woff2 b/css/fonts/lato-bold.woff2 new file mode 100644 index 00000000..bb195043 Binary files /dev/null and b/css/fonts/lato-bold.woff2 differ diff --git a/css/fonts/lato-normal-italic.woff b/css/fonts/lato-normal-italic.woff new file mode 100644 index 00000000..76114bc0 Binary files /dev/null and b/css/fonts/lato-normal-italic.woff differ diff --git a/css/fonts/lato-normal-italic.woff2 b/css/fonts/lato-normal-italic.woff2 new file mode 100644 index 00000000..3404f37e Binary files /dev/null and b/css/fonts/lato-normal-italic.woff2 differ diff --git a/css/fonts/lato-normal.woff b/css/fonts/lato-normal.woff new file mode 100644 index 00000000..ae1307ff Binary files /dev/null and b/css/fonts/lato-normal.woff differ diff --git a/css/fonts/lato-normal.woff2 b/css/fonts/lato-normal.woff2 new file mode 100644 index 00000000..3bf98433 Binary files /dev/null and b/css/fonts/lato-normal.woff2 differ diff --git a/css/theme.css b/css/theme.css new file mode 100644 index 00000000..ad773009 --- /dev/null +++ b/css/theme.css @@ -0,0 +1,13 @@ +/* + * This file is copied from the upstream ReadTheDocs Sphinx + * theme. To aid upgradability this file should *not* be edited. + * modifications we need should be included in theme_extra.css. + * + * https://github.com/readthedocs/sphinx_rtd_theme + */ + + /* sphinx_rtd_theme version 1.2.0 | MIT license */ +html{box-sizing:border-box}*,:after,:before{box-sizing:inherit}article,aside,details,figcaption,figure,footer,header,hgroup,nav,section{display:block}audio,canvas,video{display:inline-block;*display:inline;*zoom:1}[hidden],audio:not([controls]){display:none}*{-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box}html{font-size:100%;-webkit-text-size-adjust:100%;-ms-text-size-adjust:100%}body{margin:0}a:active,a:hover{outline:0}abbr[title]{border-bottom:1px dotted}b,strong{font-weight:700}blockquote{margin:0}dfn{font-style:italic}ins{background:#ff9;text-decoration:none}ins,mark{color:#000}mark{background:#ff0;font-style:italic;font-weight:700}.rst-content code,.rst-content tt,code,kbd,pre,samp{font-family:monospace,serif;_font-family:courier new,monospace;font-size:1em}pre{white-space:pre}q{quotes:none}q:after,q:before{content:"";content:none}small{font-size:85%}sub,sup{font-size:75%;line-height:0;position:relative;vertical-align:baseline}sup{top:-.5em}sub{bottom:-.25em}dl,ol,ul{margin:0;padding:0;list-style:none;list-style-image:none}li{list-style:none}dd{margin:0}img{border:0;-ms-interpolation-mode:bicubic;vertical-align:middle;max-width:100%}svg:not(:root){overflow:hidden}figure,form{margin:0}label{cursor:pointer}button,input,select,textarea{font-size:100%;margin:0;vertical-align:baseline;*vertical-align:middle}button,input{line-height:normal}button,input[type=button],input[type=reset],input[type=submit]{cursor:pointer;-webkit-appearance:button;*overflow:visible}button[disabled],input[disabled]{cursor:default}input[type=search]{-webkit-appearance:textfield;-moz-box-sizing:content-box;-webkit-box-sizing:content-box;box-sizing:content-box}textarea{resize:vertical}table{border-collapse:collapse;border-spacing:0}td{vertical-align:top}.chromeframe{margin:.2em 0;background:#ccc;color:#000;padding:.2em 0}.ir{display:block;border:0;text-indent:-999em;overflow:hidden;background-color:transparent;background-repeat:no-repeat;text-align:left;direction:ltr;*line-height:0}.ir br{display:none}.hidden{display:none!important;visibility:hidden}.visuallyhidden{border:0;clip:rect(0 0 0 0);height:1px;margin:-1px;overflow:hidden;padding:0;position:absolute;width:1px}.visuallyhidden.focusable:active,.visuallyhidden.focusable:focus{clip:auto;height:auto;margin:0;overflow:visible;position:static;width:auto}.invisible{visibility:hidden}.relative{position:relative}big,small{font-size:100%}@media print{body,html,section{background:none!important}*{box-shadow:none!important;text-shadow:none!important;filter:none!important;-ms-filter:none!important}a,a:visited{text-decoration:underline}.ir a:after,a[href^="#"]:after,a[href^="javascript:"]:after{content:""}blockquote,pre{page-break-inside:avoid}thead{display:table-header-group}img,tr{page-break-inside:avoid}img{max-width:100%!important}@page{margin:.5cm}.rst-content .toctree-wrapper>p.caption,h2,h3,p{orphans:3;widows:3}.rst-content .toctree-wrapper>p.caption,h2,h3{page-break-after:avoid}}.btn,.fa:before,.icon:before,.rst-content .admonition,.rst-content .admonition-title:before,.rst-content .admonition-todo,.rst-content .attention,.rst-content .caution,.rst-content .code-block-caption .headerlink:before,.rst-content .danger,.rst-content .eqno .headerlink:before,.rst-content .error,.rst-content .hint,.rst-content .important,.rst-content .note,.rst-content .seealso,.rst-content .tip,.rst-content .warning,.rst-content code.download span:first-child:before,.rst-content dl dt .headerlink:before,.rst-content h1 .headerlink:before,.rst-content h2 .headerlink:before,.rst-content h3 .headerlink:before,.rst-content h4 .headerlink:before,.rst-content h5 .headerlink:before,.rst-content h6 .headerlink:before,.rst-content p.caption .headerlink:before,.rst-content p .headerlink:before,.rst-content table>caption .headerlink:before,.rst-content tt.download span:first-child:before,.wy-alert,.wy-dropdown .caret:before,.wy-inline-validate.wy-inline-validate-danger .wy-input-context:before,.wy-inline-validate.wy-inline-validate-info .wy-input-context:before,.wy-inline-validate.wy-inline-validate-success .wy-input-context:before,.wy-inline-validate.wy-inline-validate-warning .wy-input-context:before,.wy-menu-vertical li.current>a button.toctree-expand:before,.wy-menu-vertical li.on a button.toctree-expand:before,.wy-menu-vertical li button.toctree-expand:before,input[type=color],input[type=date],input[type=datetime-local],input[type=datetime],input[type=email],input[type=month],input[type=number],input[type=password],input[type=search],input[type=tel],input[type=text],input[type=time],input[type=url],input[type=week],select,textarea{-webkit-font-smoothing:antialiased}.clearfix{*zoom:1}.clearfix:after,.clearfix:before{display:table;content:""}.clearfix:after{clear:both}/*! + * Font Awesome 4.7.0 by @davegandy - http://fontawesome.io - @fontawesome + * License - http://fontawesome.io/license (Font: SIL OFL 1.1, CSS: MIT License) + */@font-face{font-family:FontAwesome;src:url(fonts/fontawesome-webfont.eot?674f50d287a8c48dc19ba404d20fe713);src:url(fonts/fontawesome-webfont.eot?674f50d287a8c48dc19ba404d20fe713?#iefix&v=4.7.0) format("embedded-opentype"),url(fonts/fontawesome-webfont.woff2?af7ae505a9eed503f8b8e6982036873e) format("woff2"),url(fonts/fontawesome-webfont.woff?fee66e712a8a08eef5805a46892932ad) format("woff"),url(fonts/fontawesome-webfont.ttf?b06871f281fee6b241d60582ae9369b9) format("truetype"),url(fonts/fontawesome-webfont.svg?912ec66d7572ff821749319396470bde#fontawesomeregular) format("svg");font-weight:400;font-style:normal}.fa,.icon,.rst-content .admonition-title,.rst-content .code-block-caption .headerlink,.rst-content .eqno .headerlink,.rst-content code.download span:first-child,.rst-content dl dt .headerlink,.rst-content h1 .headerlink,.rst-content h2 .headerlink,.rst-content h3 .headerlink,.rst-content h4 .headerlink,.rst-content h5 .headerlink,.rst-content h6 .headerlink,.rst-content p.caption .headerlink,.rst-content p .headerlink,.rst-content table>caption .headerlink,.rst-content tt.download span:first-child,.wy-menu-vertical li.current>a button.toctree-expand,.wy-menu-vertical li.on a button.toctree-expand,.wy-menu-vertical li button.toctree-expand{display:inline-block;font:normal normal normal 14px/1 FontAwesome;font-size:inherit;text-rendering:auto;-webkit-font-smoothing:antialiased;-moz-osx-font-smoothing:grayscale}.fa-lg{font-size:1.33333em;line-height:.75em;vertical-align:-15%}.fa-2x{font-size:2em}.fa-3x{font-size:3em}.fa-4x{font-size:4em}.fa-5x{font-size:5em}.fa-fw{width:1.28571em;text-align:center}.fa-ul{padding-left:0;margin-left:2.14286em;list-style-type:none}.fa-ul>li{position:relative}.fa-li{position:absolute;left:-2.14286em;width:2.14286em;top:.14286em;text-align:center}.fa-li.fa-lg{left:-1.85714em}.fa-border{padding:.2em .25em .15em;border:.08em solid #eee;border-radius:.1em}.fa-pull-left{float:left}.fa-pull-right{float:right}.fa-pull-left.icon,.fa.fa-pull-left,.rst-content .code-block-caption .fa-pull-left.headerlink,.rst-content .eqno .fa-pull-left.headerlink,.rst-content .fa-pull-left.admonition-title,.rst-content code.download span.fa-pull-left:first-child,.rst-content dl dt .fa-pull-left.headerlink,.rst-content h1 .fa-pull-left.headerlink,.rst-content h2 .fa-pull-left.headerlink,.rst-content h3 .fa-pull-left.headerlink,.rst-content h4 .fa-pull-left.headerlink,.rst-content h5 .fa-pull-left.headerlink,.rst-content h6 .fa-pull-left.headerlink,.rst-content p .fa-pull-left.headerlink,.rst-content table>caption .fa-pull-left.headerlink,.rst-content tt.download span.fa-pull-left:first-child,.wy-menu-vertical li.current>a button.fa-pull-left.toctree-expand,.wy-menu-vertical li.on a button.fa-pull-left.toctree-expand,.wy-menu-vertical li button.fa-pull-left.toctree-expand{margin-right:.3em}.fa-pull-right.icon,.fa.fa-pull-right,.rst-content .code-block-caption .fa-pull-right.headerlink,.rst-content .eqno .fa-pull-right.headerlink,.rst-content .fa-pull-right.admonition-title,.rst-content code.download span.fa-pull-right:first-child,.rst-content dl dt .fa-pull-right.headerlink,.rst-content h1 .fa-pull-right.headerlink,.rst-content h2 .fa-pull-right.headerlink,.rst-content h3 .fa-pull-right.headerlink,.rst-content h4 .fa-pull-right.headerlink,.rst-content h5 .fa-pull-right.headerlink,.rst-content h6 .fa-pull-right.headerlink,.rst-content p .fa-pull-right.headerlink,.rst-content table>caption .fa-pull-right.headerlink,.rst-content tt.download span.fa-pull-right:first-child,.wy-menu-vertical li.current>a button.fa-pull-right.toctree-expand,.wy-menu-vertical li.on a button.fa-pull-right.toctree-expand,.wy-menu-vertical li button.fa-pull-right.toctree-expand{margin-left:.3em}.pull-right{float:right}.pull-left{float:left}.fa.pull-left,.pull-left.icon,.rst-content .code-block-caption .pull-left.headerlink,.rst-content .eqno .pull-left.headerlink,.rst-content .pull-left.admonition-title,.rst-content code.download span.pull-left:first-child,.rst-content dl dt .pull-left.headerlink,.rst-content h1 .pull-left.headerlink,.rst-content h2 .pull-left.headerlink,.rst-content h3 .pull-left.headerlink,.rst-content h4 .pull-left.headerlink,.rst-content h5 .pull-left.headerlink,.rst-content h6 .pull-left.headerlink,.rst-content p .pull-left.headerlink,.rst-content table>caption .pull-left.headerlink,.rst-content tt.download span.pull-left:first-child,.wy-menu-vertical li.current>a button.pull-left.toctree-expand,.wy-menu-vertical li.on a button.pull-left.toctree-expand,.wy-menu-vertical li button.pull-left.toctree-expand{margin-right:.3em}.fa.pull-right,.pull-right.icon,.rst-content .code-block-caption .pull-right.headerlink,.rst-content .eqno .pull-right.headerlink,.rst-content .pull-right.admonition-title,.rst-content code.download span.pull-right:first-child,.rst-content dl dt .pull-right.headerlink,.rst-content h1 .pull-right.headerlink,.rst-content h2 .pull-right.headerlink,.rst-content h3 .pull-right.headerlink,.rst-content h4 .pull-right.headerlink,.rst-content h5 .pull-right.headerlink,.rst-content h6 .pull-right.headerlink,.rst-content p .pull-right.headerlink,.rst-content table>caption .pull-right.headerlink,.rst-content tt.download span.pull-right:first-child,.wy-menu-vertical li.current>a button.pull-right.toctree-expand,.wy-menu-vertical li.on a button.pull-right.toctree-expand,.wy-menu-vertical li button.pull-right.toctree-expand{margin-left:.3em}.fa-spin{-webkit-animation:fa-spin 2s linear infinite;animation:fa-spin 2s linear infinite}.fa-pulse{-webkit-animation:fa-spin 1s steps(8) infinite;animation:fa-spin 1s steps(8) infinite}@-webkit-keyframes fa-spin{0%{-webkit-transform:rotate(0deg);transform:rotate(0deg)}to{-webkit-transform:rotate(359deg);transform:rotate(359deg)}}@keyframes fa-spin{0%{-webkit-transform:rotate(0deg);transform:rotate(0deg)}to{-webkit-transform:rotate(359deg);transform:rotate(359deg)}}.fa-rotate-90{-ms-filter:"progid:DXImageTransform.Microsoft.BasicImage(rotation=1)";-webkit-transform:rotate(90deg);-ms-transform:rotate(90deg);transform:rotate(90deg)}.fa-rotate-180{-ms-filter:"progid:DXImageTransform.Microsoft.BasicImage(rotation=2)";-webkit-transform:rotate(180deg);-ms-transform:rotate(180deg);transform:rotate(180deg)}.fa-rotate-270{-ms-filter:"progid:DXImageTransform.Microsoft.BasicImage(rotation=3)";-webkit-transform:rotate(270deg);-ms-transform:rotate(270deg);transform:rotate(270deg)}.fa-flip-horizontal{-ms-filter:"progid:DXImageTransform.Microsoft.BasicImage(rotation=0, mirror=1)";-webkit-transform:scaleX(-1);-ms-transform:scaleX(-1);transform:scaleX(-1)}.fa-flip-vertical{-ms-filter:"progid:DXImageTransform.Microsoft.BasicImage(rotation=2, mirror=1)";-webkit-transform:scaleY(-1);-ms-transform:scaleY(-1);transform:scaleY(-1)}:root .fa-flip-horizontal,:root .fa-flip-vertical,:root .fa-rotate-90,:root .fa-rotate-180,:root .fa-rotate-270{filter:none}.fa-stack{position:relative;display:inline-block;width:2em;height:2em;line-height:2em;vertical-align:middle}.fa-stack-1x,.fa-stack-2x{position:absolute;left:0;width:100%;text-align:center}.fa-stack-1x{line-height:inherit}.fa-stack-2x{font-size:2em}.fa-inverse{color:#fff}.fa-glass:before{content:""}.fa-music:before{content:""}.fa-search:before,.icon-search:before{content:""}.fa-envelope-o:before{content:""}.fa-heart:before{content:""}.fa-star:before{content:""}.fa-star-o:before{content:""}.fa-user:before{content:""}.fa-film:before{content:""}.fa-th-large:before{content:""}.fa-th:before{content:""}.fa-th-list:before{content:""}.fa-check:before{content:""}.fa-close:before,.fa-remove:before,.fa-times:before{content:""}.fa-search-plus:before{content:""}.fa-search-minus:before{content:""}.fa-power-off:before{content:""}.fa-signal:before{content:""}.fa-cog:before,.fa-gear:before{content:""}.fa-trash-o:before{content:""}.fa-home:before,.icon-home:before{content:""}.fa-file-o:before{content:""}.fa-clock-o:before{content:""}.fa-road:before{content:""}.fa-download:before,.rst-content code.download span:first-child:before,.rst-content tt.download span:first-child:before{content:""}.fa-arrow-circle-o-down:before{content:""}.fa-arrow-circle-o-up:before{content:""}.fa-inbox:before{content:""}.fa-play-circle-o:before{content:""}.fa-repeat:before,.fa-rotate-right:before{content:""}.fa-refresh:before{content:""}.fa-list-alt:before{content:""}.fa-lock:before{content:""}.fa-flag:before{content:""}.fa-headphones:before{content:""}.fa-volume-off:before{content:""}.fa-volume-down:before{content:""}.fa-volume-up:before{content:""}.fa-qrcode:before{content:""}.fa-barcode:before{content:""}.fa-tag:before{content:""}.fa-tags:before{content:""}.fa-book:before,.icon-book:before{content:""}.fa-bookmark:before{content:""}.fa-print:before{content:""}.fa-camera:before{content:""}.fa-font:before{content:""}.fa-bold:before{content:""}.fa-italic:before{content:""}.fa-text-height:before{content:""}.fa-text-width:before{content:""}.fa-align-left:before{content:""}.fa-align-center:before{content:""}.fa-align-right:before{content:""}.fa-align-justify:before{content:""}.fa-list:before{content:""}.fa-dedent:before,.fa-outdent:before{content:""}.fa-indent:before{content:""}.fa-video-camera:before{content:""}.fa-image:before,.fa-photo:before,.fa-picture-o:before{content:""}.fa-pencil:before{content:""}.fa-map-marker:before{content:""}.fa-adjust:before{content:""}.fa-tint:before{content:""}.fa-edit:before,.fa-pencil-square-o:before{content:""}.fa-share-square-o:before{content:""}.fa-check-square-o:before{content:""}.fa-arrows:before{content:""}.fa-step-backward:before{content:""}.fa-fast-backward:before{content:""}.fa-backward:before{content:""}.fa-play:before{content:""}.fa-pause:before{content:""}.fa-stop:before{content:""}.fa-forward:before{content:""}.fa-fast-forward:before{content:""}.fa-step-forward:before{content:""}.fa-eject:before{content:""}.fa-chevron-left:before{content:""}.fa-chevron-right:before{content:""}.fa-plus-circle:before{content:""}.fa-minus-circle:before{content:""}.fa-times-circle:before,.wy-inline-validate.wy-inline-validate-danger .wy-input-context:before{content:""}.fa-check-circle:before,.wy-inline-validate.wy-inline-validate-success .wy-input-context:before{content:""}.fa-question-circle:before{content:""}.fa-info-circle:before{content:""}.fa-crosshairs:before{content:""}.fa-times-circle-o:before{content:""}.fa-check-circle-o:before{content:""}.fa-ban:before{content:""}.fa-arrow-left:before{content:""}.fa-arrow-right:before{content:""}.fa-arrow-up:before{content:""}.fa-arrow-down:before{content:""}.fa-mail-forward:before,.fa-share:before{content:""}.fa-expand:before{content:""}.fa-compress:before{content:""}.fa-plus:before{content:""}.fa-minus:before{content:""}.fa-asterisk:before{content:""}.fa-exclamation-circle:before,.rst-content .admonition-title:before,.wy-inline-validate.wy-inline-validate-info .wy-input-context:before,.wy-inline-validate.wy-inline-validate-warning .wy-input-context:before{content:""}.fa-gift:before{content:""}.fa-leaf:before{content:""}.fa-fire:before,.icon-fire:before{content:""}.fa-eye:before{content:""}.fa-eye-slash:before{content:""}.fa-exclamation-triangle:before,.fa-warning:before{content:""}.fa-plane:before{content:""}.fa-calendar:before{content:""}.fa-random:before{content:""}.fa-comment:before{content:""}.fa-magnet:before{content:""}.fa-chevron-up:before{content:""}.fa-chevron-down:before{content:""}.fa-retweet:before{content:""}.fa-shopping-cart:before{content:""}.fa-folder:before{content:""}.fa-folder-open:before{content:""}.fa-arrows-v:before{content:""}.fa-arrows-h:before{content:""}.fa-bar-chart-o:before,.fa-bar-chart:before{content:""}.fa-twitter-square:before{content:""}.fa-facebook-square:before{content:""}.fa-camera-retro:before{content:""}.fa-key:before{content:""}.fa-cogs:before,.fa-gears:before{content:""}.fa-comments:before{content:""}.fa-thumbs-o-up:before{content:""}.fa-thumbs-o-down:before{content:""}.fa-star-half:before{content:""}.fa-heart-o:before{content:""}.fa-sign-out:before{content:""}.fa-linkedin-square:before{content:""}.fa-thumb-tack:before{content:""}.fa-external-link:before{content:""}.fa-sign-in:before{content:""}.fa-trophy:before{content:""}.fa-github-square:before{content:""}.fa-upload:before{content:""}.fa-lemon-o:before{content:""}.fa-phone:before{content:""}.fa-square-o:before{content:""}.fa-bookmark-o:before{content:""}.fa-phone-square:before{content:""}.fa-twitter:before{content:""}.fa-facebook-f:before,.fa-facebook:before{content:""}.fa-github:before,.icon-github:before{content:""}.fa-unlock:before{content:""}.fa-credit-card:before{content:""}.fa-feed:before,.fa-rss:before{content:""}.fa-hdd-o:before{content:""}.fa-bullhorn:before{content:""}.fa-bell:before{content:""}.fa-certificate:before{content:""}.fa-hand-o-right:before{content:""}.fa-hand-o-left:before{content:""}.fa-hand-o-up:before{content:""}.fa-hand-o-down:before{content:""}.fa-arrow-circle-left:before,.icon-circle-arrow-left:before{content:""}.fa-arrow-circle-right:before,.icon-circle-arrow-right:before{content:""}.fa-arrow-circle-up:before{content:""}.fa-arrow-circle-down:before{content:""}.fa-globe:before{content:""}.fa-wrench:before{content:""}.fa-tasks:before{content:""}.fa-filter:before{content:""}.fa-briefcase:before{content:""}.fa-arrows-alt:before{content:""}.fa-group:before,.fa-users:before{content:""}.fa-chain:before,.fa-link:before,.icon-link:before{content:""}.fa-cloud:before{content:""}.fa-flask:before{content:""}.fa-cut:before,.fa-scissors:before{content:""}.fa-copy:before,.fa-files-o:before{content:""}.fa-paperclip:before{content:""}.fa-floppy-o:before,.fa-save:before{content:""}.fa-square:before{content:""}.fa-bars:before,.fa-navicon:before,.fa-reorder:before{content:""}.fa-list-ul:before{content:""}.fa-list-ol:before{content:""}.fa-strikethrough:before{content:""}.fa-underline:before{content:""}.fa-table:before{content:""}.fa-magic:before{content:""}.fa-truck:before{content:""}.fa-pinterest:before{content:""}.fa-pinterest-square:before{content:""}.fa-google-plus-square:before{content:""}.fa-google-plus:before{content:""}.fa-money:before{content:""}.fa-caret-down:before,.icon-caret-down:before,.wy-dropdown .caret:before{content:""}.fa-caret-up:before{content:""}.fa-caret-left:before{content:""}.fa-caret-right:before{content:""}.fa-columns:before{content:""}.fa-sort:before,.fa-unsorted:before{content:""}.fa-sort-desc:before,.fa-sort-down:before{content:""}.fa-sort-asc:before,.fa-sort-up:before{content:""}.fa-envelope:before{content:""}.fa-linkedin:before{content:""}.fa-rotate-left:before,.fa-undo:before{content:""}.fa-gavel:before,.fa-legal:before{content:""}.fa-dashboard:before,.fa-tachometer:before{content:""}.fa-comment-o:before{content:""}.fa-comments-o:before{content:""}.fa-bolt:before,.fa-flash:before{content:""}.fa-sitemap:before{content:""}.fa-umbrella:before{content:""}.fa-clipboard:before,.fa-paste:before{content:""}.fa-lightbulb-o:before{content:""}.fa-exchange:before{content:""}.fa-cloud-download:before{content:""}.fa-cloud-upload:before{content:""}.fa-user-md:before{content:""}.fa-stethoscope:before{content:""}.fa-suitcase:before{content:""}.fa-bell-o:before{content:""}.fa-coffee:before{content:""}.fa-cutlery:before{content:""}.fa-file-text-o:before{content:""}.fa-building-o:before{content:""}.fa-hospital-o:before{content:""}.fa-ambulance:before{content:""}.fa-medkit:before{content:""}.fa-fighter-jet:before{content:""}.fa-beer:before{content:""}.fa-h-square:before{content:""}.fa-plus-square:before{content:""}.fa-angle-double-left:before{content:""}.fa-angle-double-right:before{content:""}.fa-angle-double-up:before{content:""}.fa-angle-double-down:before{content:""}.fa-angle-left:before{content:""}.fa-angle-right:before{content:""}.fa-angle-up:before{content:""}.fa-angle-down:before{content:""}.fa-desktop:before{content:""}.fa-laptop:before{content:""}.fa-tablet:before{content:""}.fa-mobile-phone:before,.fa-mobile:before{content:""}.fa-circle-o:before{content:""}.fa-quote-left:before{content:""}.fa-quote-right:before{content:""}.fa-spinner:before{content:""}.fa-circle:before{content:""}.fa-mail-reply:before,.fa-reply:before{content:""}.fa-github-alt:before{content:""}.fa-folder-o:before{content:""}.fa-folder-open-o:before{content:""}.fa-smile-o:before{content:""}.fa-frown-o:before{content:""}.fa-meh-o:before{content:""}.fa-gamepad:before{content:""}.fa-keyboard-o:before{content:""}.fa-flag-o:before{content:""}.fa-flag-checkered:before{content:""}.fa-terminal:before{content:""}.fa-code:before{content:""}.fa-mail-reply-all:before,.fa-reply-all:before{content:""}.fa-star-half-empty:before,.fa-star-half-full:before,.fa-star-half-o:before{content:""}.fa-location-arrow:before{content:""}.fa-crop:before{content:""}.fa-code-fork:before{content:""}.fa-chain-broken:before,.fa-unlink:before{content:""}.fa-question:before{content:""}.fa-info:before{content:""}.fa-exclamation:before{content:""}.fa-superscript:before{content:""}.fa-subscript:before{content:""}.fa-eraser:before{content:""}.fa-puzzle-piece:before{content:""}.fa-microphone:before{content:""}.fa-microphone-slash:before{content:""}.fa-shield:before{content:""}.fa-calendar-o:before{content:""}.fa-fire-extinguisher:before{content:""}.fa-rocket:before{content:""}.fa-maxcdn:before{content:""}.fa-chevron-circle-left:before{content:""}.fa-chevron-circle-right:before{content:""}.fa-chevron-circle-up:before{content:""}.fa-chevron-circle-down:before{content:""}.fa-html5:before{content:""}.fa-css3:before{content:""}.fa-anchor:before{content:""}.fa-unlock-alt:before{content:""}.fa-bullseye:before{content:""}.fa-ellipsis-h:before{content:""}.fa-ellipsis-v:before{content:""}.fa-rss-square:before{content:""}.fa-play-circle:before{content:""}.fa-ticket:before{content:""}.fa-minus-square:before{content:""}.fa-minus-square-o:before,.wy-menu-vertical li.current>a button.toctree-expand:before,.wy-menu-vertical li.on a button.toctree-expand:before{content:""}.fa-level-up:before{content:""}.fa-level-down:before{content:""}.fa-check-square:before{content:""}.fa-pencil-square:before{content:""}.fa-external-link-square:before{content:""}.fa-share-square:before{content:""}.fa-compass:before{content:""}.fa-caret-square-o-down:before,.fa-toggle-down:before{content:""}.fa-caret-square-o-up:before,.fa-toggle-up:before{content:""}.fa-caret-square-o-right:before,.fa-toggle-right:before{content:""}.fa-eur:before,.fa-euro:before{content:""}.fa-gbp:before{content:""}.fa-dollar:before,.fa-usd:before{content:""}.fa-inr:before,.fa-rupee:before{content:""}.fa-cny:before,.fa-jpy:before,.fa-rmb:before,.fa-yen:before{content:""}.fa-rouble:before,.fa-rub:before,.fa-ruble:before{content:""}.fa-krw:before,.fa-won:before{content:""}.fa-bitcoin:before,.fa-btc:before{content:""}.fa-file:before{content:""}.fa-file-text:before{content:""}.fa-sort-alpha-asc:before{content:""}.fa-sort-alpha-desc:before{content:""}.fa-sort-amount-asc:before{content:""}.fa-sort-amount-desc:before{content:""}.fa-sort-numeric-asc:before{content:""}.fa-sort-numeric-desc:before{content:""}.fa-thumbs-up:before{content:""}.fa-thumbs-down:before{content:""}.fa-youtube-square:before{content:""}.fa-youtube:before{content:""}.fa-xing:before{content:""}.fa-xing-square:before{content:""}.fa-youtube-play:before{content:""}.fa-dropbox:before{content:""}.fa-stack-overflow:before{content:""}.fa-instagram:before{content:""}.fa-flickr:before{content:""}.fa-adn:before{content:""}.fa-bitbucket:before,.icon-bitbucket:before{content:""}.fa-bitbucket-square:before{content:""}.fa-tumblr:before{content:""}.fa-tumblr-square:before{content:""}.fa-long-arrow-down:before{content:""}.fa-long-arrow-up:before{content:""}.fa-long-arrow-left:before{content:""}.fa-long-arrow-right:before{content:""}.fa-apple:before{content:""}.fa-windows:before{content:""}.fa-android:before{content:""}.fa-linux:before{content:""}.fa-dribbble:before{content:""}.fa-skype:before{content:""}.fa-foursquare:before{content:""}.fa-trello:before{content:""}.fa-female:before{content:""}.fa-male:before{content:""}.fa-gittip:before,.fa-gratipay:before{content:""}.fa-sun-o:before{content:""}.fa-moon-o:before{content:""}.fa-archive:before{content:""}.fa-bug:before{content:""}.fa-vk:before{content:""}.fa-weibo:before{content:""}.fa-renren:before{content:""}.fa-pagelines:before{content:""}.fa-stack-exchange:before{content:""}.fa-arrow-circle-o-right:before{content:""}.fa-arrow-circle-o-left:before{content:""}.fa-caret-square-o-left:before,.fa-toggle-left:before{content:""}.fa-dot-circle-o:before{content:""}.fa-wheelchair:before{content:""}.fa-vimeo-square:before{content:""}.fa-try:before,.fa-turkish-lira:before{content:""}.fa-plus-square-o:before,.wy-menu-vertical li button.toctree-expand:before{content:""}.fa-space-shuttle:before{content:""}.fa-slack:before{content:""}.fa-envelope-square:before{content:""}.fa-wordpress:before{content:""}.fa-openid:before{content:""}.fa-bank:before,.fa-institution:before,.fa-university:before{content:""}.fa-graduation-cap:before,.fa-mortar-board:before{content:""}.fa-yahoo:before{content:""}.fa-google:before{content:""}.fa-reddit:before{content:""}.fa-reddit-square:before{content:""}.fa-stumbleupon-circle:before{content:""}.fa-stumbleupon:before{content:""}.fa-delicious:before{content:""}.fa-digg:before{content:""}.fa-pied-piper-pp:before{content:""}.fa-pied-piper-alt:before{content:""}.fa-drupal:before{content:""}.fa-joomla:before{content:""}.fa-language:before{content:""}.fa-fax:before{content:""}.fa-building:before{content:""}.fa-child:before{content:""}.fa-paw:before{content:""}.fa-spoon:before{content:""}.fa-cube:before{content:""}.fa-cubes:before{content:""}.fa-behance:before{content:""}.fa-behance-square:before{content:""}.fa-steam:before{content:""}.fa-steam-square:before{content:""}.fa-recycle:before{content:""}.fa-automobile:before,.fa-car:before{content:""}.fa-cab:before,.fa-taxi:before{content:""}.fa-tree:before{content:""}.fa-spotify:before{content:""}.fa-deviantart:before{content:""}.fa-soundcloud:before{content:""}.fa-database:before{content:""}.fa-file-pdf-o:before{content:""}.fa-file-word-o:before{content:""}.fa-file-excel-o:before{content:""}.fa-file-powerpoint-o:before{content:""}.fa-file-image-o:before,.fa-file-photo-o:before,.fa-file-picture-o:before{content:""}.fa-file-archive-o:before,.fa-file-zip-o:before{content:""}.fa-file-audio-o:before,.fa-file-sound-o:before{content:""}.fa-file-movie-o:before,.fa-file-video-o:before{content:""}.fa-file-code-o:before{content:""}.fa-vine:before{content:""}.fa-codepen:before{content:""}.fa-jsfiddle:before{content:""}.fa-life-bouy:before,.fa-life-buoy:before,.fa-life-ring:before,.fa-life-saver:before,.fa-support:before{content:""}.fa-circle-o-notch:before{content:""}.fa-ra:before,.fa-rebel:before,.fa-resistance:before{content:""}.fa-empire:before,.fa-ge:before{content:""}.fa-git-square:before{content:""}.fa-git:before{content:""}.fa-hacker-news:before,.fa-y-combinator-square:before,.fa-yc-square:before{content:""}.fa-tencent-weibo:before{content:""}.fa-qq:before{content:""}.fa-wechat:before,.fa-weixin:before{content:""}.fa-paper-plane:before,.fa-send:before{content:""}.fa-paper-plane-o:before,.fa-send-o:before{content:""}.fa-history:before{content:""}.fa-circle-thin:before{content:""}.fa-header:before{content:""}.fa-paragraph:before{content:""}.fa-sliders:before{content:""}.fa-share-alt:before{content:""}.fa-share-alt-square:before{content:""}.fa-bomb:before{content:""}.fa-futbol-o:before,.fa-soccer-ball-o:before{content:""}.fa-tty:before{content:""}.fa-binoculars:before{content:""}.fa-plug:before{content:""}.fa-slideshare:before{content:""}.fa-twitch:before{content:""}.fa-yelp:before{content:""}.fa-newspaper-o:before{content:""}.fa-wifi:before{content:""}.fa-calculator:before{content:""}.fa-paypal:before{content:""}.fa-google-wallet:before{content:""}.fa-cc-visa:before{content:""}.fa-cc-mastercard:before{content:""}.fa-cc-discover:before{content:""}.fa-cc-amex:before{content:""}.fa-cc-paypal:before{content:""}.fa-cc-stripe:before{content:""}.fa-bell-slash:before{content:""}.fa-bell-slash-o:before{content:""}.fa-trash:before{content:""}.fa-copyright:before{content:""}.fa-at:before{content:""}.fa-eyedropper:before{content:""}.fa-paint-brush:before{content:""}.fa-birthday-cake:before{content:""}.fa-area-chart:before{content:""}.fa-pie-chart:before{content:""}.fa-line-chart:before{content:""}.fa-lastfm:before{content:""}.fa-lastfm-square:before{content:""}.fa-toggle-off:before{content:""}.fa-toggle-on:before{content:""}.fa-bicycle:before{content:""}.fa-bus:before{content:""}.fa-ioxhost:before{content:""}.fa-angellist:before{content:""}.fa-cc:before{content:""}.fa-ils:before,.fa-shekel:before,.fa-sheqel:before{content:""}.fa-meanpath:before{content:""}.fa-buysellads:before{content:""}.fa-connectdevelop:before{content:""}.fa-dashcube:before{content:""}.fa-forumbee:before{content:""}.fa-leanpub:before{content:""}.fa-sellsy:before{content:""}.fa-shirtsinbulk:before{content:""}.fa-simplybuilt:before{content:""}.fa-skyatlas:before{content:""}.fa-cart-plus:before{content:""}.fa-cart-arrow-down:before{content:""}.fa-diamond:before{content:""}.fa-ship:before{content:""}.fa-user-secret:before{content:""}.fa-motorcycle:before{content:""}.fa-street-view:before{content:""}.fa-heartbeat:before{content:""}.fa-venus:before{content:""}.fa-mars:before{content:""}.fa-mercury:before{content:""}.fa-intersex:before,.fa-transgender:before{content:""}.fa-transgender-alt:before{content:""}.fa-venus-double:before{content:""}.fa-mars-double:before{content:""}.fa-venus-mars:before{content:""}.fa-mars-stroke:before{content:""}.fa-mars-stroke-v:before{content:""}.fa-mars-stroke-h:before{content:""}.fa-neuter:before{content:""}.fa-genderless:before{content:""}.fa-facebook-official:before{content:""}.fa-pinterest-p:before{content:""}.fa-whatsapp:before{content:""}.fa-server:before{content:""}.fa-user-plus:before{content:""}.fa-user-times:before{content:""}.fa-bed:before,.fa-hotel:before{content:""}.fa-viacoin:before{content:""}.fa-train:before{content:""}.fa-subway:before{content:""}.fa-medium:before{content:""}.fa-y-combinator:before,.fa-yc:before{content:""}.fa-optin-monster:before{content:""}.fa-opencart:before{content:""}.fa-expeditedssl:before{content:""}.fa-battery-4:before,.fa-battery-full:before,.fa-battery:before{content:""}.fa-battery-3:before,.fa-battery-three-quarters:before{content:""}.fa-battery-2:before,.fa-battery-half:before{content:""}.fa-battery-1:before,.fa-battery-quarter:before{content:""}.fa-battery-0:before,.fa-battery-empty:before{content:""}.fa-mouse-pointer:before{content:""}.fa-i-cursor:before{content:""}.fa-object-group:before{content:""}.fa-object-ungroup:before{content:""}.fa-sticky-note:before{content:""}.fa-sticky-note-o:before{content:""}.fa-cc-jcb:before{content:""}.fa-cc-diners-club:before{content:""}.fa-clone:before{content:""}.fa-balance-scale:before{content:""}.fa-hourglass-o:before{content:""}.fa-hourglass-1:before,.fa-hourglass-start:before{content:""}.fa-hourglass-2:before,.fa-hourglass-half:before{content:""}.fa-hourglass-3:before,.fa-hourglass-end:before{content:""}.fa-hourglass:before{content:""}.fa-hand-grab-o:before,.fa-hand-rock-o:before{content:""}.fa-hand-paper-o:before,.fa-hand-stop-o:before{content:""}.fa-hand-scissors-o:before{content:""}.fa-hand-lizard-o:before{content:""}.fa-hand-spock-o:before{content:""}.fa-hand-pointer-o:before{content:""}.fa-hand-peace-o:before{content:""}.fa-trademark:before{content:""}.fa-registered:before{content:""}.fa-creative-commons:before{content:""}.fa-gg:before{content:""}.fa-gg-circle:before{content:""}.fa-tripadvisor:before{content:""}.fa-odnoklassniki:before{content:""}.fa-odnoklassniki-square:before{content:""}.fa-get-pocket:before{content:""}.fa-wikipedia-w:before{content:""}.fa-safari:before{content:""}.fa-chrome:before{content:""}.fa-firefox:before{content:""}.fa-opera:before{content:""}.fa-internet-explorer:before{content:""}.fa-television:before,.fa-tv:before{content:""}.fa-contao:before{content:""}.fa-500px:before{content:""}.fa-amazon:before{content:""}.fa-calendar-plus-o:before{content:""}.fa-calendar-minus-o:before{content:""}.fa-calendar-times-o:before{content:""}.fa-calendar-check-o:before{content:""}.fa-industry:before{content:""}.fa-map-pin:before{content:""}.fa-map-signs:before{content:""}.fa-map-o:before{content:""}.fa-map:before{content:""}.fa-commenting:before{content:""}.fa-commenting-o:before{content:""}.fa-houzz:before{content:""}.fa-vimeo:before{content:""}.fa-black-tie:before{content:""}.fa-fonticons:before{content:""}.fa-reddit-alien:before{content:""}.fa-edge:before{content:""}.fa-credit-card-alt:before{content:""}.fa-codiepie:before{content:""}.fa-modx:before{content:""}.fa-fort-awesome:before{content:""}.fa-usb:before{content:""}.fa-product-hunt:before{content:""}.fa-mixcloud:before{content:""}.fa-scribd:before{content:""}.fa-pause-circle:before{content:""}.fa-pause-circle-o:before{content:""}.fa-stop-circle:before{content:""}.fa-stop-circle-o:before{content:""}.fa-shopping-bag:before{content:""}.fa-shopping-basket:before{content:""}.fa-hashtag:before{content:""}.fa-bluetooth:before{content:""}.fa-bluetooth-b:before{content:""}.fa-percent:before{content:""}.fa-gitlab:before,.icon-gitlab:before{content:""}.fa-wpbeginner:before{content:""}.fa-wpforms:before{content:""}.fa-envira:before{content:""}.fa-universal-access:before{content:""}.fa-wheelchair-alt:before{content:""}.fa-question-circle-o:before{content:""}.fa-blind:before{content:""}.fa-audio-description:before{content:""}.fa-volume-control-phone:before{content:""}.fa-braille:before{content:""}.fa-assistive-listening-systems:before{content:""}.fa-american-sign-language-interpreting:before,.fa-asl-interpreting:before{content:""}.fa-deaf:before,.fa-deafness:before,.fa-hard-of-hearing:before{content:""}.fa-glide:before{content:""}.fa-glide-g:before{content:""}.fa-sign-language:before,.fa-signing:before{content:""}.fa-low-vision:before{content:""}.fa-viadeo:before{content:""}.fa-viadeo-square:before{content:""}.fa-snapchat:before{content:""}.fa-snapchat-ghost:before{content:""}.fa-snapchat-square:before{content:""}.fa-pied-piper:before{content:""}.fa-first-order:before{content:""}.fa-yoast:before{content:""}.fa-themeisle:before{content:""}.fa-google-plus-circle:before,.fa-google-plus-official:before{content:""}.fa-fa:before,.fa-font-awesome:before{content:""}.fa-handshake-o:before{content:""}.fa-envelope-open:before{content:""}.fa-envelope-open-o:before{content:""}.fa-linode:before{content:""}.fa-address-book:before{content:""}.fa-address-book-o:before{content:""}.fa-address-card:before,.fa-vcard:before{content:""}.fa-address-card-o:before,.fa-vcard-o:before{content:""}.fa-user-circle:before{content:""}.fa-user-circle-o:before{content:""}.fa-user-o:before{content:""}.fa-id-badge:before{content:""}.fa-drivers-license:before,.fa-id-card:before{content:""}.fa-drivers-license-o:before,.fa-id-card-o:before{content:""}.fa-quora:before{content:""}.fa-free-code-camp:before{content:""}.fa-telegram:before{content:""}.fa-thermometer-4:before,.fa-thermometer-full:before,.fa-thermometer:before{content:""}.fa-thermometer-3:before,.fa-thermometer-three-quarters:before{content:""}.fa-thermometer-2:before,.fa-thermometer-half:before{content:""}.fa-thermometer-1:before,.fa-thermometer-quarter:before{content:""}.fa-thermometer-0:before,.fa-thermometer-empty:before{content:""}.fa-shower:before{content:""}.fa-bath:before,.fa-bathtub:before,.fa-s15:before{content:""}.fa-podcast:before{content:""}.fa-window-maximize:before{content:""}.fa-window-minimize:before{content:""}.fa-window-restore:before{content:""}.fa-times-rectangle:before,.fa-window-close:before{content:""}.fa-times-rectangle-o:before,.fa-window-close-o:before{content:""}.fa-bandcamp:before{content:""}.fa-grav:before{content:""}.fa-etsy:before{content:""}.fa-imdb:before{content:""}.fa-ravelry:before{content:""}.fa-eercast:before{content:""}.fa-microchip:before{content:""}.fa-snowflake-o:before{content:""}.fa-superpowers:before{content:""}.fa-wpexplorer:before{content:""}.fa-meetup:before{content:""}.sr-only{position:absolute;width:1px;height:1px;padding:0;margin:-1px;overflow:hidden;clip:rect(0,0,0,0);border:0}.sr-only-focusable:active,.sr-only-focusable:focus{position:static;width:auto;height:auto;margin:0;overflow:visible;clip:auto}.fa,.icon,.rst-content .admonition-title,.rst-content .code-block-caption .headerlink,.rst-content .eqno .headerlink,.rst-content code.download span:first-child,.rst-content dl dt .headerlink,.rst-content h1 .headerlink,.rst-content h2 .headerlink,.rst-content h3 .headerlink,.rst-content h4 .headerlink,.rst-content h5 .headerlink,.rst-content h6 .headerlink,.rst-content p.caption .headerlink,.rst-content p .headerlink,.rst-content table>caption .headerlink,.rst-content tt.download span:first-child,.wy-dropdown .caret,.wy-inline-validate.wy-inline-validate-danger .wy-input-context,.wy-inline-validate.wy-inline-validate-info .wy-input-context,.wy-inline-validate.wy-inline-validate-success .wy-input-context,.wy-inline-validate.wy-inline-validate-warning .wy-input-context,.wy-menu-vertical li.current>a button.toctree-expand,.wy-menu-vertical li.on a button.toctree-expand,.wy-menu-vertical li button.toctree-expand{font-family:inherit}.fa:before,.icon:before,.rst-content .admonition-title:before,.rst-content .code-block-caption .headerlink:before,.rst-content .eqno .headerlink:before,.rst-content code.download span:first-child:before,.rst-content dl dt .headerlink:before,.rst-content h1 .headerlink:before,.rst-content h2 .headerlink:before,.rst-content h3 .headerlink:before,.rst-content h4 .headerlink:before,.rst-content h5 .headerlink:before,.rst-content h6 .headerlink:before,.rst-content p.caption .headerlink:before,.rst-content p .headerlink:before,.rst-content table>caption .headerlink:before,.rst-content tt.download span:first-child:before,.wy-dropdown .caret:before,.wy-inline-validate.wy-inline-validate-danger .wy-input-context:before,.wy-inline-validate.wy-inline-validate-info .wy-input-context:before,.wy-inline-validate.wy-inline-validate-success .wy-input-context:before,.wy-inline-validate.wy-inline-validate-warning .wy-input-context:before,.wy-menu-vertical li.current>a button.toctree-expand:before,.wy-menu-vertical li.on a button.toctree-expand:before,.wy-menu-vertical li button.toctree-expand:before{font-family:FontAwesome;display:inline-block;font-style:normal;font-weight:400;line-height:1;text-decoration:inherit}.rst-content .code-block-caption a .headerlink,.rst-content .eqno a .headerlink,.rst-content a .admonition-title,.rst-content code.download a span:first-child,.rst-content dl dt a .headerlink,.rst-content h1 a .headerlink,.rst-content h2 a .headerlink,.rst-content h3 a .headerlink,.rst-content h4 a .headerlink,.rst-content h5 a .headerlink,.rst-content h6 a .headerlink,.rst-content p.caption a .headerlink,.rst-content p a .headerlink,.rst-content table>caption a .headerlink,.rst-content tt.download a span:first-child,.wy-menu-vertical li.current>a button.toctree-expand,.wy-menu-vertical li.on a button.toctree-expand,.wy-menu-vertical li a button.toctree-expand,a .fa,a .icon,a .rst-content .admonition-title,a .rst-content .code-block-caption .headerlink,a .rst-content .eqno .headerlink,a .rst-content code.download span:first-child,a .rst-content dl dt .headerlink,a .rst-content h1 .headerlink,a .rst-content h2 .headerlink,a .rst-content h3 .headerlink,a .rst-content h4 .headerlink,a .rst-content h5 .headerlink,a .rst-content h6 .headerlink,a .rst-content p.caption .headerlink,a .rst-content p .headerlink,a .rst-content table>caption .headerlink,a .rst-content tt.download span:first-child,a .wy-menu-vertical li button.toctree-expand{display:inline-block;text-decoration:inherit}.btn .fa,.btn .icon,.btn .rst-content .admonition-title,.btn .rst-content .code-block-caption .headerlink,.btn .rst-content .eqno .headerlink,.btn .rst-content code.download span:first-child,.btn .rst-content dl dt .headerlink,.btn .rst-content h1 .headerlink,.btn .rst-content h2 .headerlink,.btn .rst-content h3 .headerlink,.btn .rst-content h4 .headerlink,.btn .rst-content h5 .headerlink,.btn .rst-content h6 .headerlink,.btn .rst-content p .headerlink,.btn .rst-content table>caption .headerlink,.btn .rst-content tt.download span:first-child,.btn .wy-menu-vertical li.current>a button.toctree-expand,.btn .wy-menu-vertical li.on a button.toctree-expand,.btn .wy-menu-vertical li button.toctree-expand,.nav .fa,.nav .icon,.nav .rst-content .admonition-title,.nav .rst-content .code-block-caption .headerlink,.nav .rst-content .eqno .headerlink,.nav .rst-content code.download span:first-child,.nav .rst-content dl dt .headerlink,.nav .rst-content h1 .headerlink,.nav .rst-content h2 .headerlink,.nav .rst-content h3 .headerlink,.nav .rst-content h4 .headerlink,.nav .rst-content h5 .headerlink,.nav .rst-content h6 .headerlink,.nav .rst-content p .headerlink,.nav .rst-content table>caption .headerlink,.nav .rst-content tt.download span:first-child,.nav .wy-menu-vertical li.current>a button.toctree-expand,.nav .wy-menu-vertical li.on a button.toctree-expand,.nav .wy-menu-vertical li button.toctree-expand,.rst-content .btn .admonition-title,.rst-content .code-block-caption .btn .headerlink,.rst-content .code-block-caption .nav .headerlink,.rst-content .eqno .btn .headerlink,.rst-content .eqno .nav .headerlink,.rst-content .nav .admonition-title,.rst-content code.download .btn span:first-child,.rst-content code.download .nav span:first-child,.rst-content dl dt .btn .headerlink,.rst-content dl dt .nav .headerlink,.rst-content h1 .btn .headerlink,.rst-content h1 .nav .headerlink,.rst-content h2 .btn .headerlink,.rst-content h2 .nav .headerlink,.rst-content h3 .btn .headerlink,.rst-content h3 .nav .headerlink,.rst-content h4 .btn .headerlink,.rst-content h4 .nav .headerlink,.rst-content h5 .btn .headerlink,.rst-content h5 .nav .headerlink,.rst-content h6 .btn .headerlink,.rst-content h6 .nav .headerlink,.rst-content p .btn .headerlink,.rst-content p .nav .headerlink,.rst-content table>caption .btn .headerlink,.rst-content table>caption .nav .headerlink,.rst-content tt.download .btn span:first-child,.rst-content tt.download .nav span:first-child,.wy-menu-vertical li .btn button.toctree-expand,.wy-menu-vertical li.current>a .btn button.toctree-expand,.wy-menu-vertical li.current>a .nav button.toctree-expand,.wy-menu-vertical li .nav button.toctree-expand,.wy-menu-vertical li.on a .btn button.toctree-expand,.wy-menu-vertical li.on a .nav button.toctree-expand{display:inline}.btn .fa-large.icon,.btn .fa.fa-large,.btn .rst-content .code-block-caption .fa-large.headerlink,.btn .rst-content .eqno .fa-large.headerlink,.btn .rst-content .fa-large.admonition-title,.btn .rst-content code.download span.fa-large:first-child,.btn .rst-content dl dt .fa-large.headerlink,.btn .rst-content h1 .fa-large.headerlink,.btn .rst-content h2 .fa-large.headerlink,.btn .rst-content h3 .fa-large.headerlink,.btn .rst-content h4 .fa-large.headerlink,.btn .rst-content h5 .fa-large.headerlink,.btn .rst-content h6 .fa-large.headerlink,.btn .rst-content p .fa-large.headerlink,.btn .rst-content table>caption .fa-large.headerlink,.btn .rst-content tt.download span.fa-large:first-child,.btn .wy-menu-vertical li button.fa-large.toctree-expand,.nav .fa-large.icon,.nav .fa.fa-large,.nav .rst-content .code-block-caption .fa-large.headerlink,.nav .rst-content .eqno .fa-large.headerlink,.nav .rst-content .fa-large.admonition-title,.nav .rst-content code.download span.fa-large:first-child,.nav .rst-content dl dt .fa-large.headerlink,.nav .rst-content h1 .fa-large.headerlink,.nav .rst-content h2 .fa-large.headerlink,.nav .rst-content h3 .fa-large.headerlink,.nav .rst-content h4 .fa-large.headerlink,.nav .rst-content h5 .fa-large.headerlink,.nav .rst-content h6 .fa-large.headerlink,.nav .rst-content p .fa-large.headerlink,.nav .rst-content table>caption .fa-large.headerlink,.nav .rst-content tt.download span.fa-large:first-child,.nav .wy-menu-vertical li button.fa-large.toctree-expand,.rst-content .btn .fa-large.admonition-title,.rst-content .code-block-caption .btn .fa-large.headerlink,.rst-content .code-block-caption .nav .fa-large.headerlink,.rst-content .eqno .btn .fa-large.headerlink,.rst-content .eqno .nav .fa-large.headerlink,.rst-content .nav .fa-large.admonition-title,.rst-content code.download .btn span.fa-large:first-child,.rst-content code.download .nav span.fa-large:first-child,.rst-content dl dt .btn .fa-large.headerlink,.rst-content dl dt .nav .fa-large.headerlink,.rst-content h1 .btn .fa-large.headerlink,.rst-content h1 .nav .fa-large.headerlink,.rst-content h2 .btn .fa-large.headerlink,.rst-content h2 .nav .fa-large.headerlink,.rst-content h3 .btn .fa-large.headerlink,.rst-content h3 .nav .fa-large.headerlink,.rst-content h4 .btn .fa-large.headerlink,.rst-content h4 .nav .fa-large.headerlink,.rst-content h5 .btn .fa-large.headerlink,.rst-content h5 .nav .fa-large.headerlink,.rst-content h6 .btn .fa-large.headerlink,.rst-content h6 .nav .fa-large.headerlink,.rst-content p .btn .fa-large.headerlink,.rst-content p .nav .fa-large.headerlink,.rst-content table>caption .btn .fa-large.headerlink,.rst-content table>caption .nav .fa-large.headerlink,.rst-content tt.download .btn span.fa-large:first-child,.rst-content tt.download .nav span.fa-large:first-child,.wy-menu-vertical li .btn button.fa-large.toctree-expand,.wy-menu-vertical li .nav button.fa-large.toctree-expand{line-height:.9em}.btn .fa-spin.icon,.btn .fa.fa-spin,.btn .rst-content .code-block-caption .fa-spin.headerlink,.btn .rst-content .eqno .fa-spin.headerlink,.btn .rst-content .fa-spin.admonition-title,.btn .rst-content code.download span.fa-spin:first-child,.btn .rst-content dl dt .fa-spin.headerlink,.btn .rst-content h1 .fa-spin.headerlink,.btn .rst-content h2 .fa-spin.headerlink,.btn .rst-content h3 .fa-spin.headerlink,.btn .rst-content h4 .fa-spin.headerlink,.btn .rst-content h5 .fa-spin.headerlink,.btn .rst-content h6 .fa-spin.headerlink,.btn .rst-content p .fa-spin.headerlink,.btn .rst-content table>caption .fa-spin.headerlink,.btn .rst-content tt.download span.fa-spin:first-child,.btn .wy-menu-vertical li button.fa-spin.toctree-expand,.nav .fa-spin.icon,.nav .fa.fa-spin,.nav .rst-content .code-block-caption .fa-spin.headerlink,.nav .rst-content .eqno .fa-spin.headerlink,.nav .rst-content .fa-spin.admonition-title,.nav .rst-content code.download span.fa-spin:first-child,.nav .rst-content dl dt .fa-spin.headerlink,.nav .rst-content h1 .fa-spin.headerlink,.nav .rst-content h2 .fa-spin.headerlink,.nav .rst-content h3 .fa-spin.headerlink,.nav .rst-content h4 .fa-spin.headerlink,.nav .rst-content h5 .fa-spin.headerlink,.nav .rst-content h6 .fa-spin.headerlink,.nav .rst-content p .fa-spin.headerlink,.nav .rst-content table>caption .fa-spin.headerlink,.nav .rst-content tt.download span.fa-spin:first-child,.nav .wy-menu-vertical li button.fa-spin.toctree-expand,.rst-content .btn .fa-spin.admonition-title,.rst-content .code-block-caption .btn .fa-spin.headerlink,.rst-content .code-block-caption .nav .fa-spin.headerlink,.rst-content .eqno .btn .fa-spin.headerlink,.rst-content .eqno .nav .fa-spin.headerlink,.rst-content .nav .fa-spin.admonition-title,.rst-content code.download .btn span.fa-spin:first-child,.rst-content code.download .nav span.fa-spin:first-child,.rst-content dl dt .btn .fa-spin.headerlink,.rst-content dl dt .nav .fa-spin.headerlink,.rst-content h1 .btn .fa-spin.headerlink,.rst-content h1 .nav .fa-spin.headerlink,.rst-content h2 .btn .fa-spin.headerlink,.rst-content h2 .nav .fa-spin.headerlink,.rst-content h3 .btn .fa-spin.headerlink,.rst-content h3 .nav .fa-spin.headerlink,.rst-content h4 .btn .fa-spin.headerlink,.rst-content h4 .nav .fa-spin.headerlink,.rst-content h5 .btn .fa-spin.headerlink,.rst-content h5 .nav .fa-spin.headerlink,.rst-content h6 .btn .fa-spin.headerlink,.rst-content h6 .nav .fa-spin.headerlink,.rst-content p .btn .fa-spin.headerlink,.rst-content p .nav .fa-spin.headerlink,.rst-content table>caption .btn .fa-spin.headerlink,.rst-content table>caption .nav .fa-spin.headerlink,.rst-content tt.download .btn span.fa-spin:first-child,.rst-content tt.download .nav span.fa-spin:first-child,.wy-menu-vertical li .btn button.fa-spin.toctree-expand,.wy-menu-vertical li .nav button.fa-spin.toctree-expand{display:inline-block}.btn.fa:before,.btn.icon:before,.rst-content .btn.admonition-title:before,.rst-content .code-block-caption .btn.headerlink:before,.rst-content .eqno .btn.headerlink:before,.rst-content code.download span.btn:first-child:before,.rst-content dl dt .btn.headerlink:before,.rst-content h1 .btn.headerlink:before,.rst-content h2 .btn.headerlink:before,.rst-content h3 .btn.headerlink:before,.rst-content h4 .btn.headerlink:before,.rst-content h5 .btn.headerlink:before,.rst-content h6 .btn.headerlink:before,.rst-content p .btn.headerlink:before,.rst-content table>caption .btn.headerlink:before,.rst-content tt.download span.btn:first-child:before,.wy-menu-vertical li button.btn.toctree-expand:before{opacity:.5;-webkit-transition:opacity .05s ease-in;-moz-transition:opacity .05s ease-in;transition:opacity .05s ease-in}.btn.fa:hover:before,.btn.icon:hover:before,.rst-content .btn.admonition-title:hover:before,.rst-content .code-block-caption .btn.headerlink:hover:before,.rst-content .eqno .btn.headerlink:hover:before,.rst-content code.download span.btn:first-child:hover:before,.rst-content dl dt .btn.headerlink:hover:before,.rst-content h1 .btn.headerlink:hover:before,.rst-content h2 .btn.headerlink:hover:before,.rst-content h3 .btn.headerlink:hover:before,.rst-content h4 .btn.headerlink:hover:before,.rst-content h5 .btn.headerlink:hover:before,.rst-content h6 .btn.headerlink:hover:before,.rst-content p .btn.headerlink:hover:before,.rst-content table>caption .btn.headerlink:hover:before,.rst-content tt.download span.btn:first-child:hover:before,.wy-menu-vertical li button.btn.toctree-expand:hover:before{opacity:1}.btn-mini .fa:before,.btn-mini .icon:before,.btn-mini .rst-content .admonition-title:before,.btn-mini .rst-content .code-block-caption .headerlink:before,.btn-mini .rst-content .eqno .headerlink:before,.btn-mini .rst-content code.download span:first-child:before,.btn-mini .rst-content dl dt .headerlink:before,.btn-mini .rst-content h1 .headerlink:before,.btn-mini .rst-content h2 .headerlink:before,.btn-mini .rst-content h3 .headerlink:before,.btn-mini .rst-content h4 .headerlink:before,.btn-mini .rst-content h5 .headerlink:before,.btn-mini .rst-content h6 .headerlink:before,.btn-mini .rst-content p .headerlink:before,.btn-mini .rst-content table>caption .headerlink:before,.btn-mini .rst-content tt.download span:first-child:before,.btn-mini .wy-menu-vertical li button.toctree-expand:before,.rst-content .btn-mini .admonition-title:before,.rst-content .code-block-caption .btn-mini .headerlink:before,.rst-content .eqno .btn-mini .headerlink:before,.rst-content code.download .btn-mini span:first-child:before,.rst-content dl dt .btn-mini .headerlink:before,.rst-content h1 .btn-mini .headerlink:before,.rst-content h2 .btn-mini .headerlink:before,.rst-content h3 .btn-mini .headerlink:before,.rst-content h4 .btn-mini .headerlink:before,.rst-content h5 .btn-mini .headerlink:before,.rst-content h6 .btn-mini .headerlink:before,.rst-content p .btn-mini .headerlink:before,.rst-content table>caption .btn-mini .headerlink:before,.rst-content tt.download .btn-mini span:first-child:before,.wy-menu-vertical li .btn-mini button.toctree-expand:before{font-size:14px;vertical-align:-15%}.rst-content .admonition,.rst-content .admonition-todo,.rst-content .attention,.rst-content .caution,.rst-content .danger,.rst-content .error,.rst-content .hint,.rst-content .important,.rst-content .note,.rst-content .seealso,.rst-content .tip,.rst-content .warning,.wy-alert{padding:12px;line-height:24px;margin-bottom:24px;background:#e7f2fa}.rst-content .admonition-title,.wy-alert-title{font-weight:700;display:block;color:#fff;background:#6ab0de;padding:6px 12px;margin:-12px -12px 12px}.rst-content .danger,.rst-content .error,.rst-content .wy-alert-danger.admonition,.rst-content .wy-alert-danger.admonition-todo,.rst-content .wy-alert-danger.attention,.rst-content .wy-alert-danger.caution,.rst-content .wy-alert-danger.hint,.rst-content .wy-alert-danger.important,.rst-content .wy-alert-danger.note,.rst-content .wy-alert-danger.seealso,.rst-content .wy-alert-danger.tip,.rst-content .wy-alert-danger.warning,.wy-alert.wy-alert-danger{background:#fdf3f2}.rst-content .danger .admonition-title,.rst-content .danger .wy-alert-title,.rst-content .error .admonition-title,.rst-content .error .wy-alert-title,.rst-content .wy-alert-danger.admonition-todo .admonition-title,.rst-content .wy-alert-danger.admonition-todo .wy-alert-title,.rst-content .wy-alert-danger.admonition .admonition-title,.rst-content .wy-alert-danger.admonition .wy-alert-title,.rst-content .wy-alert-danger.attention .admonition-title,.rst-content .wy-alert-danger.attention .wy-alert-title,.rst-content .wy-alert-danger.caution .admonition-title,.rst-content .wy-alert-danger.caution .wy-alert-title,.rst-content .wy-alert-danger.hint .admonition-title,.rst-content .wy-alert-danger.hint .wy-alert-title,.rst-content .wy-alert-danger.important .admonition-title,.rst-content .wy-alert-danger.important .wy-alert-title,.rst-content .wy-alert-danger.note .admonition-title,.rst-content .wy-alert-danger.note .wy-alert-title,.rst-content .wy-alert-danger.seealso .admonition-title,.rst-content .wy-alert-danger.seealso .wy-alert-title,.rst-content .wy-alert-danger.tip .admonition-title,.rst-content .wy-alert-danger.tip .wy-alert-title,.rst-content .wy-alert-danger.warning .admonition-title,.rst-content .wy-alert-danger.warning .wy-alert-title,.rst-content .wy-alert.wy-alert-danger .admonition-title,.wy-alert.wy-alert-danger .rst-content .admonition-title,.wy-alert.wy-alert-danger .wy-alert-title{background:#f29f97}.rst-content .admonition-todo,.rst-content .attention,.rst-content .caution,.rst-content .warning,.rst-content .wy-alert-warning.admonition,.rst-content .wy-alert-warning.danger,.rst-content .wy-alert-warning.error,.rst-content .wy-alert-warning.hint,.rst-content .wy-alert-warning.important,.rst-content .wy-alert-warning.note,.rst-content .wy-alert-warning.seealso,.rst-content .wy-alert-warning.tip,.wy-alert.wy-alert-warning{background:#ffedcc}.rst-content .admonition-todo .admonition-title,.rst-content .admonition-todo .wy-alert-title,.rst-content .attention .admonition-title,.rst-content .attention .wy-alert-title,.rst-content .caution .admonition-title,.rst-content .caution .wy-alert-title,.rst-content .warning .admonition-title,.rst-content .warning .wy-alert-title,.rst-content .wy-alert-warning.admonition .admonition-title,.rst-content .wy-alert-warning.admonition .wy-alert-title,.rst-content .wy-alert-warning.danger .admonition-title,.rst-content .wy-alert-warning.danger .wy-alert-title,.rst-content .wy-alert-warning.error .admonition-title,.rst-content .wy-alert-warning.error .wy-alert-title,.rst-content .wy-alert-warning.hint .admonition-title,.rst-content .wy-alert-warning.hint .wy-alert-title,.rst-content .wy-alert-warning.important .admonition-title,.rst-content .wy-alert-warning.important .wy-alert-title,.rst-content .wy-alert-warning.note .admonition-title,.rst-content .wy-alert-warning.note .wy-alert-title,.rst-content .wy-alert-warning.seealso .admonition-title,.rst-content .wy-alert-warning.seealso .wy-alert-title,.rst-content .wy-alert-warning.tip .admonition-title,.rst-content .wy-alert-warning.tip .wy-alert-title,.rst-content .wy-alert.wy-alert-warning .admonition-title,.wy-alert.wy-alert-warning .rst-content .admonition-title,.wy-alert.wy-alert-warning .wy-alert-title{background:#f0b37e}.rst-content .note,.rst-content .seealso,.rst-content .wy-alert-info.admonition,.rst-content .wy-alert-info.admonition-todo,.rst-content .wy-alert-info.attention,.rst-content .wy-alert-info.caution,.rst-content .wy-alert-info.danger,.rst-content .wy-alert-info.error,.rst-content .wy-alert-info.hint,.rst-content .wy-alert-info.important,.rst-content .wy-alert-info.tip,.rst-content .wy-alert-info.warning,.wy-alert.wy-alert-info{background:#e7f2fa}.rst-content .note .admonition-title,.rst-content .note .wy-alert-title,.rst-content .seealso .admonition-title,.rst-content .seealso .wy-alert-title,.rst-content .wy-alert-info.admonition-todo .admonition-title,.rst-content .wy-alert-info.admonition-todo .wy-alert-title,.rst-content .wy-alert-info.admonition .admonition-title,.rst-content .wy-alert-info.admonition .wy-alert-title,.rst-content .wy-alert-info.attention .admonition-title,.rst-content .wy-alert-info.attention .wy-alert-title,.rst-content .wy-alert-info.caution .admonition-title,.rst-content .wy-alert-info.caution .wy-alert-title,.rst-content .wy-alert-info.danger .admonition-title,.rst-content .wy-alert-info.danger .wy-alert-title,.rst-content .wy-alert-info.error .admonition-title,.rst-content .wy-alert-info.error .wy-alert-title,.rst-content .wy-alert-info.hint .admonition-title,.rst-content .wy-alert-info.hint .wy-alert-title,.rst-content .wy-alert-info.important .admonition-title,.rst-content .wy-alert-info.important .wy-alert-title,.rst-content .wy-alert-info.tip .admonition-title,.rst-content .wy-alert-info.tip .wy-alert-title,.rst-content .wy-alert-info.warning .admonition-title,.rst-content .wy-alert-info.warning .wy-alert-title,.rst-content .wy-alert.wy-alert-info .admonition-title,.wy-alert.wy-alert-info .rst-content .admonition-title,.wy-alert.wy-alert-info .wy-alert-title{background:#6ab0de}.rst-content .hint,.rst-content .important,.rst-content .tip,.rst-content .wy-alert-success.admonition,.rst-content .wy-alert-success.admonition-todo,.rst-content .wy-alert-success.attention,.rst-content .wy-alert-success.caution,.rst-content .wy-alert-success.danger,.rst-content .wy-alert-success.error,.rst-content .wy-alert-success.note,.rst-content .wy-alert-success.seealso,.rst-content .wy-alert-success.warning,.wy-alert.wy-alert-success{background:#dbfaf4}.rst-content .hint .admonition-title,.rst-content .hint .wy-alert-title,.rst-content .important .admonition-title,.rst-content .important .wy-alert-title,.rst-content .tip .admonition-title,.rst-content .tip .wy-alert-title,.rst-content .wy-alert-success.admonition-todo .admonition-title,.rst-content .wy-alert-success.admonition-todo .wy-alert-title,.rst-content .wy-alert-success.admonition .admonition-title,.rst-content .wy-alert-success.admonition .wy-alert-title,.rst-content .wy-alert-success.attention .admonition-title,.rst-content .wy-alert-success.attention .wy-alert-title,.rst-content .wy-alert-success.caution .admonition-title,.rst-content .wy-alert-success.caution .wy-alert-title,.rst-content .wy-alert-success.danger .admonition-title,.rst-content .wy-alert-success.danger .wy-alert-title,.rst-content .wy-alert-success.error .admonition-title,.rst-content .wy-alert-success.error .wy-alert-title,.rst-content .wy-alert-success.note .admonition-title,.rst-content .wy-alert-success.note .wy-alert-title,.rst-content .wy-alert-success.seealso .admonition-title,.rst-content .wy-alert-success.seealso .wy-alert-title,.rst-content .wy-alert-success.warning .admonition-title,.rst-content .wy-alert-success.warning .wy-alert-title,.rst-content .wy-alert.wy-alert-success .admonition-title,.wy-alert.wy-alert-success .rst-content .admonition-title,.wy-alert.wy-alert-success .wy-alert-title{background:#1abc9c}.rst-content .wy-alert-neutral.admonition,.rst-content .wy-alert-neutral.admonition-todo,.rst-content .wy-alert-neutral.attention,.rst-content .wy-alert-neutral.caution,.rst-content .wy-alert-neutral.danger,.rst-content .wy-alert-neutral.error,.rst-content .wy-alert-neutral.hint,.rst-content .wy-alert-neutral.important,.rst-content .wy-alert-neutral.note,.rst-content .wy-alert-neutral.seealso,.rst-content .wy-alert-neutral.tip,.rst-content .wy-alert-neutral.warning,.wy-alert.wy-alert-neutral{background:#f3f6f6}.rst-content .wy-alert-neutral.admonition-todo .admonition-title,.rst-content .wy-alert-neutral.admonition-todo .wy-alert-title,.rst-content .wy-alert-neutral.admonition .admonition-title,.rst-content .wy-alert-neutral.admonition .wy-alert-title,.rst-content .wy-alert-neutral.attention .admonition-title,.rst-content .wy-alert-neutral.attention .wy-alert-title,.rst-content .wy-alert-neutral.caution .admonition-title,.rst-content .wy-alert-neutral.caution .wy-alert-title,.rst-content .wy-alert-neutral.danger .admonition-title,.rst-content .wy-alert-neutral.danger .wy-alert-title,.rst-content .wy-alert-neutral.error .admonition-title,.rst-content .wy-alert-neutral.error .wy-alert-title,.rst-content .wy-alert-neutral.hint .admonition-title,.rst-content .wy-alert-neutral.hint .wy-alert-title,.rst-content .wy-alert-neutral.important .admonition-title,.rst-content .wy-alert-neutral.important .wy-alert-title,.rst-content .wy-alert-neutral.note .admonition-title,.rst-content .wy-alert-neutral.note .wy-alert-title,.rst-content .wy-alert-neutral.seealso .admonition-title,.rst-content .wy-alert-neutral.seealso .wy-alert-title,.rst-content .wy-alert-neutral.tip .admonition-title,.rst-content .wy-alert-neutral.tip .wy-alert-title,.rst-content .wy-alert-neutral.warning .admonition-title,.rst-content .wy-alert-neutral.warning .wy-alert-title,.rst-content .wy-alert.wy-alert-neutral .admonition-title,.wy-alert.wy-alert-neutral .rst-content .admonition-title,.wy-alert.wy-alert-neutral .wy-alert-title{color:#404040;background:#e1e4e5}.rst-content .wy-alert-neutral.admonition-todo a,.rst-content .wy-alert-neutral.admonition a,.rst-content .wy-alert-neutral.attention a,.rst-content .wy-alert-neutral.caution a,.rst-content .wy-alert-neutral.danger a,.rst-content .wy-alert-neutral.error a,.rst-content .wy-alert-neutral.hint a,.rst-content .wy-alert-neutral.important a,.rst-content .wy-alert-neutral.note a,.rst-content .wy-alert-neutral.seealso a,.rst-content .wy-alert-neutral.tip a,.rst-content .wy-alert-neutral.warning a,.wy-alert.wy-alert-neutral a{color:#2980b9}.rst-content .admonition-todo p:last-child,.rst-content .admonition p:last-child,.rst-content .attention p:last-child,.rst-content .caution p:last-child,.rst-content .danger p:last-child,.rst-content .error p:last-child,.rst-content .hint p:last-child,.rst-content .important p:last-child,.rst-content .note p:last-child,.rst-content .seealso p:last-child,.rst-content .tip p:last-child,.rst-content .warning p:last-child,.wy-alert p:last-child{margin-bottom:0}.wy-tray-container{position:fixed;bottom:0;left:0;z-index:600}.wy-tray-container li{display:block;width:300px;background:transparent;color:#fff;text-align:center;box-shadow:0 5px 5px 0 rgba(0,0,0,.1);padding:0 24px;min-width:20%;opacity:0;height:0;line-height:56px;overflow:hidden;-webkit-transition:all .3s ease-in;-moz-transition:all .3s ease-in;transition:all .3s ease-in}.wy-tray-container li.wy-tray-item-success{background:#27ae60}.wy-tray-container li.wy-tray-item-info{background:#2980b9}.wy-tray-container li.wy-tray-item-warning{background:#e67e22}.wy-tray-container li.wy-tray-item-danger{background:#e74c3c}.wy-tray-container li.on{opacity:1;height:56px}@media screen and (max-width:768px){.wy-tray-container{bottom:auto;top:0;width:100%}.wy-tray-container li{width:100%}}button{font-size:100%;margin:0;vertical-align:baseline;*vertical-align:middle;cursor:pointer;line-height:normal;-webkit-appearance:button;*overflow:visible}button::-moz-focus-inner,input::-moz-focus-inner{border:0;padding:0}button[disabled]{cursor:default}.btn{display:inline-block;border-radius:2px;line-height:normal;white-space:nowrap;text-align:center;cursor:pointer;font-size:100%;padding:6px 12px 8px;color:#fff;border:1px solid rgba(0,0,0,.1);background-color:#27ae60;text-decoration:none;font-weight:400;font-family:Lato,proxima-nova,Helvetica Neue,Arial,sans-serif;box-shadow:inset 0 1px 2px -1px hsla(0,0%,100%,.5),inset 0 -2px 0 0 rgba(0,0,0,.1);outline-none:false;vertical-align:middle;*display:inline;zoom:1;-webkit-user-drag:none;-webkit-user-select:none;-moz-user-select:none;-ms-user-select:none;user-select:none;-webkit-transition:all .1s linear;-moz-transition:all .1s linear;transition:all .1s linear}.btn-hover{background:#2e8ece;color:#fff}.btn:hover{background:#2cc36b;color:#fff}.btn:focus{background:#2cc36b;outline:0}.btn:active{box-shadow:inset 0 -1px 0 0 rgba(0,0,0,.05),inset 0 2px 0 0 rgba(0,0,0,.1);padding:8px 12px 6px}.btn:visited{color:#fff}.btn-disabled,.btn-disabled:active,.btn-disabled:focus,.btn-disabled:hover,.btn:disabled{background-image:none;filter:progid:DXImageTransform.Microsoft.gradient(enabled = false);filter:alpha(opacity=40);opacity:.4;cursor:not-allowed;box-shadow:none}.btn::-moz-focus-inner{padding:0;border:0}.btn-small{font-size:80%}.btn-info{background-color:#2980b9!important}.btn-info:hover{background-color:#2e8ece!important}.btn-neutral{background-color:#f3f6f6!important;color:#404040!important}.btn-neutral:hover{background-color:#e5ebeb!important;color:#404040}.btn-neutral:visited{color:#404040!important}.btn-success{background-color:#27ae60!important}.btn-success:hover{background-color:#295!important}.btn-danger{background-color:#e74c3c!important}.btn-danger:hover{background-color:#ea6153!important}.btn-warning{background-color:#e67e22!important}.btn-warning:hover{background-color:#e98b39!important}.btn-invert{background-color:#222}.btn-invert:hover{background-color:#2f2f2f!important}.btn-link{background-color:transparent!important;color:#2980b9;box-shadow:none;border-color:transparent!important}.btn-link:active,.btn-link:hover{background-color:transparent!important;color:#409ad5!important;box-shadow:none}.btn-link:visited{color:#9b59b6}.wy-btn-group .btn,.wy-control .btn{vertical-align:middle}.wy-btn-group{margin-bottom:24px;*zoom:1}.wy-btn-group:after,.wy-btn-group:before{display:table;content:""}.wy-btn-group:after{clear:both}.wy-dropdown{position:relative;display:inline-block}.wy-dropdown-active .wy-dropdown-menu{display:block}.wy-dropdown-menu{position:absolute;left:0;display:none;float:left;top:100%;min-width:100%;background:#fcfcfc;z-index:100;border:1px solid #cfd7dd;box-shadow:0 2px 2px 0 rgba(0,0,0,.1);padding:12px}.wy-dropdown-menu>dd>a{display:block;clear:both;color:#404040;white-space:nowrap;font-size:90%;padding:0 12px;cursor:pointer}.wy-dropdown-menu>dd>a:hover{background:#2980b9;color:#fff}.wy-dropdown-menu>dd.divider{border-top:1px solid #cfd7dd;margin:6px 0}.wy-dropdown-menu>dd.search{padding-bottom:12px}.wy-dropdown-menu>dd.search input[type=search]{width:100%}.wy-dropdown-menu>dd.call-to-action{background:#e3e3e3;text-transform:uppercase;font-weight:500;font-size:80%}.wy-dropdown-menu>dd.call-to-action:hover{background:#e3e3e3}.wy-dropdown-menu>dd.call-to-action .btn{color:#fff}.wy-dropdown.wy-dropdown-up .wy-dropdown-menu{bottom:100%;top:auto;left:auto;right:0}.wy-dropdown.wy-dropdown-bubble .wy-dropdown-menu{background:#fcfcfc;margin-top:2px}.wy-dropdown.wy-dropdown-bubble .wy-dropdown-menu a{padding:6px 12px}.wy-dropdown.wy-dropdown-bubble .wy-dropdown-menu a:hover{background:#2980b9;color:#fff}.wy-dropdown.wy-dropdown-left .wy-dropdown-menu{right:0;left:auto;text-align:right}.wy-dropdown-arrow:before{content:" ";border-bottom:5px solid #f5f5f5;border-left:5px solid transparent;border-right:5px solid transparent;position:absolute;display:block;top:-4px;left:50%;margin-left:-3px}.wy-dropdown-arrow.wy-dropdown-arrow-left:before{left:11px}.wy-form-stacked select{display:block}.wy-form-aligned .wy-help-inline,.wy-form-aligned input,.wy-form-aligned label,.wy-form-aligned select,.wy-form-aligned textarea{display:inline-block;*display:inline;*zoom:1;vertical-align:middle}.wy-form-aligned .wy-control-group>label{display:inline-block;vertical-align:middle;width:10em;margin:6px 12px 0 0;float:left}.wy-form-aligned .wy-control{float:left}.wy-form-aligned .wy-control label{display:block}.wy-form-aligned .wy-control select{margin-top:6px}fieldset{margin:0}fieldset,legend{border:0;padding:0}legend{width:100%;white-space:normal;margin-bottom:24px;font-size:150%;*margin-left:-7px}label,legend{display:block}label{margin:0 0 .3125em;color:#333;font-size:90%}input,select,textarea{font-size:100%;margin:0;vertical-align:baseline;*vertical-align:middle}.wy-control-group{margin-bottom:24px;max-width:1200px;margin-left:auto;margin-right:auto;*zoom:1}.wy-control-group:after,.wy-control-group:before{display:table;content:""}.wy-control-group:after{clear:both}.wy-control-group.wy-control-group-required>label:after{content:" *";color:#e74c3c}.wy-control-group .wy-form-full,.wy-control-group .wy-form-halves,.wy-control-group .wy-form-thirds{padding-bottom:12px}.wy-control-group .wy-form-full input[type=color],.wy-control-group .wy-form-full input[type=date],.wy-control-group .wy-form-full input[type=datetime-local],.wy-control-group .wy-form-full input[type=datetime],.wy-control-group .wy-form-full input[type=email],.wy-control-group .wy-form-full input[type=month],.wy-control-group .wy-form-full input[type=number],.wy-control-group .wy-form-full input[type=password],.wy-control-group .wy-form-full input[type=search],.wy-control-group .wy-form-full input[type=tel],.wy-control-group .wy-form-full input[type=text],.wy-control-group .wy-form-full input[type=time],.wy-control-group .wy-form-full input[type=url],.wy-control-group .wy-form-full input[type=week],.wy-control-group .wy-form-full select,.wy-control-group .wy-form-halves input[type=color],.wy-control-group .wy-form-halves input[type=date],.wy-control-group .wy-form-halves input[type=datetime-local],.wy-control-group .wy-form-halves input[type=datetime],.wy-control-group .wy-form-halves input[type=email],.wy-control-group .wy-form-halves input[type=month],.wy-control-group .wy-form-halves input[type=number],.wy-control-group .wy-form-halves input[type=password],.wy-control-group .wy-form-halves input[type=search],.wy-control-group .wy-form-halves input[type=tel],.wy-control-group .wy-form-halves input[type=text],.wy-control-group .wy-form-halves input[type=time],.wy-control-group .wy-form-halves input[type=url],.wy-control-group .wy-form-halves input[type=week],.wy-control-group .wy-form-halves select,.wy-control-group .wy-form-thirds input[type=color],.wy-control-group .wy-form-thirds input[type=date],.wy-control-group .wy-form-thirds input[type=datetime-local],.wy-control-group .wy-form-thirds input[type=datetime],.wy-control-group .wy-form-thirds input[type=email],.wy-control-group .wy-form-thirds input[type=month],.wy-control-group .wy-form-thirds input[type=number],.wy-control-group .wy-form-thirds input[type=password],.wy-control-group .wy-form-thirds input[type=search],.wy-control-group .wy-form-thirds input[type=tel],.wy-control-group .wy-form-thirds input[type=text],.wy-control-group .wy-form-thirds input[type=time],.wy-control-group .wy-form-thirds input[type=url],.wy-control-group .wy-form-thirds input[type=week],.wy-control-group .wy-form-thirds select{width:100%}.wy-control-group .wy-form-full{float:left;display:block;width:100%;margin-right:0}.wy-control-group .wy-form-full:last-child{margin-right:0}.wy-control-group .wy-form-halves{float:left;display:block;margin-right:2.35765%;width:48.82117%}.wy-control-group .wy-form-halves:last-child,.wy-control-group .wy-form-halves:nth-of-type(2n){margin-right:0}.wy-control-group .wy-form-halves:nth-of-type(odd){clear:left}.wy-control-group .wy-form-thirds{float:left;display:block;margin-right:2.35765%;width:31.76157%}.wy-control-group .wy-form-thirds:last-child,.wy-control-group .wy-form-thirds:nth-of-type(3n){margin-right:0}.wy-control-group .wy-form-thirds:nth-of-type(3n+1){clear:left}.wy-control-group.wy-control-group-no-input .wy-control,.wy-control-no-input{margin:6px 0 0;font-size:90%}.wy-control-no-input{display:inline-block}.wy-control-group.fluid-input input[type=color],.wy-control-group.fluid-input input[type=date],.wy-control-group.fluid-input input[type=datetime-local],.wy-control-group.fluid-input input[type=datetime],.wy-control-group.fluid-input input[type=email],.wy-control-group.fluid-input input[type=month],.wy-control-group.fluid-input input[type=number],.wy-control-group.fluid-input input[type=password],.wy-control-group.fluid-input input[type=search],.wy-control-group.fluid-input input[type=tel],.wy-control-group.fluid-input input[type=text],.wy-control-group.fluid-input input[type=time],.wy-control-group.fluid-input input[type=url],.wy-control-group.fluid-input input[type=week]{width:100%}.wy-form-message-inline{padding-left:.3em;color:#666;font-size:90%}.wy-form-message{display:block;color:#999;font-size:70%;margin-top:.3125em;font-style:italic}.wy-form-message p{font-size:inherit;font-style:italic;margin-bottom:6px}.wy-form-message p:last-child{margin-bottom:0}input{line-height:normal}input[type=button],input[type=reset],input[type=submit]{-webkit-appearance:button;cursor:pointer;font-family:Lato,proxima-nova,Helvetica Neue,Arial,sans-serif;*overflow:visible}input[type=color],input[type=date],input[type=datetime-local],input[type=datetime],input[type=email],input[type=month],input[type=number],input[type=password],input[type=search],input[type=tel],input[type=text],input[type=time],input[type=url],input[type=week]{-webkit-appearance:none;padding:6px;display:inline-block;border:1px solid #ccc;font-size:80%;font-family:Lato,proxima-nova,Helvetica Neue,Arial,sans-serif;box-shadow:inset 0 1px 3px #ddd;border-radius:0;-webkit-transition:border .3s linear;-moz-transition:border .3s linear;transition:border .3s linear}input[type=datetime-local]{padding:.34375em .625em}input[disabled]{cursor:default}input[type=checkbox],input[type=radio]{padding:0;margin-right:.3125em;*height:13px;*width:13px}input[type=checkbox],input[type=radio],input[type=search]{-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box}input[type=search]::-webkit-search-cancel-button,input[type=search]::-webkit-search-decoration{-webkit-appearance:none}input[type=color]:focus,input[type=date]:focus,input[type=datetime-local]:focus,input[type=datetime]:focus,input[type=email]:focus,input[type=month]:focus,input[type=number]:focus,input[type=password]:focus,input[type=search]:focus,input[type=tel]:focus,input[type=text]:focus,input[type=time]:focus,input[type=url]:focus,input[type=week]:focus{outline:0;outline:thin dotted\9;border-color:#333}input.no-focus:focus{border-color:#ccc!important}input[type=checkbox]:focus,input[type=file]:focus,input[type=radio]:focus{outline:thin dotted #333;outline:1px auto #129fea}input[type=color][disabled],input[type=date][disabled],input[type=datetime-local][disabled],input[type=datetime][disabled],input[type=email][disabled],input[type=month][disabled],input[type=number][disabled],input[type=password][disabled],input[type=search][disabled],input[type=tel][disabled],input[type=text][disabled],input[type=time][disabled],input[type=url][disabled],input[type=week][disabled]{cursor:not-allowed;background-color:#fafafa}input:focus:invalid,select:focus:invalid,textarea:focus:invalid{color:#e74c3c;border:1px solid #e74c3c}input:focus:invalid:focus,select:focus:invalid:focus,textarea:focus:invalid:focus{border-color:#e74c3c}input[type=checkbox]:focus:invalid:focus,input[type=file]:focus:invalid:focus,input[type=radio]:focus:invalid:focus{outline-color:#e74c3c}input.wy-input-large{padding:12px;font-size:100%}textarea{overflow:auto;vertical-align:top;width:100%;font-family:Lato,proxima-nova,Helvetica Neue,Arial,sans-serif}select,textarea{padding:.5em .625em;display:inline-block;border:1px solid #ccc;font-size:80%;box-shadow:inset 0 1px 3px #ddd;-webkit-transition:border .3s linear;-moz-transition:border .3s linear;transition:border .3s linear}select{border:1px solid #ccc;background-color:#fff}select[multiple]{height:auto}select:focus,textarea:focus{outline:0}input[readonly],select[disabled],select[readonly],textarea[disabled],textarea[readonly]{cursor:not-allowed;background-color:#fafafa}input[type=checkbox][disabled],input[type=radio][disabled]{cursor:not-allowed}.wy-checkbox,.wy-radio{margin:6px 0;color:#404040;display:block}.wy-checkbox input,.wy-radio input{vertical-align:baseline}.wy-form-message-inline{display:inline-block;*display:inline;*zoom:1;vertical-align:middle}.wy-input-prefix,.wy-input-suffix{white-space:nowrap;padding:6px}.wy-input-prefix .wy-input-context,.wy-input-suffix .wy-input-context{line-height:27px;padding:0 8px;display:inline-block;font-size:80%;background-color:#f3f6f6;border:1px solid #ccc;color:#999}.wy-input-suffix .wy-input-context{border-left:0}.wy-input-prefix .wy-input-context{border-right:0}.wy-switch{position:relative;display:block;height:24px;margin-top:12px;cursor:pointer}.wy-switch:before{left:0;top:0;width:36px;height:12px;background:#ccc}.wy-switch:after,.wy-switch:before{position:absolute;content:"";display:block;border-radius:4px;-webkit-transition:all .2s ease-in-out;-moz-transition:all .2s ease-in-out;transition:all .2s ease-in-out}.wy-switch:after{width:18px;height:18px;background:#999;left:-3px;top:-3px}.wy-switch span{position:absolute;left:48px;display:block;font-size:12px;color:#ccc;line-height:1}.wy-switch.active:before{background:#1e8449}.wy-switch.active:after{left:24px;background:#27ae60}.wy-switch.disabled{cursor:not-allowed;opacity:.8}.wy-control-group.wy-control-group-error .wy-form-message,.wy-control-group.wy-control-group-error>label{color:#e74c3c}.wy-control-group.wy-control-group-error input[type=color],.wy-control-group.wy-control-group-error input[type=date],.wy-control-group.wy-control-group-error input[type=datetime-local],.wy-control-group.wy-control-group-error input[type=datetime],.wy-control-group.wy-control-group-error input[type=email],.wy-control-group.wy-control-group-error input[type=month],.wy-control-group.wy-control-group-error input[type=number],.wy-control-group.wy-control-group-error input[type=password],.wy-control-group.wy-control-group-error input[type=search],.wy-control-group.wy-control-group-error input[type=tel],.wy-control-group.wy-control-group-error input[type=text],.wy-control-group.wy-control-group-error input[type=time],.wy-control-group.wy-control-group-error input[type=url],.wy-control-group.wy-control-group-error input[type=week],.wy-control-group.wy-control-group-error textarea{border:1px solid #e74c3c}.wy-inline-validate{white-space:nowrap}.wy-inline-validate .wy-input-context{padding:.5em .625em;display:inline-block;font-size:80%}.wy-inline-validate.wy-inline-validate-success .wy-input-context{color:#27ae60}.wy-inline-validate.wy-inline-validate-danger .wy-input-context{color:#e74c3c}.wy-inline-validate.wy-inline-validate-warning .wy-input-context{color:#e67e22}.wy-inline-validate.wy-inline-validate-info .wy-input-context{color:#2980b9}.rotate-90{-webkit-transform:rotate(90deg);-moz-transform:rotate(90deg);-ms-transform:rotate(90deg);-o-transform:rotate(90deg);transform:rotate(90deg)}.rotate-180{-webkit-transform:rotate(180deg);-moz-transform:rotate(180deg);-ms-transform:rotate(180deg);-o-transform:rotate(180deg);transform:rotate(180deg)}.rotate-270{-webkit-transform:rotate(270deg);-moz-transform:rotate(270deg);-ms-transform:rotate(270deg);-o-transform:rotate(270deg);transform:rotate(270deg)}.mirror{-webkit-transform:scaleX(-1);-moz-transform:scaleX(-1);-ms-transform:scaleX(-1);-o-transform:scaleX(-1);transform:scaleX(-1)}.mirror.rotate-90{-webkit-transform:scaleX(-1) rotate(90deg);-moz-transform:scaleX(-1) rotate(90deg);-ms-transform:scaleX(-1) rotate(90deg);-o-transform:scaleX(-1) rotate(90deg);transform:scaleX(-1) rotate(90deg)}.mirror.rotate-180{-webkit-transform:scaleX(-1) rotate(180deg);-moz-transform:scaleX(-1) rotate(180deg);-ms-transform:scaleX(-1) rotate(180deg);-o-transform:scaleX(-1) rotate(180deg);transform:scaleX(-1) rotate(180deg)}.mirror.rotate-270{-webkit-transform:scaleX(-1) rotate(270deg);-moz-transform:scaleX(-1) rotate(270deg);-ms-transform:scaleX(-1) rotate(270deg);-o-transform:scaleX(-1) rotate(270deg);transform:scaleX(-1) rotate(270deg)}@media only screen and (max-width:480px){.wy-form button[type=submit]{margin:.7em 0 0}.wy-form input[type=color],.wy-form input[type=date],.wy-form input[type=datetime-local],.wy-form input[type=datetime],.wy-form input[type=email],.wy-form input[type=month],.wy-form input[type=number],.wy-form input[type=password],.wy-form input[type=search],.wy-form input[type=tel],.wy-form input[type=text],.wy-form input[type=time],.wy-form input[type=url],.wy-form input[type=week],.wy-form label{margin-bottom:.3em;display:block}.wy-form input[type=color],.wy-form input[type=date],.wy-form input[type=datetime-local],.wy-form input[type=datetime],.wy-form input[type=email],.wy-form input[type=month],.wy-form input[type=number],.wy-form input[type=password],.wy-form input[type=search],.wy-form input[type=tel],.wy-form input[type=time],.wy-form input[type=url],.wy-form input[type=week]{margin-bottom:0}.wy-form-aligned .wy-control-group label{margin-bottom:.3em;text-align:left;display:block;width:100%}.wy-form-aligned .wy-control{margin:1.5em 0 0}.wy-form-message,.wy-form-message-inline,.wy-form .wy-help-inline{display:block;font-size:80%;padding:6px 0}}@media screen and (max-width:768px){.tablet-hide{display:none}}@media screen and (max-width:480px){.mobile-hide{display:none}}.float-left{float:left}.float-right{float:right}.full-width{width:100%}.rst-content table.docutils,.rst-content table.field-list,.wy-table{border-collapse:collapse;border-spacing:0;empty-cells:show;margin-bottom:24px}.rst-content table.docutils caption,.rst-content table.field-list caption,.wy-table caption{color:#000;font:italic 85%/1 arial,sans-serif;padding:1em 0;text-align:center}.rst-content table.docutils td,.rst-content table.docutils th,.rst-content table.field-list td,.rst-content table.field-list th,.wy-table td,.wy-table th{font-size:90%;margin:0;overflow:visible;padding:8px 16px}.rst-content table.docutils td:first-child,.rst-content table.docutils th:first-child,.rst-content table.field-list td:first-child,.rst-content table.field-list th:first-child,.wy-table td:first-child,.wy-table th:first-child{border-left-width:0}.rst-content table.docutils thead,.rst-content table.field-list thead,.wy-table thead{color:#000;text-align:left;vertical-align:bottom;white-space:nowrap}.rst-content table.docutils thead th,.rst-content table.field-list thead th,.wy-table thead th{font-weight:700;border-bottom:2px solid #e1e4e5}.rst-content table.docutils td,.rst-content table.field-list td,.wy-table td{background-color:transparent;vertical-align:middle}.rst-content table.docutils td p,.rst-content table.field-list td p,.wy-table td p{line-height:18px}.rst-content table.docutils td p:last-child,.rst-content table.field-list td p:last-child,.wy-table td p:last-child{margin-bottom:0}.rst-content table.docutils .wy-table-cell-min,.rst-content table.field-list .wy-table-cell-min,.wy-table .wy-table-cell-min{width:1%;padding-right:0}.rst-content table.docutils .wy-table-cell-min input[type=checkbox],.rst-content table.field-list .wy-table-cell-min input[type=checkbox],.wy-table .wy-table-cell-min input[type=checkbox]{margin:0}.wy-table-secondary{color:grey;font-size:90%}.wy-table-tertiary{color:grey;font-size:80%}.rst-content table.docutils:not(.field-list) tr:nth-child(2n-1) td,.wy-table-backed,.wy-table-odd td,.wy-table-striped tr:nth-child(2n-1) td{background-color:#f3f6f6}.rst-content table.docutils,.wy-table-bordered-all{border:1px solid #e1e4e5}.rst-content table.docutils td,.wy-table-bordered-all td{border-bottom:1px solid #e1e4e5;border-left:1px solid #e1e4e5}.rst-content table.docutils tbody>tr:last-child td,.wy-table-bordered-all tbody>tr:last-child td{border-bottom-width:0}.wy-table-bordered{border:1px solid #e1e4e5}.wy-table-bordered-rows td{border-bottom:1px solid #e1e4e5}.wy-table-bordered-rows tbody>tr:last-child td{border-bottom-width:0}.wy-table-horizontal td,.wy-table-horizontal th{border-width:0 0 1px;border-bottom:1px solid #e1e4e5}.wy-table-horizontal tbody>tr:last-child td{border-bottom-width:0}.wy-table-responsive{margin-bottom:24px;max-width:100%;overflow:auto}.wy-table-responsive table{margin-bottom:0!important}.wy-table-responsive table td,.wy-table-responsive table th{white-space:nowrap}a{color:#2980b9;text-decoration:none;cursor:pointer}a:hover{color:#3091d1}a:visited{color:#9b59b6}html{height:100%}body,html{overflow-x:hidden}body{font-family:Lato,proxima-nova,Helvetica Neue,Arial,sans-serif;font-weight:400;color:#404040;min-height:100%;background:#edf0f2}.wy-text-left{text-align:left}.wy-text-center{text-align:center}.wy-text-right{text-align:right}.wy-text-large{font-size:120%}.wy-text-normal{font-size:100%}.wy-text-small,small{font-size:80%}.wy-text-strike{text-decoration:line-through}.wy-text-warning{color:#e67e22!important}a.wy-text-warning:hover{color:#eb9950!important}.wy-text-info{color:#2980b9!important}a.wy-text-info:hover{color:#409ad5!important}.wy-text-success{color:#27ae60!important}a.wy-text-success:hover{color:#36d278!important}.wy-text-danger{color:#e74c3c!important}a.wy-text-danger:hover{color:#ed7669!important}.wy-text-neutral{color:#404040!important}a.wy-text-neutral:hover{color:#595959!important}.rst-content .toctree-wrapper>p.caption,h1,h2,h3,h4,h5,h6,legend{margin-top:0;font-weight:700;font-family:Roboto Slab,ff-tisa-web-pro,Georgia,Arial,sans-serif}p{line-height:24px;font-size:16px;margin:0 0 24px}h1{font-size:175%}.rst-content .toctree-wrapper>p.caption,h2{font-size:150%}h3{font-size:125%}h4{font-size:115%}h5{font-size:110%}h6{font-size:100%}hr{display:block;height:1px;border:0;border-top:1px solid #e1e4e5;margin:24px 0;padding:0}.rst-content code,.rst-content tt,code{white-space:nowrap;max-width:100%;background:#fff;border:1px solid #e1e4e5;font-size:75%;padding:0 5px;font-family:SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,Courier,monospace;color:#e74c3c;overflow-x:auto}.rst-content tt.code-large,code.code-large{font-size:90%}.rst-content .section ul,.rst-content .toctree-wrapper ul,.rst-content section ul,.wy-plain-list-disc,article ul{list-style:disc;line-height:24px;margin-bottom:24px}.rst-content .section ul li,.rst-content .toctree-wrapper ul li,.rst-content section ul li,.wy-plain-list-disc li,article ul li{list-style:disc;margin-left:24px}.rst-content .section ul li p:last-child,.rst-content .section ul li ul,.rst-content .toctree-wrapper ul li p:last-child,.rst-content .toctree-wrapper ul li ul,.rst-content section ul li p:last-child,.rst-content section ul li ul,.wy-plain-list-disc li p:last-child,.wy-plain-list-disc li ul,article ul li p:last-child,article ul li ul{margin-bottom:0}.rst-content .section ul li li,.rst-content .toctree-wrapper ul li li,.rst-content section ul li li,.wy-plain-list-disc li li,article ul li li{list-style:circle}.rst-content .section ul li li li,.rst-content .toctree-wrapper ul li li li,.rst-content section ul li li li,.wy-plain-list-disc li li li,article ul li li li{list-style:square}.rst-content .section ul li ol li,.rst-content .toctree-wrapper ul li ol li,.rst-content section ul li ol li,.wy-plain-list-disc li ol li,article ul li ol li{list-style:decimal}.rst-content .section ol,.rst-content .section ol.arabic,.rst-content .toctree-wrapper ol,.rst-content .toctree-wrapper ol.arabic,.rst-content section ol,.rst-content section ol.arabic,.wy-plain-list-decimal,article ol{list-style:decimal;line-height:24px;margin-bottom:24px}.rst-content .section ol.arabic li,.rst-content .section ol li,.rst-content .toctree-wrapper ol.arabic li,.rst-content .toctree-wrapper ol li,.rst-content section ol.arabic li,.rst-content section ol li,.wy-plain-list-decimal li,article ol li{list-style:decimal;margin-left:24px}.rst-content .section ol.arabic li ul,.rst-content .section ol li p:last-child,.rst-content .section ol li ul,.rst-content .toctree-wrapper ol.arabic li ul,.rst-content .toctree-wrapper ol li p:last-child,.rst-content .toctree-wrapper ol li ul,.rst-content section ol.arabic li ul,.rst-content section ol li p:last-child,.rst-content section ol li ul,.wy-plain-list-decimal li p:last-child,.wy-plain-list-decimal li ul,article ol li p:last-child,article ol li ul{margin-bottom:0}.rst-content .section ol.arabic li ul li,.rst-content .section ol li ul li,.rst-content .toctree-wrapper ol.arabic li ul li,.rst-content .toctree-wrapper ol li ul li,.rst-content section ol.arabic li ul li,.rst-content section ol li ul li,.wy-plain-list-decimal li ul li,article ol li ul li{list-style:disc}.wy-breadcrumbs{*zoom:1}.wy-breadcrumbs:after,.wy-breadcrumbs:before{display:table;content:""}.wy-breadcrumbs:after{clear:both}.wy-breadcrumbs>li{display:inline-block;padding-top:5px}.wy-breadcrumbs>li.wy-breadcrumbs-aside{float:right}.rst-content .wy-breadcrumbs>li code,.rst-content .wy-breadcrumbs>li tt,.wy-breadcrumbs>li .rst-content tt,.wy-breadcrumbs>li code{all:inherit;color:inherit}.breadcrumb-item:before{content:"/";color:#bbb;font-size:13px;padding:0 6px 0 3px}.wy-breadcrumbs-extra{margin-bottom:0;color:#b3b3b3;font-size:80%;display:inline-block}@media screen and (max-width:480px){.wy-breadcrumbs-extra,.wy-breadcrumbs li.wy-breadcrumbs-aside{display:none}}@media print{.wy-breadcrumbs li.wy-breadcrumbs-aside{display:none}}html{font-size:16px}.wy-affix{position:fixed;top:1.618em}.wy-menu a:hover{text-decoration:none}.wy-menu-horiz{*zoom:1}.wy-menu-horiz:after,.wy-menu-horiz:before{display:table;content:""}.wy-menu-horiz:after{clear:both}.wy-menu-horiz li,.wy-menu-horiz ul{display:inline-block}.wy-menu-horiz li:hover{background:hsla(0,0%,100%,.1)}.wy-menu-horiz li.divide-left{border-left:1px solid #404040}.wy-menu-horiz li.divide-right{border-right:1px solid #404040}.wy-menu-horiz a{height:32px;display:inline-block;line-height:32px;padding:0 16px}.wy-menu-vertical{width:300px}.wy-menu-vertical header,.wy-menu-vertical p.caption{color:#55a5d9;height:32px;line-height:32px;padding:0 1.618em;margin:12px 0 0;display:block;font-weight:700;text-transform:uppercase;font-size:85%;white-space:nowrap}.wy-menu-vertical ul{margin-bottom:0}.wy-menu-vertical li.divide-top{border-top:1px solid #404040}.wy-menu-vertical li.divide-bottom{border-bottom:1px solid #404040}.wy-menu-vertical li.current{background:#e3e3e3}.wy-menu-vertical li.current a{color:grey;border-right:1px solid #c9c9c9;padding:.4045em 2.427em}.wy-menu-vertical li.current a:hover{background:#d6d6d6}.rst-content .wy-menu-vertical li tt,.wy-menu-vertical li .rst-content tt,.wy-menu-vertical li code{border:none;background:inherit;color:inherit;padding-left:0;padding-right:0}.wy-menu-vertical li button.toctree-expand{display:block;float:left;margin-left:-1.2em;line-height:18px;color:#4d4d4d;border:none;background:none;padding:0}.wy-menu-vertical li.current>a,.wy-menu-vertical li.on a{color:#404040;font-weight:700;position:relative;background:#fcfcfc;border:none;padding:.4045em 1.618em}.wy-menu-vertical li.current>a:hover,.wy-menu-vertical li.on a:hover{background:#fcfcfc}.wy-menu-vertical li.current>a:hover button.toctree-expand,.wy-menu-vertical li.on a:hover button.toctree-expand{color:grey}.wy-menu-vertical li.current>a button.toctree-expand,.wy-menu-vertical li.on a button.toctree-expand{display:block;line-height:18px;color:#333}.wy-menu-vertical li.toctree-l1.current>a{border-bottom:1px solid #c9c9c9;border-top:1px solid #c9c9c9}.wy-menu-vertical .toctree-l1.current .toctree-l2>ul,.wy-menu-vertical .toctree-l2.current .toctree-l3>ul,.wy-menu-vertical .toctree-l3.current .toctree-l4>ul,.wy-menu-vertical .toctree-l4.current .toctree-l5>ul,.wy-menu-vertical .toctree-l5.current .toctree-l6>ul,.wy-menu-vertical .toctree-l6.current .toctree-l7>ul,.wy-menu-vertical .toctree-l7.current .toctree-l8>ul,.wy-menu-vertical .toctree-l8.current .toctree-l9>ul,.wy-menu-vertical .toctree-l9.current .toctree-l10>ul,.wy-menu-vertical .toctree-l10.current .toctree-l11>ul{display:none}.wy-menu-vertical .toctree-l1.current .current.toctree-l2>ul,.wy-menu-vertical .toctree-l2.current .current.toctree-l3>ul,.wy-menu-vertical .toctree-l3.current .current.toctree-l4>ul,.wy-menu-vertical .toctree-l4.current .current.toctree-l5>ul,.wy-menu-vertical .toctree-l5.current .current.toctree-l6>ul,.wy-menu-vertical .toctree-l6.current .current.toctree-l7>ul,.wy-menu-vertical .toctree-l7.current .current.toctree-l8>ul,.wy-menu-vertical .toctree-l8.current .current.toctree-l9>ul,.wy-menu-vertical .toctree-l9.current .current.toctree-l10>ul,.wy-menu-vertical .toctree-l10.current .current.toctree-l11>ul{display:block}.wy-menu-vertical li.toctree-l3,.wy-menu-vertical li.toctree-l4{font-size:.9em}.wy-menu-vertical li.toctree-l2 a,.wy-menu-vertical li.toctree-l3 a,.wy-menu-vertical li.toctree-l4 a,.wy-menu-vertical li.toctree-l5 a,.wy-menu-vertical li.toctree-l6 a,.wy-menu-vertical li.toctree-l7 a,.wy-menu-vertical li.toctree-l8 a,.wy-menu-vertical li.toctree-l9 a,.wy-menu-vertical li.toctree-l10 a{color:#404040}.wy-menu-vertical li.toctree-l2 a:hover button.toctree-expand,.wy-menu-vertical li.toctree-l3 a:hover button.toctree-expand,.wy-menu-vertical li.toctree-l4 a:hover button.toctree-expand,.wy-menu-vertical li.toctree-l5 a:hover button.toctree-expand,.wy-menu-vertical li.toctree-l6 a:hover button.toctree-expand,.wy-menu-vertical li.toctree-l7 a:hover button.toctree-expand,.wy-menu-vertical li.toctree-l8 a:hover button.toctree-expand,.wy-menu-vertical li.toctree-l9 a:hover button.toctree-expand,.wy-menu-vertical li.toctree-l10 a:hover button.toctree-expand{color:grey}.wy-menu-vertical li.toctree-l2.current li.toctree-l3>a,.wy-menu-vertical li.toctree-l3.current li.toctree-l4>a,.wy-menu-vertical li.toctree-l4.current li.toctree-l5>a,.wy-menu-vertical li.toctree-l5.current li.toctree-l6>a,.wy-menu-vertical li.toctree-l6.current li.toctree-l7>a,.wy-menu-vertical li.toctree-l7.current li.toctree-l8>a,.wy-menu-vertical li.toctree-l8.current li.toctree-l9>a,.wy-menu-vertical li.toctree-l9.current li.toctree-l10>a,.wy-menu-vertical li.toctree-l10.current li.toctree-l11>a{display:block}.wy-menu-vertical li.toctree-l2.current>a{padding:.4045em 2.427em}.wy-menu-vertical li.toctree-l2.current li.toctree-l3>a{padding:.4045em 1.618em .4045em 4.045em}.wy-menu-vertical li.toctree-l3.current>a{padding:.4045em 4.045em}.wy-menu-vertical li.toctree-l3.current li.toctree-l4>a{padding:.4045em 1.618em .4045em 5.663em}.wy-menu-vertical li.toctree-l4.current>a{padding:.4045em 5.663em}.wy-menu-vertical li.toctree-l4.current li.toctree-l5>a{padding:.4045em 1.618em .4045em 7.281em}.wy-menu-vertical li.toctree-l5.current>a{padding:.4045em 7.281em}.wy-menu-vertical li.toctree-l5.current li.toctree-l6>a{padding:.4045em 1.618em .4045em 8.899em}.wy-menu-vertical li.toctree-l6.current>a{padding:.4045em 8.899em}.wy-menu-vertical li.toctree-l6.current li.toctree-l7>a{padding:.4045em 1.618em .4045em 10.517em}.wy-menu-vertical li.toctree-l7.current>a{padding:.4045em 10.517em}.wy-menu-vertical li.toctree-l7.current li.toctree-l8>a{padding:.4045em 1.618em .4045em 12.135em}.wy-menu-vertical li.toctree-l8.current>a{padding:.4045em 12.135em}.wy-menu-vertical li.toctree-l8.current li.toctree-l9>a{padding:.4045em 1.618em .4045em 13.753em}.wy-menu-vertical li.toctree-l9.current>a{padding:.4045em 13.753em}.wy-menu-vertical li.toctree-l9.current li.toctree-l10>a{padding:.4045em 1.618em .4045em 15.371em}.wy-menu-vertical li.toctree-l10.current>a{padding:.4045em 15.371em}.wy-menu-vertical li.toctree-l10.current li.toctree-l11>a{padding:.4045em 1.618em .4045em 16.989em}.wy-menu-vertical li.toctree-l2.current>a,.wy-menu-vertical li.toctree-l2.current li.toctree-l3>a{background:#c9c9c9}.wy-menu-vertical li.toctree-l2 button.toctree-expand{color:#a3a3a3}.wy-menu-vertical li.toctree-l3.current>a,.wy-menu-vertical li.toctree-l3.current li.toctree-l4>a{background:#bdbdbd}.wy-menu-vertical li.toctree-l3 button.toctree-expand{color:#969696}.wy-menu-vertical li.current ul{display:block}.wy-menu-vertical li ul{margin-bottom:0;display:none}.wy-menu-vertical li ul li a{margin-bottom:0;color:#d9d9d9;font-weight:400}.wy-menu-vertical a{line-height:18px;padding:.4045em 1.618em;display:block;position:relative;font-size:90%;color:#d9d9d9}.wy-menu-vertical a:hover{background-color:#4e4a4a;cursor:pointer}.wy-menu-vertical a:hover button.toctree-expand{color:#d9d9d9}.wy-menu-vertical a:active{background-color:#2980b9;cursor:pointer;color:#fff}.wy-menu-vertical a:active button.toctree-expand{color:#fff}.wy-side-nav-search{display:block;width:300px;padding:.809em;margin-bottom:.809em;z-index:200;background-color:#2980b9;text-align:center;color:#fcfcfc}.wy-side-nav-search input[type=text]{width:100%;border-radius:50px;padding:6px 12px;border-color:#2472a4}.wy-side-nav-search img{display:block;margin:auto auto .809em;height:45px;width:45px;background-color:#2980b9;padding:5px;border-radius:100%}.wy-side-nav-search .wy-dropdown>a,.wy-side-nav-search>a{color:#fcfcfc;font-size:100%;font-weight:700;display:inline-block;padding:4px 6px;margin-bottom:.809em;max-width:100%}.wy-side-nav-search .wy-dropdown>a:hover,.wy-side-nav-search>a:hover{background:hsla(0,0%,100%,.1)}.wy-side-nav-search .wy-dropdown>a img.logo,.wy-side-nav-search>a img.logo{display:block;margin:0 auto;height:auto;width:auto;border-radius:0;max-width:100%;background:transparent}.wy-side-nav-search .wy-dropdown>a.icon img.logo,.wy-side-nav-search>a.icon img.logo{margin-top:.85em}.wy-side-nav-search>div.version{margin-top:-.4045em;margin-bottom:.809em;font-weight:400;color:hsla(0,0%,100%,.3)}.wy-nav .wy-menu-vertical header{color:#2980b9}.wy-nav .wy-menu-vertical a{color:#b3b3b3}.wy-nav .wy-menu-vertical a:hover{background-color:#2980b9;color:#fff}[data-menu-wrap]{-webkit-transition:all .2s ease-in;-moz-transition:all .2s ease-in;transition:all .2s ease-in;position:absolute;opacity:1;width:100%;opacity:0}[data-menu-wrap].move-center{left:0;right:auto;opacity:1}[data-menu-wrap].move-left{right:auto;left:-100%;opacity:0}[data-menu-wrap].move-right{right:-100%;left:auto;opacity:0}.wy-body-for-nav{background:#fcfcfc}.wy-grid-for-nav{position:absolute;width:100%;height:100%}.wy-nav-side{position:fixed;top:0;bottom:0;left:0;padding-bottom:2em;width:300px;overflow-x:hidden;overflow-y:hidden;min-height:100%;color:#9b9b9b;background:#343131;z-index:200}.wy-side-scroll{width:320px;position:relative;overflow-x:hidden;overflow-y:scroll;height:100%}.wy-nav-top{display:none;background:#2980b9;color:#fff;padding:.4045em .809em;position:relative;line-height:50px;text-align:center;font-size:100%;*zoom:1}.wy-nav-top:after,.wy-nav-top:before{display:table;content:""}.wy-nav-top:after{clear:both}.wy-nav-top a{color:#fff;font-weight:700}.wy-nav-top img{margin-right:12px;height:45px;width:45px;background-color:#2980b9;padding:5px;border-radius:100%}.wy-nav-top i{font-size:30px;float:left;cursor:pointer;padding-top:inherit}.wy-nav-content-wrap{margin-left:300px;background:#fcfcfc;min-height:100%}.wy-nav-content{padding:1.618em 3.236em;height:100%;max-width:800px;margin:auto}.wy-body-mask{position:fixed;width:100%;height:100%;background:rgba(0,0,0,.2);display:none;z-index:499}.wy-body-mask.on{display:block}footer{color:grey}footer p{margin-bottom:12px}.rst-content footer span.commit tt,footer span.commit .rst-content tt,footer span.commit code{padding:0;font-family:SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,Courier,monospace;font-size:1em;background:none;border:none;color:grey}.rst-footer-buttons{*zoom:1}.rst-footer-buttons:after,.rst-footer-buttons:before{width:100%;display:table;content:""}.rst-footer-buttons:after{clear:both}.rst-breadcrumbs-buttons{margin-top:12px;*zoom:1}.rst-breadcrumbs-buttons:after,.rst-breadcrumbs-buttons:before{display:table;content:""}.rst-breadcrumbs-buttons:after{clear:both}#search-results .search li{margin-bottom:24px;border-bottom:1px solid #e1e4e5;padding-bottom:24px}#search-results .search li:first-child{border-top:1px solid #e1e4e5;padding-top:24px}#search-results .search li a{font-size:120%;margin-bottom:12px;display:inline-block}#search-results .context{color:grey;font-size:90%}.genindextable li>ul{margin-left:24px}@media screen and (max-width:768px){.wy-body-for-nav{background:#fcfcfc}.wy-nav-top{display:block}.wy-nav-side{left:-300px}.wy-nav-side.shift{width:85%;left:0}.wy-menu.wy-menu-vertical,.wy-side-nav-search,.wy-side-scroll{width:auto}.wy-nav-content-wrap{margin-left:0}.wy-nav-content-wrap .wy-nav-content{padding:1.618em}.wy-nav-content-wrap.shift{position:fixed;min-width:100%;left:85%;top:0;height:100%;overflow:hidden}}@media screen and (min-width:1100px){.wy-nav-content-wrap{background:rgba(0,0,0,.05)}.wy-nav-content{margin:0;background:#fcfcfc}}@media print{.rst-versions,.wy-nav-side,footer{display:none}.wy-nav-content-wrap{margin-left:0}}.rst-versions{position:fixed;bottom:0;left:0;width:300px;color:#fcfcfc;background:#1f1d1d;font-family:Lato,proxima-nova,Helvetica Neue,Arial,sans-serif;z-index:400}.rst-versions a{color:#2980b9;text-decoration:none}.rst-versions .rst-badge-small{display:none}.rst-versions .rst-current-version{padding:12px;background-color:#272525;display:block;text-align:right;font-size:90%;cursor:pointer;color:#27ae60;*zoom:1}.rst-versions .rst-current-version:after,.rst-versions .rst-current-version:before{display:table;content:""}.rst-versions .rst-current-version:after{clear:both}.rst-content .code-block-caption .rst-versions .rst-current-version .headerlink,.rst-content .eqno .rst-versions .rst-current-version .headerlink,.rst-content .rst-versions .rst-current-version .admonition-title,.rst-content code.download .rst-versions .rst-current-version span:first-child,.rst-content dl dt .rst-versions .rst-current-version .headerlink,.rst-content h1 .rst-versions .rst-current-version .headerlink,.rst-content h2 .rst-versions .rst-current-version .headerlink,.rst-content h3 .rst-versions .rst-current-version .headerlink,.rst-content h4 .rst-versions .rst-current-version .headerlink,.rst-content h5 .rst-versions .rst-current-version .headerlink,.rst-content h6 .rst-versions .rst-current-version .headerlink,.rst-content p .rst-versions .rst-current-version .headerlink,.rst-content table>caption .rst-versions .rst-current-version .headerlink,.rst-content tt.download .rst-versions .rst-current-version span:first-child,.rst-versions .rst-current-version .fa,.rst-versions .rst-current-version .icon,.rst-versions .rst-current-version .rst-content .admonition-title,.rst-versions .rst-current-version .rst-content .code-block-caption .headerlink,.rst-versions .rst-current-version .rst-content .eqno .headerlink,.rst-versions .rst-current-version .rst-content code.download span:first-child,.rst-versions .rst-current-version .rst-content dl dt .headerlink,.rst-versions .rst-current-version .rst-content h1 .headerlink,.rst-versions .rst-current-version .rst-content h2 .headerlink,.rst-versions .rst-current-version .rst-content h3 .headerlink,.rst-versions .rst-current-version .rst-content h4 .headerlink,.rst-versions .rst-current-version .rst-content h5 .headerlink,.rst-versions .rst-current-version .rst-content h6 .headerlink,.rst-versions .rst-current-version .rst-content p .headerlink,.rst-versions .rst-current-version .rst-content table>caption .headerlink,.rst-versions .rst-current-version .rst-content tt.download span:first-child,.rst-versions .rst-current-version .wy-menu-vertical li button.toctree-expand,.wy-menu-vertical li .rst-versions .rst-current-version button.toctree-expand{color:#fcfcfc}.rst-versions .rst-current-version .fa-book,.rst-versions .rst-current-version .icon-book{float:left}.rst-versions .rst-current-version.rst-out-of-date{background-color:#e74c3c;color:#fff}.rst-versions .rst-current-version.rst-active-old-version{background-color:#f1c40f;color:#000}.rst-versions.shift-up{height:auto;max-height:100%;overflow-y:scroll}.rst-versions.shift-up .rst-other-versions{display:block}.rst-versions .rst-other-versions{font-size:90%;padding:12px;color:grey;display:none}.rst-versions .rst-other-versions hr{display:block;height:1px;border:0;margin:20px 0;padding:0;border-top:1px solid #413d3d}.rst-versions .rst-other-versions dd{display:inline-block;margin:0}.rst-versions .rst-other-versions dd a{display:inline-block;padding:6px;color:#fcfcfc}.rst-versions.rst-badge{width:auto;bottom:20px;right:20px;left:auto;border:none;max-width:300px;max-height:90%}.rst-versions.rst-badge .fa-book,.rst-versions.rst-badge .icon-book{float:none;line-height:30px}.rst-versions.rst-badge.shift-up .rst-current-version{text-align:right}.rst-versions.rst-badge.shift-up .rst-current-version .fa-book,.rst-versions.rst-badge.shift-up .rst-current-version .icon-book{float:left}.rst-versions.rst-badge>.rst-current-version{width:auto;height:30px;line-height:30px;padding:0 6px;display:block;text-align:center}@media screen and (max-width:768px){.rst-versions{width:85%;display:none}.rst-versions.shift{display:block}}.rst-content .toctree-wrapper>p.caption,.rst-content h1,.rst-content h2,.rst-content h3,.rst-content h4,.rst-content h5,.rst-content h6{margin-bottom:24px}.rst-content img{max-width:100%;height:auto}.rst-content div.figure,.rst-content figure{margin-bottom:24px}.rst-content div.figure .caption-text,.rst-content figure .caption-text{font-style:italic}.rst-content div.figure p:last-child.caption,.rst-content figure p:last-child.caption{margin-bottom:0}.rst-content div.figure.align-center,.rst-content figure.align-center{text-align:center}.rst-content .section>a>img,.rst-content .section>img,.rst-content section>a>img,.rst-content section>img{margin-bottom:24px}.rst-content abbr[title]{text-decoration:none}.rst-content.style-external-links a.reference.external:after{font-family:FontAwesome;content:"\f08e";color:#b3b3b3;vertical-align:super;font-size:60%;margin:0 .2em}.rst-content blockquote{margin-left:24px;line-height:24px;margin-bottom:24px}.rst-content pre.literal-block{white-space:pre;margin:0;padding:12px;font-family:SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,Courier,monospace;display:block;overflow:auto}.rst-content div[class^=highlight],.rst-content pre.literal-block{border:1px solid #e1e4e5;overflow-x:auto;margin:1px 0 24px}.rst-content div[class^=highlight] div[class^=highlight],.rst-content pre.literal-block div[class^=highlight]{padding:0;border:none;margin:0}.rst-content div[class^=highlight] td.code{width:100%}.rst-content .linenodiv pre{border-right:1px solid #e6e9ea;margin:0;padding:12px;font-family:SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,Courier,monospace;user-select:none;pointer-events:none}.rst-content div[class^=highlight] pre{white-space:pre;margin:0;padding:12px;display:block;overflow:auto}.rst-content div[class^=highlight] pre .hll{display:block;margin:0 -12px;padding:0 12px}.rst-content .linenodiv pre,.rst-content div[class^=highlight] pre,.rst-content pre.literal-block{font-family:SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,Courier,monospace;font-size:12px;line-height:1.4}.rst-content div.highlight .gp,.rst-content div.highlight span.linenos{user-select:none;pointer-events:none}.rst-content div.highlight span.linenos{display:inline-block;padding-left:0;padding-right:12px;margin-right:12px;border-right:1px solid #e6e9ea}.rst-content .code-block-caption{font-style:italic;font-size:85%;line-height:1;padding:1em 0;text-align:center}@media print{.rst-content .codeblock,.rst-content div[class^=highlight],.rst-content div[class^=highlight] pre{white-space:pre-wrap}}.rst-content .admonition,.rst-content .admonition-todo,.rst-content .attention,.rst-content .caution,.rst-content .danger,.rst-content .error,.rst-content .hint,.rst-content .important,.rst-content .note,.rst-content .seealso,.rst-content .tip,.rst-content .warning{clear:both}.rst-content .admonition-todo .last,.rst-content .admonition-todo>:last-child,.rst-content .admonition .last,.rst-content .admonition>:last-child,.rst-content .attention .last,.rst-content .attention>:last-child,.rst-content .caution .last,.rst-content .caution>:last-child,.rst-content .danger .last,.rst-content .danger>:last-child,.rst-content .error .last,.rst-content .error>:last-child,.rst-content .hint .last,.rst-content .hint>:last-child,.rst-content .important .last,.rst-content .important>:last-child,.rst-content .note .last,.rst-content .note>:last-child,.rst-content .seealso .last,.rst-content .seealso>:last-child,.rst-content .tip .last,.rst-content .tip>:last-child,.rst-content .warning .last,.rst-content .warning>:last-child{margin-bottom:0}.rst-content .admonition-title:before{margin-right:4px}.rst-content .admonition table{border-color:rgba(0,0,0,.1)}.rst-content .admonition table td,.rst-content .admonition table th{background:transparent!important;border-color:rgba(0,0,0,.1)!important}.rst-content .section ol.loweralpha,.rst-content .section ol.loweralpha>li,.rst-content .toctree-wrapper ol.loweralpha,.rst-content .toctree-wrapper ol.loweralpha>li,.rst-content section ol.loweralpha,.rst-content section ol.loweralpha>li{list-style:lower-alpha}.rst-content .section ol.upperalpha,.rst-content .section ol.upperalpha>li,.rst-content .toctree-wrapper ol.upperalpha,.rst-content .toctree-wrapper ol.upperalpha>li,.rst-content section ol.upperalpha,.rst-content section ol.upperalpha>li{list-style:upper-alpha}.rst-content .section ol li>*,.rst-content .section ul li>*,.rst-content .toctree-wrapper ol li>*,.rst-content .toctree-wrapper ul li>*,.rst-content section ol li>*,.rst-content section ul li>*{margin-top:12px;margin-bottom:12px}.rst-content .section ol li>:first-child,.rst-content .section ul li>:first-child,.rst-content .toctree-wrapper ol li>:first-child,.rst-content .toctree-wrapper ul li>:first-child,.rst-content section ol li>:first-child,.rst-content section ul li>:first-child{margin-top:0}.rst-content .section ol li>p,.rst-content .section ol li>p:last-child,.rst-content .section ul li>p,.rst-content .section ul li>p:last-child,.rst-content .toctree-wrapper ol li>p,.rst-content .toctree-wrapper ol li>p:last-child,.rst-content .toctree-wrapper ul li>p,.rst-content .toctree-wrapper ul li>p:last-child,.rst-content section ol li>p,.rst-content section ol li>p:last-child,.rst-content section ul li>p,.rst-content section ul li>p:last-child{margin-bottom:12px}.rst-content .section ol li>p:only-child,.rst-content .section ol li>p:only-child:last-child,.rst-content .section ul li>p:only-child,.rst-content .section ul li>p:only-child:last-child,.rst-content .toctree-wrapper ol li>p:only-child,.rst-content .toctree-wrapper ol li>p:only-child:last-child,.rst-content .toctree-wrapper ul li>p:only-child,.rst-content .toctree-wrapper ul li>p:only-child:last-child,.rst-content section ol li>p:only-child,.rst-content section ol li>p:only-child:last-child,.rst-content section ul li>p:only-child,.rst-content section ul li>p:only-child:last-child{margin-bottom:0}.rst-content .section ol li>ol,.rst-content .section ol li>ul,.rst-content .section ul li>ol,.rst-content .section ul li>ul,.rst-content .toctree-wrapper ol li>ol,.rst-content .toctree-wrapper ol li>ul,.rst-content .toctree-wrapper ul li>ol,.rst-content .toctree-wrapper ul li>ul,.rst-content section ol li>ol,.rst-content section ol li>ul,.rst-content section ul li>ol,.rst-content section ul li>ul{margin-bottom:12px}.rst-content .section ol.simple li>*,.rst-content .section ol.simple li ol,.rst-content .section ol.simple li ul,.rst-content .section ul.simple li>*,.rst-content .section ul.simple li ol,.rst-content .section ul.simple li ul,.rst-content .toctree-wrapper ol.simple li>*,.rst-content .toctree-wrapper ol.simple li ol,.rst-content .toctree-wrapper ol.simple li ul,.rst-content .toctree-wrapper ul.simple li>*,.rst-content .toctree-wrapper ul.simple li ol,.rst-content .toctree-wrapper ul.simple li ul,.rst-content section ol.simple li>*,.rst-content section ol.simple li ol,.rst-content section ol.simple li ul,.rst-content section ul.simple li>*,.rst-content section ul.simple li ol,.rst-content section ul.simple li ul{margin-top:0;margin-bottom:0}.rst-content .line-block{margin-left:0;margin-bottom:24px;line-height:24px}.rst-content .line-block .line-block{margin-left:24px;margin-bottom:0}.rst-content .topic-title{font-weight:700;margin-bottom:12px}.rst-content .toc-backref{color:#404040}.rst-content .align-right{float:right;margin:0 0 24px 24px}.rst-content .align-left{float:left;margin:0 24px 24px 0}.rst-content .align-center{margin:auto}.rst-content .align-center:not(table){display:block}.rst-content .code-block-caption .headerlink,.rst-content .eqno .headerlink,.rst-content .toctree-wrapper>p.caption .headerlink,.rst-content dl dt .headerlink,.rst-content h1 .headerlink,.rst-content h2 .headerlink,.rst-content h3 .headerlink,.rst-content h4 .headerlink,.rst-content h5 .headerlink,.rst-content h6 .headerlink,.rst-content p.caption .headerlink,.rst-content p .headerlink,.rst-content table>caption .headerlink{opacity:0;font-size:14px;font-family:FontAwesome;margin-left:.5em}.rst-content .code-block-caption .headerlink:focus,.rst-content .code-block-caption:hover .headerlink,.rst-content .eqno .headerlink:focus,.rst-content .eqno:hover .headerlink,.rst-content .toctree-wrapper>p.caption .headerlink:focus,.rst-content .toctree-wrapper>p.caption:hover .headerlink,.rst-content dl dt .headerlink:focus,.rst-content dl dt:hover .headerlink,.rst-content h1 .headerlink:focus,.rst-content h1:hover .headerlink,.rst-content h2 .headerlink:focus,.rst-content h2:hover .headerlink,.rst-content h3 .headerlink:focus,.rst-content h3:hover .headerlink,.rst-content h4 .headerlink:focus,.rst-content h4:hover .headerlink,.rst-content h5 .headerlink:focus,.rst-content h5:hover .headerlink,.rst-content h6 .headerlink:focus,.rst-content h6:hover .headerlink,.rst-content p.caption .headerlink:focus,.rst-content p.caption:hover .headerlink,.rst-content p .headerlink:focus,.rst-content p:hover .headerlink,.rst-content table>caption .headerlink:focus,.rst-content table>caption:hover .headerlink{opacity:1}.rst-content p a{overflow-wrap:anywhere}.rst-content .wy-table td p,.rst-content .wy-table td ul,.rst-content .wy-table th p,.rst-content .wy-table th ul,.rst-content table.docutils td p,.rst-content table.docutils td ul,.rst-content table.docutils th p,.rst-content table.docutils th ul,.rst-content table.field-list td p,.rst-content table.field-list td ul,.rst-content table.field-list th p,.rst-content table.field-list th ul{font-size:inherit}.rst-content .btn:focus{outline:2px solid}.rst-content table>caption .headerlink:after{font-size:12px}.rst-content .centered{text-align:center}.rst-content .sidebar{float:right;width:40%;display:block;margin:0 0 24px 24px;padding:24px;background:#f3f6f6;border:1px solid #e1e4e5}.rst-content .sidebar dl,.rst-content .sidebar p,.rst-content .sidebar ul{font-size:90%}.rst-content .sidebar .last,.rst-content .sidebar>:last-child{margin-bottom:0}.rst-content .sidebar .sidebar-title{display:block;font-family:Roboto Slab,ff-tisa-web-pro,Georgia,Arial,sans-serif;font-weight:700;background:#e1e4e5;padding:6px 12px;margin:-24px -24px 24px;font-size:100%}.rst-content .highlighted{background:#f1c40f;box-shadow:0 0 0 2px #f1c40f;display:inline;font-weight:700}.rst-content .citation-reference,.rst-content .footnote-reference{vertical-align:baseline;position:relative;top:-.4em;line-height:0;font-size:90%}.rst-content .citation-reference>span.fn-bracket,.rst-content .footnote-reference>span.fn-bracket{display:none}.rst-content .hlist{width:100%}.rst-content dl dt span.classifier:before{content:" : "}.rst-content dl dt span.classifier-delimiter{display:none!important}html.writer-html4 .rst-content table.docutils.citation,html.writer-html4 .rst-content table.docutils.footnote{background:none;border:none}html.writer-html4 .rst-content table.docutils.citation td,html.writer-html4 .rst-content table.docutils.citation tr,html.writer-html4 .rst-content table.docutils.footnote td,html.writer-html4 .rst-content table.docutils.footnote tr{border:none;background-color:transparent!important;white-space:normal}html.writer-html4 .rst-content table.docutils.citation td.label,html.writer-html4 .rst-content table.docutils.footnote td.label{padding-left:0;padding-right:0;vertical-align:top}html.writer-html5 .rst-content dl.citation,html.writer-html5 .rst-content dl.field-list,html.writer-html5 .rst-content dl.footnote{display:grid;grid-template-columns:auto minmax(80%,95%)}html.writer-html5 .rst-content dl.citation>dt,html.writer-html5 .rst-content dl.field-list>dt,html.writer-html5 .rst-content dl.footnote>dt{display:inline-grid;grid-template-columns:max-content auto}html.writer-html5 .rst-content aside.citation,html.writer-html5 .rst-content aside.footnote,html.writer-html5 .rst-content div.citation{display:grid;grid-template-columns:auto auto minmax(.65rem,auto) minmax(40%,95%)}html.writer-html5 .rst-content aside.citation>span.label,html.writer-html5 .rst-content aside.footnote>span.label,html.writer-html5 .rst-content div.citation>span.label{grid-column-start:1;grid-column-end:2}html.writer-html5 .rst-content aside.citation>span.backrefs,html.writer-html5 .rst-content aside.footnote>span.backrefs,html.writer-html5 .rst-content div.citation>span.backrefs{grid-column-start:2;grid-column-end:3;grid-row-start:1;grid-row-end:3}html.writer-html5 .rst-content aside.citation>p,html.writer-html5 .rst-content aside.footnote>p,html.writer-html5 .rst-content div.citation>p{grid-column-start:4;grid-column-end:5}html.writer-html5 .rst-content dl.citation,html.writer-html5 .rst-content dl.field-list,html.writer-html5 .rst-content dl.footnote{margin-bottom:24px}html.writer-html5 .rst-content dl.citation>dt,html.writer-html5 .rst-content dl.field-list>dt,html.writer-html5 .rst-content dl.footnote>dt{padding-left:1rem}html.writer-html5 .rst-content dl.citation>dd,html.writer-html5 .rst-content dl.citation>dt,html.writer-html5 .rst-content dl.field-list>dd,html.writer-html5 .rst-content dl.field-list>dt,html.writer-html5 .rst-content dl.footnote>dd,html.writer-html5 .rst-content dl.footnote>dt{margin-bottom:0}html.writer-html5 .rst-content dl.citation,html.writer-html5 .rst-content dl.footnote{font-size:.9rem}html.writer-html5 .rst-content dl.citation>dt,html.writer-html5 .rst-content dl.footnote>dt{margin:0 .5rem .5rem 0;line-height:1.2rem;word-break:break-all;font-weight:400}html.writer-html5 .rst-content dl.citation>dt>span.brackets:before,html.writer-html5 .rst-content dl.footnote>dt>span.brackets:before{content:"["}html.writer-html5 .rst-content dl.citation>dt>span.brackets:after,html.writer-html5 .rst-content dl.footnote>dt>span.brackets:after{content:"]"}html.writer-html5 .rst-content dl.citation>dt>span.fn-backref,html.writer-html5 .rst-content dl.footnote>dt>span.fn-backref{text-align:left;font-style:italic;margin-left:.65rem;word-break:break-word;word-spacing:-.1rem;max-width:5rem}html.writer-html5 .rst-content dl.citation>dt>span.fn-backref>a,html.writer-html5 .rst-content dl.footnote>dt>span.fn-backref>a{word-break:keep-all}html.writer-html5 .rst-content dl.citation>dt>span.fn-backref>a:not(:first-child):before,html.writer-html5 .rst-content dl.footnote>dt>span.fn-backref>a:not(:first-child):before{content:" "}html.writer-html5 .rst-content dl.citation>dd,html.writer-html5 .rst-content dl.footnote>dd{margin:0 0 .5rem;line-height:1.2rem}html.writer-html5 .rst-content dl.citation>dd p,html.writer-html5 .rst-content dl.footnote>dd p{font-size:.9rem}html.writer-html5 .rst-content aside.citation,html.writer-html5 .rst-content aside.footnote,html.writer-html5 .rst-content div.citation{padding-left:1rem;padding-right:1rem;font-size:.9rem;line-height:1.2rem}html.writer-html5 .rst-content aside.citation p,html.writer-html5 .rst-content aside.footnote p,html.writer-html5 .rst-content div.citation p{font-size:.9rem;line-height:1.2rem;margin-bottom:12px}html.writer-html5 .rst-content aside.citation span.backrefs,html.writer-html5 .rst-content aside.footnote span.backrefs,html.writer-html5 .rst-content div.citation span.backrefs{text-align:left;font-style:italic;margin-left:.65rem;word-break:break-word;word-spacing:-.1rem;max-width:5rem}html.writer-html5 .rst-content aside.citation span.backrefs>a,html.writer-html5 .rst-content aside.footnote span.backrefs>a,html.writer-html5 .rst-content div.citation span.backrefs>a{word-break:keep-all}html.writer-html5 .rst-content aside.citation span.backrefs>a:not(:first-child):before,html.writer-html5 .rst-content aside.footnote span.backrefs>a:not(:first-child):before,html.writer-html5 .rst-content div.citation span.backrefs>a:not(:first-child):before{content:" "}html.writer-html5 .rst-content aside.citation span.label,html.writer-html5 .rst-content aside.footnote span.label,html.writer-html5 .rst-content div.citation span.label{line-height:1.2rem}html.writer-html5 .rst-content aside.citation-list,html.writer-html5 .rst-content aside.footnote-list,html.writer-html5 .rst-content div.citation-list{margin-bottom:24px}html.writer-html5 .rst-content dl.option-list kbd{font-size:.9rem}.rst-content table.docutils.footnote,html.writer-html4 .rst-content table.docutils.citation,html.writer-html5 .rst-content aside.footnote,html.writer-html5 .rst-content aside.footnote-list aside.footnote,html.writer-html5 .rst-content div.citation-list>div.citation,html.writer-html5 .rst-content dl.citation,html.writer-html5 .rst-content dl.footnote{color:grey}.rst-content table.docutils.footnote code,.rst-content table.docutils.footnote tt,html.writer-html4 .rst-content table.docutils.citation code,html.writer-html4 .rst-content table.docutils.citation tt,html.writer-html5 .rst-content aside.footnote-list aside.footnote code,html.writer-html5 .rst-content aside.footnote-list aside.footnote tt,html.writer-html5 .rst-content aside.footnote code,html.writer-html5 .rst-content aside.footnote tt,html.writer-html5 .rst-content div.citation-list>div.citation code,html.writer-html5 .rst-content div.citation-list>div.citation tt,html.writer-html5 .rst-content dl.citation code,html.writer-html5 .rst-content dl.citation tt,html.writer-html5 .rst-content dl.footnote code,html.writer-html5 .rst-content dl.footnote tt{color:#555}.rst-content .wy-table-responsive.citation,.rst-content .wy-table-responsive.footnote{margin-bottom:0}.rst-content .wy-table-responsive.citation+:not(.citation),.rst-content .wy-table-responsive.footnote+:not(.footnote){margin-top:24px}.rst-content .wy-table-responsive.citation:last-child,.rst-content .wy-table-responsive.footnote:last-child{margin-bottom:24px}.rst-content table.docutils th{border-color:#e1e4e5}html.writer-html5 .rst-content table.docutils th{border:1px solid #e1e4e5}html.writer-html5 .rst-content table.docutils td>p,html.writer-html5 .rst-content table.docutils th>p{line-height:1rem;margin-bottom:0;font-size:.9rem}.rst-content table.docutils td .last,.rst-content table.docutils td .last>:last-child{margin-bottom:0}.rst-content table.field-list,.rst-content table.field-list td{border:none}.rst-content table.field-list td p{line-height:inherit}.rst-content table.field-list td>strong{display:inline-block}.rst-content table.field-list .field-name{padding-right:10px;text-align:left;white-space:nowrap}.rst-content table.field-list .field-body{text-align:left}.rst-content code,.rst-content tt{color:#000;font-family:SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,Courier,monospace;padding:2px 5px}.rst-content code big,.rst-content code em,.rst-content tt big,.rst-content tt em{font-size:100%!important;line-height:normal}.rst-content code.literal,.rst-content tt.literal{color:#e74c3c;white-space:normal}.rst-content code.xref,.rst-content tt.xref,a .rst-content code,a .rst-content tt{font-weight:700;color:#404040;overflow-wrap:normal}.rst-content kbd,.rst-content pre,.rst-content samp{font-family:SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,Courier,monospace}.rst-content a code,.rst-content a tt{color:#2980b9}.rst-content dl{margin-bottom:24px}.rst-content dl dt{font-weight:700;margin-bottom:12px}.rst-content dl ol,.rst-content dl p,.rst-content dl table,.rst-content dl ul{margin-bottom:12px}.rst-content dl dd{margin:0 0 12px 24px;line-height:24px}.rst-content dl dd>ol:last-child,.rst-content dl dd>p:last-child,.rst-content dl dd>table:last-child,.rst-content dl dd>ul:last-child{margin-bottom:0}html.writer-html4 .rst-content dl:not(.docutils),html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple){margin-bottom:24px}html.writer-html4 .rst-content dl:not(.docutils)>dt,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple)>dt{display:table;margin:6px 0;font-size:90%;line-height:normal;background:#e7f2fa;color:#2980b9;border-top:3px solid #6ab0de;padding:6px;position:relative}html.writer-html4 .rst-content dl:not(.docutils)>dt:before,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple)>dt:before{color:#6ab0de}html.writer-html4 .rst-content dl:not(.docutils)>dt .headerlink,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple)>dt .headerlink{color:#404040;font-size:100%!important}html.writer-html4 .rst-content dl:not(.docutils) dl:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple)>dt,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) dl:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple)>dt{margin-bottom:6px;border:none;border-left:3px solid #ccc;background:#f0f0f0;color:#555}html.writer-html4 .rst-content dl:not(.docutils) dl:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple)>dt .headerlink,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) dl:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple)>dt .headerlink{color:#404040;font-size:100%!important}html.writer-html4 .rst-content dl:not(.docutils)>dt:first-child,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple)>dt:first-child{margin-top:0}html.writer-html4 .rst-content dl:not(.docutils) code.descclassname,html.writer-html4 .rst-content dl:not(.docutils) code.descname,html.writer-html4 .rst-content dl:not(.docutils) tt.descclassname,html.writer-html4 .rst-content dl:not(.docutils) tt.descname,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) code.descclassname,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) code.descname,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) tt.descclassname,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) tt.descname{background-color:transparent;border:none;padding:0;font-size:100%!important}html.writer-html4 .rst-content dl:not(.docutils) code.descname,html.writer-html4 .rst-content dl:not(.docutils) tt.descname,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) code.descname,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) tt.descname{font-weight:700}html.writer-html4 .rst-content dl:not(.docutils) .optional,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) .optional{display:inline-block;padding:0 4px;color:#000;font-weight:700}html.writer-html4 .rst-content dl:not(.docutils) .property,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) .property{display:inline-block;padding-right:8px;max-width:100%}html.writer-html4 .rst-content dl:not(.docutils) .k,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) .k{font-style:italic}html.writer-html4 .rst-content dl:not(.docutils) .descclassname,html.writer-html4 .rst-content dl:not(.docutils) .descname,html.writer-html4 .rst-content dl:not(.docutils) .sig-name,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) .descclassname,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) .descname,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) .sig-name{font-family:SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,Courier,monospace;color:#000}.rst-content .viewcode-back,.rst-content .viewcode-link{display:inline-block;color:#27ae60;font-size:80%;padding-left:24px}.rst-content .viewcode-back{display:block;float:right}.rst-content p.rubric{margin-bottom:12px;font-weight:700}.rst-content code.download,.rst-content tt.download{background:inherit;padding:inherit;font-weight:400;font-family:inherit;font-size:inherit;color:inherit;border:inherit;white-space:inherit}.rst-content code.download span:first-child,.rst-content tt.download span:first-child{-webkit-font-smoothing:subpixel-antialiased}.rst-content code.download span:first-child:before,.rst-content tt.download span:first-child:before{margin-right:4px}.rst-content .guilabel{border:1px solid #7fbbe3;background:#e7f2fa;font-size:80%;font-weight:700;border-radius:4px;padding:2.4px 6px;margin:auto 2px}.rst-content :not(dl.option-list)>:not(dt):not(kbd):not(.kbd)>.kbd,.rst-content :not(dl.option-list)>:not(dt):not(kbd):not(.kbd)>kbd{color:inherit;font-size:80%;background-color:#fff;border:1px solid #a6a6a6;border-radius:4px;box-shadow:0 2px grey;padding:2.4px 6px;margin:auto 0}.rst-content .versionmodified{font-style:italic}@media screen and (max-width:480px){.rst-content .sidebar{width:100%}}span[id*=MathJax-Span]{color:#404040}.math{text-align:center}@font-face{font-family:Lato;src:url(fonts/lato-normal.woff2?bd03a2cc277bbbc338d464e679fe9942) format("woff2"),url(fonts/lato-normal.woff?27bd77b9162d388cb8d4c4217c7c5e2a) format("woff");font-weight:400;font-style:normal;font-display:block}@font-face{font-family:Lato;src:url(fonts/lato-bold.woff2?cccb897485813c7c256901dbca54ecf2) format("woff2"),url(fonts/lato-bold.woff?d878b6c29b10beca227e9eef4246111b) format("woff");font-weight:700;font-style:normal;font-display:block}@font-face{font-family:Lato;src:url(fonts/lato-bold-italic.woff2?0b6bb6725576b072c5d0b02ecdd1900d) format("woff2"),url(fonts/lato-bold-italic.woff?9c7e4e9eb485b4a121c760e61bc3707c) format("woff");font-weight:700;font-style:italic;font-display:block}@font-face{font-family:Lato;src:url(fonts/lato-normal-italic.woff2?4eb103b4d12be57cb1d040ed5e162e9d) format("woff2"),url(fonts/lato-normal-italic.woff?f28f2d6482446544ef1ea1ccc6dd5892) format("woff");font-weight:400;font-style:italic;font-display:block}@font-face{font-family:Roboto Slab;font-style:normal;font-weight:400;src:url(fonts/Roboto-Slab-Regular.woff2?7abf5b8d04d26a2cafea937019bca958) format("woff2"),url(fonts/Roboto-Slab-Regular.woff?c1be9284088d487c5e3ff0a10a92e58c) format("woff");font-display:block}@font-face{font-family:Roboto Slab;font-style:normal;font-weight:700;src:url(fonts/Roboto-Slab-Bold.woff2?9984f4a9bda09be08e83f2506954adbe) format("woff2"),url(fonts/Roboto-Slab-Bold.woff?bed5564a116b05148e3b3bea6fb1162a) format("woff");font-display:block} diff --git a/css/theme_extra.css b/css/theme_extra.css new file mode 100644 index 00000000..ab0631a1 --- /dev/null +++ b/css/theme_extra.css @@ -0,0 +1,197 @@ +/* + * Wrap inline code samples otherwise they shoot of the side and + * can't be read at all. + * + * https://github.com/mkdocs/mkdocs/issues/313 + * https://github.com/mkdocs/mkdocs/issues/233 + * https://github.com/mkdocs/mkdocs/issues/834 + */ +.rst-content code { + white-space: pre-wrap; + word-wrap: break-word; + padding: 2px 5px; +} + +/** + * Make code blocks display as blocks and give them the appropriate + * font size and padding. + * + * https://github.com/mkdocs/mkdocs/issues/855 + * https://github.com/mkdocs/mkdocs/issues/834 + * https://github.com/mkdocs/mkdocs/issues/233 + */ +.rst-content pre code { + white-space: pre; + word-wrap: normal; + display: block; + padding: 12px; + font-size: 12px; +} + +/** + * Fix code colors + * + * https://github.com/mkdocs/mkdocs/issues/2027 + */ +.rst-content code { + color: #E74C3C; +} + +.rst-content pre code { + color: #000; + background: #f8f8f8; +} + +/* + * Fix link colors when the link text is inline code. + * + * https://github.com/mkdocs/mkdocs/issues/718 + */ +a code { + color: #2980B9; +} +a:hover code { + color: #3091d1; +} +a:visited code { + color: #9B59B6; +} + +/* + * The CSS classes from highlight.js seem to clash with the + * ReadTheDocs theme causing some code to be incorrectly made + * bold and italic. + * + * https://github.com/mkdocs/mkdocs/issues/411 + */ +pre .cs, pre .c { + font-weight: inherit; + font-style: inherit; +} + +/* + * Fix some issues with the theme and non-highlighted code + * samples. Without and highlighting styles attached the + * formatting is broken. + * + * https://github.com/mkdocs/mkdocs/issues/319 + */ +.rst-content .no-highlight { + display: block; + padding: 0.5em; + color: #333; +} + + +/* + * Additions specific to the search functionality provided by MkDocs + */ + +.search-results { + margin-top: 23px; +} + +.search-results article { + border-top: 1px solid #E1E4E5; + padding-top: 24px; +} + +.search-results article:first-child { + border-top: none; +} + +form .search-query { + width: 100%; + border-radius: 50px; + padding: 6px 12px; + border-color: #D1D4D5; +} + +/* + * Improve inline code blocks within admonitions. + * + * https://github.com/mkdocs/mkdocs/issues/656 + */ + .rst-content .admonition code { + color: #404040; + border: 1px solid #c7c9cb; + border: 1px solid rgba(0, 0, 0, 0.2); + background: #f8fbfd; + background: rgba(255, 255, 255, 0.7); +} + +/* + * Account for wide tables which go off the side. + * Override borders to avoid weirdness on narrow tables. + * + * https://github.com/mkdocs/mkdocs/issues/834 + * https://github.com/mkdocs/mkdocs/pull/1034 + */ +.rst-content .section .docutils { + width: 100%; + overflow: auto; + display: block; + border: none; +} + +td, th { + border: 1px solid #e1e4e5 !important; + border-collapse: collapse; +} + +/* + * Without the following amendments, the navigation in the theme will be + * slightly cut off. This is due to the fact that the .wy-nav-side has a + * padding-bottom of 2em, which must not necessarily align with the font-size of + * 90 % on the .rst-current-version container, combined with the padding of 12px + * above and below. These amendments fix this in two steps: First, make sure the + * .rst-current-version container has a fixed height of 40px, achieved using + * line-height, and then applying a padding-bottom of 40px to this container. In + * a second step, the items within that container are re-aligned using flexbox. + * + * https://github.com/mkdocs/mkdocs/issues/2012 + */ + .wy-nav-side { + padding-bottom: 40px; +} + +/* For section-index only */ +.wy-menu-vertical .current-section p { + background-color: #e3e3e3; + color: #404040; +} + +/* + * The second step of above amendment: Here we make sure the items are aligned + * correctly within the .rst-current-version container. Using flexbox, we + * achieve it in such a way that it will look like the following: + * + * [No repo_name] + * Next >> // On the first page + * << Previous Next >> // On all subsequent pages + * + * [With repo_name] + * Next >> // On the first page + * << Previous Next >> // On all subsequent pages + * + * https://github.com/mkdocs/mkdocs/issues/2012 + */ +.rst-versions .rst-current-version { + padding: 0 12px; + display: flex; + font-size: initial; + justify-content: space-between; + align-items: center; + line-height: 40px; +} + +/* + * Please note that this amendment also involves removing certain inline-styles + * from the file ./mkdocs/themes/readthedocs/versions.html. + * + * https://github.com/mkdocs/mkdocs/issues/2012 + */ +.rst-current-version span { + flex: 1; + text-align: center; +} diff --git a/docker-compose.yml b/docker-compose.yml deleted file mode 100644 index fede1d66..00000000 --- a/docker-compose.yml +++ /dev/null @@ -1,11 +0,0 @@ -services: - db: - # postgres://postgres:postgres@127.0.0.1:5432/postgres - image: postgres - restart: always - environment: - - POSTGRES_USER=postgres - - POSTGRES_PASSWORD=postgres - - POSTGRES_DB=postgres - ports: - - "5432:5432" diff --git a/docs/checking_migrations.md b/docs/checking_migrations.md deleted file mode 100644 index 6f552f6e..00000000 --- a/docs/checking_migrations.md +++ /dev/null @@ -1,32 +0,0 @@ -# Linting Migrations - -Postgres Language Tools comes with a `check` command that can be integrated into your development workflow to catch problematic schema changes and encourage best practices. - -To run it, simply point at your migrations directory. - -```sh -postgrestools check supabase/migrations -``` - -When you are setting it up in an existing project, you might want to ignore all migrations that are already applied. To do so, add `migrationsDir` and `after` to your `postgrestools.jsonc` file - - -```json -{ - "migrations": { - "migrationsDir": "supabase/migrations", - "after": 1740868021 - } -} -``` - -Alternatively, pass them directly. - -``` -postgrestools check supabase/migrations --migrations-dir="supabase/migrations" --after=1740868021 -``` - -This will only check migrations after the specified timestamp. - -For pre-commit hooks and when working locally, use `--staged` to only lint files that have been staged. In CI environments, you most likely want to use `--changed` to only lint files that have been changed compared to your `vcs.default_branch` configuration. If `default_branch` is not set in your `postgrestools.jsonc`, use `--since=REF` to specify the base branch to compare against. - diff --git a/docs/cli_reference.md b/docs/cli_reference.md deleted file mode 100644 index f8780789..00000000 --- a/docs/cli_reference.md +++ /dev/null @@ -1,270 +0,0 @@ -## CLI Reference - -[//]: # "BEGIN CLI_REF" - -# Command summary - -- [`postgrestools`↴](#postgrestools) -- [`postgrestools version`↴](#postgrestools-version) -- [`postgrestools check`↴](#postgrestools-check) -- [`postgrestools start`↴](#postgrestools-start) -- [`postgrestools stop`↴](#postgrestools-stop) -- [`postgrestools init`↴](#postgrestools-init) -- [`postgrestools lsp-proxy`↴](#postgrestools-lsp-proxy) -- [`postgrestools clean`↴](#postgrestools-clean) - -## postgrestools - -PostgresTools official CLI. Use it to check the health of your project or run it to check single files. - -**Usage**: **`postgrestools`** _`COMMAND ...`_ - -**Available options:** - -- **`-h`**, **`--help`** — - Prints help information -- **`-V`**, **`--version`** — - Prints version information - -**Available commands:** - -- **`version`** — - Shows the version information and quit. -- **`check`** — - Runs everything to the requested files. -- **`start`** — - Starts the daemon server process. -- **`stop`** — - Stops the daemon server process. -- **`init`** — - Bootstraps a new project. Creates a configuration file with some defaults. -- **`lsp-proxy`** — - Acts as a server for the Language Server Protocol over stdin/stdout. -- **`clean`** — - Cleans the logs emitted by the daemon. - -## postgrestools version - -Shows the version information and quit. - -**Usage**: **`postgrestools`** **`version`** - -**Global options applied to all commands** - -- **` --colors`**=_``_ — - Set the formatting mode for markup: "off" prints everything as plain text, "force" forces the formatting of markup using ANSI even if the console output is determined to be incompatible -- **` --use-server`** — - Connect to a running instance of the daemon server. -- **` --skip-db`** — - Skip connecting to the database and only run checks that don't require a database connection. -- **` --verbose`** — - Print additional diagnostics, and some diagnostics show more information. Also, print out what files were processed and which ones were modified. -- **` --config-path`**=_`PATH`_ — - Set the file path to the configuration file, or the directory path to find `postgrestools.jsonc`. If used, it disables the default configuration file resolution. -- **` --max-diagnostics`**=_`>`_ — - Cap the amount of diagnostics displayed. When `none` is provided, the limit is lifted. - [default: 20] -- **` --skip-errors`** — - Skip over files containing syntax errors instead of emitting an error diagnostic. -- **` --no-errors-on-unmatched`** — - Silence errors that would be emitted in case no files were processed during the execution of the command. -- **` --error-on-warnings`** — - Tell Postgres Tools to exit with an error code if some diagnostics emit warnings. -- **` --reporter`**=_``_ — - Allows to change how diagnostics and summary are reported. -- **` --log-level`**=_``_ — - The level of logging. In order, from the most verbose to the least verbose: debug, info, warn, error. - - The value `none` won't show any logging. - - [default: none] - -- **` --log-kind`**=_``_ — - How the log should look like. - [default: pretty] -- **` --diagnostic-level`**=_``_ — - The level of diagnostics to show. In order, from the lowest to the most important: info, warn, error. Passing `--diagnostic-level=error` will cause Postgres Tools to print only diagnostics that contain only errors. - [default: info] - -**Available options:** - -- **`-h`**, **`--help`** — - Prints help information - -## postgrestools check - -Runs everything to the requested files. - -**Usage**: **`postgrestools`** **`check`** \[**`--staged`**\] \[**`--changed`**\] \[**`--since`**=_`REF`_\] \[_`PATH`_\]... - -**The configuration that is contained inside the configuration file.** - -- **` --vcs-enabled`**=_``_ — - Whether we should integrate itself with the VCS client -- **` --vcs-client-kind`**=_``_ — - The kind of client. -- **` --vcs-use-ignore-file`**=_``_ — - Whether we should use the VCS ignore file. When [true], we will ignore the files specified in the ignore file. -- **` --vcs-root`**=_`PATH`_ — - The folder where we should check for VCS files. By default, we will use the same folder where `postgrestools.jsonc` was found. - - If we can't find the configuration, it will attempt to use the current working directory. If no current working directory can't be found, we won't use the VCS integration, and a diagnostic will be emitted - -- **` --vcs-default-branch`**=_`BRANCH`_ — - The main branch of the project -- **` --files-max-size`**=_`NUMBER`_ — - The maximum allowed size for source code files in bytes. Files above this limit will be ignored for performance reasons. Defaults to 1 MiB -- **` --migrations-dir`**=_`ARG`_ — - The directory where the migration files are stored -- **` --after`**=_`ARG`_ — - Ignore any migrations before this timestamp -- **` --host`**=_`ARG`_ — - The host of the database. -- **` --port`**=_`ARG`_ — - The port of the database. -- **` --username`**=_`ARG`_ — - The username to connect to the database. -- **` --password`**=_`ARG`_ — - The password to connect to the database. -- **` --database`**=_`ARG`_ — - The name of the database. -- **` --conn_timeout_secs`**=_`ARG`_ — - The connection timeout in seconds. - [default: Some(10)] - -**Global options applied to all commands** - -- **` --colors`**=_``_ — - Set the formatting mode for markup: "off" prints everything as plain text, "force" forces the formatting of markup using ANSI even if the console output is determined to be incompatible -- **` --use-server`** — - Connect to a running instance of the daemon server. -- **` --skip-db`** — - Skip connecting to the database and only run checks that don't require a database connection. -- **` --verbose`** — - Print additional diagnostics, and some diagnostics show more information. Also, print out what files were processed and which ones were modified. -- **` --config-path`**=_`PATH`_ — - Set the file path to the configuration file, or the directory path to find `postgrestools.jsonc`. If used, it disables the default configuration file resolution. -- **` --max-diagnostics`**=_`>`_ — - Cap the amount of diagnostics displayed. When `none` is provided, the limit is lifted. - [default: 20] -- **` --skip-errors`** — - Skip over files containing syntax errors instead of emitting an error diagnostic. -- **` --no-errors-on-unmatched`** — - Silence errors that would be emitted in case no files were processed during the execution of the command. -- **` --error-on-warnings`** — - Tell Postgres Tools to exit with an error code if some diagnostics emit warnings. -- **` --reporter`**=_``_ — - Allows to change how diagnostics and summary are reported. -- **` --log-level`**=_``_ — - The level of logging. In order, from the most verbose to the least verbose: debug, info, warn, error. - - The value `none` won't show any logging. - - [default: none] - -- **` --log-kind`**=_``_ — - How the log should look like. - [default: pretty] -- **` --diagnostic-level`**=_``_ — - The level of diagnostics to show. In order, from the lowest to the most important: info, warn, error. Passing `--diagnostic-level=error` will cause Postgres Tools to print only diagnostics that contain only errors. - [default: info] - -**Available positional items:** - -- _`PATH`_ — - Single file, single path or list of paths - -**Available options:** - -- **` --stdin-file-path`**=_`PATH`_ — - Use this option when you want to format code piped from `stdin`, and print the output to `stdout`. - - The file doesn't need to exist on disk, what matters is the extension of the file. Based on the extension, we know how to check the code. - - Example: `echo 'let a;' | pgt_cli check --stdin-file-path=test.sql` - -- **` --staged`** — - When set to true, only the files that have been staged (the ones prepared to be committed) will be linted. This option should be used when working locally. -- **` --changed`** — - When set to true, only the files that have been changed compared to your `defaultBranch` configuration will be linted. This option should be used in CI environments. -- **` --since`**=_`REF`_ — - Use this to specify the base branch to compare against when you're using the --changed flag and the `defaultBranch` is not set in your `postgrestools.jsonc` -- **`-h`**, **`--help`** — - Prints help information - -## postgrestools start - -Starts the daemon server process. - -**Usage**: **`postgrestools`** **`start`** \[**`--config-path`**=_`PATH`_\] - -**Available options:** - -- **` --log-prefix-name`**=_`STRING`_ — - Allows to change the prefix applied to the file name of the logs. - Uses environment variable **`PGT_LOG_PREFIX_NAME`** - [default: server.log] -- **` --log-path`**=_`PATH`_ — - Allows to change the folder where logs are stored. - Uses environment variable **`PGT_LOG_PATH`** -- **` --config-path`**=_`PATH`_ — - Allows to set a custom file path to the configuration file, or a custom directory path to find `postgrestools.jsonc` - Uses environment variable **`PGT_LOG_PREFIX_NAME`** -- **`-h`**, **`--help`** — - Prints help information - -## postgrestools stop - -Stops the daemon server process. - -**Usage**: **`postgrestools`** **`stop`** - -**Available options:** - -- **`-h`**, **`--help`** — - Prints help information - -## postgrestools init - -Bootstraps a new project. Creates a configuration file with some defaults. - -**Usage**: **`postgrestools`** **`init`** - -**Available options:** - -- **`-h`**, **`--help`** — - Prints help information - -## postgrestools lsp-proxy - -Acts as a server for the Language Server Protocol over stdin/stdout. - -**Usage**: **`postgrestools`** **`lsp-proxy`** \[**`--config-path`**=_`PATH`_\] - -**Available options:** - -- **` --log-prefix-name`**=_`STRING`_ — - Allows to change the prefix applied to the file name of the logs. - Uses environment variable **`PGT_LOG_PREFIX_NAME`** - [default: server.log] -- **` --log-path`**=_`PATH`_ — - Allows to change the folder where logs are stored. - Uses environment variable **`PGT_LOG_PATH`** -- **` --config-path`**=_`PATH`_ — - Allows to set a custom file path to the configuration file, or a custom directory path to find `postgrestools.jsonc` - Uses environment variable **`PGT_CONFIG_PATH`** -- **`-h`**, **`--help`** — - Prints help information - -## postgrestools clean - -Cleans the logs emitted by the daemon. - -**Usage**: **`postgrestools`** **`clean`** - -**Available options:** - -- **`-h`**, **`--help`** — - Prints help information - -[//]: # "END CLI_REF" diff --git a/docs/env_variables.md b/docs/env_variables.md deleted file mode 100644 index a3091f48..00000000 --- a/docs/env_variables.md +++ /dev/null @@ -1,19 +0,0 @@ -## Environment Variables - -[//]: # (BEGIN ENV_VARS) - - -### `PGT_LOG_PATH` - - The directory where the Daemon logs will be saved. - -### `PGT_LOG_PREFIX_NAME` - - A prefix that's added to the name of the log. Default: `server.log.` - -### `PGT_CONFIG_PATH` - - A path to the configuration file - - -[//]: # (END ENV_VARS) diff --git a/docs/index.md b/docs/index.md deleted file mode 100644 index 14490385..00000000 --- a/docs/index.md +++ /dev/null @@ -1,171 +0,0 @@ -![Postgres Language Server](images/pls-github.png) - -# Postgres Tools - -A collection of language tools and a Language Server Protocol (LSP) implementation for Postgres, focusing on developer experience and reliable SQL tooling. - ---- - -**Source Code**: https://github.com/supabase-community/postgres-language-server - ---- - -## Overview - -This project provides a toolchain for Postgres development - -##### Postgres Language Server - -![LSP Demo](images/lsp-demo.gif) - -##### CLI Demo - -![CLI Demo](images/cli-demo.png) - -The toolchain is built on Postgres' own parser `libpg_query` to ensure 100% syntax compatibility. It uses a Server-Client architecture and is a transport-agnostic. This means all features can be accessed through the [Language Server Protocol](https://microsoft.github.io/language-server-protocol/) as well as various interfaces like a CLI, HTTP APIs, or a WebAssembly module. - -The following features are implemented: - -- Autocompletion -- Syntax Error Highlighting -- Type-checking (via `EXPLAIN` error insights) -- Linter, inspired by [Squawk](https://squawkhq.com) - -We are currently focused on refining and enhancing these core features. For future plans and opportunities to contribute, please check out the issues and discussions. Any contributions are welcome! - -## Installation - -There are various ways to use the toolchain. - -### CLI - -Grab the executable for your platform from the [latest CLI release](https://github.com/supabase-community/postgres-language-server/releases/latest) on GitHub and give it execution permission: - -```sh -curl -L https://github.com/supabase-community/postgres-language-server/releases/download//postgrestools_aarch64-apple-darwin -o postgrestools -chmod +x postgrestools -``` - -Now you can use Postgres Tools by simply running `./postgrestools`. - -### NPM - -If you are using Node, you can install the CLI via NPM. Run the following commands in a directory containing a `package.json` file. - -```sh -npm add --save-dev --save-exact @postgrestools/postgrestools -``` - -### VSCode - -The language server is available on the [VSCode Marketplace](https://marketplace.visualstudio.com/items?itemName=Supabase.postgrestools). It's published from [this repo](https://github.com/supabase-community/postgrestools-vscode). - -### Neovim - -You will have to install `nvim-lspconfig`, and follow the [instructions](https://github.com/neovim/nvim-lspconfig/blob/master/doc/configs.md#postgres_lsp). - -### Emacs - -The language client is available through [lsp-mode](https://github.com/emacs-lsp/lsp-mode). For more details, refer to their [manual page](https://emacs-lsp.github.io/lsp-mode/page/lsp-postgres/). - -### Zed - -The language server is available as an Extension. It's published from [this repo](https://github.com/LoamStudios/zed-postgres-language-server). - -### GitHub Actions - -To use the CLI in GitHub Actions, you can install it via our [GitHub Action](https://github.com/supabase-community/postgrestools-cli-action). - -## Configuration - -We recommend creating a `postgrestools.jsonc` configuration file for each project. This eliminates repetitive CLI options and ensures that consistent configuration in your editor. Some options are only available from a configuration file. This step is optional: if you are happy with the defaults, you don’t need a configuration file. To create the `postgrestools.jsonc` file, run the `init` command in the root folder of your project: - -```sh -postgrestools init -``` - -You’ll now have a `postgrestools.jsonc` file in your directory: - -[//]: # "BEGIN DEFAULT_CONFIGURATION" - -```json -{ - "$schema": "https://pgtools.dev/schemas/0.0.0/schema.json", - "vcs": { - "enabled": false, - "clientKind": "git", - "useIgnoreFile": false - }, - "files": { - "ignore": [] - }, - "linter": { - "enabled": true, - "rules": { - "recommended": true - } - }, - "db": { - "host": "127.0.0.1", - "port": 5432, - "username": "postgres", - "password": "postgres", - "database": "postgres", - "connTimeoutSecs": 10, - "allowStatementExecutionsAgainst": ["127.0.0.1/*", "localhost/*"] - } -} -``` - -[//]: # "END DEFAULT_CONFIGURATION" - -Make sure to edit the database connection settings to connect to your local development database. To see all options, run `postgrestools --help`. - -## Usage - -You can use Postgres Tools via the command line or a using a code editor that supports an LSP. - -#### Using the CLI - -The CLI exposes a simple `check` command that will run all checks on the given files or paths. - -```sh -postgrestools check myfile.sql -``` - -Make sure to check out the other options by running `postgrestools --help`. We will provide guides for specific use cases like linting migration files soon. - -#### Using the LSP Proxy - -Postgres Tools has a command called `lsp-proxy`. When executed, two processes will spawn: - -- a daemon that does execute the requested operations; -- a server that functions as a proxy between the requests of the client - the editor - and the server - the daemon; - If your editor is able to interact with a server and send [JSON-RPC](https://www.jsonrpc.org) requests, you only need to configure the editor to run that command. - -#### Using the daemon with the binary - -Using the binary via CLI is very efficient, although you won’t be able to provide logs to your users. The CLI allows you to bootstrap a daemon and then use the CLI commands through the daemon itself. -In order to do so, you first need to start a daemon process with the start command: - -```sh -postgrestools start -``` - -Then, every command needs to add the `--use-server` options, e.g.: - -```sh -echo "select 1" | biome check --use-server --stdin-file-path=dummy.sql -``` - -#### Daemon logs - -The daemon saves logs in your file system. Logs are stored in a folder called `pgt-logs`. The path of this folder changes based on your operative system: - -- Linux: `~/.cache/pgt;` -- Windows: `C:\Users\\AppData\Local\supabase-community\pgt\cache` -- macOS: `/Users//Library/Caches/dev.supabase-community.pgt` - -For other operative systems, you can find the folder in the system’s temporary directory. - -You can change the location of the `pgt-logs` folder via the `PGT_LOG_PATH` variable. diff --git a/docs/rule_sources.md b/docs/rule_sources.md deleted file mode 100644 index b5c1f49f..00000000 --- a/docs/rule_sources.md +++ /dev/null @@ -1,9 +0,0 @@ -## Exclusive rules -## Rules from other sources -### Squawk -| Squawk Rule Name | Rule Name | -| ---- | ---- | -| [adding-required-field](https://squawkhq.com/docs/adding-required-field) |[addingRequiredField](./rules/adding-required-field) | -| [ban-drop-column](https://squawkhq.com/docs/ban-drop-column) |[banDropColumn](./rules/ban-drop-column) | -| [ban-drop-not-null](https://squawkhq.com/docs/ban-drop-not-null) |[banDropNotNull](./rules/ban-drop-not-null) | -| [ban-drop-table](https://squawkhq.com/docs/ban-drop-table) |[banDropTable](./rules/ban-drop-table) | diff --git a/docs/rules.md b/docs/rules.md deleted file mode 100644 index 1f674af6..00000000 --- a/docs/rules.md +++ /dev/null @@ -1,22 +0,0 @@ -# Rules - -Below the list of rules supported by Postgres Language Tools, divided by group. Here's a legend of the emojis: - -- The icon ✅ indicates that the rule is part of the recommended rules. - -[//]: # (BEGIN RULES_INDEX) - -## Safety - -Rules that detect potential safety issues in your code. - -| Rule name | Description | Properties | -| --- | --- | --- | -| [addingRequiredField](/rules/adding-required-field) | Adding a new column that is NOT NULL and has no default value to an existing table effectively makes it required. | | -| [banDropColumn](/rules/ban-drop-column) | Dropping a column may break existing clients. | ✅ | -| [banDropNotNull](/rules/ban-drop-not-null) | Dropping a NOT NULL constraint may break existing clients. | ✅ | -| [banDropTable](/rules/ban-drop-table) | Dropping a table may break existing clients. | ✅ | - -[//]: # (END RULES_INDEX) - - diff --git a/docs/rules/adding-required-field.md b/docs/rules/adding-required-field.md deleted file mode 100644 index 63dea281..00000000 --- a/docs/rules/adding-required-field.md +++ /dev/null @@ -1,39 +0,0 @@ -# addingRequiredField -**Diagnostic Category: `lint/safety/addingRequiredField`** - -**Since**: `vnext` - - -**Sources**: -- Inspired from: squawk/adding-required-field - -## Description -Adding a new column that is NOT NULL and has no default value to an existing table effectively makes it required. - -This will fail immediately upon running for any populated table. Furthermore, old application code that is unaware of this column will fail to INSERT to this table. - -Make new columns optional initially by omitting the NOT NULL constraint until all existing data and application code has been updated. Once no NULL values are written to or persisted in the database, set it to NOT NULL. -Alternatively, if using Postgres version 11 or later, add a DEFAULT value that is not volatile. This allows the column to keep its NOT NULL constraint. - -## Invalid - -alter table test add column count int not null; - -## Valid in Postgres >= 11 - -alter table test add column count int not null default 0; - -## How to configure -```json - -{ - "linter": { - "rules": { - "safety": { - "addingRequiredField": "error" - } - } - } -} - -``` diff --git a/docs/rules/ban-drop-column.md b/docs/rules/ban-drop-column.md deleted file mode 100644 index 49a0d054..00000000 --- a/docs/rules/ban-drop-column.md +++ /dev/null @@ -1,50 +0,0 @@ -# banDropColumn -**Diagnostic Category: `lint/safety/banDropColumn`** - -**Since**: `vnext` - -> [!NOTE] -> This rule is recommended. A diagnostic error will appear when linting your code. - -**Sources**: -- Inspired from: squawk/ban-drop-column - -## Description -Dropping a column may break existing clients. - -Update your application code to no longer read or write the column. - -You can leave the column as nullable or delete the column once queries no longer select or modify the column. - -## Examples - -### Invalid - -```sql -alter table test drop column id; -``` - -```sh -code-block.sql lint/safety/banDropColumn ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ - - × Dropping a column may break existing clients. - - i You can leave the column as nullable or delete the column once queries no longer select or modify the column. - - -``` - -## How to configure -```json - -{ - "linter": { - "rules": { - "safety": { - "banDropColumn": "error" - } - } - } -} - -``` diff --git a/docs/rules/ban-drop-not-null.md b/docs/rules/ban-drop-not-null.md deleted file mode 100644 index ccf49f95..00000000 --- a/docs/rules/ban-drop-not-null.md +++ /dev/null @@ -1,50 +0,0 @@ -# banDropNotNull -**Diagnostic Category: `lint/safety/banDropNotNull`** - -**Since**: `vnext` - -> [!NOTE] -> This rule is recommended. A diagnostic error will appear when linting your code. - -**Sources**: -- Inspired from: squawk/ban-drop-not-null - -## Description -Dropping a NOT NULL constraint may break existing clients. - -Application code or code written in procedural languages like PL/SQL or PL/pgSQL may not expect NULL values for the column that was previously guaranteed to be NOT NULL and therefore may fail to process them correctly. - -You can consider using a marker value that represents NULL. Alternatively, create a new table allowing NULL values, copy the data from the old table, and create a view that filters NULL values. - -## Examples - -### Invalid - -```sql -alter table users alter column email drop not null; -``` - -```sh -code-block.sql lint/safety/banDropNotNull ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ - - × Dropping a NOT NULL constraint may break existing clients. - - i Consider using a marker value that represents NULL. Alternatively, create a new table allowing NULL values, copy the data from the old table, and create a view that filters NULL values. - - -``` - -## How to configure -```json - -{ - "linter": { - "rules": { - "safety": { - "banDropNotNull": "error" - } - } - } -} - -``` diff --git a/docs/rules/ban-drop-table.md b/docs/rules/ban-drop-table.md deleted file mode 100644 index f2f34156..00000000 --- a/docs/rules/ban-drop-table.md +++ /dev/null @@ -1,51 +0,0 @@ -# banDropTable -**Diagnostic Category: `lint/safety/banDropTable`** - -**Since**: `vnext` - -> [!NOTE] -> This rule is recommended. A diagnostic error will appear when linting your code. - -**Sources**: -- Inspired from: squawk/ban-drop-table - -## Description -Dropping a table may break existing clients. - -Update your application code to no longer read or write the table. - -Once the table is no longer needed, you can delete it by running the command "DROP TABLE mytable;". - -This command will permanently remove the table from the database and all its contents. -Be sure to back up the table before deleting it, just in case you need to restore it in the future. - -## Examples - -```sql -drop table some_table; -``` - -```sh -code-block.sql lint/safety/banDropTable ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ - - × Dropping a table may break existing clients. - - i Update your application code to no longer read or write the table, and only then delete the table. Be sure to create a backup. - - -``` - -## How to configure -```json - -{ - "linter": { - "rules": { - "safety": { - "banDropTable": "error" - } - } - } -} - -``` diff --git a/docs/troubleshooting.md b/docs/troubleshooting.md deleted file mode 100644 index f9d91561..00000000 --- a/docs/troubleshooting.md +++ /dev/null @@ -1,9 +0,0 @@ -## Troubleshooting - -This guide describes how to resolve common issues with Postgres Language Tools. - -### Incorrect and / or misplaced diagnostics - -We are employing pragmatic solutions to split a SQL file into statements, and they might be incorrect in certain cases. If you see diagnostics like `Unexpected token` in the middle of a valid statement, make sure to either end all statements with a semicolon, or put two double newlines between them. If there are still issues, its most likely a bug in the change handler that is gone after reopening the file. But please file an issue with sample code so we can fix the root cause. - - diff --git a/editors/code/.eslintignore b/editors/code/.eslintignore deleted file mode 100644 index 3a1e8e18..00000000 --- a/editors/code/.eslintignore +++ /dev/null @@ -1,2 +0,0 @@ -node_modules -.eslintrc.js diff --git a/editors/code/.eslintrc.js b/editors/code/.eslintrc.js deleted file mode 100644 index 9705c5f5..00000000 --- a/editors/code/.eslintrc.js +++ /dev/null @@ -1,46 +0,0 @@ -module.exports = { - env: { - es6: true, - node: true, - }, - extends: ["prettier"], - parser: "@typescript-eslint/parser", - parserOptions: { - project: true, - tsconfigRootDir: __dirname, - sourceType: "module", - }, - plugins: ["@typescript-eslint"], - rules: { - camelcase: ["error"], - eqeqeq: ["error", "always", { null: "ignore" }], - curly: ["error", "multi-line"], - "no-console": ["error", { allow: ["warn", "error"] }], - "prefer-const": "error", - "@typescript-eslint/member-delimiter-style": [ - "error", - { - multiline: { - delimiter: "semi", - requireLast: true, - }, - singleline: { - delimiter: "semi", - requireLast: false, - }, - }, - ], - "@typescript-eslint/semi": ["error", "always"], - "@typescript-eslint/no-unnecessary-type-assertion": "error", - "@typescript-eslint/no-floating-promises": "error", - - "@typescript-eslint/consistent-type-imports": [ - "error", - { - prefer: "type-imports", - fixStyle: "inline-type-imports", - }, - ], - "@typescript-eslint/no-import-type-side-effects": "error", - }, -}; diff --git a/editors/code/.gitignore b/editors/code/.gitignore deleted file mode 100644 index 2c975a94..00000000 --- a/editors/code/.gitignore +++ /dev/null @@ -1,7 +0,0 @@ -out -node_modules -server -.vscode-test/ -*.vsix -bundle -vscode.proposed.d.ts diff --git a/editors/code/.prettierignore b/editors/code/.prettierignore deleted file mode 100644 index 13baf68d..00000000 --- a/editors/code/.prettierignore +++ /dev/null @@ -1,3 +0,0 @@ -node_modules -.vscode-test -out diff --git a/editors/code/.prettierrc.js b/editors/code/.prettierrc.js deleted file mode 100644 index dd0f7ca1..00000000 --- a/editors/code/.prettierrc.js +++ /dev/null @@ -1,8 +0,0 @@ -module.exports = { - // use 100 because it's Rustfmt's default - // https://rust-lang.github.io/rustfmt/?version=v1.4.38&search=#max_width - printWidth: 100, - singleQuote: true, - tabWidth: 4, - trailingComma: 'none' -}; diff --git a/editors/code/.vscodeignore b/editors/code/.vscodeignore deleted file mode 100644 index 09dc2705..00000000 --- a/editors/code/.vscodeignore +++ /dev/null @@ -1,14 +0,0 @@ -** -!icon.png -!language-configuration.json -!LICENSE -!node_modules/@hpcc-js/wasm/dist/graphvizlib.wasm -!node_modules/@hpcc-js/wasm/dist/index.min.js -!node_modules/d3-graphviz/build/d3-graphviz.min.js -!node_modules/d3/dist/d3.min.js -!out/main.js -!package-lock.json -!package.json -!ra_syntax_tree.tmGrammar.json -!server -!README.md diff --git a/editors/code/LICENSE b/editors/code/LICENSE deleted file mode 100644 index 065fae0f..00000000 --- a/editors/code/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2023 Philipp Steinrötter - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/editors/code/README.md b/editors/code/README.md deleted file mode 100644 index 3491c55b..00000000 --- a/editors/code/README.md +++ /dev/null @@ -1,3 +0,0 @@ -# postgres_lsp for VSCode - -This extension provides VSCode support for `postgres_lsp`. diff --git a/editors/code/package-lock.json b/editors/code/package-lock.json deleted file mode 100644 index 6d0bb526..00000000 --- a/editors/code/package-lock.json +++ /dev/null @@ -1,4005 +0,0 @@ -{ - "name": "postgres-lsp", - "version": "0.0.0-dev", - "lockfileVersion": 3, - "requires": true, - "packages": { - "": { - "name": "postgres-lsp", - "version": "0.0.0-dev", - "license": "MIT OR Apache-2.0", - "dependencies": { - "@hpcc-js/wasm": "^2.13.0", - "anser": "^2.1.1", - "d3": "^7.8.5", - "d3-graphviz": "^5.0.2", - "vscode-languageclient": "9.0.1" - }, - "devDependencies": { - "@tsconfig/strictest": "^2.0.1", - "@types/node": "~16.11.7", - "@types/vscode": "~1.75", - "@typescript-eslint/eslint-plugin": "^6.0.0", - "@typescript-eslint/parser": "^6.0.0", - "@vscode/test-electron": "^2.3.3", - "@vscode/vsce": "^2.19.0", - "esbuild": "^0.18.12", - "eslint": "^8.44.0", - "eslint-config-prettier": "^8.8.0", - "ovsx": "^0.8.2", - "prettier": "^3.0.0", - "tslib": "^2.6.0", - "typescript": "^5.1.6" - }, - "engines": { - "vscode": "^1.75.0" - } - }, - "node_modules/@aashutoshrathi/word-wrap": { - "version": "1.2.6", - "resolved": "https://registry.npmjs.org/@aashutoshrathi/word-wrap/-/word-wrap-1.2.6.tgz", - "integrity": "sha512-1Yjs2SvM8TflER/OD3cOjhWWOZb58A2t7wpE2S9XfBYTiIl+XFhQG2bjy4Pu1I+EAlCNUzRDYDdFwFYUKvXcIA==", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/@esbuild/android-arm": { - "version": "0.18.12", - "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.18.12.tgz", - "integrity": "sha512-LIxaNIQfkFZbTLb4+cX7dozHlAbAshhFE5PKdro0l+FnCpx1GDJaQ2WMcqm+ToXKMt8p8Uojk/MFRuGyz3V5Sw==", - "cpu": [ - "arm" - ], - "dev": true, - "optional": true, - "os": [ - "android" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/@esbuild/android-arm64": { - "version": "0.18.12", - "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.18.12.tgz", - "integrity": "sha512-BMAlczRqC/LUt2P97E4apTBbkvS9JTJnp2DKFbCwpZ8vBvXVbNdqmvzW/OsdtI/+mGr+apkkpqGM8WecLkPgrA==", - "cpu": [ - "arm64" - ], - "dev": true, - "optional": true, - "os": [ - "android" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/@esbuild/android-x64": { - "version": "0.18.12", - "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.18.12.tgz", - "integrity": "sha512-zU5MyluNsykf5cOJ0LZZZjgAHbhPJ1cWfdH1ZXVMXxVMhEV0VZiZXQdwBBVvmvbF28EizeK7obG9fs+fpmS0eQ==", - "cpu": [ - "x64" - ], - "dev": true, - "optional": true, - "os": [ - "android" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/@esbuild/darwin-arm64": { - "version": "0.18.12", - "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.18.12.tgz", - "integrity": "sha512-zUZMep7YONnp6954QOOwEBwFX9svlKd3ov6PkxKd53LGTHsp/gy7vHaPGhhjBmEpqXEXShi6dddjIkmd+NgMsA==", - "cpu": [ - "arm64" - ], - "dev": true, - "optional": true, - "os": [ - "darwin" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/@esbuild/darwin-x64": { - "version": "0.18.12", - "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.18.12.tgz", - "integrity": "sha512-ohqLPc7i67yunArPj1+/FeeJ7AgwAjHqKZ512ADk3WsE3FHU9l+m5aa7NdxXr0HmN1bjDlUslBjWNbFlD9y12Q==", - "cpu": [ - "x64" - ], - "dev": true, - "optional": true, - "os": [ - "darwin" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/@esbuild/freebsd-arm64": { - "version": "0.18.12", - "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.18.12.tgz", - "integrity": "sha512-GIIHtQXqgeOOqdG16a/A9N28GpkvjJnjYMhOnXVbn3EDJcoItdR58v/pGN31CHjyXDc8uCcRnFWmqaJt24AYJg==", - "cpu": [ - "arm64" - ], - "dev": true, - "optional": true, - "os": [ - "freebsd" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/@esbuild/freebsd-x64": { - "version": "0.18.12", - "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.18.12.tgz", - "integrity": "sha512-zK0b9a1/0wZY+6FdOS3BpZcPc1kcx2G5yxxfEJtEUzVxI6n/FrC2Phsxj/YblPuBchhBZ/1wwn7AyEBUyNSa6g==", - "cpu": [ - "x64" - ], - "dev": true, - "optional": true, - "os": [ - "freebsd" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/@esbuild/linux-arm": { - "version": "0.18.12", - "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.18.12.tgz", - "integrity": "sha512-y75OijvrBE/1XRrXq1jtrJfG26eHeMoqLJ2dwQNwviwTuTtHGCojsDO6BJNF8gU+3jTn1KzJEMETytwsFSvc+Q==", - "cpu": [ - "arm" - ], - "dev": true, - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/@esbuild/linux-arm64": { - "version": "0.18.12", - "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.18.12.tgz", - "integrity": "sha512-JKgG8Q/LL/9sw/iHHxQyVMoQYu3rU3+a5Z87DxC+wAu3engz+EmctIrV+FGOgI6gWG1z1+5nDDbXiRMGQZXqiw==", - "cpu": [ - "arm64" - ], - "dev": true, - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/@esbuild/linux-ia32": { - "version": "0.18.12", - "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.18.12.tgz", - "integrity": "sha512-yoRIAqc0B4lDIAAEFEIu9ttTRFV84iuAl0KNCN6MhKLxNPfzwCBvEMgwco2f71GxmpBcTtn7KdErueZaM2rEvw==", - "cpu": [ - "ia32" - ], - "dev": true, - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/@esbuild/linux-loong64": { - "version": "0.18.12", - "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.18.12.tgz", - "integrity": "sha512-qYgt3dHPVvf/MgbIBpJ4Sup/yb9DAopZ3a2JgMpNKIHUpOdnJ2eHBo/aQdnd8dJ21X/+sS58wxHtA9lEazYtXQ==", - "cpu": [ - "loong64" - ], - "dev": true, - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/@esbuild/linux-mips64el": { - "version": "0.18.12", - "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.18.12.tgz", - "integrity": "sha512-wHphlMLK4ufNOONqukELfVIbnGQJrHJ/mxZMMrP2jYrPgCRZhOtf0kC4yAXBwnfmULimV1qt5UJJOw4Kh13Yfg==", - "cpu": [ - "mips64el" - ], - "dev": true, - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/@esbuild/linux-ppc64": { - "version": "0.18.12", - "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.18.12.tgz", - "integrity": "sha512-TeN//1Ft20ZZW41+zDSdOI/Os1bEq5dbvBvYkberB7PHABbRcsteeoNVZFlI0YLpGdlBqohEpjrn06kv8heCJg==", - "cpu": [ - "ppc64" - ], - "dev": true, - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/@esbuild/linux-riscv64": { - "version": "0.18.12", - "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.18.12.tgz", - "integrity": "sha512-AgUebVS4DoAblBgiB2ACQ/8l4eGE5aWBb8ZXtkXHiET9mbj7GuWt3OnsIW/zX+XHJt2RYJZctbQ2S/mDjbp0UA==", - "cpu": [ - "riscv64" - ], - "dev": true, - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/@esbuild/linux-s390x": { - "version": "0.18.12", - "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.18.12.tgz", - "integrity": "sha512-dJ3Rb3Ei2u/ysSXd6pzleGtfDdc2MuzKt8qc6ls8vreP1G3B7HInX3i7gXS4BGeVd24pp0yqyS7bJ5NHaI9ing==", - "cpu": [ - "s390x" - ], - "dev": true, - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/@esbuild/linux-x64": { - "version": "0.18.12", - "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.18.12.tgz", - "integrity": "sha512-OrNJMGQbPaVyHHcDF8ybNSwu7TDOfX8NGpXCbetwOSP6txOJiWlgQnRymfC9ocR1S0Y5PW0Wb1mV6pUddqmvmQ==", - "cpu": [ - "x64" - ], - "dev": true, - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/@esbuild/netbsd-x64": { - "version": "0.18.12", - "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.18.12.tgz", - "integrity": "sha512-55FzVCAiwE9FK8wWeCRuvjazNRJ1QqLCYGZVB6E8RuQuTeStSwotpSW4xoRGwp3a1wUsaVCdYcj5LGCASVJmMg==", - "cpu": [ - "x64" - ], - "dev": true, - "optional": true, - "os": [ - "netbsd" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/@esbuild/openbsd-x64": { - "version": "0.18.12", - "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.18.12.tgz", - "integrity": "sha512-qnluf8rfb6Y5Lw2tirfK2quZOBbVqmwxut7GPCIJsM8lc4AEUj9L8y0YPdLaPK0TECt4IdyBdBD/KRFKorlK3g==", - "cpu": [ - "x64" - ], - "dev": true, - "optional": true, - "os": [ - "openbsd" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/@esbuild/sunos-x64": { - "version": "0.18.12", - "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.18.12.tgz", - "integrity": "sha512-+RkKpVQR7bICjTOPUpkTBTaJ4TFqQBX5Ywyd/HSdDkQGn65VPkTsR/pL4AMvuMWy+wnXgIl4EY6q4mVpJal8Kg==", - "cpu": [ - "x64" - ], - "dev": true, - "optional": true, - "os": [ - "sunos" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/@esbuild/win32-arm64": { - "version": "0.18.12", - "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.18.12.tgz", - "integrity": "sha512-GNHuciv0mFM7ouzsU0+AwY+7eV4Mgo5WnbhfDCQGtpvOtD1vbOiRjPYG6dhmMoFyBjj+pNqQu2X+7DKn0KQ/Gw==", - "cpu": [ - "arm64" - ], - "dev": true, - "optional": true, - "os": [ - "win32" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/@esbuild/win32-ia32": { - "version": "0.18.12", - "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.18.12.tgz", - "integrity": "sha512-kR8cezhYipbbypGkaqCTWIeu4zID17gamC8YTPXYtcN3E5BhhtTnwKBn9I0PJur/T6UVwIEGYzkffNL0lFvxEw==", - "cpu": [ - "ia32" - ], - "dev": true, - "optional": true, - "os": [ - "win32" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/@esbuild/win32-x64": { - "version": "0.18.12", - "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.18.12.tgz", - "integrity": "sha512-O0UYQVkvfM/jO8a4OwoV0mAKSJw+mjWTAd1MJd/1FCX6uiMdLmMRPK/w6e9OQ0ob2WGxzIm9va/KG0Ja4zIOgg==", - "cpu": [ - "x64" - ], - "dev": true, - "optional": true, - "os": [ - "win32" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/@eslint-community/eslint-utils": { - "version": "4.4.0", - "resolved": "https://registry.npmjs.org/@eslint-community/eslint-utils/-/eslint-utils-4.4.0.tgz", - "integrity": "sha512-1/sA4dwrzBAyeUoQ6oxahHKmrZvsnLCg4RfxW3ZFGGmQkSNQPFNLV9CUEFQP1x9EYXHTo5p6xdhZM1Ne9p/AfA==", - "dev": true, - "dependencies": { - "eslint-visitor-keys": "^3.3.0" - }, - "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" - }, - "peerDependencies": { - "eslint": "^6.0.0 || ^7.0.0 || >=8.0.0" - } - }, - "node_modules/@eslint-community/regexpp": { - "version": "4.5.1", - "resolved": "https://registry.npmjs.org/@eslint-community/regexpp/-/regexpp-4.5.1.tgz", - "integrity": "sha512-Z5ba73P98O1KUYCCJTUeVpja9RcGoMdncZ6T49FCUl2lN38JtCJ+3WgIDBv0AuY4WChU5PmtJmOCTlN6FZTFKQ==", - "dev": true, - "engines": { - "node": "^12.0.0 || ^14.0.0 || >=16.0.0" - } - }, - "node_modules/@eslint/eslintrc": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-2.1.0.tgz", - "integrity": "sha512-Lj7DECXqIVCqnqjjHMPna4vn6GJcMgul/wuS0je9OZ9gsL0zzDpKPVtcG1HaDVc+9y+qgXneTeUMbCqXJNpH1A==", - "dev": true, - "dependencies": { - "ajv": "^6.12.4", - "debug": "^4.3.2", - "espree": "^9.6.0", - "globals": "^13.19.0", - "ignore": "^5.2.0", - "import-fresh": "^3.2.1", - "js-yaml": "^4.1.0", - "minimatch": "^3.1.2", - "strip-json-comments": "^3.1.1" - }, - "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" - }, - "funding": { - "url": "https://opencollective.com/eslint" - } - }, - "node_modules/@eslint/js": { - "version": "8.44.0", - "resolved": "https://registry.npmjs.org/@eslint/js/-/js-8.44.0.tgz", - "integrity": "sha512-Ag+9YM4ocKQx9AarydN0KY2j0ErMHNIocPDrVo8zAE44xLTjEtz81OdR68/cydGtk6m6jDb5Za3r2useMzYmSw==", - "dev": true, - "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" - } - }, - "node_modules/@hpcc-js/wasm": { - "version": "2.13.0", - "resolved": "https://registry.npmjs.org/@hpcc-js/wasm/-/wasm-2.13.0.tgz", - "integrity": "sha512-MvnUPnyMlN3/2IONCXwl/SBVWIfVOFJqvw+kFfI1QcwKjNmkwTAtG+9/m3nvofTymkASUUxNULbBmRDIr2uzIA==", - "dependencies": { - "yargs": "17.7.2" - }, - "bin": { - "dot-wasm": "bin/dot-wasm.js" - } - }, - "node_modules/@humanwhocodes/config-array": { - "version": "0.11.10", - "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.11.10.tgz", - "integrity": "sha512-KVVjQmNUepDVGXNuoRRdmmEjruj0KfiGSbS8LVc12LMsWDQzRXJ0qdhN8L8uUigKpfEHRhlaQFY0ib1tnUbNeQ==", - "dev": true, - "dependencies": { - "@humanwhocodes/object-schema": "^1.2.1", - "debug": "^4.1.1", - "minimatch": "^3.0.5" - }, - "engines": { - "node": ">=10.10.0" - } - }, - "node_modules/@humanwhocodes/module-importer": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/@humanwhocodes/module-importer/-/module-importer-1.0.1.tgz", - "integrity": "sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA==", - "dev": true, - "engines": { - "node": ">=12.22" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/nzakas" - } - }, - "node_modules/@humanwhocodes/object-schema": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/@humanwhocodes/object-schema/-/object-schema-1.2.1.tgz", - "integrity": "sha512-ZnQMnLV4e7hDlUvw8H+U8ASL02SS2Gn6+9Ac3wGGLIe7+je2AeAOxPY+izIPJDfFDb7eDjev0Us8MO1iFRN8hA==", - "dev": true - }, - "node_modules/@nodelib/fs.scandir": { - "version": "2.1.5", - "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", - "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==", - "dev": true, - "dependencies": { - "@nodelib/fs.stat": "2.0.5", - "run-parallel": "^1.1.9" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/@nodelib/fs.stat": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz", - "integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==", - "dev": true, - "engines": { - "node": ">= 8" - } - }, - "node_modules/@nodelib/fs.walk": { - "version": "1.2.8", - "resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz", - "integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==", - "dev": true, - "dependencies": { - "@nodelib/fs.scandir": "2.1.5", - "fastq": "^1.6.0" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/@tootallnate/once": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/@tootallnate/once/-/once-1.1.2.tgz", - "integrity": "sha512-RbzJvlNzmRq5c3O09UipeuXno4tA1FE6ikOjxZK0tuxVv3412l64l5t1W5pj4+rJq9vpkm/kwiR07aZXnsKPxw==", - "dev": true, - "engines": { - "node": ">= 6" - } - }, - "node_modules/@tsconfig/strictest": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/@tsconfig/strictest/-/strictest-2.0.1.tgz", - "integrity": "sha512-7JHHCbyCsGUxLd0pDbp24yz3zjxw2t673W5oAP6HCEdr/UUhaRhYd3SSnUsGCk+VnPVJVA4mXROzbhI+nyIk+w==", - "dev": true - }, - "node_modules/@types/json-schema": { - "version": "7.0.12", - "resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.12.tgz", - "integrity": "sha512-Hr5Jfhc9eYOQNPYO5WLDq/n4jqijdHNlDXjuAQkkt+mWdQR+XJToOHrsD4cPaMXpn6KO7y2+wM8AZEs8VpBLVA==", - "dev": true - }, - "node_modules/@types/node": { - "version": "16.11.68", - "resolved": "https://registry.npmjs.org/@types/node/-/node-16.11.68.tgz", - "integrity": "sha512-JkRpuVz3xCNCWaeQ5EHLR/6woMbHZz/jZ7Kmc63AkU+1HxnoUugzSWMck7dsR4DvNYX8jp9wTi9K7WvnxOIQZQ==", - "dev": true - }, - "node_modules/@types/semver": { - "version": "7.5.0", - "resolved": "https://registry.npmjs.org/@types/semver/-/semver-7.5.0.tgz", - "integrity": "sha512-G8hZ6XJiHnuhQKR7ZmysCeJWE08o8T0AXtk5darsCaTVsYZhhgUrq53jizaR2FvsoeCwJhlmwTjkXBY5Pn/ZHw==", - "dev": true - }, - "node_modules/@types/vscode": { - "version": "1.75.1", - "resolved": "https://registry.npmjs.org/@types/vscode/-/vscode-1.75.1.tgz", - "integrity": "sha512-emg7wdsTFzdi+elvoyoA+Q8keEautdQHyY5LNmHVM4PTpY8JgOTVADrGVyXGepJ6dVW2OS5/xnLUWh+nZxvdiA==", - "dev": true - }, - "node_modules/@typescript-eslint/eslint-plugin": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-6.0.0.tgz", - "integrity": "sha512-xuv6ghKGoiq856Bww/yVYnXGsKa588kY3M0XK7uUW/3fJNNULKRfZfSBkMTSpqGG/8ZCXCadfh8G/z/B4aqS/A==", - "dev": true, - "dependencies": { - "@eslint-community/regexpp": "^4.5.0", - "@typescript-eslint/scope-manager": "6.0.0", - "@typescript-eslint/type-utils": "6.0.0", - "@typescript-eslint/utils": "6.0.0", - "@typescript-eslint/visitor-keys": "6.0.0", - "debug": "^4.3.4", - "grapheme-splitter": "^1.0.4", - "graphemer": "^1.4.0", - "ignore": "^5.2.4", - "natural-compare": "^1.4.0", - "natural-compare-lite": "^1.4.0", - "semver": "^7.5.0", - "ts-api-utils": "^1.0.1" - }, - "engines": { - "node": "^16.0.0 || >=18.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - }, - "peerDependencies": { - "@typescript-eslint/parser": "^6.0.0 || ^6.0.0-alpha", - "eslint": "^7.0.0 || ^8.0.0" - }, - "peerDependenciesMeta": { - "typescript": { - "optional": true - } - } - }, - "node_modules/@typescript-eslint/parser": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-6.0.0.tgz", - "integrity": "sha512-TNaufYSPrr1U8n+3xN+Yp9g31vQDJqhXzzPSHfQDLcaO4tU+mCfODPxCwf4H530zo7aUBE3QIdxCXamEnG04Tg==", - "dev": true, - "dependencies": { - "@typescript-eslint/scope-manager": "6.0.0", - "@typescript-eslint/types": "6.0.0", - "@typescript-eslint/typescript-estree": "6.0.0", - "@typescript-eslint/visitor-keys": "6.0.0", - "debug": "^4.3.4" - }, - "engines": { - "node": "^16.0.0 || >=18.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - }, - "peerDependencies": { - "eslint": "^7.0.0 || ^8.0.0" - }, - "peerDependenciesMeta": { - "typescript": { - "optional": true - } - } - }, - "node_modules/@typescript-eslint/scope-manager": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-6.0.0.tgz", - "integrity": "sha512-o4q0KHlgCZTqjuaZ25nw5W57NeykZT9LiMEG4do/ovwvOcPnDO1BI5BQdCsUkjxFyrCL0cSzLjvIMfR9uo7cWg==", - "dev": true, - "dependencies": { - "@typescript-eslint/types": "6.0.0", - "@typescript-eslint/visitor-keys": "6.0.0" - }, - "engines": { - "node": "^16.0.0 || >=18.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - } - }, - "node_modules/@typescript-eslint/type-utils": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-6.0.0.tgz", - "integrity": "sha512-ah6LJvLgkoZ/pyJ9GAdFkzeuMZ8goV6BH7eC9FPmojrnX9yNCIsfjB+zYcnex28YO3RFvBkV6rMV6WpIqkPvoQ==", - "dev": true, - "dependencies": { - "@typescript-eslint/typescript-estree": "6.0.0", - "@typescript-eslint/utils": "6.0.0", - "debug": "^4.3.4", - "ts-api-utils": "^1.0.1" - }, - "engines": { - "node": "^16.0.0 || >=18.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - }, - "peerDependencies": { - "eslint": "^7.0.0 || ^8.0.0" - }, - "peerDependenciesMeta": { - "typescript": { - "optional": true - } - } - }, - "node_modules/@typescript-eslint/types": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-6.0.0.tgz", - "integrity": "sha512-Zk9KDggyZM6tj0AJWYYKgF0yQyrcnievdhG0g5FqyU3Y2DRxJn4yWY21sJC0QKBckbsdKKjYDV2yVrrEvuTgxg==", - "dev": true, - "engines": { - "node": "^16.0.0 || >=18.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - } - }, - "node_modules/@typescript-eslint/typescript-estree": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-6.0.0.tgz", - "integrity": "sha512-2zq4O7P6YCQADfmJ5OTDQTP3ktajnXIRrYAtHM9ofto/CJZV3QfJ89GEaM2BNGeSr1KgmBuLhEkz5FBkS2RQhQ==", - "dev": true, - "dependencies": { - "@typescript-eslint/types": "6.0.0", - "@typescript-eslint/visitor-keys": "6.0.0", - "debug": "^4.3.4", - "globby": "^11.1.0", - "is-glob": "^4.0.3", - "semver": "^7.5.0", - "ts-api-utils": "^1.0.1" - }, - "engines": { - "node": "^16.0.0 || >=18.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - }, - "peerDependenciesMeta": { - "typescript": { - "optional": true - } - } - }, - "node_modules/@typescript-eslint/utils": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-6.0.0.tgz", - "integrity": "sha512-SOr6l4NB6HE4H/ktz0JVVWNXqCJTOo/mHnvIte1ZhBQ0Cvd04x5uKZa3zT6tiodL06zf5xxdK8COiDvPnQ27JQ==", - "dev": true, - "dependencies": { - "@eslint-community/eslint-utils": "^4.3.0", - "@types/json-schema": "^7.0.11", - "@types/semver": "^7.3.12", - "@typescript-eslint/scope-manager": "6.0.0", - "@typescript-eslint/types": "6.0.0", - "@typescript-eslint/typescript-estree": "6.0.0", - "eslint-scope": "^5.1.1", - "semver": "^7.5.0" - }, - "engines": { - "node": "^16.0.0 || >=18.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - }, - "peerDependencies": { - "eslint": "^7.0.0 || ^8.0.0" - } - }, - "node_modules/@typescript-eslint/visitor-keys": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-6.0.0.tgz", - "integrity": "sha512-cvJ63l8c0yXdeT5POHpL0Q1cZoRcmRKFCtSjNGJxPkcP571EfZMcNbzWAc7oK3D1dRzm/V5EwtkANTZxqvuuUA==", - "dev": true, - "dependencies": { - "@typescript-eslint/types": "6.0.0", - "eslint-visitor-keys": "^3.4.1" - }, - "engines": { - "node": "^16.0.0 || >=18.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - } - }, - "node_modules/@vscode/test-electron": { - "version": "2.3.3", - "resolved": "https://registry.npmjs.org/@vscode/test-electron/-/test-electron-2.3.3.tgz", - "integrity": "sha512-hgXCkDP0ibboF1K6seqQYyHAzCURgTwHS/6QU7slhwznDLwsRwg9bhfw1CZdyUEw8vvCmlrKWnd7BlQnI0BC4w==", - "dev": true, - "dependencies": { - "http-proxy-agent": "^4.0.1", - "https-proxy-agent": "^5.0.0", - "jszip": "^3.10.1", - "semver": "^7.3.8" - }, - "engines": { - "node": ">=16" - } - }, - "node_modules/@vscode/vsce": { - "version": "2.19.0", - "resolved": "https://registry.npmjs.org/@vscode/vsce/-/vsce-2.19.0.tgz", - "integrity": "sha512-dAlILxC5ggOutcvJY24jxz913wimGiUrHaPkk16Gm9/PGFbz1YezWtrXsTKUtJws4fIlpX2UIlVlVESWq8lkfQ==", - "dev": true, - "dependencies": { - "azure-devops-node-api": "^11.0.1", - "chalk": "^2.4.2", - "cheerio": "^1.0.0-rc.9", - "commander": "^6.1.0", - "glob": "^7.0.6", - "hosted-git-info": "^4.0.2", - "jsonc-parser": "^3.2.0", - "leven": "^3.1.0", - "markdown-it": "^12.3.2", - "mime": "^1.3.4", - "minimatch": "^3.0.3", - "parse-semver": "^1.1.1", - "read": "^1.0.7", - "semver": "^5.1.0", - "tmp": "^0.2.1", - "typed-rest-client": "^1.8.4", - "url-join": "^4.0.1", - "xml2js": "^0.5.0", - "yauzl": "^2.3.1", - "yazl": "^2.2.2" - }, - "bin": { - "vsce": "vsce" - }, - "engines": { - "node": ">= 14" - }, - "optionalDependencies": { - "keytar": "^7.7.0" - } - }, - "node_modules/@vscode/vsce/node_modules/ansi-styles": { - "version": "3.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", - "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", - "dev": true, - "dependencies": { - "color-convert": "^1.9.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/@vscode/vsce/node_modules/chalk": { - "version": "2.4.2", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", - "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", - "dev": true, - "dependencies": { - "ansi-styles": "^3.2.1", - "escape-string-regexp": "^1.0.5", - "supports-color": "^5.3.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/@vscode/vsce/node_modules/color-convert": { - "version": "1.9.3", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", - "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", - "dev": true, - "dependencies": { - "color-name": "1.1.3" - } - }, - "node_modules/@vscode/vsce/node_modules/color-name": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", - "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==", - "dev": true - }, - "node_modules/@vscode/vsce/node_modules/commander": { - "version": "6.2.1", - "resolved": "https://registry.npmjs.org/commander/-/commander-6.2.1.tgz", - "integrity": "sha512-U7VdrJFnJgo4xjrHpTzu0yrHPGImdsmD95ZlgYSEajAn2JKzDhDTPG9kBTefmObL2w/ngeZnilk+OV9CG3d7UA==", - "dev": true, - "engines": { - "node": ">= 6" - } - }, - "node_modules/@vscode/vsce/node_modules/escape-string-regexp": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", - "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==", - "dev": true, - "engines": { - "node": ">=0.8.0" - } - }, - "node_modules/@vscode/vsce/node_modules/has-flag": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", - "integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==", - "dev": true, - "engines": { - "node": ">=4" - } - }, - "node_modules/@vscode/vsce/node_modules/semver": { - "version": "5.7.2", - "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.2.tgz", - "integrity": "sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g==", - "dev": true, - "bin": { - "semver": "bin/semver" - } - }, - "node_modules/@vscode/vsce/node_modules/supports-color": { - "version": "5.5.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", - "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", - "dev": true, - "dependencies": { - "has-flag": "^3.0.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/@vscode/vsce/node_modules/xml2js": { - "version": "0.5.0", - "resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.5.0.tgz", - "integrity": "sha512-drPFnkQJik/O+uPKpqSgr22mpuFHqKdbS835iAQrUC73L2F5WkboIRd63ai/2Yg6I1jzifPFKH2NTK+cfglkIA==", - "dev": true, - "dependencies": { - "sax": ">=0.6.0", - "xmlbuilder": "~11.0.0" - }, - "engines": { - "node": ">=4.0.0" - } - }, - "node_modules/acorn": { - "version": "8.10.0", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.10.0.tgz", - "integrity": "sha512-F0SAmZ8iUtS//m8DmCTA0jlh6TDKkHQyK6xc6V4KDTyZKA9dnvX9/3sRTVQrWm79glUAZbnmmNcdYwUIHWVybw==", - "dev": true, - "bin": { - "acorn": "bin/acorn" - }, - "engines": { - "node": ">=0.4.0" - } - }, - "node_modules/acorn-jsx": { - "version": "5.3.2", - "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.3.2.tgz", - "integrity": "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==", - "dev": true, - "peerDependencies": { - "acorn": "^6.0.0 || ^7.0.0 || ^8.0.0" - } - }, - "node_modules/agent-base": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.2.tgz", - "integrity": "sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ==", - "dev": true, - "dependencies": { - "debug": "4" - }, - "engines": { - "node": ">= 6.0.0" - } - }, - "node_modules/ajv": { - "version": "6.12.6", - "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", - "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", - "dev": true, - "dependencies": { - "fast-deep-equal": "^3.1.1", - "fast-json-stable-stringify": "^2.0.0", - "json-schema-traverse": "^0.4.1", - "uri-js": "^4.2.2" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/epoberezkin" - } - }, - "node_modules/anser": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/anser/-/anser-2.1.1.tgz", - "integrity": "sha512-nqLm4HxOTpeLOxcmB3QWmV5TcDFhW9y/fyQ+hivtDFcK4OQ+pQ5fzPnXHM1Mfcm0VkLtvVi1TCPr++Qy0Q/3EQ==" - }, - "node_modules/ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "engines": { - "node": ">=8" - } - }, - "node_modules/ansi-styles": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", - "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", - "dependencies": { - "color-convert": "^2.0.1" - }, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/chalk/ansi-styles?sponsor=1" - } - }, - "node_modules/argparse": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", - "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", - "dev": true - }, - "node_modules/array-union": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/array-union/-/array-union-2.1.0.tgz", - "integrity": "sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw==", - "dev": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/azure-devops-node-api": { - "version": "11.2.0", - "resolved": "https://registry.npmjs.org/azure-devops-node-api/-/azure-devops-node-api-11.2.0.tgz", - "integrity": "sha512-XdiGPhrpaT5J8wdERRKs5g8E0Zy1pvOYTli7z9E8nmOn3YGp4FhtjhrOyFmX/8veWCwdI69mCHKJw6l+4J/bHA==", - "dev": true, - "dependencies": { - "tunnel": "0.0.6", - "typed-rest-client": "^1.8.4" - } - }, - "node_modules/balanced-match": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", - "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==" - }, - "node_modules/base64-js": { - "version": "1.5.1", - "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz", - "integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==", - "dev": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], - "optional": true - }, - "node_modules/bl": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/bl/-/bl-4.1.0.tgz", - "integrity": "sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w==", - "dev": true, - "optional": true, - "dependencies": { - "buffer": "^5.5.0", - "inherits": "^2.0.4", - "readable-stream": "^3.4.0" - } - }, - "node_modules/bl/node_modules/readable-stream": { - "version": "3.6.0", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", - "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==", - "dev": true, - "optional": true, - "dependencies": { - "inherits": "^2.0.3", - "string_decoder": "^1.1.1", - "util-deprecate": "^1.0.1" - }, - "engines": { - "node": ">= 6" - } - }, - "node_modules/boolbase": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/boolbase/-/boolbase-1.0.0.tgz", - "integrity": "sha512-JZOSA7Mo9sNGB8+UjSgzdLtokWAky1zbztM3WRLCbZ70/3cTANmQmOdR7y2g+J0e2WXywy1yS468tY+IruqEww==", - "dev": true - }, - "node_modules/brace-expansion": { - "version": "1.1.11", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", - "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", - "dev": true, - "dependencies": { - "balanced-match": "^1.0.0", - "concat-map": "0.0.1" - } - }, - "node_modules/braces": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", - "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", - "dev": true, - "dependencies": { - "fill-range": "^7.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/buffer": { - "version": "5.7.1", - "resolved": "https://registry.npmjs.org/buffer/-/buffer-5.7.1.tgz", - "integrity": "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==", - "dev": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], - "optional": true, - "dependencies": { - "base64-js": "^1.3.1", - "ieee754": "^1.1.13" - } - }, - "node_modules/buffer-crc32": { - "version": "0.2.13", - "resolved": "https://registry.npmjs.org/buffer-crc32/-/buffer-crc32-0.2.13.tgz", - "integrity": "sha512-VO9Ht/+p3SN7SKWqcrgEzjGbRSJYTx+Q1pTQC0wrWqHx0vpJraQ6GtHx8tvcg1rlK1byhU5gccxgOgj7B0TDkQ==", - "dev": true, - "engines": { - "node": "*" - } - }, - "node_modules/call-bind": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.2.tgz", - "integrity": "sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA==", - "dev": true, - "dependencies": { - "function-bind": "^1.1.1", - "get-intrinsic": "^1.0.2" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/callsites": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", - "integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==", - "dev": true, - "engines": { - "node": ">=6" - } - }, - "node_modules/chalk": { - "version": "4.1.2", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", - "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", - "dev": true, - "dependencies": { - "ansi-styles": "^4.1.0", - "supports-color": "^7.1.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/chalk/chalk?sponsor=1" - } - }, - "node_modules/cheerio": { - "version": "1.0.0-rc.12", - "resolved": "https://registry.npmjs.org/cheerio/-/cheerio-1.0.0-rc.12.tgz", - "integrity": "sha512-VqR8m68vM46BNnuZ5NtnGBKIE/DfN0cRIzg9n40EIq9NOv90ayxLBXA8fXC5gquFRGJSTRqBq25Jt2ECLR431Q==", - "dev": true, - "dependencies": { - "cheerio-select": "^2.1.0", - "dom-serializer": "^2.0.0", - "domhandler": "^5.0.3", - "domutils": "^3.0.1", - "htmlparser2": "^8.0.1", - "parse5": "^7.0.0", - "parse5-htmlparser2-tree-adapter": "^7.0.0" - }, - "engines": { - "node": ">= 6" - }, - "funding": { - "url": "https://github.com/cheeriojs/cheerio?sponsor=1" - } - }, - "node_modules/cheerio-select": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/cheerio-select/-/cheerio-select-2.1.0.tgz", - "integrity": "sha512-9v9kG0LvzrlcungtnJtpGNxY+fzECQKhK4EGJX2vByejiMX84MFNQw4UxPJl3bFbTMw+Dfs37XaIkCwTZfLh4g==", - "dev": true, - "dependencies": { - "boolbase": "^1.0.0", - "css-select": "^5.1.0", - "css-what": "^6.1.0", - "domelementtype": "^2.3.0", - "domhandler": "^5.0.3", - "domutils": "^3.0.1" - }, - "funding": { - "url": "https://github.com/sponsors/fb55" - } - }, - "node_modules/chownr": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/chownr/-/chownr-1.1.4.tgz", - "integrity": "sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg==", - "dev": true, - "optional": true - }, - "node_modules/ci-info": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ci-info/-/ci-info-2.0.0.tgz", - "integrity": "sha512-5tK7EtrZ0N+OLFMthtqOj4fI2Jeb88C4CAZPu25LDVUgXJ0A3Js4PMGqrn0JU1W0Mh1/Z8wZzYPxqUrXeBboCQ==", - "dev": true - }, - "node_modules/cliui": { - "version": "8.0.1", - "resolved": "https://registry.npmjs.org/cliui/-/cliui-8.0.1.tgz", - "integrity": "sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==", - "dependencies": { - "string-width": "^4.2.0", - "strip-ansi": "^6.0.1", - "wrap-ansi": "^7.0.0" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/color-convert": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", - "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", - "dependencies": { - "color-name": "~1.1.4" - }, - "engines": { - "node": ">=7.0.0" - } - }, - "node_modules/color-name": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", - "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" - }, - "node_modules/commander": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/commander/-/commander-7.2.0.tgz", - "integrity": "sha512-QrWXB+ZQSVPmIWIhtEO9H+gwHaMGYiF5ChvoJ+K9ZGHG/sVsa6yiesAD1GC/x46sET00Xlwo1u49RVVVzvcSkw==", - "engines": { - "node": ">= 10" - } - }, - "node_modules/concat-map": { - "version": "0.0.1", - "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", - "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==", - "dev": true - }, - "node_modules/core-util-is": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.3.tgz", - "integrity": "sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==", - "dev": true - }, - "node_modules/cross-spawn": { - "version": "7.0.3", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", - "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", - "dev": true, - "dependencies": { - "path-key": "^3.1.0", - "shebang-command": "^2.0.0", - "which": "^2.0.1" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/css-select": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/css-select/-/css-select-5.1.0.tgz", - "integrity": "sha512-nwoRF1rvRRnnCqqY7updORDsuqKzqYJ28+oSMaJMMgOauh3fvwHqMS7EZpIPqK8GL+g9mKxF1vP/ZjSeNjEVHg==", - "dev": true, - "dependencies": { - "boolbase": "^1.0.0", - "css-what": "^6.1.0", - "domhandler": "^5.0.2", - "domutils": "^3.0.1", - "nth-check": "^2.0.1" - }, - "funding": { - "url": "https://github.com/sponsors/fb55" - } - }, - "node_modules/css-what": { - "version": "6.1.0", - "resolved": "https://registry.npmjs.org/css-what/-/css-what-6.1.0.tgz", - "integrity": "sha512-HTUrgRJ7r4dsZKU6GjmpfRK1O76h97Z8MfS1G0FozR+oF2kG6Vfe8JE6zwrkbxigziPHinCJ+gCPjA9EaBDtRw==", - "dev": true, - "engines": { - "node": ">= 6" - }, - "funding": { - "url": "https://github.com/sponsors/fb55" - } - }, - "node_modules/d3": { - "version": "7.8.5", - "resolved": "https://registry.npmjs.org/d3/-/d3-7.8.5.tgz", - "integrity": "sha512-JgoahDG51ncUfJu6wX/1vWQEqOflgXyl4MaHqlcSruTez7yhaRKR9i8VjjcQGeS2en/jnFivXuaIMnseMMt0XA==", - "dependencies": { - "d3-array": "3", - "d3-axis": "3", - "d3-brush": "3", - "d3-chord": "3", - "d3-color": "3", - "d3-contour": "4", - "d3-delaunay": "6", - "d3-dispatch": "3", - "d3-drag": "3", - "d3-dsv": "3", - "d3-ease": "3", - "d3-fetch": "3", - "d3-force": "3", - "d3-format": "3", - "d3-geo": "3", - "d3-hierarchy": "3", - "d3-interpolate": "3", - "d3-path": "3", - "d3-polygon": "3", - "d3-quadtree": "3", - "d3-random": "3", - "d3-scale": "4", - "d3-scale-chromatic": "3", - "d3-selection": "3", - "d3-shape": "3", - "d3-time": "3", - "d3-time-format": "4", - "d3-timer": "3", - "d3-transition": "3", - "d3-zoom": "3" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/d3-array": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/d3-array/-/d3-array-3.2.2.tgz", - "integrity": "sha512-yEEyEAbDrF8C6Ob2myOBLjwBLck1Z89jMGFee0oPsn95GqjerpaOA4ch+vc2l0FNFFwMD5N7OCSEN5eAlsUbgQ==", - "dependencies": { - "internmap": "1 - 2" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/d3-axis": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/d3-axis/-/d3-axis-3.0.0.tgz", - "integrity": "sha512-IH5tgjV4jE/GhHkRV0HiVYPDtvfjHQlQfJHs0usq7M30XcSBvOotpmH1IgkcXsO/5gEQZD43B//fc7SRT5S+xw==", - "engines": { - "node": ">=12" - } - }, - "node_modules/d3-brush": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/d3-brush/-/d3-brush-3.0.0.tgz", - "integrity": "sha512-ALnjWlVYkXsVIGlOsuWH1+3udkYFI48Ljihfnh8FZPF2QS9o+PzGLBslO0PjzVoHLZ2KCVgAM8NVkXPJB2aNnQ==", - "dependencies": { - "d3-dispatch": "1 - 3", - "d3-drag": "2 - 3", - "d3-interpolate": "1 - 3", - "d3-selection": "3", - "d3-transition": "3" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/d3-chord": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/d3-chord/-/d3-chord-3.0.1.tgz", - "integrity": "sha512-VE5S6TNa+j8msksl7HwjxMHDM2yNK3XCkusIlpX5kwauBfXuyLAtNg9jCp/iHH61tgI4sb6R/EIMWCqEIdjT/g==", - "dependencies": { - "d3-path": "1 - 3" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/d3-color": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/d3-color/-/d3-color-3.1.0.tgz", - "integrity": "sha512-zg/chbXyeBtMQ1LbD/WSoW2DpC3I0mpmPdW+ynRTj/x2DAWYrIY7qeZIHidozwV24m4iavr15lNwIwLxRmOxhA==", - "engines": { - "node": ">=12" - } - }, - "node_modules/d3-contour": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/d3-contour/-/d3-contour-4.0.2.tgz", - "integrity": "sha512-4EzFTRIikzs47RGmdxbeUvLWtGedDUNkTcmzoeyg4sP/dvCexO47AaQL7VKy/gul85TOxw+IBgA8US2xwbToNA==", - "dependencies": { - "d3-array": "^3.2.0" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/d3-delaunay": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/d3-delaunay/-/d3-delaunay-6.0.2.tgz", - "integrity": "sha512-IMLNldruDQScrcfT+MWnazhHbDJhcRJyOEBAJfwQnHle1RPh6WDuLvxNArUju2VSMSUuKlY5BGHRJ2cYyoFLQQ==", - "dependencies": { - "delaunator": "5" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/d3-dispatch": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/d3-dispatch/-/d3-dispatch-3.0.1.tgz", - "integrity": "sha512-rzUyPU/S7rwUflMyLc1ETDeBj0NRuHKKAcvukozwhshr6g6c5d8zh4c2gQjY2bZ0dXeGLWc1PF174P2tVvKhfg==", - "engines": { - "node": ">=12" - } - }, - "node_modules/d3-drag": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/d3-drag/-/d3-drag-3.0.0.tgz", - "integrity": "sha512-pWbUJLdETVA8lQNJecMxoXfH6x+mO2UQo8rSmZ+QqxcbyA3hfeprFgIT//HW2nlHChWeIIMwS2Fq+gEARkhTkg==", - "dependencies": { - "d3-dispatch": "1 - 3", - "d3-selection": "3" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/d3-dsv": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/d3-dsv/-/d3-dsv-3.0.1.tgz", - "integrity": "sha512-UG6OvdI5afDIFP9w4G0mNq50dSOsXHJaRE8arAS5o9ApWnIElp8GZw1Dun8vP8OyHOZ/QJUKUJwxiiCCnUwm+Q==", - "dependencies": { - "commander": "7", - "iconv-lite": "0.6", - "rw": "1" - }, - "bin": { - "csv2json": "bin/dsv2json.js", - "csv2tsv": "bin/dsv2dsv.js", - "dsv2dsv": "bin/dsv2dsv.js", - "dsv2json": "bin/dsv2json.js", - "json2csv": "bin/json2dsv.js", - "json2dsv": "bin/json2dsv.js", - "json2tsv": "bin/json2dsv.js", - "tsv2csv": "bin/dsv2dsv.js", - "tsv2json": "bin/dsv2json.js" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/d3-ease": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/d3-ease/-/d3-ease-3.0.1.tgz", - "integrity": "sha512-wR/XK3D3XcLIZwpbvQwQ5fK+8Ykds1ip7A2Txe0yxncXSdq1L9skcG7blcedkOX+ZcgxGAmLX1FrRGbADwzi0w==", - "engines": { - "node": ">=12" - } - }, - "node_modules/d3-fetch": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/d3-fetch/-/d3-fetch-3.0.1.tgz", - "integrity": "sha512-kpkQIM20n3oLVBKGg6oHrUchHM3xODkTzjMoj7aWQFq5QEM+R6E4WkzT5+tojDY7yjez8KgCBRoj4aEr99Fdqw==", - "dependencies": { - "d3-dsv": "1 - 3" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/d3-force": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/d3-force/-/d3-force-3.0.0.tgz", - "integrity": "sha512-zxV/SsA+U4yte8051P4ECydjD/S+qeYtnaIyAs9tgHCqfguma/aAQDjo85A9Z6EKhBirHRJHXIgJUlffT4wdLg==", - "dependencies": { - "d3-dispatch": "1 - 3", - "d3-quadtree": "1 - 3", - "d3-timer": "1 - 3" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/d3-format": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/d3-format/-/d3-format-3.1.0.tgz", - "integrity": "sha512-YyUI6AEuY/Wpt8KWLgZHsIU86atmikuoOmCfommt0LYHiQSPjvX2AcFc38PX0CBpr2RCyZhjex+NS/LPOv6YqA==", - "engines": { - "node": ">=12" - } - }, - "node_modules/d3-geo": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/d3-geo/-/d3-geo-3.1.0.tgz", - "integrity": "sha512-JEo5HxXDdDYXCaWdwLRt79y7giK8SbhZJbFWXqbRTolCHFI5jRqteLzCsq51NKbUoX0PjBVSohxrx+NoOUujYA==", - "dependencies": { - "d3-array": "2.5.0 - 3" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/d3-graphviz": { - "version": "5.0.2", - "resolved": "https://registry.npmjs.org/d3-graphviz/-/d3-graphviz-5.0.2.tgz", - "integrity": "sha512-EVRow9rnFgm/L1trbbnu2PGOND11IcSEdWXbrDbz9hH0/Kj3YM2AqMkkTN/EAWgawD5/zryyCy+3Vm05oSJ1Kg==", - "dependencies": { - "@hpcc-js/wasm": "2.5.0", - "d3-dispatch": "^3.0.1", - "d3-format": "^3.1.0", - "d3-interpolate": "^3.0.1", - "d3-path": "^3.1.0", - "d3-timer": "^3.0.1", - "d3-transition": "^3.0.1", - "d3-zoom": "^3.0.0" - }, - "engines": { - "node": ">=14" - }, - "peerDependencies": { - "d3-selection": "^3.0.0" - } - }, - "node_modules/d3-graphviz/node_modules/@hpcc-js/wasm": { - "version": "2.5.0", - "resolved": "https://registry.npmjs.org/@hpcc-js/wasm/-/wasm-2.5.0.tgz", - "integrity": "sha512-G26BamgaHW46f6P8bmkygapgNcy+tTDMwIvCzmMzdp39sxUS1u4gaT/vR2SSDc4x3SfL5RE4B2B8ef/wd429Hg==", - "dependencies": { - "yargs": "17.6.2" - }, - "bin": { - "dot-wasm": "bin/dot-wasm.js" - } - }, - "node_modules/d3-graphviz/node_modules/yargs": { - "version": "17.6.2", - "resolved": "https://registry.npmjs.org/yargs/-/yargs-17.6.2.tgz", - "integrity": "sha512-1/9UrdHjDZc0eOU0HxOHoS78C69UD3JRMvzlJ7S79S2nTaWRA/whGCTV8o9e/N/1Va9YIV7Q4sOxD8VV4pCWOw==", - "dependencies": { - "cliui": "^8.0.1", - "escalade": "^3.1.1", - "get-caller-file": "^2.0.5", - "require-directory": "^2.1.1", - "string-width": "^4.2.3", - "y18n": "^5.0.5", - "yargs-parser": "^21.1.1" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/d3-hierarchy": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/d3-hierarchy/-/d3-hierarchy-3.1.2.tgz", - "integrity": "sha512-FX/9frcub54beBdugHjDCdikxThEqjnR93Qt7PvQTOHxyiNCAlvMrHhclk3cD5VeAaq9fxmfRp+CnWw9rEMBuA==", - "engines": { - "node": ">=12" - } - }, - "node_modules/d3-interpolate": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/d3-interpolate/-/d3-interpolate-3.0.1.tgz", - "integrity": "sha512-3bYs1rOD33uo8aqJfKP3JWPAibgw8Zm2+L9vBKEHJ2Rg+viTR7o5Mmv5mZcieN+FRYaAOWX5SJATX6k1PWz72g==", - "dependencies": { - "d3-color": "1 - 3" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/d3-path": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/d3-path/-/d3-path-3.1.0.tgz", - "integrity": "sha512-p3KP5HCf/bvjBSSKuXid6Zqijx7wIfNW+J/maPs+iwR35at5JCbLUT0LzF1cnjbCHWhqzQTIN2Jpe8pRebIEFQ==", - "engines": { - "node": ">=12" - } - }, - "node_modules/d3-polygon": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/d3-polygon/-/d3-polygon-3.0.1.tgz", - "integrity": "sha512-3vbA7vXYwfe1SYhED++fPUQlWSYTTGmFmQiany/gdbiWgU/iEyQzyymwL9SkJjFFuCS4902BSzewVGsHHmHtXg==", - "engines": { - "node": ">=12" - } - }, - "node_modules/d3-quadtree": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/d3-quadtree/-/d3-quadtree-3.0.1.tgz", - "integrity": "sha512-04xDrxQTDTCFwP5H6hRhsRcb9xxv2RzkcsygFzmkSIOJy3PeRJP7sNk3VRIbKXcog561P9oU0/rVH6vDROAgUw==", - "engines": { - "node": ">=12" - } - }, - "node_modules/d3-random": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/d3-random/-/d3-random-3.0.1.tgz", - "integrity": "sha512-FXMe9GfxTxqd5D6jFsQ+DJ8BJS4E/fT5mqqdjovykEB2oFbTMDVdg1MGFxfQW+FBOGoB++k8swBrgwSHT1cUXQ==", - "engines": { - "node": ">=12" - } - }, - "node_modules/d3-scale": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/d3-scale/-/d3-scale-4.0.2.tgz", - "integrity": "sha512-GZW464g1SH7ag3Y7hXjf8RoUuAFIqklOAq3MRl4OaWabTFJY9PN/E1YklhXLh+OQ3fM9yS2nOkCoS+WLZ6kvxQ==", - "dependencies": { - "d3-array": "2.10.0 - 3", - "d3-format": "1 - 3", - "d3-interpolate": "1.2.0 - 3", - "d3-time": "2.1.1 - 3", - "d3-time-format": "2 - 4" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/d3-scale-chromatic": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/d3-scale-chromatic/-/d3-scale-chromatic-3.0.0.tgz", - "integrity": "sha512-Lx9thtxAKrO2Pq6OO2Ua474opeziKr279P/TKZsMAhYyNDD3EnCffdbgeSYN5O7m2ByQsxtuP2CSDczNUIZ22g==", - "dependencies": { - "d3-color": "1 - 3", - "d3-interpolate": "1 - 3" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/d3-selection": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/d3-selection/-/d3-selection-3.0.0.tgz", - "integrity": "sha512-fmTRWbNMmsmWq6xJV8D19U/gw/bwrHfNXxrIN+HfZgnzqTHp9jOmKMhsTUjXOJnZOdZY9Q28y4yebKzqDKlxlQ==", - "engines": { - "node": ">=12" - } - }, - "node_modules/d3-shape": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/d3-shape/-/d3-shape-3.2.0.tgz", - "integrity": "sha512-SaLBuwGm3MOViRq2ABk3eLoxwZELpH6zhl3FbAoJ7Vm1gofKx6El1Ib5z23NUEhF9AsGl7y+dzLe5Cw2AArGTA==", - "dependencies": { - "d3-path": "^3.1.0" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/d3-time": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/d3-time/-/d3-time-3.1.0.tgz", - "integrity": "sha512-VqKjzBLejbSMT4IgbmVgDjpkYrNWUYJnbCGo874u7MMKIWsILRX+OpX/gTk8MqjpT1A/c6HY2dCA77ZN0lkQ2Q==", - "dependencies": { - "d3-array": "2 - 3" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/d3-time-format": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/d3-time-format/-/d3-time-format-4.1.0.tgz", - "integrity": "sha512-dJxPBlzC7NugB2PDLwo9Q8JiTR3M3e4/XANkreKSUxF8vvXKqm1Yfq4Q5dl8budlunRVlUUaDUgFt7eA8D6NLg==", - "dependencies": { - "d3-time": "1 - 3" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/d3-timer": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/d3-timer/-/d3-timer-3.0.1.tgz", - "integrity": "sha512-ndfJ/JxxMd3nw31uyKoY2naivF+r29V+Lc0svZxe1JvvIRmi8hUsrMvdOwgS1o6uBHmiz91geQ0ylPP0aj1VUA==", - "engines": { - "node": ">=12" - } - }, - "node_modules/d3-transition": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/d3-transition/-/d3-transition-3.0.1.tgz", - "integrity": "sha512-ApKvfjsSR6tg06xrL434C0WydLr7JewBB3V+/39RMHsaXTOG0zmt/OAXeng5M5LBm0ojmxJrpomQVZ1aPvBL4w==", - "dependencies": { - "d3-color": "1 - 3", - "d3-dispatch": "1 - 3", - "d3-ease": "1 - 3", - "d3-interpolate": "1 - 3", - "d3-timer": "1 - 3" - }, - "engines": { - "node": ">=12" - }, - "peerDependencies": { - "d3-selection": "2 - 3" - } - }, - "node_modules/d3-zoom": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/d3-zoom/-/d3-zoom-3.0.0.tgz", - "integrity": "sha512-b8AmV3kfQaqWAuacbPuNbL6vahnOJflOhexLzMMNLga62+/nh0JzvJ0aO/5a5MVgUFGS7Hu1P9P03o3fJkDCyw==", - "dependencies": { - "d3-dispatch": "1 - 3", - "d3-drag": "2 - 3", - "d3-interpolate": "1 - 3", - "d3-selection": "2 - 3", - "d3-transition": "2 - 3" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/debug": { - "version": "4.3.4", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", - "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", - "dev": true, - "dependencies": { - "ms": "2.1.2" - }, - "engines": { - "node": ">=6.0" - }, - "peerDependenciesMeta": { - "supports-color": { - "optional": true - } - } - }, - "node_modules/decompress-response": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/decompress-response/-/decompress-response-6.0.0.tgz", - "integrity": "sha512-aW35yZM6Bb/4oJlZncMH2LCoZtJXTRxES17vE3hoRiowU2kWHaJKFkSBDnDR+cm9J+9QhXmREyIfv0pji9ejCQ==", - "dev": true, - "optional": true, - "dependencies": { - "mimic-response": "^3.1.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/deep-extend": { - "version": "0.6.0", - "resolved": "https://registry.npmjs.org/deep-extend/-/deep-extend-0.6.0.tgz", - "integrity": "sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA==", - "dev": true, - "optional": true, - "engines": { - "node": ">=4.0.0" - } - }, - "node_modules/deep-is": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz", - "integrity": "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==", - "dev": true - }, - "node_modules/delaunator": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/delaunator/-/delaunator-5.0.0.tgz", - "integrity": "sha512-AyLvtyJdbv/U1GkiS6gUUzclRoAY4Gs75qkMygJJhU75LW4DNuSF2RMzpxs9jw9Oz1BobHjTdkG3zdP55VxAqw==", - "dependencies": { - "robust-predicates": "^3.0.0" - } - }, - "node_modules/detect-libc": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-2.0.1.tgz", - "integrity": "sha512-463v3ZeIrcWtdgIg6vI6XUncguvr2TnGl4SzDXinkt9mSLpBJKXT3mW6xT3VQdDN11+WVs29pgvivTc4Lp8v+w==", - "dev": true, - "optional": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/dir-glob": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/dir-glob/-/dir-glob-3.0.1.tgz", - "integrity": "sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==", - "dev": true, - "dependencies": { - "path-type": "^4.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/doctrine": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-3.0.0.tgz", - "integrity": "sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w==", - "dev": true, - "dependencies": { - "esutils": "^2.0.2" - }, - "engines": { - "node": ">=6.0.0" - } - }, - "node_modules/dom-serializer": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/dom-serializer/-/dom-serializer-2.0.0.tgz", - "integrity": "sha512-wIkAryiqt/nV5EQKqQpo3SToSOV9J0DnbJqwK7Wv/Trc92zIAYZ4FlMu+JPFW1DfGFt81ZTCGgDEabffXeLyJg==", - "dev": true, - "dependencies": { - "domelementtype": "^2.3.0", - "domhandler": "^5.0.2", - "entities": "^4.2.0" - }, - "funding": { - "url": "https://github.com/cheeriojs/dom-serializer?sponsor=1" - } - }, - "node_modules/domelementtype": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/domelementtype/-/domelementtype-2.3.0.tgz", - "integrity": "sha512-OLETBj6w0OsagBwdXnPdN0cnMfF9opN69co+7ZrbfPGrdpPVNBUj02spi6B1N7wChLQiPn4CSH/zJvXw56gmHw==", - "dev": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/fb55" - } - ] - }, - "node_modules/domhandler": { - "version": "5.0.3", - "resolved": "https://registry.npmjs.org/domhandler/-/domhandler-5.0.3.tgz", - "integrity": "sha512-cgwlv/1iFQiFnU96XXgROh8xTeetsnJiDsTc7TYCLFd9+/WNkIqPTxiM/8pSd8VIrhXGTf1Ny1q1hquVqDJB5w==", - "dev": true, - "dependencies": { - "domelementtype": "^2.3.0" - }, - "engines": { - "node": ">= 4" - }, - "funding": { - "url": "https://github.com/fb55/domhandler?sponsor=1" - } - }, - "node_modules/domutils": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/domutils/-/domutils-3.0.1.tgz", - "integrity": "sha512-z08c1l761iKhDFtfXO04C7kTdPBLi41zwOZl00WS8b5eiaebNpY00HKbztwBq+e3vyqWNwWF3mP9YLUeqIrF+Q==", - "dev": true, - "dependencies": { - "dom-serializer": "^2.0.0", - "domelementtype": "^2.3.0", - "domhandler": "^5.0.1" - }, - "funding": { - "url": "https://github.com/fb55/domutils?sponsor=1" - } - }, - "node_modules/emoji-regex": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", - "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==" - }, - "node_modules/end-of-stream": { - "version": "1.4.4", - "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.4.tgz", - "integrity": "sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q==", - "dev": true, - "optional": true, - "dependencies": { - "once": "^1.4.0" - } - }, - "node_modules/entities": { - "version": "4.4.0", - "resolved": "https://registry.npmjs.org/entities/-/entities-4.4.0.tgz", - "integrity": "sha512-oYp7156SP8LkeGD0GF85ad1X9Ai79WtRsZ2gxJqtBuzH+98YUV6jkHEKlZkMbcrjJjIVJNIDP/3WL9wQkoPbWA==", - "dev": true, - "engines": { - "node": ">=0.12" - }, - "funding": { - "url": "https://github.com/fb55/entities?sponsor=1" - } - }, - "node_modules/esbuild": { - "version": "0.18.12", - "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.18.12.tgz", - "integrity": "sha512-XuOVLDdtsDslXStStduT41op21Ytmf4/BDS46aa3xPJ7X5h2eMWBF1oAe3QjUH3bDksocNXgzGUZ7XHIBya6Tg==", - "dev": true, - "hasInstallScript": true, - "bin": { - "esbuild": "bin/esbuild" - }, - "engines": { - "node": ">=12" - }, - "optionalDependencies": { - "@esbuild/android-arm": "0.18.12", - "@esbuild/android-arm64": "0.18.12", - "@esbuild/android-x64": "0.18.12", - "@esbuild/darwin-arm64": "0.18.12", - "@esbuild/darwin-x64": "0.18.12", - "@esbuild/freebsd-arm64": "0.18.12", - "@esbuild/freebsd-x64": "0.18.12", - "@esbuild/linux-arm": "0.18.12", - "@esbuild/linux-arm64": "0.18.12", - "@esbuild/linux-ia32": "0.18.12", - "@esbuild/linux-loong64": "0.18.12", - "@esbuild/linux-mips64el": "0.18.12", - "@esbuild/linux-ppc64": "0.18.12", - "@esbuild/linux-riscv64": "0.18.12", - "@esbuild/linux-s390x": "0.18.12", - "@esbuild/linux-x64": "0.18.12", - "@esbuild/netbsd-x64": "0.18.12", - "@esbuild/openbsd-x64": "0.18.12", - "@esbuild/sunos-x64": "0.18.12", - "@esbuild/win32-arm64": "0.18.12", - "@esbuild/win32-ia32": "0.18.12", - "@esbuild/win32-x64": "0.18.12" - } - }, - "node_modules/escalade": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.1.tgz", - "integrity": "sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==", - "engines": { - "node": ">=6" - } - }, - "node_modules/escape-string-regexp": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", - "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", - "dev": true, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/eslint": { - "version": "8.44.0", - "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.44.0.tgz", - "integrity": "sha512-0wpHoUbDUHgNCyvFB5aXLiQVfK9B0at6gUvzy83k4kAsQ/u769TQDX6iKC+aO4upIHO9WSaA3QoXYQDHbNwf1A==", - "dev": true, - "dependencies": { - "@eslint-community/eslint-utils": "^4.2.0", - "@eslint-community/regexpp": "^4.4.0", - "@eslint/eslintrc": "^2.1.0", - "@eslint/js": "8.44.0", - "@humanwhocodes/config-array": "^0.11.10", - "@humanwhocodes/module-importer": "^1.0.1", - "@nodelib/fs.walk": "^1.2.8", - "ajv": "^6.10.0", - "chalk": "^4.0.0", - "cross-spawn": "^7.0.2", - "debug": "^4.3.2", - "doctrine": "^3.0.0", - "escape-string-regexp": "^4.0.0", - "eslint-scope": "^7.2.0", - "eslint-visitor-keys": "^3.4.1", - "espree": "^9.6.0", - "esquery": "^1.4.2", - "esutils": "^2.0.2", - "fast-deep-equal": "^3.1.3", - "file-entry-cache": "^6.0.1", - "find-up": "^5.0.0", - "glob-parent": "^6.0.2", - "globals": "^13.19.0", - "graphemer": "^1.4.0", - "ignore": "^5.2.0", - "import-fresh": "^3.0.0", - "imurmurhash": "^0.1.4", - "is-glob": "^4.0.0", - "is-path-inside": "^3.0.3", - "js-yaml": "^4.1.0", - "json-stable-stringify-without-jsonify": "^1.0.1", - "levn": "^0.4.1", - "lodash.merge": "^4.6.2", - "minimatch": "^3.1.2", - "natural-compare": "^1.4.0", - "optionator": "^0.9.3", - "strip-ansi": "^6.0.1", - "strip-json-comments": "^3.1.0", - "text-table": "^0.2.0" - }, - "bin": { - "eslint": "bin/eslint.js" - }, - "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" - }, - "funding": { - "url": "https://opencollective.com/eslint" - } - }, - "node_modules/eslint-config-prettier": { - "version": "8.8.0", - "resolved": "https://registry.npmjs.org/eslint-config-prettier/-/eslint-config-prettier-8.8.0.tgz", - "integrity": "sha512-wLbQiFre3tdGgpDv67NQKnJuTlcUVYHas3k+DZCc2U2BadthoEY4B7hLPvAxaqdyOGCzuLfii2fqGph10va7oA==", - "dev": true, - "bin": { - "eslint-config-prettier": "bin/cli.js" - }, - "peerDependencies": { - "eslint": ">=7.0.0" - } - }, - "node_modules/eslint-scope": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-5.1.1.tgz", - "integrity": "sha512-2NxwbF/hZ0KpepYN0cNbo+FN6XoK7GaHlQhgx/hIZl6Va0bF45RQOOwhLIy8lQDbuCiadSLCBnH2CFYquit5bw==", - "dev": true, - "dependencies": { - "esrecurse": "^4.3.0", - "estraverse": "^4.1.1" - }, - "engines": { - "node": ">=8.0.0" - } - }, - "node_modules/eslint-visitor-keys": { - "version": "3.4.1", - "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.4.1.tgz", - "integrity": "sha512-pZnmmLwYzf+kWaM/Qgrvpen51upAktaaiI01nsJD/Yr3lMOdNtq0cxkrrg16w64VtisN6okbs7Q8AfGqj4c9fA==", - "dev": true, - "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" - }, - "funding": { - "url": "https://opencollective.com/eslint" - } - }, - "node_modules/eslint/node_modules/eslint-scope": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-7.2.0.tgz", - "integrity": "sha512-DYj5deGlHBfMt15J7rdtyKNq/Nqlv5KfU4iodrQ019XESsRnwXH9KAE0y3cwtUHDo2ob7CypAnCqefh6vioWRw==", - "dev": true, - "dependencies": { - "esrecurse": "^4.3.0", - "estraverse": "^5.2.0" - }, - "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" - }, - "funding": { - "url": "https://opencollective.com/eslint" - } - }, - "node_modules/eslint/node_modules/estraverse": { - "version": "5.3.0", - "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", - "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", - "dev": true, - "engines": { - "node": ">=4.0" - } - }, - "node_modules/espree": { - "version": "9.6.0", - "resolved": "https://registry.npmjs.org/espree/-/espree-9.6.0.tgz", - "integrity": "sha512-1FH/IiruXZ84tpUlm0aCUEwMl2Ho5ilqVh0VvQXw+byAz/4SAciyHLlfmL5WYqsvD38oymdUwBss0LtK8m4s/A==", - "dev": true, - "dependencies": { - "acorn": "^8.9.0", - "acorn-jsx": "^5.3.2", - "eslint-visitor-keys": "^3.4.1" - }, - "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" - }, - "funding": { - "url": "https://opencollective.com/eslint" - } - }, - "node_modules/esquery": { - "version": "1.5.0", - "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.5.0.tgz", - "integrity": "sha512-YQLXUplAwJgCydQ78IMJywZCceoqk1oH01OERdSAJc/7U2AylwjhSCLDEtqwg811idIS/9fIU5GjG73IgjKMVg==", - "dev": true, - "dependencies": { - "estraverse": "^5.1.0" - }, - "engines": { - "node": ">=0.10" - } - }, - "node_modules/esquery/node_modules/estraverse": { - "version": "5.3.0", - "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", - "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", - "dev": true, - "engines": { - "node": ">=4.0" - } - }, - "node_modules/esrecurse": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.3.0.tgz", - "integrity": "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==", - "dev": true, - "dependencies": { - "estraverse": "^5.2.0" - }, - "engines": { - "node": ">=4.0" - } - }, - "node_modules/esrecurse/node_modules/estraverse": { - "version": "5.3.0", - "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", - "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", - "dev": true, - "engines": { - "node": ">=4.0" - } - }, - "node_modules/estraverse": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-4.3.0.tgz", - "integrity": "sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw==", - "dev": true, - "engines": { - "node": ">=4.0" - } - }, - "node_modules/esutils": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz", - "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/expand-template": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/expand-template/-/expand-template-2.0.3.tgz", - "integrity": "sha512-XYfuKMvj4O35f/pOXLObndIRvyQ+/+6AhODh+OKWj9S9498pHHn/IMszH+gt0fBCRWMNfk1ZSp5x3AifmnI2vg==", - "dev": true, - "optional": true, - "engines": { - "node": ">=6" - } - }, - "node_modules/fast-deep-equal": { - "version": "3.1.3", - "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", - "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==", - "dev": true - }, - "node_modules/fast-glob": { - "version": "3.3.0", - "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.3.0.tgz", - "integrity": "sha512-ChDuvbOypPuNjO8yIDf36x7BlZX1smcUMTTcyoIjycexOxd6DFsKsg21qVBzEmr3G7fUKIRy2/psii+CIUt7FA==", - "dev": true, - "dependencies": { - "@nodelib/fs.stat": "^2.0.2", - "@nodelib/fs.walk": "^1.2.3", - "glob-parent": "^5.1.2", - "merge2": "^1.3.0", - "micromatch": "^4.0.4" - }, - "engines": { - "node": ">=8.6.0" - } - }, - "node_modules/fast-glob/node_modules/glob-parent": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", - "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", - "dev": true, - "dependencies": { - "is-glob": "^4.0.1" - }, - "engines": { - "node": ">= 6" - } - }, - "node_modules/fast-json-stable-stringify": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", - "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==", - "dev": true - }, - "node_modules/fast-levenshtein": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz", - "integrity": "sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==", - "dev": true - }, - "node_modules/fastq": { - "version": "1.15.0", - "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.15.0.tgz", - "integrity": "sha512-wBrocU2LCXXa+lWBt8RoIRD89Fi8OdABODa/kEnyeyjS5aZO5/GNvI5sEINADqP/h8M29UHTHUb53sUu5Ihqdw==", - "dev": true, - "dependencies": { - "reusify": "^1.0.4" - } - }, - "node_modules/fd-slicer": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/fd-slicer/-/fd-slicer-1.1.0.tgz", - "integrity": "sha512-cE1qsB/VwyQozZ+q1dGxR8LBYNZeofhEdUNGSMbQD3Gw2lAzX9Zb3uIU6Ebc/Fmyjo9AWWfnn0AUCHqtevs/8g==", - "dev": true, - "dependencies": { - "pend": "~1.2.0" - } - }, - "node_modules/file-entry-cache": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-6.0.1.tgz", - "integrity": "sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg==", - "dev": true, - "dependencies": { - "flat-cache": "^3.0.4" - }, - "engines": { - "node": "^10.12.0 || >=12.0.0" - } - }, - "node_modules/fill-range": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", - "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", - "dev": true, - "dependencies": { - "to-regex-range": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/find-up": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz", - "integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==", - "dev": true, - "dependencies": { - "locate-path": "^6.0.0", - "path-exists": "^4.0.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/flat-cache": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-3.0.4.tgz", - "integrity": "sha512-dm9s5Pw7Jc0GvMYbshN6zchCA9RgQlzzEZX3vylR9IqFfS8XciblUXOKfW6SiuJ0e13eDYZoZV5wdrev7P3Nwg==", - "dev": true, - "dependencies": { - "flatted": "^3.1.0", - "rimraf": "^3.0.2" - }, - "engines": { - "node": "^10.12.0 || >=12.0.0" - } - }, - "node_modules/flatted": { - "version": "3.2.7", - "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.2.7.tgz", - "integrity": "sha512-5nqDSxl8nn5BSNxyR3n4I6eDmbolI6WT+QqR547RwxQapgjQBmtktdP+HTBb/a/zLsbzERTONyUB5pefh5TtjQ==", - "dev": true - }, - "node_modules/follow-redirects": { - "version": "1.15.6", - "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.6.tgz", - "integrity": "sha512-wWN62YITEaOpSK584EZXJafH1AGpO8RVgElfkuXbTOrPX4fIfOyEpW/CsiNd8JdYrAoOvafRTOEnvsO++qCqFA==", - "dev": true, - "funding": [ - { - "type": "individual", - "url": "https://github.com/sponsors/RubenVerborgh" - } - ], - "engines": { - "node": ">=4.0" - }, - "peerDependenciesMeta": { - "debug": { - "optional": true - } - } - }, - "node_modules/fs-constants": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/fs-constants/-/fs-constants-1.0.0.tgz", - "integrity": "sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow==", - "dev": true, - "optional": true - }, - "node_modules/fs.realpath": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", - "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==", - "dev": true - }, - "node_modules/function-bind": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", - "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==", - "dev": true - }, - "node_modules/get-caller-file": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", - "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==", - "engines": { - "node": "6.* || 8.* || >= 10.*" - } - }, - "node_modules/get-intrinsic": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.0.tgz", - "integrity": "sha512-L049y6nFOuom5wGyRc3/gdTLO94dySVKRACj1RmJZBQXlbTMhtNIgkWkUHq+jYmZvKf14EW1EoJnnjbmoHij0Q==", - "dev": true, - "dependencies": { - "function-bind": "^1.1.1", - "has": "^1.0.3", - "has-symbols": "^1.0.3" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/github-from-package": { - "version": "0.0.0", - "resolved": "https://registry.npmjs.org/github-from-package/-/github-from-package-0.0.0.tgz", - "integrity": "sha512-SyHy3T1v2NUXn29OsWdxmK6RwHD+vkj3v8en8AOBZ1wBQ/hCAQ5bAQTD02kW4W9tUp/3Qh6J8r9EvntiyCmOOw==", - "dev": true, - "optional": true - }, - "node_modules/glob": { - "version": "7.2.3", - "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", - "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", - "dev": true, - "dependencies": { - "fs.realpath": "^1.0.0", - "inflight": "^1.0.4", - "inherits": "2", - "minimatch": "^3.1.1", - "once": "^1.3.0", - "path-is-absolute": "^1.0.0" - }, - "engines": { - "node": "*" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/glob-parent": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz", - "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==", - "dev": true, - "dependencies": { - "is-glob": "^4.0.3" - }, - "engines": { - "node": ">=10.13.0" - } - }, - "node_modules/globals": { - "version": "13.20.0", - "resolved": "https://registry.npmjs.org/globals/-/globals-13.20.0.tgz", - "integrity": "sha512-Qg5QtVkCy/kv3FUSlu4ukeZDVf9ee0iXLAUYX13gbR17bnejFTzr4iS9bY7kwCf1NztRNm1t91fjOiyx4CSwPQ==", - "dev": true, - "dependencies": { - "type-fest": "^0.20.2" - }, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/globby": { - "version": "11.1.0", - "resolved": "https://registry.npmjs.org/globby/-/globby-11.1.0.tgz", - "integrity": "sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g==", - "dev": true, - "dependencies": { - "array-union": "^2.1.0", - "dir-glob": "^3.0.1", - "fast-glob": "^3.2.9", - "ignore": "^5.2.0", - "merge2": "^1.4.1", - "slash": "^3.0.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/grapheme-splitter": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/grapheme-splitter/-/grapheme-splitter-1.0.4.tgz", - "integrity": "sha512-bzh50DW9kTPM00T8y4o8vQg89Di9oLJVLW/KaOGIXJWP/iqCN6WKYkbNOF04vFLJhwcpYUh9ydh/+5vpOqV4YQ==", - "dev": true - }, - "node_modules/graphemer": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/graphemer/-/graphemer-1.4.0.tgz", - "integrity": "sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag==", - "dev": true - }, - "node_modules/has": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz", - "integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==", - "dev": true, - "dependencies": { - "function-bind": "^1.1.1" - }, - "engines": { - "node": ">= 0.4.0" - } - }, - "node_modules/has-flag": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", - "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", - "dev": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/has-symbols": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.3.tgz", - "integrity": "sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==", - "dev": true, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/hosted-git-info": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-4.1.0.tgz", - "integrity": "sha512-kyCuEOWjJqZuDbRHzL8V93NzQhwIB71oFWSyzVo+KPZI+pnQPPxucdkrOZvkLRnrf5URsQM+IJ09Dw29cRALIA==", - "dev": true, - "dependencies": { - "lru-cache": "^6.0.0" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/htmlparser2": { - "version": "8.0.1", - "resolved": "https://registry.npmjs.org/htmlparser2/-/htmlparser2-8.0.1.tgz", - "integrity": "sha512-4lVbmc1diZC7GUJQtRQ5yBAeUCL1exyMwmForWkRLnwyzWBFxN633SALPMGYaWZvKe9j1pRZJpauvmxENSp/EA==", - "dev": true, - "funding": [ - "https://github.com/fb55/htmlparser2?sponsor=1", - { - "type": "github", - "url": "https://github.com/sponsors/fb55" - } - ], - "dependencies": { - "domelementtype": "^2.3.0", - "domhandler": "^5.0.2", - "domutils": "^3.0.1", - "entities": "^4.3.0" - } - }, - "node_modules/http-proxy-agent": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-4.0.1.tgz", - "integrity": "sha512-k0zdNgqWTGA6aeIRVpvfVob4fL52dTfaehylg0Y4UvSySvOq/Y+BOyPrgpUrA7HylqvU8vIZGsRuXmspskV0Tg==", - "dev": true, - "dependencies": { - "@tootallnate/once": "1", - "agent-base": "6", - "debug": "4" - }, - "engines": { - "node": ">= 6" - } - }, - "node_modules/https-proxy-agent": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-5.0.1.tgz", - "integrity": "sha512-dFcAjpTQFgoLMzC2VwU+C/CbS7uRL0lWmxDITmqm7C+7F0Odmj6s9l6alZc6AELXhrnggM2CeWSXHGOdX2YtwA==", - "dev": true, - "dependencies": { - "agent-base": "6", - "debug": "4" - }, - "engines": { - "node": ">= 6" - } - }, - "node_modules/iconv-lite": { - "version": "0.6.3", - "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz", - "integrity": "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==", - "dependencies": { - "safer-buffer": ">= 2.1.2 < 3.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/ieee754": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.2.1.tgz", - "integrity": "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==", - "dev": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], - "optional": true - }, - "node_modules/ignore": { - "version": "5.2.4", - "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.2.4.tgz", - "integrity": "sha512-MAb38BcSbH0eHNBxn7ql2NH/kX33OkB3lZ1BNdh7ENeRChHTYsTvWrMubiIAMNS2llXEEgZ1MUOBtXChP3kaFQ==", - "dev": true, - "engines": { - "node": ">= 4" - } - }, - "node_modules/immediate": { - "version": "3.0.6", - "resolved": "https://registry.npmjs.org/immediate/-/immediate-3.0.6.tgz", - "integrity": "sha512-XXOFtyqDjNDAQxVfYxuF7g9Il/IbWmmlQg2MYKOH8ExIT1qg6xc4zyS3HaEEATgs1btfzxq15ciUiY7gjSXRGQ==", - "dev": true - }, - "node_modules/import-fresh": { - "version": "3.3.0", - "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.0.tgz", - "integrity": "sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw==", - "dev": true, - "dependencies": { - "parent-module": "^1.0.0", - "resolve-from": "^4.0.0" - }, - "engines": { - "node": ">=6" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/imurmurhash": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", - "integrity": "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==", - "dev": true, - "engines": { - "node": ">=0.8.19" - } - }, - "node_modules/inflight": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", - "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==", - "dev": true, - "dependencies": { - "once": "^1.3.0", - "wrappy": "1" - } - }, - "node_modules/inherits": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", - "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", - "dev": true - }, - "node_modules/ini": { - "version": "1.3.8", - "resolved": "https://registry.npmjs.org/ini/-/ini-1.3.8.tgz", - "integrity": "sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew==", - "dev": true, - "optional": true - }, - "node_modules/internmap": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/internmap/-/internmap-2.0.3.tgz", - "integrity": "sha512-5Hh7Y1wQbvY5ooGgPbDaL5iYLAPzMTUrjMulskHLH6wnv/A+1q5rgEaiuqEjB+oxGXIVZs1FF+R/KPN3ZSQYYg==", - "engines": { - "node": ">=12" - } - }, - "node_modules/is-ci": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/is-ci/-/is-ci-2.0.0.tgz", - "integrity": "sha512-YfJT7rkpQB0updsdHLGWrvhBJfcfzNNawYDNIyQXJz0IViGf75O8EBPKSdvw2rF+LGCsX4FZ8tcr3b19LcZq4w==", - "dev": true, - "dependencies": { - "ci-info": "^2.0.0" - }, - "bin": { - "is-ci": "bin.js" - } - }, - "node_modules/is-extglob": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", - "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/is-fullwidth-code-point": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", - "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", - "engines": { - "node": ">=8" - } - }, - "node_modules/is-glob": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", - "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", - "dev": true, - "dependencies": { - "is-extglob": "^2.1.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/is-number": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", - "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", - "dev": true, - "engines": { - "node": ">=0.12.0" - } - }, - "node_modules/is-path-inside": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/is-path-inside/-/is-path-inside-3.0.3.tgz", - "integrity": "sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ==", - "dev": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/isarray": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", - "integrity": "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==", - "dev": true - }, - "node_modules/isexe": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", - "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", - "dev": true - }, - "node_modules/js-yaml": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", - "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==", - "dev": true, - "dependencies": { - "argparse": "^2.0.1" - }, - "bin": { - "js-yaml": "bin/js-yaml.js" - } - }, - "node_modules/json-schema-traverse": { - "version": "0.4.1", - "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", - "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", - "dev": true - }, - "node_modules/json-stable-stringify-without-jsonify": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz", - "integrity": "sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==", - "dev": true - }, - "node_modules/jsonc-parser": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/jsonc-parser/-/jsonc-parser-3.2.0.tgz", - "integrity": "sha512-gfFQZrcTc8CnKXp6Y4/CBT3fTc0OVuDofpre4aEeEpSBPV5X5v4+Vmx+8snU7RLPrNHPKSgLxGo9YuQzz20o+w==", - "dev": true - }, - "node_modules/jszip": { - "version": "3.10.1", - "resolved": "https://registry.npmjs.org/jszip/-/jszip-3.10.1.tgz", - "integrity": "sha512-xXDvecyTpGLrqFrvkrUSoxxfJI5AH7U8zxxtVclpsUtMCq4JQ290LY8AW5c7Ggnr/Y/oK+bQMbqK2qmtk3pN4g==", - "dev": true, - "dependencies": { - "lie": "~3.3.0", - "pako": "~1.0.2", - "readable-stream": "~2.3.6", - "setimmediate": "^1.0.5" - } - }, - "node_modules/keytar": { - "version": "7.9.0", - "resolved": "https://registry.npmjs.org/keytar/-/keytar-7.9.0.tgz", - "integrity": "sha512-VPD8mtVtm5JNtA2AErl6Chp06JBfy7diFQ7TQQhdpWOl6MrCRB+eRbvAZUsbGQS9kiMq0coJsy0W0vHpDCkWsQ==", - "dev": true, - "hasInstallScript": true, - "optional": true, - "dependencies": { - "node-addon-api": "^4.3.0", - "prebuild-install": "^7.0.1" - } - }, - "node_modules/leven": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/leven/-/leven-3.1.0.tgz", - "integrity": "sha512-qsda+H8jTaUaN/x5vzW2rzc+8Rw4TAQ/4KjB46IwK5VH+IlVeeeje/EoZRpiXvIqjFgK84QffqPztGI3VBLG1A==", - "dev": true, - "engines": { - "node": ">=6" - } - }, - "node_modules/levn": { - "version": "0.4.1", - "resolved": "https://registry.npmjs.org/levn/-/levn-0.4.1.tgz", - "integrity": "sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==", - "dev": true, - "dependencies": { - "prelude-ls": "^1.2.1", - "type-check": "~0.4.0" - }, - "engines": { - "node": ">= 0.8.0" - } - }, - "node_modules/lie": { - "version": "3.3.0", - "resolved": "https://registry.npmjs.org/lie/-/lie-3.3.0.tgz", - "integrity": "sha512-UaiMJzeWRlEujzAuw5LokY1L5ecNQYZKfmyZ9L7wDHb/p5etKaxXhohBcrw0EYby+G/NA52vRSN4N39dxHAIwQ==", - "dev": true, - "dependencies": { - "immediate": "~3.0.5" - } - }, - "node_modules/linkify-it": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/linkify-it/-/linkify-it-3.0.3.tgz", - "integrity": "sha512-ynTsyrFSdE5oZ/O9GEf00kPngmOfVwazR5GKDq6EYfhlpFug3J2zybX56a2PRRpc9P+FuSoGNAwjlbDs9jJBPQ==", - "dev": true, - "dependencies": { - "uc.micro": "^1.0.1" - } - }, - "node_modules/locate-path": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz", - "integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==", - "dev": true, - "dependencies": { - "p-locate": "^5.0.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/lodash.merge": { - "version": "4.6.2", - "resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz", - "integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==", - "dev": true - }, - "node_modules/lru-cache": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", - "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", - "dependencies": { - "yallist": "^4.0.0" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/markdown-it": { - "version": "12.3.2", - "resolved": "https://registry.npmjs.org/markdown-it/-/markdown-it-12.3.2.tgz", - "integrity": "sha512-TchMembfxfNVpHkbtriWltGWc+m3xszaRD0CZup7GFFhzIgQqxIfn3eGj1yZpfuflzPvfkt611B2Q/Bsk1YnGg==", - "dev": true, - "dependencies": { - "argparse": "^2.0.1", - "entities": "~2.1.0", - "linkify-it": "^3.0.1", - "mdurl": "^1.0.1", - "uc.micro": "^1.0.5" - }, - "bin": { - "markdown-it": "bin/markdown-it.js" - } - }, - "node_modules/markdown-it/node_modules/entities": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/entities/-/entities-2.1.0.tgz", - "integrity": "sha512-hCx1oky9PFrJ611mf0ifBLBRW8lUUVRlFolb5gWRfIELabBlbp9xZvrqZLZAs+NxFnbfQoeGd8wDkygjg7U85w==", - "dev": true, - "funding": { - "url": "https://github.com/fb55/entities?sponsor=1" - } - }, - "node_modules/mdurl": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/mdurl/-/mdurl-1.0.1.tgz", - "integrity": "sha512-/sKlQJCBYVY9Ers9hqzKou4H6V5UWc/M59TH2dvkt+84itfnq7uFOMLpOiOS4ujvHP4etln18fmIxA5R5fll0g==", - "dev": true - }, - "node_modules/merge2": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", - "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==", - "dev": true, - "engines": { - "node": ">= 8" - } - }, - "node_modules/micromatch": { - "version": "4.0.5", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.5.tgz", - "integrity": "sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA==", - "dev": true, - "dependencies": { - "braces": "^3.0.2", - "picomatch": "^2.3.1" - }, - "engines": { - "node": ">=8.6" - } - }, - "node_modules/mime": { - "version": "1.6.0", - "resolved": "https://registry.npmjs.org/mime/-/mime-1.6.0.tgz", - "integrity": "sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==", - "dev": true, - "bin": { - "mime": "cli.js" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/mimic-response": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/mimic-response/-/mimic-response-3.1.0.tgz", - "integrity": "sha512-z0yWI+4FDrrweS8Zmt4Ej5HdJmky15+L2e6Wgn3+iK5fWzb6T3fhNFq2+MeTRb064c6Wr4N/wv0DzQTjNzHNGQ==", - "dev": true, - "optional": true, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/minimatch": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", - "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", - "dev": true, - "dependencies": { - "brace-expansion": "^1.1.7" - }, - "engines": { - "node": "*" - } - }, - "node_modules/minimist": { - "version": "1.2.7", - "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.7.tgz", - "integrity": "sha512-bzfL1YUZsP41gmu/qjrEk0Q6i2ix/cVeAhbCbqH9u3zYutS1cLg00qhrD0M2MVdCcx4Sc0UpP2eBWo9rotpq6g==", - "dev": true, - "optional": true, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/mkdirp-classic": { - "version": "0.5.3", - "resolved": "https://registry.npmjs.org/mkdirp-classic/-/mkdirp-classic-0.5.3.tgz", - "integrity": "sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A==", - "dev": true, - "optional": true - }, - "node_modules/ms": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", - "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", - "dev": true - }, - "node_modules/mute-stream": { - "version": "0.0.8", - "resolved": "https://registry.npmjs.org/mute-stream/-/mute-stream-0.0.8.tgz", - "integrity": "sha512-nnbWWOkoWyUsTjKrhgD0dcz22mdkSnpYqbEjIm2nhwhuxlSkpywJmBo8h0ZqJdkp73mb90SssHkN4rsRaBAfAA==", - "dev": true - }, - "node_modules/napi-build-utils": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/napi-build-utils/-/napi-build-utils-1.0.2.tgz", - "integrity": "sha512-ONmRUqK7zj7DWX0D9ADe03wbwOBZxNAfF20PlGfCWQcD3+/MakShIHrMqx9YwPTfxDdF1zLeL+RGZiR9kGMLdg==", - "dev": true, - "optional": true - }, - "node_modules/natural-compare": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz", - "integrity": "sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==", - "dev": true - }, - "node_modules/natural-compare-lite": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/natural-compare-lite/-/natural-compare-lite-1.4.0.tgz", - "integrity": "sha512-Tj+HTDSJJKaZnfiuw+iaF9skdPpTo2GtEly5JHnWV/hfv2Qj/9RKsGISQtLh2ox3l5EAGw487hnBee0sIJ6v2g==", - "dev": true - }, - "node_modules/node-abi": { - "version": "3.31.0", - "resolved": "https://registry.npmjs.org/node-abi/-/node-abi-3.31.0.tgz", - "integrity": "sha512-eSKV6s+APenqVh8ubJyiu/YhZgxQpGP66ntzUb3lY1xB9ukSRaGnx0AIxI+IM+1+IVYC1oWobgG5L3Lt9ARykQ==", - "dev": true, - "optional": true, - "dependencies": { - "semver": "^7.3.5" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/node-addon-api": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/node-addon-api/-/node-addon-api-4.3.0.tgz", - "integrity": "sha512-73sE9+3UaLYYFmDsFZnqCInzPyh3MqIwZO9cw58yIqAZhONrrabrYyYe3TuIqtIiOuTXVhsGau8hcrhhwSsDIQ==", - "dev": true, - "optional": true - }, - "node_modules/nth-check": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/nth-check/-/nth-check-2.1.1.tgz", - "integrity": "sha512-lqjrjmaOoAnWfMmBPL+XNnynZh2+swxiX3WUE0s4yEHI6m+AwrK2UZOimIRl3X/4QctVqS8AiZjFqyOGrMXb/w==", - "dev": true, - "dependencies": { - "boolbase": "^1.0.0" - }, - "funding": { - "url": "https://github.com/fb55/nth-check?sponsor=1" - } - }, - "node_modules/object-inspect": { - "version": "1.12.3", - "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.12.3.tgz", - "integrity": "sha512-geUvdk7c+eizMNUDkRpW1wJwgfOiOeHbxBR/hLXK1aT6zmVSO0jsQcs7fj6MGw89jC/cjGfLcNOrtMYtGqm81g==", - "dev": true, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/once": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", - "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", - "dev": true, - "dependencies": { - "wrappy": "1" - } - }, - "node_modules/optionator": { - "version": "0.9.3", - "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.3.tgz", - "integrity": "sha512-JjCoypp+jKn1ttEFExxhetCKeJt9zhAgAve5FXHixTvFDW/5aEktX9bufBKLRRMdU7bNtpLfcGu94B3cdEJgjg==", - "dev": true, - "dependencies": { - "@aashutoshrathi/word-wrap": "^1.2.3", - "deep-is": "^0.1.3", - "fast-levenshtein": "^2.0.6", - "levn": "^0.4.1", - "prelude-ls": "^1.2.1", - "type-check": "^0.4.0" - }, - "engines": { - "node": ">= 0.8.0" - } - }, - "node_modules/ovsx": { - "version": "0.8.2", - "resolved": "https://registry.npmjs.org/ovsx/-/ovsx-0.8.2.tgz", - "integrity": "sha512-btDXZorXlmwN9+9Un3khrVygCXmhwbrtg8gifNXw92rZPXcRBAiLG/L09Kb6srhGEratsFt42AktfD8t9XhzoA==", - "dev": true, - "dependencies": { - "@vscode/vsce": "^2.19.0", - "commander": "^6.1.0", - "follow-redirects": "^1.14.6", - "is-ci": "^2.0.0", - "leven": "^3.1.0", - "semver": "^7.5.2", - "tmp": "^0.2.1" - }, - "bin": { - "ovsx": "lib/ovsx" - }, - "engines": { - "node": ">= 14" - } - }, - "node_modules/ovsx/node_modules/commander": { - "version": "6.2.1", - "resolved": "https://registry.npmjs.org/commander/-/commander-6.2.1.tgz", - "integrity": "sha512-U7VdrJFnJgo4xjrHpTzu0yrHPGImdsmD95ZlgYSEajAn2JKzDhDTPG9kBTefmObL2w/ngeZnilk+OV9CG3d7UA==", - "dev": true, - "engines": { - "node": ">= 6" - } - }, - "node_modules/p-limit": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", - "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", - "dev": true, - "dependencies": { - "yocto-queue": "^0.1.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/p-locate": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz", - "integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==", - "dev": true, - "dependencies": { - "p-limit": "^3.0.2" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/pako": { - "version": "1.0.11", - "resolved": "https://registry.npmjs.org/pako/-/pako-1.0.11.tgz", - "integrity": "sha512-4hLB8Py4zZce5s4yd9XzopqwVv/yGNhV1Bl8NTmCq1763HeK2+EwVTv+leGeL13Dnh2wfbqowVPXCIO0z4taYw==", - "dev": true - }, - "node_modules/parent-module": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz", - "integrity": "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==", - "dev": true, - "dependencies": { - "callsites": "^3.0.0" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/parse-semver": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/parse-semver/-/parse-semver-1.1.1.tgz", - "integrity": "sha512-Eg1OuNntBMH0ojvEKSrvDSnwLmvVuUOSdylH/pSCPNMIspLlweJyIWXCE+k/5hm3cj/EBUYwmWkjhBALNP4LXQ==", - "dev": true, - "dependencies": { - "semver": "^5.1.0" - } - }, - "node_modules/parse-semver/node_modules/semver": { - "version": "5.7.2", - "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.2.tgz", - "integrity": "sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g==", - "dev": true, - "bin": { - "semver": "bin/semver" - } - }, - "node_modules/parse5": { - "version": "7.1.2", - "resolved": "https://registry.npmjs.org/parse5/-/parse5-7.1.2.tgz", - "integrity": "sha512-Czj1WaSVpaoj0wbhMzLmWD69anp2WH7FXMB9n1Sy8/ZFF9jolSQVMu1Ij5WIyGmcBmhk7EOndpO4mIpihVqAXw==", - "dev": true, - "dependencies": { - "entities": "^4.4.0" - }, - "funding": { - "url": "https://github.com/inikulin/parse5?sponsor=1" - } - }, - "node_modules/parse5-htmlparser2-tree-adapter": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/parse5-htmlparser2-tree-adapter/-/parse5-htmlparser2-tree-adapter-7.0.0.tgz", - "integrity": "sha512-B77tOZrqqfUfnVcOrUvfdLbz4pu4RopLD/4vmu3HUPswwTA8OH0EMW9BlWR2B0RCoiZRAHEUu7IxeP1Pd1UU+g==", - "dev": true, - "dependencies": { - "domhandler": "^5.0.2", - "parse5": "^7.0.0" - }, - "funding": { - "url": "https://github.com/inikulin/parse5?sponsor=1" - } - }, - "node_modules/path-exists": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", - "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", - "dev": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/path-is-absolute": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", - "integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/path-key": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", - "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", - "dev": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/path-type": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz", - "integrity": "sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==", - "dev": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/pend": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/pend/-/pend-1.2.0.tgz", - "integrity": "sha512-F3asv42UuXchdzt+xXqfW1OGlVBe+mxa2mqI0pg5yAHZPvFmY3Y6drSf/GQ1A86WgWEN9Kzh/WrgKa6iGcHXLg==", - "dev": true - }, - "node_modules/picomatch": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", - "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", - "dev": true, - "engines": { - "node": ">=8.6" - }, - "funding": { - "url": "https://github.com/sponsors/jonschlinkert" - } - }, - "node_modules/prebuild-install": { - "version": "7.1.1", - "resolved": "https://registry.npmjs.org/prebuild-install/-/prebuild-install-7.1.1.tgz", - "integrity": "sha512-jAXscXWMcCK8GgCoHOfIr0ODh5ai8mj63L2nWrjuAgXE6tDyYGnx4/8o/rCgU+B4JSyZBKbeZqzhtwtC3ovxjw==", - "dev": true, - "optional": true, - "dependencies": { - "detect-libc": "^2.0.0", - "expand-template": "^2.0.3", - "github-from-package": "0.0.0", - "minimist": "^1.2.3", - "mkdirp-classic": "^0.5.3", - "napi-build-utils": "^1.0.1", - "node-abi": "^3.3.0", - "pump": "^3.0.0", - "rc": "^1.2.7", - "simple-get": "^4.0.0", - "tar-fs": "^2.0.0", - "tunnel-agent": "^0.6.0" - }, - "bin": { - "prebuild-install": "bin.js" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/prelude-ls": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz", - "integrity": "sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==", - "dev": true, - "engines": { - "node": ">= 0.8.0" - } - }, - "node_modules/prettier": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/prettier/-/prettier-3.0.0.tgz", - "integrity": "sha512-zBf5eHpwHOGPC47h0zrPyNn+eAEIdEzfywMoYn2XPi0P44Zp0tSq64rq0xAREh4auw2cJZHo9QUob+NqCQky4g==", - "dev": true, - "bin": { - "prettier": "bin/prettier.cjs" - }, - "engines": { - "node": ">=14" - }, - "funding": { - "url": "https://github.com/prettier/prettier?sponsor=1" - } - }, - "node_modules/process-nextick-args": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz", - "integrity": "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==", - "dev": true - }, - "node_modules/pump": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/pump/-/pump-3.0.0.tgz", - "integrity": "sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww==", - "dev": true, - "optional": true, - "dependencies": { - "end-of-stream": "^1.1.0", - "once": "^1.3.1" - } - }, - "node_modules/punycode": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.0.tgz", - "integrity": "sha512-rRV+zQD8tVFys26lAGR9WUuS4iUAngJScM+ZRSKtvl5tKeZ2t5bvdNFdNHBW9FWR4guGHlgmsZ1G7BSm2wTbuA==", - "dev": true, - "engines": { - "node": ">=6" - } - }, - "node_modules/qs": { - "version": "6.11.0", - "resolved": "https://registry.npmjs.org/qs/-/qs-6.11.0.tgz", - "integrity": "sha512-MvjoMCJwEarSbUYk5O+nmoSzSutSsTwF85zcHPQ9OrlFoZOYIjaqBAJIqIXjptyD5vThxGq52Xu/MaJzRkIk4Q==", - "dev": true, - "dependencies": { - "side-channel": "^1.0.4" - }, - "engines": { - "node": ">=0.6" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/queue-microtask": { - "version": "1.2.3", - "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", - "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==", - "dev": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ] - }, - "node_modules/rc": { - "version": "1.2.8", - "resolved": "https://registry.npmjs.org/rc/-/rc-1.2.8.tgz", - "integrity": "sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw==", - "dev": true, - "optional": true, - "dependencies": { - "deep-extend": "^0.6.0", - "ini": "~1.3.0", - "minimist": "^1.2.0", - "strip-json-comments": "~2.0.1" - }, - "bin": { - "rc": "cli.js" - } - }, - "node_modules/rc/node_modules/strip-json-comments": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-2.0.1.tgz", - "integrity": "sha512-4gB8na07fecVVkOI6Rs4e7T6NOTki5EmL7TUduTs6bu3EdnSycntVJ4re8kgZA+wx9IueI2Y11bfbgwtzuE0KQ==", - "dev": true, - "optional": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/read": { - "version": "1.0.7", - "resolved": "https://registry.npmjs.org/read/-/read-1.0.7.tgz", - "integrity": "sha512-rSOKNYUmaxy0om1BNjMN4ezNT6VKK+2xF4GBhc81mkH7L60i6dp8qPYrkndNLT3QPphoII3maL9PVC9XmhHwVQ==", - "dev": true, - "dependencies": { - "mute-stream": "~0.0.4" - }, - "engines": { - "node": ">=0.8" - } - }, - "node_modules/readable-stream": { - "version": "2.3.8", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.8.tgz", - "integrity": "sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA==", - "dev": true, - "dependencies": { - "core-util-is": "~1.0.0", - "inherits": "~2.0.3", - "isarray": "~1.0.0", - "process-nextick-args": "~2.0.0", - "safe-buffer": "~5.1.1", - "string_decoder": "~1.1.1", - "util-deprecate": "~1.0.1" - } - }, - "node_modules/require-directory": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", - "integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/resolve-from": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz", - "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==", - "dev": true, - "engines": { - "node": ">=4" - } - }, - "node_modules/reusify": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.0.4.tgz", - "integrity": "sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==", - "dev": true, - "engines": { - "iojs": ">=1.0.0", - "node": ">=0.10.0" - } - }, - "node_modules/rimraf": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", - "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==", - "dev": true, - "dependencies": { - "glob": "^7.1.3" - }, - "bin": { - "rimraf": "bin.js" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/robust-predicates": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/robust-predicates/-/robust-predicates-3.0.1.tgz", - "integrity": "sha512-ndEIpszUHiG4HtDsQLeIuMvRsDnn8c8rYStabochtUeCvfuvNptb5TUbVD68LRAILPX7p9nqQGh4xJgn3EHS/g==" - }, - "node_modules/run-parallel": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", - "integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==", - "dev": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], - "dependencies": { - "queue-microtask": "^1.2.2" - } - }, - "node_modules/rw": { - "version": "1.3.3", - "resolved": "https://registry.npmjs.org/rw/-/rw-1.3.3.tgz", - "integrity": "sha512-PdhdWy89SiZogBLaw42zdeqtRJ//zFd2PgQavcICDUgJT5oW10QCRKbJ6bg4r0/UY2M6BWd5tkxuGFRvCkgfHQ==" - }, - "node_modules/safe-buffer": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", - "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==", - "dev": true - }, - "node_modules/safer-buffer": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", - "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==" - }, - "node_modules/sax": { - "version": "1.2.4", - "resolved": "https://registry.npmjs.org/sax/-/sax-1.2.4.tgz", - "integrity": "sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw==", - "dev": true - }, - "node_modules/semver": { - "version": "7.5.4", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.5.4.tgz", - "integrity": "sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==", - "dependencies": { - "lru-cache": "^6.0.0" - }, - "bin": { - "semver": "bin/semver.js" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/setimmediate": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/setimmediate/-/setimmediate-1.0.5.tgz", - "integrity": "sha512-MATJdZp8sLqDl/68LfQmbP8zKPLQNV6BIZoIgrscFDQ+RsvK/BxeDQOgyxKKoh0y/8h3BqVFnCqQ/gd+reiIXA==", - "dev": true - }, - "node_modules/shebang-command": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", - "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", - "dev": true, - "dependencies": { - "shebang-regex": "^3.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/shebang-regex": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", - "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", - "dev": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/side-channel": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.4.tgz", - "integrity": "sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw==", - "dev": true, - "dependencies": { - "call-bind": "^1.0.0", - "get-intrinsic": "^1.0.2", - "object-inspect": "^1.9.0" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/simple-concat": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/simple-concat/-/simple-concat-1.0.1.tgz", - "integrity": "sha512-cSFtAPtRhljv69IK0hTVZQ+OfE9nePi/rtJmw5UjHeVyVroEqJXP1sFztKUy1qU+xvz3u/sfYJLa947b7nAN2Q==", - "dev": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], - "optional": true - }, - "node_modules/simple-get": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/simple-get/-/simple-get-4.0.1.tgz", - "integrity": "sha512-brv7p5WgH0jmQJr1ZDDfKDOSeWWg+OVypG99A/5vYGPqJ6pxiaHLy8nxtFjBA7oMa01ebA9gfh1uMCFqOuXxvA==", - "dev": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], - "optional": true, - "dependencies": { - "decompress-response": "^6.0.0", - "once": "^1.3.1", - "simple-concat": "^1.0.0" - } - }, - "node_modules/slash": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", - "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", - "dev": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/string_decoder": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", - "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", - "dev": true, - "dependencies": { - "safe-buffer": "~5.1.0" - } - }, - "node_modules/string-width": { - "version": "4.2.3", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", - "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", - "dependencies": { - "emoji-regex": "^8.0.0", - "is-fullwidth-code-point": "^3.0.0", - "strip-ansi": "^6.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "dependencies": { - "ansi-regex": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/strip-json-comments": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz", - "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==", - "dev": true, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/supports-color": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", - "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", - "dev": true, - "dependencies": { - "has-flag": "^4.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/tar-fs": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/tar-fs/-/tar-fs-2.1.1.tgz", - "integrity": "sha512-V0r2Y9scmbDRLCNex/+hYzvp/zyYjvFbHPNgVTKfQvVrb6guiE/fxP+XblDNR011utopbkex2nM4dHNV6GDsng==", - "dev": true, - "optional": true, - "dependencies": { - "chownr": "^1.1.1", - "mkdirp-classic": "^0.5.2", - "pump": "^3.0.0", - "tar-stream": "^2.1.4" - } - }, - "node_modules/tar-stream": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/tar-stream/-/tar-stream-2.2.0.tgz", - "integrity": "sha512-ujeqbceABgwMZxEJnk2HDY2DlnUZ+9oEcb1KzTVfYHio0UE6dG71n60d8D2I4qNvleWrrXpmjpt7vZeF1LnMZQ==", - "dev": true, - "optional": true, - "dependencies": { - "bl": "^4.0.3", - "end-of-stream": "^1.4.1", - "fs-constants": "^1.0.0", - "inherits": "^2.0.3", - "readable-stream": "^3.1.1" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/tar-stream/node_modules/readable-stream": { - "version": "3.6.0", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", - "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==", - "dev": true, - "optional": true, - "dependencies": { - "inherits": "^2.0.3", - "string_decoder": "^1.1.1", - "util-deprecate": "^1.0.1" - }, - "engines": { - "node": ">= 6" - } - }, - "node_modules/text-table": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/text-table/-/text-table-0.2.0.tgz", - "integrity": "sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw==", - "dev": true - }, - "node_modules/tmp": { - "version": "0.2.1", - "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.2.1.tgz", - "integrity": "sha512-76SUhtfqR2Ijn+xllcI5P1oyannHNHByD80W1q447gU3mp9G9PSpGdWmjUOHRDPiHYacIk66W7ubDTuPF3BEtQ==", - "dev": true, - "dependencies": { - "rimraf": "^3.0.0" - }, - "engines": { - "node": ">=8.17.0" - } - }, - "node_modules/to-regex-range": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", - "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", - "dev": true, - "dependencies": { - "is-number": "^7.0.0" - }, - "engines": { - "node": ">=8.0" - } - }, - "node_modules/ts-api-utils": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/ts-api-utils/-/ts-api-utils-1.0.1.tgz", - "integrity": "sha512-lC/RGlPmwdrIBFTX59wwNzqh7aR2otPNPR/5brHZm/XKFYKsfqxihXUe9pU3JI+3vGkl+vyCoNNnPhJn3aLK1A==", - "dev": true, - "engines": { - "node": ">=16.13.0" - }, - "peerDependencies": { - "typescript": ">=4.2.0" - } - }, - "node_modules/tslib": { - "version": "2.6.0", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.6.0.tgz", - "integrity": "sha512-7At1WUettjcSRHXCyYtTselblcHl9PJFFVKiCAy/bY97+BPZXSQ2wbq0P9s8tK2G7dFQfNnlJnPAiArVBVBsfA==", - "dev": true - }, - "node_modules/tunnel": { - "version": "0.0.6", - "resolved": "https://registry.npmjs.org/tunnel/-/tunnel-0.0.6.tgz", - "integrity": "sha512-1h/Lnq9yajKY2PEbBadPXj3VxsDDu844OnaAo52UVmIzIvwwtBPIuNvkjuzBlTWpfJyUbG3ez0KSBibQkj4ojg==", - "dev": true, - "engines": { - "node": ">=0.6.11 <=0.7.0 || >=0.7.3" - } - }, - "node_modules/tunnel-agent": { - "version": "0.6.0", - "resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.6.0.tgz", - "integrity": "sha512-McnNiV1l8RYeY8tBgEpuodCC1mLUdbSN+CYBL7kJsJNInOP8UjDDEwdk6Mw60vdLLrr5NHKZhMAOSrR2NZuQ+w==", - "dev": true, - "optional": true, - "dependencies": { - "safe-buffer": "^5.0.1" - }, - "engines": { - "node": "*" - } - }, - "node_modules/type-check": { - "version": "0.4.0", - "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz", - "integrity": "sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==", - "dev": true, - "dependencies": { - "prelude-ls": "^1.2.1" - }, - "engines": { - "node": ">= 0.8.0" - } - }, - "node_modules/type-fest": { - "version": "0.20.2", - "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.20.2.tgz", - "integrity": "sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==", - "dev": true, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/typed-rest-client": { - "version": "1.8.9", - "resolved": "https://registry.npmjs.org/typed-rest-client/-/typed-rest-client-1.8.9.tgz", - "integrity": "sha512-uSmjE38B80wjL85UFX3sTYEUlvZ1JgCRhsWj/fJ4rZ0FqDUFoIuodtiVeE+cUqiVTOKPdKrp/sdftD15MDek6g==", - "dev": true, - "dependencies": { - "qs": "^6.9.1", - "tunnel": "0.0.6", - "underscore": "^1.12.1" - } - }, - "node_modules/typescript": { - "version": "5.1.6", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.1.6.tgz", - "integrity": "sha512-zaWCozRZ6DLEWAWFrVDz1H6FVXzUSfTy5FUMWsQlU8Ym5JP9eO4xkTIROFCQvhQf61z6O/G6ugw3SgAnvvm+HA==", - "dev": true, - "bin": { - "tsc": "bin/tsc", - "tsserver": "bin/tsserver" - }, - "engines": { - "node": ">=14.17" - } - }, - "node_modules/uc.micro": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/uc.micro/-/uc.micro-1.0.6.tgz", - "integrity": "sha512-8Y75pvTYkLJW2hWQHXxoqRgV7qb9B+9vFEtidML+7koHUFapnVJAZ6cKs+Qjz5Aw3aZWHMC6u0wJE3At+nSGwA==", - "dev": true - }, - "node_modules/underscore": { - "version": "1.13.6", - "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.13.6.tgz", - "integrity": "sha512-+A5Sja4HP1M08MaXya7p5LvjuM7K6q/2EaC0+iovj/wOcMsTzMvDFbasi/oSapiwOlt252IqsKqPjCl7huKS0A==", - "dev": true - }, - "node_modules/uri-js": { - "version": "4.4.1", - "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", - "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", - "dev": true, - "dependencies": { - "punycode": "^2.1.0" - } - }, - "node_modules/url-join": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/url-join/-/url-join-4.0.1.tgz", - "integrity": "sha512-jk1+QP6ZJqyOiuEI9AEWQfju/nB2Pw466kbA0LEZljHwKeMgd9WrAEgEGxjPDD2+TNbbb37rTyhEfrCXfuKXnA==", - "dev": true - }, - "node_modules/util-deprecate": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", - "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==", - "dev": true - }, - "node_modules/vscode-jsonrpc": { - "version": "8.2.0", - "resolved": "https://registry.npmjs.org/vscode-jsonrpc/-/vscode-jsonrpc-8.2.0.tgz", - "integrity": "sha512-C+r0eKJUIfiDIfwJhria30+TYWPtuHJXHtI7J0YlOmKAo7ogxP20T0zxB7HZQIFhIyvoBPwWskjxrvAtfjyZfA==", - "license": "MIT", - "engines": { - "node": ">=14.0.0" - } - }, - "node_modules/vscode-languageclient": { - "version": "9.0.1", - "resolved": "https://registry.npmjs.org/vscode-languageclient/-/vscode-languageclient-9.0.1.tgz", - "integrity": "sha512-JZiimVdvimEuHh5olxhxkht09m3JzUGwggb5eRUkzzJhZ2KjCN0nh55VfiED9oez9DyF8/fz1g1iBV3h+0Z2EA==", - "license": "MIT", - "dependencies": { - "minimatch": "^5.1.0", - "semver": "^7.3.7", - "vscode-languageserver-protocol": "3.17.5" - }, - "engines": { - "vscode": "^1.82.0" - } - }, - "node_modules/vscode-languageclient/node_modules/brace-expansion": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", - "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", - "dependencies": { - "balanced-match": "^1.0.0" - } - }, - "node_modules/vscode-languageclient/node_modules/minimatch": { - "version": "5.1.6", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.1.6.tgz", - "integrity": "sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g==", - "dependencies": { - "brace-expansion": "^2.0.1" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/vscode-languageserver-protocol": { - "version": "3.17.5", - "resolved": "https://registry.npmjs.org/vscode-languageserver-protocol/-/vscode-languageserver-protocol-3.17.5.tgz", - "integrity": "sha512-mb1bvRJN8SVznADSGWM9u/b07H7Ecg0I3OgXDuLdn307rl/J3A9YD6/eYOssqhecL27hK1IPZAsaqh00i/Jljg==", - "license": "MIT", - "dependencies": { - "vscode-jsonrpc": "8.2.0", - "vscode-languageserver-types": "3.17.5" - } - }, - "node_modules/vscode-languageserver-types": { - "version": "3.17.5", - "resolved": "https://registry.npmjs.org/vscode-languageserver-types/-/vscode-languageserver-types-3.17.5.tgz", - "integrity": "sha512-Ld1VelNuX9pdF39h2Hgaeb5hEZM2Z3jUrrMgWQAu82jMtZp7p3vJT3BzToKtZI7NgQssZje5o0zryOrhQvzQAg==", - "license": "MIT" - }, - "node_modules/which": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", - "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", - "dev": true, - "dependencies": { - "isexe": "^2.0.0" - }, - "bin": { - "node-which": "bin/node-which" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/wrap-ansi": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", - "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", - "dependencies": { - "ansi-styles": "^4.0.0", - "string-width": "^4.1.0", - "strip-ansi": "^6.0.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/chalk/wrap-ansi?sponsor=1" - } - }, - "node_modules/wrappy": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", - "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==", - "dev": true - }, - "node_modules/xmlbuilder": { - "version": "11.0.1", - "resolved": "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-11.0.1.tgz", - "integrity": "sha512-fDlsI/kFEx7gLvbecc0/ohLG50fugQp8ryHzMTuW9vSa1GJ0XYWKnhsUx7oie3G98+r56aTQIUB4kht42R3JvA==", - "dev": true, - "engines": { - "node": ">=4.0" - } - }, - "node_modules/y18n": { - "version": "5.0.8", - "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz", - "integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==", - "engines": { - "node": ">=10" - } - }, - "node_modules/yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" - }, - "node_modules/yargs": { - "version": "17.7.2", - "resolved": "https://registry.npmjs.org/yargs/-/yargs-17.7.2.tgz", - "integrity": "sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==", - "dependencies": { - "cliui": "^8.0.1", - "escalade": "^3.1.1", - "get-caller-file": "^2.0.5", - "require-directory": "^2.1.1", - "string-width": "^4.2.3", - "y18n": "^5.0.5", - "yargs-parser": "^21.1.1" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/yargs-parser": { - "version": "21.1.1", - "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.1.1.tgz", - "integrity": "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==", - "engines": { - "node": ">=12" - } - }, - "node_modules/yauzl": { - "version": "2.10.0", - "resolved": "https://registry.npmjs.org/yauzl/-/yauzl-2.10.0.tgz", - "integrity": "sha512-p4a9I6X6nu6IhoGmBqAcbJy1mlC4j27vEPZX9F4L4/vZT3Lyq1VkFHw/V/PUcB9Buo+DG3iHkT0x3Qya58zc3g==", - "dev": true, - "dependencies": { - "buffer-crc32": "~0.2.3", - "fd-slicer": "~1.1.0" - } - }, - "node_modules/yazl": { - "version": "2.5.1", - "resolved": "https://registry.npmjs.org/yazl/-/yazl-2.5.1.tgz", - "integrity": "sha512-phENi2PLiHnHb6QBVot+dJnaAZ0xosj7p3fWl+znIjBDlnMI2PsZCJZ306BPTFOaHf5qdDEI8x5qFrSOBN5vrw==", - "dev": true, - "dependencies": { - "buffer-crc32": "~0.2.3" - } - }, - "node_modules/yocto-queue": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", - "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==", - "dev": true, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - } - } -} diff --git a/editors/code/package.json b/editors/code/package.json deleted file mode 100644 index c1ae259f..00000000 --- a/editors/code/package.json +++ /dev/null @@ -1,96 +0,0 @@ -{ - "name": "postgres-lsp", - "displayName": "postgres_lsp", - "description": "postgres_lsp support for Visual Studio Code", - "private": true, - "version": "0.0.0-dev", - "releaseTag": null, - "publisher": "supabase", - "repository": { - "url": "https://github.com/supabase/postgres_lsp.git", - "type": "git" - }, - "homepage": "https://github.com/supabase/postgres_lsp", - "license": "MIT OR Apache-2.0", - "keywords": [ - "postgres" - ], - "categories": [ - "Programming Languages" - ], - "capabilities": { - "untrustedWorkspaces": { - "supported": false, - "description": "postgres_lsp invokes binaries set up by its configuration as well as the Rust toolchain's binaries. A malicious actor could exploit this to run arbitrary code on your machine." - } - }, - "engines": { - "vscode": "^1.75.0" - }, - "enabledApiProposals": [], - "scripts": { - "vscode:prepublish": "npm run build-base -- --minify", - "package": "vsce package -o postgres_lsp.vsix", - "build-base": "esbuild ./src/main.ts --bundle --outfile=out/main.js --external:vscode --format=cjs --platform=node --target=node16", - "build": "npm run build-base -- --sourcemap", - "watch": "npm run build-base -- --sourcemap --watch", - "format": "prettier --write .", - "format:check": "prettier --check .", - "lint": "eslint -c .eslintrc.js --ext ts ./src ./tests", - "lint:fix": "npm run lint -- --fix", - "typecheck": "tsc", - "pretest": "npm run typecheck && npm run build", - "test": "node ./out/tests/runTests.js" - }, - "dependencies": { - "@hpcc-js/wasm": "^2.13.0", - "anser": "^2.1.1", - "d3": "^7.8.5", - "d3-graphviz": "^5.0.2", - "vscode-languageclient": "9.0.1" - }, - "devDependencies": { - "@tsconfig/strictest": "^2.0.1", - "@types/node": "~16.11.7", - "@types/vscode": "~1.75", - "@typescript-eslint/eslint-plugin": "^6.0.0", - "@typescript-eslint/parser": "^6.0.0", - "@vscode/test-electron": "^2.3.3", - "@vscode/vsce": "^2.19.0", - "esbuild": "^0.18.12", - "eslint": "^8.44.0", - "eslint-config-prettier": "^8.8.0", - "ovsx": "^0.8.2", - "prettier": "^3.0.0", - "tslib": "^2.6.0", - "typescript": "^5.1.6" - }, - "activationEvents": [ - "onLanguage:sql" - ], - "main": "./out/main", - "contributes": { - "configuration": { - "type": "object", - "title": "postgres_lsp", - "properties": { - "postgres_lsp.trace.server": { - "scope": "window", - "type": "string", - "enum": [ - "off", - "messages", - "verbose" - ], - "default": "off", - "description": "Traces the communication between VS Code and the language server." - }, - "pglt.databaseUrl": { - "type": "string", - "default": "", - "description": "Your Postgres Database URL" - } - } - } - } -} \ No newline at end of file diff --git a/editors/code/src/main.ts b/editors/code/src/main.ts deleted file mode 100644 index accdb3b3..00000000 --- a/editors/code/src/main.ts +++ /dev/null @@ -1,47 +0,0 @@ -import { type ExtensionContext, window } from 'vscode'; - -import { - type Executable, - LanguageClient, - type LanguageClientOptions, - type ServerOptions -} from 'vscode-languageclient/node'; - -let client: LanguageClient; - -export function activate(_context: ExtensionContext) { - // If the extension is launched in debug mode then the debug server options are used - // Otherwise the run options are used - const run: Executable = { - command: 'pg_cli', - args: ['lsp-proxy'] - }; - - const outputChannel = window.createOutputChannel('Postgres LSP', { log: true }); - - const serverOptions: ServerOptions = { - run, - debug: run - }; - - // Options to control the language client - const clientOptions: LanguageClientOptions = { - // Register the server for plain text documents - documentSelector: [{ scheme: 'file', language: 'sql' }], - outputChannel - }; - - // Create the language client and start the client. - client = new LanguageClient('postgres_lsp', 'Postgres LSP', serverOptions, clientOptions); - - // Start the client. This will also launch the server - void client.start(); -} - -export function deactivate(): Thenable | undefined { - console.log('Deactivating client...'); - if (!client) { - return undefined; - } - return client.stop(); -} diff --git a/editors/code/tsconfig.eslint.json b/editors/code/tsconfig.eslint.json deleted file mode 100644 index 5e2b33ca..00000000 --- a/editors/code/tsconfig.eslint.json +++ /dev/null @@ -1,11 +0,0 @@ -// Special typescript project file, used by eslint only. -{ - "extends": "./tsconfig.json", - "include": [ - // repeated from base config's "include" setting - "src", - "tests", - // these are the eslint-only inclusions - ".eslintrc.js" - ] -} diff --git a/editors/code/tsconfig.json b/editors/code/tsconfig.json deleted file mode 100644 index 125021ee..00000000 --- a/editors/code/tsconfig.json +++ /dev/null @@ -1,19 +0,0 @@ -{ - "extends": "@tsconfig/strictest/tsconfig.json", - "compilerOptions": { - "esModuleInterop": false, - "module": "Node16", - "moduleResolution": "node16", - "target": "es2021", - "outDir": "out", - "lib": ["es2021"], - "sourceMap": true, - "rootDir": ".", - "newLine": "LF", - - // FIXME: https://github.com/rust-lang/rust-analyzer/issues/15253 - "exactOptionalPropertyTypes": false - }, - "exclude": ["node_modules", ".vscode-test"], - "include": ["src", "tests"] -} diff --git a/env_variables/index.html b/env_variables/index.html new file mode 100644 index 00000000..46c87978 --- /dev/null +++ b/env_variables/index.html @@ -0,0 +1,147 @@ + + + + + + + + Environment Variables - Postgres Language Server + + + + + + + + + + + + + +

+ + +
+ +
+
+ +
+
+
+
+ +

Environment Variables

+

PGT_LOG_PATH

+

The directory where the Daemon logs will be saved.

+

PGT_LOG_PREFIX_NAME

+

A prefix that's added to the name of the log. Default: server.log.

+

PGT_CONFIG_PATH

+

A path to the configuration file

+ +
+
+ +
+
+ +
+ +
+ + + + + + + + + + diff --git a/example/file.sql b/example/file.sql deleted file mode 100644 index 6600096d..00000000 --- a/example/file.sql +++ /dev/null @@ -1,12 +0,0 @@ - -select id, name from users where id = '1224'; - - -select select; - - - -select 1; - - - diff --git a/docs/images/cli-demo.png b/images/cli-demo.png similarity index 100% rename from docs/images/cli-demo.png rename to images/cli-demo.png diff --git a/docs/images/lsp-demo.gif b/images/lsp-demo.gif similarity index 100% rename from docs/images/lsp-demo.gif rename to images/lsp-demo.gif diff --git a/docs/images/pls-github.png b/images/pls-github.png similarity index 100% rename from docs/images/pls-github.png rename to images/pls-github.png diff --git a/img/favicon.ico b/img/favicon.ico new file mode 100644 index 00000000..e85006a3 Binary files /dev/null and b/img/favicon.ico differ diff --git a/index.html b/index.html new file mode 100644 index 00000000..933a4ca7 --- /dev/null +++ b/index.html @@ -0,0 +1,285 @@ + + + + + + + + Postgres Language Server + + + + + + + + + + + + + +
+ + +
+ +
+
+
+
+ +

Postgres Language Server

+

Postgres Tools

+

A collection of language tools and a Language Server Protocol (LSP) implementation for Postgres, focusing on developer experience and reliable SQL tooling.

+
+

Source Code: https://github.com/supabase-community/postgres-language-server

+
+

Overview

+

This project provides a toolchain for Postgres development

+
Postgres Language Server
+

LSP Demo

+
CLI Demo
+

CLI Demo

+

The toolchain is built on Postgres' own parser libpg_query to ensure 100% syntax compatibility. It uses a Server-Client architecture and is a transport-agnostic. This means all features can be accessed through the Language Server Protocol as well as various interfaces like a CLI, HTTP APIs, or a WebAssembly module.

+

The following features are implemented:

+
    +
  • Autocompletion
  • +
  • Syntax Error Highlighting
  • +
  • Type-checking (via EXPLAIN error insights)
  • +
  • Linter, inspired by Squawk
  • +
+

We are currently focused on refining and enhancing these core features. For future plans and opportunities to contribute, please check out the issues and discussions. Any contributions are welcome!

+

Installation

+

There are various ways to use the toolchain.

+

CLI

+

Grab the executable for your platform from the latest CLI release on GitHub and give it execution permission:

+
curl -L https://github.com/supabase-community/postgres-language-server/releases/download/<version>/postgrestools_aarch64-apple-darwin -o postgrestools
+chmod +x postgrestools
+
+

Now you can use Postgres Tools by simply running ./postgrestools.

+

NPM

+

If you are using Node, you can install the CLI via NPM. Run the following commands in a directory containing a package.json file.

+
npm add --save-dev --save-exact @postgrestools/postgrestools
+
+

VSCode

+

The language server is available on the VSCode Marketplace. It's published from this repo.

+

Neovim

+

You will have to install nvim-lspconfig, and follow the instructions.

+

Emacs

+

The language client is available through lsp-mode. For more details, refer to their manual page.

+

Zed

+

The language server is available as an Extension. It's published from this repo.

+

GitHub Actions

+

To use the CLI in GitHub Actions, you can install it via our GitHub Action.

+

Configuration

+

We recommend creating a postgrestools.jsonc configuration file for each project. This eliminates repetitive CLI options and ensures that consistent configuration in your editor. Some options are only available from a configuration file. This step is optional: if you are happy with the defaults, you don’t need a configuration file. To create the postgrestools.jsonc file, run the init command in the root folder of your project:

+
postgrestools init
+
+

You’ll now have a postgrestools.jsonc file in your directory:

+
{
+  "$schema": "https://pgtools.dev/schemas/0.0.0/schema.json",
+  "vcs": {
+    "enabled": false,
+    "clientKind": "git",
+    "useIgnoreFile": false
+  },
+  "files": {
+    "ignore": []
+  },
+  "linter": {
+    "enabled": true,
+    "rules": {
+      "recommended": true
+    }
+  },
+  "db": {
+    "host": "127.0.0.1",
+    "port": 5432,
+    "username": "postgres",
+    "password": "postgres",
+    "database": "postgres",
+    "connTimeoutSecs": 10,
+    "allowStatementExecutionsAgainst": ["127.0.0.1/*", "localhost/*"]
+  }
+}
+
+

Make sure to edit the database connection settings to connect to your local development database. To see all options, run postgrestools --help.

+

Usage

+

You can use Postgres Tools via the command line or a using a code editor that supports an LSP.

+

Using the CLI

+

The CLI exposes a simple check command that will run all checks on the given files or paths.

+
postgrestools check myfile.sql
+
+

Make sure to check out the other options by running postgrestools --help. We will provide guides for specific use cases like linting migration files soon.

+

Using the LSP Proxy

+

Postgres Tools has a command called lsp-proxy. When executed, two processes will spawn:

+
    +
  • a daemon that does execute the requested operations;
  • +
  • a server that functions as a proxy between the requests of the client - the editor - and the server - the daemon; + If your editor is able to interact with a server and send JSON-RPC requests, you only need to configure the editor to run that command.
  • +
+

Using the daemon with the binary

+

Using the binary via CLI is very efficient, although you won’t be able to provide logs to your users. The CLI allows you to bootstrap a daemon and then use the CLI commands through the daemon itself. +In order to do so, you first need to start a daemon process with the start command:

+
postgrestools start
+
+

Then, every command needs to add the --use-server options, e.g.:

+
echo "select 1" | biome check --use-server --stdin-file-path=dummy.sql
+
+

Daemon logs

+

The daemon saves logs in your file system. Logs are stored in a folder called pgt-logs. The path of this folder changes based on your operative system:

+
    +
  • Linux: ~/.cache/pgt;
  • +
  • Windows: C:\Users\<UserName>\AppData\Local\supabase-community\pgt\cache
  • +
  • macOS: /Users/<UserName>/Library/Caches/dev.supabase-community.pgt
  • +
+

For other operative systems, you can find the folder in the system’s temporary directory.

+

You can change the location of the pgt-logs folder via the PGT_LOG_PATH variable.

+ +
+
+ +
+
+ +
+ +
+ + + + + + + + + + + + diff --git a/js/html5shiv.min.js b/js/html5shiv.min.js new file mode 100644 index 00000000..1a01c94b --- /dev/null +++ b/js/html5shiv.min.js @@ -0,0 +1,4 @@ +/** +* @preserve HTML5 Shiv 3.7.3 | @afarkas @jdalton @jon_neal @rem | MIT/GPL2 Licensed +*/ +!function(a,b){function c(a,b){var c=a.createElement("p"),d=a.getElementsByTagName("head")[0]||a.documentElement;return c.innerHTML="x",d.insertBefore(c.lastChild,d.firstChild)}function d(){var a=t.elements;return"string"==typeof a?a.split(" "):a}function e(a,b){var c=t.elements;"string"!=typeof c&&(c=c.join(" ")),"string"!=typeof a&&(a=a.join(" ")),t.elements=c+" "+a,j(b)}function f(a){var b=s[a[q]];return b||(b={},r++,a[q]=r,s[r]=b),b}function g(a,c,d){if(c||(c=b),l)return c.createElement(a);d||(d=f(c));var e;return e=d.cache[a]?d.cache[a].cloneNode():p.test(a)?(d.cache[a]=d.createElem(a)).cloneNode():d.createElem(a),!e.canHaveChildren||o.test(a)||e.tagUrn?e:d.frag.appendChild(e)}function h(a,c){if(a||(a=b),l)return a.createDocumentFragment();c=c||f(a);for(var e=c.frag.cloneNode(),g=0,h=d(),i=h.length;i>g;g++)e.createElement(h[g]);return e}function i(a,b){b.cache||(b.cache={},b.createElem=a.createElement,b.createFrag=a.createDocumentFragment,b.frag=b.createFrag()),a.createElement=function(c){return t.shivMethods?g(c,a,b):b.createElem(c)},a.createDocumentFragment=Function("h,f","return function(){var n=f.cloneNode(),c=n.createElement;h.shivMethods&&("+d().join().replace(/[\w\-:]+/g,function(a){return b.createElem(a),b.frag.createElement(a),'c("'+a+'")'})+");return n}")(t,b.frag)}function j(a){a||(a=b);var d=f(a);return!t.shivCSS||k||d.hasCSS||(d.hasCSS=!!c(a,"article,aside,dialog,figcaption,figure,footer,header,hgroup,main,nav,section{display:block}mark{background:#FF0;color:#000}template{display:none}")),l||i(a,d),a}var k,l,m="3.7.3",n=a.html5||{},o=/^<|^(?:button|map|select|textarea|object|iframe|option|optgroup)$/i,p=/^(?:a|b|code|div|fieldset|h1|h2|h3|h4|h5|h6|i|label|li|ol|p|q|span|strong|style|table|tbody|td|th|tr|ul)$/i,q="_html5shiv",r=0,s={};!function(){try{var a=b.createElement("a");a.innerHTML="",k="hidden"in a,l=1==a.childNodes.length||function(){b.createElement("a");var a=b.createDocumentFragment();return"undefined"==typeof a.cloneNode||"undefined"==typeof a.createDocumentFragment||"undefined"==typeof a.createElement}()}catch(c){k=!0,l=!0}}();var t={elements:n.elements||"abbr article aside audio bdi canvas data datalist details dialog figcaption figure footer header hgroup main mark meter nav output picture progress section summary template time video",version:m,shivCSS:n.shivCSS!==!1,supportsUnknownElements:l,shivMethods:n.shivMethods!==!1,type:"default",shivDocument:j,createElement:g,createDocumentFragment:h,addElements:e};a.html5=t,j(b),"object"==typeof module&&module.exports&&(module.exports=t)}("undefined"!=typeof window?window:this,document); diff --git a/js/jquery-3.6.0.min.js b/js/jquery-3.6.0.min.js new file mode 100644 index 00000000..c4c6022f --- /dev/null +++ b/js/jquery-3.6.0.min.js @@ -0,0 +1,2 @@ +/*! jQuery v3.6.0 | (c) OpenJS Foundation and other contributors | jquery.org/license */ +!function(e,t){"use strict";"object"==typeof module&&"object"==typeof module.exports?module.exports=e.document?t(e,!0):function(e){if(!e.document)throw new Error("jQuery requires a window with a document");return t(e)}:t(e)}("undefined"!=typeof window?window:this,function(C,e){"use strict";var t=[],r=Object.getPrototypeOf,s=t.slice,g=t.flat?function(e){return t.flat.call(e)}:function(e){return t.concat.apply([],e)},u=t.push,i=t.indexOf,n={},o=n.toString,v=n.hasOwnProperty,a=v.toString,l=a.call(Object),y={},m=function(e){return"function"==typeof e&&"number"!=typeof e.nodeType&&"function"!=typeof e.item},x=function(e){return null!=e&&e===e.window},E=C.document,c={type:!0,src:!0,nonce:!0,noModule:!0};function b(e,t,n){var r,i,o=(n=n||E).createElement("script");if(o.text=e,t)for(r in c)(i=t[r]||t.getAttribute&&t.getAttribute(r))&&o.setAttribute(r,i);n.head.appendChild(o).parentNode.removeChild(o)}function w(e){return null==e?e+"":"object"==typeof e||"function"==typeof e?n[o.call(e)]||"object":typeof e}var f="3.6.0",S=function(e,t){return new S.fn.init(e,t)};function p(e){var t=!!e&&"length"in e&&e.length,n=w(e);return!m(e)&&!x(e)&&("array"===n||0===t||"number"==typeof t&&0+~]|"+M+")"+M+"*"),U=new RegExp(M+"|>"),X=new RegExp(F),V=new RegExp("^"+I+"$"),G={ID:new RegExp("^#("+I+")"),CLASS:new RegExp("^\\.("+I+")"),TAG:new RegExp("^("+I+"|[*])"),ATTR:new RegExp("^"+W),PSEUDO:new RegExp("^"+F),CHILD:new RegExp("^:(only|first|last|nth|nth-last)-(child|of-type)(?:\\("+M+"*(even|odd|(([+-]|)(\\d*)n|)"+M+"*(?:([+-]|)"+M+"*(\\d+)|))"+M+"*\\)|)","i"),bool:new RegExp("^(?:"+R+")$","i"),needsContext:new RegExp("^"+M+"*[>+~]|:(even|odd|eq|gt|lt|nth|first|last)(?:\\("+M+"*((?:-\\d)?\\d*)"+M+"*\\)|)(?=[^-]|$)","i")},Y=/HTML$/i,Q=/^(?:input|select|textarea|button)$/i,J=/^h\d$/i,K=/^[^{]+\{\s*\[native \w/,Z=/^(?:#([\w-]+)|(\w+)|\.([\w-]+))$/,ee=/[+~]/,te=new RegExp("\\\\[\\da-fA-F]{1,6}"+M+"?|\\\\([^\\r\\n\\f])","g"),ne=function(e,t){var n="0x"+e.slice(1)-65536;return t||(n<0?String.fromCharCode(n+65536):String.fromCharCode(n>>10|55296,1023&n|56320))},re=/([\0-\x1f\x7f]|^-?\d)|^-$|[^\0-\x1f\x7f-\uFFFF\w-]/g,ie=function(e,t){return t?"\0"===e?"\ufffd":e.slice(0,-1)+"\\"+e.charCodeAt(e.length-1).toString(16)+" ":"\\"+e},oe=function(){T()},ae=be(function(e){return!0===e.disabled&&"fieldset"===e.nodeName.toLowerCase()},{dir:"parentNode",next:"legend"});try{H.apply(t=O.call(p.childNodes),p.childNodes),t[p.childNodes.length].nodeType}catch(e){H={apply:t.length?function(e,t){L.apply(e,O.call(t))}:function(e,t){var n=e.length,r=0;while(e[n++]=t[r++]);e.length=n-1}}}function se(t,e,n,r){var i,o,a,s,u,l,c,f=e&&e.ownerDocument,p=e?e.nodeType:9;if(n=n||[],"string"!=typeof t||!t||1!==p&&9!==p&&11!==p)return n;if(!r&&(T(e),e=e||C,E)){if(11!==p&&(u=Z.exec(t)))if(i=u[1]){if(9===p){if(!(a=e.getElementById(i)))return n;if(a.id===i)return n.push(a),n}else if(f&&(a=f.getElementById(i))&&y(e,a)&&a.id===i)return n.push(a),n}else{if(u[2])return H.apply(n,e.getElementsByTagName(t)),n;if((i=u[3])&&d.getElementsByClassName&&e.getElementsByClassName)return H.apply(n,e.getElementsByClassName(i)),n}if(d.qsa&&!N[t+" "]&&(!v||!v.test(t))&&(1!==p||"object"!==e.nodeName.toLowerCase())){if(c=t,f=e,1===p&&(U.test(t)||z.test(t))){(f=ee.test(t)&&ye(e.parentNode)||e)===e&&d.scope||((s=e.getAttribute("id"))?s=s.replace(re,ie):e.setAttribute("id",s=S)),o=(l=h(t)).length;while(o--)l[o]=(s?"#"+s:":scope")+" "+xe(l[o]);c=l.join(",")}try{return H.apply(n,f.querySelectorAll(c)),n}catch(e){N(t,!0)}finally{s===S&&e.removeAttribute("id")}}}return g(t.replace($,"$1"),e,n,r)}function ue(){var r=[];return function e(t,n){return r.push(t+" ")>b.cacheLength&&delete e[r.shift()],e[t+" "]=n}}function le(e){return e[S]=!0,e}function ce(e){var t=C.createElement("fieldset");try{return!!e(t)}catch(e){return!1}finally{t.parentNode&&t.parentNode.removeChild(t),t=null}}function fe(e,t){var n=e.split("|"),r=n.length;while(r--)b.attrHandle[n[r]]=t}function pe(e,t){var n=t&&e,r=n&&1===e.nodeType&&1===t.nodeType&&e.sourceIndex-t.sourceIndex;if(r)return r;if(n)while(n=n.nextSibling)if(n===t)return-1;return e?1:-1}function de(t){return function(e){return"input"===e.nodeName.toLowerCase()&&e.type===t}}function he(n){return function(e){var t=e.nodeName.toLowerCase();return("input"===t||"button"===t)&&e.type===n}}function ge(t){return function(e){return"form"in e?e.parentNode&&!1===e.disabled?"label"in e?"label"in e.parentNode?e.parentNode.disabled===t:e.disabled===t:e.isDisabled===t||e.isDisabled!==!t&&ae(e)===t:e.disabled===t:"label"in e&&e.disabled===t}}function ve(a){return le(function(o){return o=+o,le(function(e,t){var n,r=a([],e.length,o),i=r.length;while(i--)e[n=r[i]]&&(e[n]=!(t[n]=e[n]))})})}function ye(e){return e&&"undefined"!=typeof e.getElementsByTagName&&e}for(e in d=se.support={},i=se.isXML=function(e){var t=e&&e.namespaceURI,n=e&&(e.ownerDocument||e).documentElement;return!Y.test(t||n&&n.nodeName||"HTML")},T=se.setDocument=function(e){var t,n,r=e?e.ownerDocument||e:p;return r!=C&&9===r.nodeType&&r.documentElement&&(a=(C=r).documentElement,E=!i(C),p!=C&&(n=C.defaultView)&&n.top!==n&&(n.addEventListener?n.addEventListener("unload",oe,!1):n.attachEvent&&n.attachEvent("onunload",oe)),d.scope=ce(function(e){return a.appendChild(e).appendChild(C.createElement("div")),"undefined"!=typeof e.querySelectorAll&&!e.querySelectorAll(":scope fieldset div").length}),d.attributes=ce(function(e){return e.className="i",!e.getAttribute("className")}),d.getElementsByTagName=ce(function(e){return e.appendChild(C.createComment("")),!e.getElementsByTagName("*").length}),d.getElementsByClassName=K.test(C.getElementsByClassName),d.getById=ce(function(e){return a.appendChild(e).id=S,!C.getElementsByName||!C.getElementsByName(S).length}),d.getById?(b.filter.ID=function(e){var t=e.replace(te,ne);return function(e){return e.getAttribute("id")===t}},b.find.ID=function(e,t){if("undefined"!=typeof t.getElementById&&E){var n=t.getElementById(e);return n?[n]:[]}}):(b.filter.ID=function(e){var n=e.replace(te,ne);return function(e){var t="undefined"!=typeof e.getAttributeNode&&e.getAttributeNode("id");return t&&t.value===n}},b.find.ID=function(e,t){if("undefined"!=typeof t.getElementById&&E){var n,r,i,o=t.getElementById(e);if(o){if((n=o.getAttributeNode("id"))&&n.value===e)return[o];i=t.getElementsByName(e),r=0;while(o=i[r++])if((n=o.getAttributeNode("id"))&&n.value===e)return[o]}return[]}}),b.find.TAG=d.getElementsByTagName?function(e,t){return"undefined"!=typeof t.getElementsByTagName?t.getElementsByTagName(e):d.qsa?t.querySelectorAll(e):void 0}:function(e,t){var n,r=[],i=0,o=t.getElementsByTagName(e);if("*"===e){while(n=o[i++])1===n.nodeType&&r.push(n);return r}return o},b.find.CLASS=d.getElementsByClassName&&function(e,t){if("undefined"!=typeof t.getElementsByClassName&&E)return t.getElementsByClassName(e)},s=[],v=[],(d.qsa=K.test(C.querySelectorAll))&&(ce(function(e){var t;a.appendChild(e).innerHTML="",e.querySelectorAll("[msallowcapture^='']").length&&v.push("[*^$]="+M+"*(?:''|\"\")"),e.querySelectorAll("[selected]").length||v.push("\\["+M+"*(?:value|"+R+")"),e.querySelectorAll("[id~="+S+"-]").length||v.push("~="),(t=C.createElement("input")).setAttribute("name",""),e.appendChild(t),e.querySelectorAll("[name='']").length||v.push("\\["+M+"*name"+M+"*="+M+"*(?:''|\"\")"),e.querySelectorAll(":checked").length||v.push(":checked"),e.querySelectorAll("a#"+S+"+*").length||v.push(".#.+[+~]"),e.querySelectorAll("\\\f"),v.push("[\\r\\n\\f]")}),ce(function(e){e.innerHTML="";var t=C.createElement("input");t.setAttribute("type","hidden"),e.appendChild(t).setAttribute("name","D"),e.querySelectorAll("[name=d]").length&&v.push("name"+M+"*[*^$|!~]?="),2!==e.querySelectorAll(":enabled").length&&v.push(":enabled",":disabled"),a.appendChild(e).disabled=!0,2!==e.querySelectorAll(":disabled").length&&v.push(":enabled",":disabled"),e.querySelectorAll("*,:x"),v.push(",.*:")})),(d.matchesSelector=K.test(c=a.matches||a.webkitMatchesSelector||a.mozMatchesSelector||a.oMatchesSelector||a.msMatchesSelector))&&ce(function(e){d.disconnectedMatch=c.call(e,"*"),c.call(e,"[s!='']:x"),s.push("!=",F)}),v=v.length&&new RegExp(v.join("|")),s=s.length&&new RegExp(s.join("|")),t=K.test(a.compareDocumentPosition),y=t||K.test(a.contains)?function(e,t){var n=9===e.nodeType?e.documentElement:e,r=t&&t.parentNode;return e===r||!(!r||1!==r.nodeType||!(n.contains?n.contains(r):e.compareDocumentPosition&&16&e.compareDocumentPosition(r)))}:function(e,t){if(t)while(t=t.parentNode)if(t===e)return!0;return!1},j=t?function(e,t){if(e===t)return l=!0,0;var n=!e.compareDocumentPosition-!t.compareDocumentPosition;return n||(1&(n=(e.ownerDocument||e)==(t.ownerDocument||t)?e.compareDocumentPosition(t):1)||!d.sortDetached&&t.compareDocumentPosition(e)===n?e==C||e.ownerDocument==p&&y(p,e)?-1:t==C||t.ownerDocument==p&&y(p,t)?1:u?P(u,e)-P(u,t):0:4&n?-1:1)}:function(e,t){if(e===t)return l=!0,0;var n,r=0,i=e.parentNode,o=t.parentNode,a=[e],s=[t];if(!i||!o)return e==C?-1:t==C?1:i?-1:o?1:u?P(u,e)-P(u,t):0;if(i===o)return pe(e,t);n=e;while(n=n.parentNode)a.unshift(n);n=t;while(n=n.parentNode)s.unshift(n);while(a[r]===s[r])r++;return r?pe(a[r],s[r]):a[r]==p?-1:s[r]==p?1:0}),C},se.matches=function(e,t){return se(e,null,null,t)},se.matchesSelector=function(e,t){if(T(e),d.matchesSelector&&E&&!N[t+" "]&&(!s||!s.test(t))&&(!v||!v.test(t)))try{var n=c.call(e,t);if(n||d.disconnectedMatch||e.document&&11!==e.document.nodeType)return n}catch(e){N(t,!0)}return 0":{dir:"parentNode",first:!0}," ":{dir:"parentNode"},"+":{dir:"previousSibling",first:!0},"~":{dir:"previousSibling"}},preFilter:{ATTR:function(e){return e[1]=e[1].replace(te,ne),e[3]=(e[3]||e[4]||e[5]||"").replace(te,ne),"~="===e[2]&&(e[3]=" "+e[3]+" "),e.slice(0,4)},CHILD:function(e){return e[1]=e[1].toLowerCase(),"nth"===e[1].slice(0,3)?(e[3]||se.error(e[0]),e[4]=+(e[4]?e[5]+(e[6]||1):2*("even"===e[3]||"odd"===e[3])),e[5]=+(e[7]+e[8]||"odd"===e[3])):e[3]&&se.error(e[0]),e},PSEUDO:function(e){var t,n=!e[6]&&e[2];return G.CHILD.test(e[0])?null:(e[3]?e[2]=e[4]||e[5]||"":n&&X.test(n)&&(t=h(n,!0))&&(t=n.indexOf(")",n.length-t)-n.length)&&(e[0]=e[0].slice(0,t),e[2]=n.slice(0,t)),e.slice(0,3))}},filter:{TAG:function(e){var t=e.replace(te,ne).toLowerCase();return"*"===e?function(){return!0}:function(e){return e.nodeName&&e.nodeName.toLowerCase()===t}},CLASS:function(e){var t=m[e+" "];return t||(t=new RegExp("(^|"+M+")"+e+"("+M+"|$)"))&&m(e,function(e){return t.test("string"==typeof e.className&&e.className||"undefined"!=typeof e.getAttribute&&e.getAttribute("class")||"")})},ATTR:function(n,r,i){return function(e){var t=se.attr(e,n);return null==t?"!="===r:!r||(t+="","="===r?t===i:"!="===r?t!==i:"^="===r?i&&0===t.indexOf(i):"*="===r?i&&-1:\x20\t\r\n\f]*)[\x20\t\r\n\f]*\/?>(?:<\/\1>|)$/i;function j(e,n,r){return m(n)?S.grep(e,function(e,t){return!!n.call(e,t,e)!==r}):n.nodeType?S.grep(e,function(e){return e===n!==r}):"string"!=typeof n?S.grep(e,function(e){return-1)[^>]*|#([\w-]+))$/;(S.fn.init=function(e,t,n){var r,i;if(!e)return this;if(n=n||D,"string"==typeof e){if(!(r="<"===e[0]&&">"===e[e.length-1]&&3<=e.length?[null,e,null]:q.exec(e))||!r[1]&&t)return!t||t.jquery?(t||n).find(e):this.constructor(t).find(e);if(r[1]){if(t=t instanceof S?t[0]:t,S.merge(this,S.parseHTML(r[1],t&&t.nodeType?t.ownerDocument||t:E,!0)),N.test(r[1])&&S.isPlainObject(t))for(r in t)m(this[r])?this[r](t[r]):this.attr(r,t[r]);return this}return(i=E.getElementById(r[2]))&&(this[0]=i,this.length=1),this}return e.nodeType?(this[0]=e,this.length=1,this):m(e)?void 0!==n.ready?n.ready(e):e(S):S.makeArray(e,this)}).prototype=S.fn,D=S(E);var L=/^(?:parents|prev(?:Until|All))/,H={children:!0,contents:!0,next:!0,prev:!0};function O(e,t){while((e=e[t])&&1!==e.nodeType);return e}S.fn.extend({has:function(e){var t=S(e,this),n=t.length;return this.filter(function(){for(var e=0;e\x20\t\r\n\f]*)/i,he=/^$|^module$|\/(?:java|ecma)script/i;ce=E.createDocumentFragment().appendChild(E.createElement("div")),(fe=E.createElement("input")).setAttribute("type","radio"),fe.setAttribute("checked","checked"),fe.setAttribute("name","t"),ce.appendChild(fe),y.checkClone=ce.cloneNode(!0).cloneNode(!0).lastChild.checked,ce.innerHTML="",y.noCloneChecked=!!ce.cloneNode(!0).lastChild.defaultValue,ce.innerHTML="",y.option=!!ce.lastChild;var ge={thead:[1,"
","
"],col:[2,"","
"],tr:[2,"","
"],td:[3,"","
"],_default:[0,"",""]};function ve(e,t){var n;return n="undefined"!=typeof e.getElementsByTagName?e.getElementsByTagName(t||"*"):"undefined"!=typeof e.querySelectorAll?e.querySelectorAll(t||"*"):[],void 0===t||t&&A(e,t)?S.merge([e],n):n}function ye(e,t){for(var n=0,r=e.length;n",""]);var me=/<|&#?\w+;/;function xe(e,t,n,r,i){for(var o,a,s,u,l,c,f=t.createDocumentFragment(),p=[],d=0,h=e.length;d\s*$/g;function je(e,t){return A(e,"table")&&A(11!==t.nodeType?t:t.firstChild,"tr")&&S(e).children("tbody")[0]||e}function De(e){return e.type=(null!==e.getAttribute("type"))+"/"+e.type,e}function qe(e){return"true/"===(e.type||"").slice(0,5)?e.type=e.type.slice(5):e.removeAttribute("type"),e}function Le(e,t){var n,r,i,o,a,s;if(1===t.nodeType){if(Y.hasData(e)&&(s=Y.get(e).events))for(i in Y.remove(t,"handle events"),s)for(n=0,r=s[i].length;n").attr(n.scriptAttrs||{}).prop({charset:n.scriptCharset,src:n.url}).on("load error",i=function(e){r.remove(),i=null,e&&t("error"===e.type?404:200,e.type)}),E.head.appendChild(r[0])},abort:function(){i&&i()}}});var _t,zt=[],Ut=/(=)\?(?=&|$)|\?\?/;S.ajaxSetup({jsonp:"callback",jsonpCallback:function(){var e=zt.pop()||S.expando+"_"+wt.guid++;return this[e]=!0,e}}),S.ajaxPrefilter("json jsonp",function(e,t,n){var r,i,o,a=!1!==e.jsonp&&(Ut.test(e.url)?"url":"string"==typeof e.data&&0===(e.contentType||"").indexOf("application/x-www-form-urlencoded")&&Ut.test(e.data)&&"data");if(a||"jsonp"===e.dataTypes[0])return r=e.jsonpCallback=m(e.jsonpCallback)?e.jsonpCallback():e.jsonpCallback,a?e[a]=e[a].replace(Ut,"$1"+r):!1!==e.jsonp&&(e.url+=(Tt.test(e.url)?"&":"?")+e.jsonp+"="+r),e.converters["script json"]=function(){return o||S.error(r+" was not called"),o[0]},e.dataTypes[0]="json",i=C[r],C[r]=function(){o=arguments},n.always(function(){void 0===i?S(C).removeProp(r):C[r]=i,e[r]&&(e.jsonpCallback=t.jsonpCallback,zt.push(r)),o&&m(i)&&i(o[0]),o=i=void 0}),"script"}),y.createHTMLDocument=((_t=E.implementation.createHTMLDocument("").body).innerHTML="

",2===_t.childNodes.length),S.parseHTML=function(e,t,n){return"string"!=typeof e?[]:("boolean"==typeof t&&(n=t,t=!1),t||(y.createHTMLDocument?((r=(t=E.implementation.createHTMLDocument("")).createElement("base")).href=E.location.href,t.head.appendChild(r)):t=E),o=!n&&[],(i=N.exec(e))?[t.createElement(i[1])]:(i=xe([e],t,o),o&&o.length&&S(o).remove(),S.merge([],i.childNodes)));var r,i,o},S.fn.load=function(e,t,n){var r,i,o,a=this,s=e.indexOf(" ");return-1").append(S.parseHTML(e)).find(r):e)}).always(n&&function(e,t){a.each(function(){n.apply(this,o||[e.responseText,t,e])})}),this},S.expr.pseudos.animated=function(t){return S.grep(S.timers,function(e){return t===e.elem}).length},S.offset={setOffset:function(e,t,n){var r,i,o,a,s,u,l=S.css(e,"position"),c=S(e),f={};"static"===l&&(e.style.position="relative"),s=c.offset(),o=S.css(e,"top"),u=S.css(e,"left"),("absolute"===l||"fixed"===l)&&-1<(o+u).indexOf("auto")?(a=(r=c.position()).top,i=r.left):(a=parseFloat(o)||0,i=parseFloat(u)||0),m(t)&&(t=t.call(e,n,S.extend({},s))),null!=t.top&&(f.top=t.top-s.top+a),null!=t.left&&(f.left=t.left-s.left+i),"using"in t?t.using.call(e,f):c.css(f)}},S.fn.extend({offset:function(t){if(arguments.length)return void 0===t?this:this.each(function(e){S.offset.setOffset(this,t,e)});var e,n,r=this[0];return r?r.getClientRects().length?(e=r.getBoundingClientRect(),n=r.ownerDocument.defaultView,{top:e.top+n.pageYOffset,left:e.left+n.pageXOffset}):{top:0,left:0}:void 0},position:function(){if(this[0]){var e,t,n,r=this[0],i={top:0,left:0};if("fixed"===S.css(r,"position"))t=r.getBoundingClientRect();else{t=this.offset(),n=r.ownerDocument,e=r.offsetParent||n.documentElement;while(e&&(e===n.body||e===n.documentElement)&&"static"===S.css(e,"position"))e=e.parentNode;e&&e!==r&&1===e.nodeType&&((i=S(e).offset()).top+=S.css(e,"borderTopWidth",!0),i.left+=S.css(e,"borderLeftWidth",!0))}return{top:t.top-i.top-S.css(r,"marginTop",!0),left:t.left-i.left-S.css(r,"marginLeft",!0)}}},offsetParent:function(){return this.map(function(){var e=this.offsetParent;while(e&&"static"===S.css(e,"position"))e=e.offsetParent;return e||re})}}),S.each({scrollLeft:"pageXOffset",scrollTop:"pageYOffset"},function(t,i){var o="pageYOffset"===i;S.fn[t]=function(e){return $(this,function(e,t,n){var r;if(x(e)?r=e:9===e.nodeType&&(r=e.defaultView),void 0===n)return r?r[i]:e[t];r?r.scrollTo(o?r.pageXOffset:n,o?n:r.pageYOffset):e[t]=n},t,e,arguments.length)}}),S.each(["top","left"],function(e,n){S.cssHooks[n]=Fe(y.pixelPosition,function(e,t){if(t)return t=We(e,n),Pe.test(t)?S(e).position()[n]+"px":t})}),S.each({Height:"height",Width:"width"},function(a,s){S.each({padding:"inner"+a,content:s,"":"outer"+a},function(r,o){S.fn[o]=function(e,t){var n=arguments.length&&(r||"boolean"!=typeof e),i=r||(!0===e||!0===t?"margin":"border");return $(this,function(e,t,n){var r;return x(e)?0===o.indexOf("outer")?e["inner"+a]:e.document.documentElement["client"+a]:9===e.nodeType?(r=e.documentElement,Math.max(e.body["scroll"+a],r["scroll"+a],e.body["offset"+a],r["offset"+a],r["client"+a])):void 0===n?S.css(e,t,i):S.style(e,t,n,i)},s,n?e:void 0,n)}})}),S.each(["ajaxStart","ajaxStop","ajaxComplete","ajaxError","ajaxSuccess","ajaxSend"],function(e,t){S.fn[t]=function(e){return this.on(t,e)}}),S.fn.extend({bind:function(e,t,n){return this.on(e,null,t,n)},unbind:function(e,t){return this.off(e,null,t)},delegate:function(e,t,n,r){return this.on(t,e,n,r)},undelegate:function(e,t,n){return 1===arguments.length?this.off(e,"**"):this.off(t,e||"**",n)},hover:function(e,t){return this.mouseenter(e).mouseleave(t||e)}}),S.each("blur focus focusin focusout resize scroll click dblclick mousedown mouseup mousemove mouseover mouseout mouseenter mouseleave change select submit keydown keypress keyup contextmenu".split(" "),function(e,n){S.fn[n]=function(e,t){return 0"),n("table.docutils.footnote").wrap("
"),n("table.docutils.citation").wrap("
"),n(".wy-menu-vertical ul").not(".simple").siblings("a").each((function(){var t=n(this);expand=n(''),expand.on("click",(function(n){return e.toggleCurrent(t),n.stopPropagation(),!1})),t.prepend(expand)}))},reset:function(){var n=encodeURI(window.location.hash)||"#";try{var e=$(".wy-menu-vertical"),t=e.find('[href="'+n+'"]');if(0===t.length){var i=$('.document [id="'+n.substring(1)+'"]').closest("div.section");0===(t=e.find('[href="#'+i.attr("id")+'"]')).length&&(t=e.find('[href="#"]'))}if(t.length>0){$(".wy-menu-vertical .current").removeClass("current").attr("aria-expanded","false"),t.addClass("current").attr("aria-expanded","true"),t.closest("li.toctree-l1").parent().addClass("current").attr("aria-expanded","true");for(let n=1;n<=10;n++)t.closest("li.toctree-l"+n).addClass("current").attr("aria-expanded","true");t[0].scrollIntoView()}}catch(n){console.log("Error expanding nav for anchor",n)}},onScroll:function(){this.winScroll=!1;var n=this.win.scrollTop(),e=n+this.winHeight,t=this.navBar.scrollTop()+(n-this.winPosition);n<0||e>this.docHeight||(this.navBar.scrollTop(t),this.winPosition=n)},onResize:function(){this.winResize=!1,this.winHeight=this.win.height(),this.docHeight=$(document).height()},hashChange:function(){this.linkScroll=!0,this.win.one("hashchange",(function(){this.linkScroll=!1}))},toggleCurrent:function(n){var e=n.closest("li");e.siblings("li.current").removeClass("current").attr("aria-expanded","false"),e.siblings().find("li.current").removeClass("current").attr("aria-expanded","false");var t=e.find("> ul li");t.length&&(t.removeClass("current").attr("aria-expanded","false"),e.toggleClass("current").attr("aria-expanded",(function(n,e){return"true"==e?"false":"true"})))}},"undefined"!=typeof window&&(window.SphinxRtdTheme={Navigation:n.exports.ThemeNav,StickyNav:n.exports.ThemeNav}),function(){for(var n=0,e=["ms","moz","webkit","o"],t=0;t Language; -} - -pub fn language() -> Language { - unsafe { tree_sitter_sql() } -} - -#[cfg(test)] -mod tests { - use tree_sitter::{Query, QueryCursor}; - - #[test] - fn test_can_load_grammar() { - let mut parser = tree_sitter::Parser::new(); - parser - .set_language(super::language()) - .expect("Error loading sql language"); - let source_code = "SELECT 1 FROM public.table where id = 4"; - - let query = Query::new( - parser.language().unwrap(), - "( - relation ( - ( - object_reference - schema: (identifier) - name: (identifier) - ) @reference - ) -) -", - ) - .unwrap(); - - let tree = parser.parse(source_code, None).unwrap(); - - let mut cursor = QueryCursor::new(); - - let mut captures = cursor.captures(&query, tree.root_node(), source_code.as_bytes()); - let (match_, idx) = captures.next().unwrap(); - let capture = match_.captures[idx]; - assert_eq!(capture.node.kind(), "object_reference"); - } -} diff --git a/lib/tree_sitter_sql/tree-sitter-sql b/lib/tree_sitter_sql/tree-sitter-sql deleted file mode 160000 index e7e8e1d1..00000000 --- a/lib/tree_sitter_sql/tree-sitter-sql +++ /dev/null @@ -1 +0,0 @@ -Subproject commit e7e8e1d10890102a4d7a46db651fd832e9b7b5dc diff --git a/libpg_query b/libpg_query deleted file mode 160000 index 1c1a32ed..00000000 --- a/libpg_query +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 1c1a32ed2f4c7799830d50bf4cb159222aafec48 diff --git a/mkdocs.yml b/mkdocs.yml deleted file mode 100644 index 3597e08c..00000000 --- a/mkdocs.yml +++ /dev/null @@ -1,39 +0,0 @@ -site_name: Postgres Language Server -site_url: https://pgtools.dev -site_description: A collection of language tools and a Language Server Protocol (LSP) implementation for Postgres, focusing on developer experience and reliable SQL tooling. - -repo_name: supabase-community/postgres-language-server -repo_url: https://github.com/supabase-community/postgres-language-server - -theme: - name: 'readthedocs' - features: - - navigation.expand - palette: - primary: grey - accent: red -nav: - - Introduction: index.md - - Guides: - - Linting Migrations: checking_migrations.md - - Troubleshooting: troubleshooting.md - - Reference: - - Rules: rules.md - - Rule Sources: rule_sources.md - - CLI: cli_reference.md - - Environment Variables: env_variables.md - -plugins: - - gh-admonitions - -markdown_extensions: - - admonition - # - pymdownx.highlight: - # anchor_linenums: true - # line_spans: __span - # pygments_lang_class: true - # - pymdownx.inlinehilite - # - pymdownx.snippets - # - pymdownx.superfences - # - pymdownx.tabbed: - # alternate_style: true diff --git a/package.json b/package.json deleted file mode 100644 index bf930300..00000000 --- a/package.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "name": "@postgrestools/monorepo", - "version": "0.0.0", - "private": true, - "devDependencies": { - "@biomejs/biome": "1.9.4", - "@types/bun": "latest" - }, - "peerDependencies": { - "typescript": "^5" - }, - "workspaces": [ - "packages/@postgrestools/postgrestools", - "packages/@postgrestools/backend-jsonrpc" - ], - "keywords": [], - "author": "Supabase Community", - "license": "MIT OR Apache-2.0", - "packageManager": "bun@1" -} diff --git a/packages/@postgrestools/backend-jsonrpc/package.json b/packages/@postgrestools/backend-jsonrpc/package.json deleted file mode 100644 index d8e0db2c..00000000 --- a/packages/@postgrestools/backend-jsonrpc/package.json +++ /dev/null @@ -1,32 +0,0 @@ -{ - "name": "@postgrestools/backend-jsonrpc", - "version": "", - "main": "dist/index.js", - "scripts": { - "test": "bun test", - "test:ci": "bun build && bun test", - "build": "bun build ./src/index.ts --outdir ./dist --target node" - }, - "files": ["dist/", "README.md"], - "repository": { - "type": "git", - "url": "git+https://github.com/supabase-community/postgres-language-server.git", - "directory": "packages/@postgrestools/backend-jsonrpc" - }, - "author": "Supabase Community", - "bugs": "ttps://github.com/supabase-community/postgres-language-server/issues", - "description": "Bindings to the JSON-RPC Workspace API of the Postgres Language Tools daemon", - "keywords": ["TypeScript", "Postgres"], - "license": "MIT", - "publishConfig": { - "provenance": true - }, - "optionalDependencies": { - "@postgrestools/cli-win32-x64": "", - "@postgrestools/cli-win32-arm64": "", - "@postgrestools/cli-darwin-x64": "", - "@postgrestools/cli-darwin-arm64": "", - "@postgrestools/cli-linux-x64": "", - "@postgrestools/cli-linux-arm64": "" - } -} diff --git a/packages/@postgrestools/backend-jsonrpc/src/command.ts b/packages/@postgrestools/backend-jsonrpc/src/command.ts deleted file mode 100644 index 1f3ef398..00000000 --- a/packages/@postgrestools/backend-jsonrpc/src/command.ts +++ /dev/null @@ -1,36 +0,0 @@ -/** - * Gets the path of the binary for the current platform - * - * @returns Filesystem path to the binary, or null if no prebuilt distribution exists for the current platform - */ -export function getCommand(): string | null { - const { platform, arch } = process; - - type PlatformPaths = { - [P in NodeJS.Platform]?: { - [A in NodeJS.Architecture]?: string; - }; - }; - - const PLATFORMS: PlatformPaths = { - win32: { - x64: "@postgrestools/cli-win32-x64/postgrestools.exe", - arm64: "@postgrestools/cli-win32-arm64/postgrestools.exe", - }, - darwin: { - x64: "@postgrestools/cli-darwin-x64/postgrestools", - arm64: "@postgrestools/cli-darwin-arm64/postgrestools", - }, - linux: { - x64: "@postgrestools/cli-linux-x64/postgrestools", - arm64: "@postgrestools/cli-linux-arm64/postgrestools", - }, - }; - - const binPath = PLATFORMS?.[platform]?.[arch]; - if (!binPath) { - return null; - } - - return require.resolve(binPath); -} diff --git a/packages/@postgrestools/backend-jsonrpc/src/index.ts b/packages/@postgrestools/backend-jsonrpc/src/index.ts deleted file mode 100644 index 92ed6660..00000000 --- a/packages/@postgrestools/backend-jsonrpc/src/index.ts +++ /dev/null @@ -1,46 +0,0 @@ -import { getCommand } from "./command"; -import { createSocket } from "./socket"; -import { Transport } from "./transport"; -import { type Workspace, createWorkspace as wrapTransport } from "./workspace"; - -/** - * Create an instance of the Workspace client connected to a remote daemon - * instance through the JSON-RPC protocol - * - * @returns A Workspace client, or null if the underlying platform is not supported - */ -export async function createWorkspace(): Promise { - const command = getCommand(); - if (!command) { - return null; - } - - return createWorkspaceWithBinary(command); -} - -/** - * Create an instance of the Workspace client connected to a remote daemon - * instance through the JSON-RPC protocol, using the provided command to spawn - * the daemon if necessary - * - * @param command Path to the binary - * @returns A Workspace client, or null if the underlying platform is not supported - */ -export async function createWorkspaceWithBinary( - command: string, -): Promise { - const socket = await createSocket(command); - const transport = new Transport(socket); - - await transport.request("initialize", { - capabilities: {}, - client_info: { - name: "@postgrestools/backend-jsonrpc", - version: "0.0.0", - }, - }); - - return wrapTransport(transport); -} - -export * from "./workspace"; diff --git a/packages/@postgrestools/backend-jsonrpc/src/socket.ts b/packages/@postgrestools/backend-jsonrpc/src/socket.ts deleted file mode 100644 index 6fd2902f..00000000 --- a/packages/@postgrestools/backend-jsonrpc/src/socket.ts +++ /dev/null @@ -1,47 +0,0 @@ -import { spawn } from "node:child_process"; -import { type Socket, connect } from "node:net"; - -function getSocket(command: string): Promise { - return new Promise((resolve, reject) => { - const process = spawn(command, ["__print_socket"], { - stdio: "pipe", - }); - - process.on("error", reject); - - let pipeName = ""; - process.stdout.on("data", (data) => { - pipeName += data.toString("utf-8"); - }); - - process.on("exit", (code) => { - if (code === 0) { - resolve(pipeName.trimEnd()); - } else { - reject( - new Error( - `Command '${command} __print_socket' exited with code ${code}`, - ), - ); - } - }); - }); -} - -/** - * Ensure the daemon server is running and create a Socket connected to the RPC channel - * - * @param command Path to the daemon binary - * @returns Socket instance connected to the daemon - */ -export async function createSocket(command: string): Promise { - const path = await getSocket(command); - const socket = connect(path); - - await new Promise((resolve, reject) => { - socket.once("error", reject); - socket.once("ready", resolve); - }); - - return socket; -} diff --git a/packages/@postgrestools/backend-jsonrpc/src/transport.ts b/packages/@postgrestools/backend-jsonrpc/src/transport.ts deleted file mode 100644 index b1cdad44..00000000 --- a/packages/@postgrestools/backend-jsonrpc/src/transport.ts +++ /dev/null @@ -1,293 +0,0 @@ -interface Socket { - on(event: "data", fn: (data: Buffer) => void): void; - write(data: Buffer): void; - destroy(): void; -} - -enum ReaderStateKind { - Header = 0, - Body = 1, -} - -interface ReaderStateHeader { - readonly kind: ReaderStateKind.Header; - contentLength?: number; - contentType?: string; -} - -interface ReaderStateBody { - readonly kind: ReaderStateKind.Body; - readonly contentLength: number; - readonly contentType?: string; -} - -type ReaderState = ReaderStateHeader | ReaderStateBody; - -interface JsonRpcRequest { - jsonrpc: "2.0"; - id: number; - method: string; - params: unknown; -} - -function isJsonRpcRequest(message: JsonRpcMessage): message is JsonRpcRequest { - return ( - "id" in message && - typeof message.id === "number" && - "method" in message && - typeof message.method === "string" && - "params" in message - ); -} - -interface JsonRpcNotification { - jsonrpc: "2.0"; - method: string; - params: unknown; -} - -function isJsonRpcNotification( - message: JsonRpcMessage, -): message is JsonRpcNotification { - return ( - !("id" in message) && - "method" in message && - typeof message.method === "string" && - "params" in message - ); -} - -type JsonRpcResponse = - | { - jsonrpc: "2.0"; - id: number; - result: unknown; - } - | { - jsonrpc: "2.0"; - id: number; - error: unknown; - }; - -function isJsonRpcResponse( - message: JsonRpcMessage, -): message is JsonRpcResponse { - return ( - "id" in message && - typeof message.id === "number" && - !("method" in message) && - ("result" in message || "error" in message) - ); -} - -type JsonRpcMessage = JsonRpcRequest | JsonRpcNotification | JsonRpcResponse; - -function isJsonRpcMessage(message: unknown): message is JsonRpcMessage { - return ( - typeof message === "object" && - message !== null && - "jsonrpc" in message && - message.jsonrpc === "2.0" - ); -} - -interface PendingRequest { - resolve(result: unknown): void; - reject(error: unknown): void; -} - -const MIME_JSONRPC = "application/vscode-jsonrpc"; - -/** - * Implements the daemon server JSON-RPC protocol over a Socket instance - */ -export class Transport { - /** - * Counter incremented for each outgoing request to generate a unique ID - */ - private nextRequestId = 0; - - /** - * Storage for the promise resolver functions of pending requests, - * keyed by ID of the request - */ - private pendingRequests: Map = new Map(); - - constructor(private socket: Socket) { - socket.on("data", (data) => { - this.processIncoming(data); - }); - } - - /** - * Send a request to the remote server - * - * @param method Name of the remote method to call - * @param params Parameters object the remote method should be called with - * @return Promise resolving with the value returned by the remote method, or rejecting with an RPC error if the remote call failed - */ - // biome-ignore lint/suspicious/noExplicitAny: if i change it to Promise typescript breaks - request(method: string, params: unknown): Promise { - return new Promise((resolve, reject) => { - const id = this.nextRequestId++; - this.pendingRequests.set(id, { resolve, reject }); - this.sendMessage({ - jsonrpc: "2.0", - id, - method, - params, - }); - }); - } - - /** - * Send a notification message to the remote server - * - * @param method Name of the remote method to call - * @param params Parameters object the remote method should be called with - */ - notify(method: string, params: unknown) { - this.sendMessage({ - jsonrpc: "2.0", - method, - params, - }); - } - - /** - * Destroy the internal socket instance for this Transport - */ - destroy() { - this.socket.destroy(); - } - - private sendMessage(message: JsonRpcMessage) { - const body = Buffer.from(JSON.stringify(message)); - const headers = Buffer.from( - `Content-Length: ${body.length}\r\nContent-Type: ${MIME_JSONRPC};charset=utf-8\r\n\r\n`, - ); - this.socket.write(Buffer.concat([headers, body])); - } - - private pendingData = Buffer.from(""); - private readerState: ReaderState = { - kind: ReaderStateKind.Header, - }; - - private processIncoming(data: Buffer) { - this.pendingData = Buffer.concat([this.pendingData, data]); - - while (this.pendingData.length > 0) { - if (this.readerState.kind === ReaderStateKind.Header) { - const lineBreakIndex = this.pendingData.indexOf("\n"); - if (lineBreakIndex < 0) { - break; - } - - const header = this.pendingData.subarray(0, lineBreakIndex + 1); - this.pendingData = this.pendingData.subarray(lineBreakIndex + 1); - this.processIncomingHeader(this.readerState, header.toString("utf-8")); - } else if (this.pendingData.length >= this.readerState.contentLength) { - const body = this.pendingData.subarray( - 0, - this.readerState.contentLength, - ); - this.pendingData = this.pendingData.subarray( - this.readerState.contentLength, - ); - this.processIncomingBody(body); - - this.readerState = { - kind: ReaderStateKind.Header, - }; - } else { - break; - } - } - } - - private processIncomingHeader(readerState: ReaderStateHeader, line: string) { - if (line === "\r\n") { - const { contentLength, contentType } = readerState; - if (typeof contentLength !== "number") { - throw new Error( - "incoming message from the remote workspace is missing the Content-Length header", - ); - } - - this.readerState = { - kind: ReaderStateKind.Body, - contentLength, - contentType, - }; - return; - } - - const colonIndex = line.indexOf(":"); - if (colonIndex < 0) { - throw new Error(`could not find colon token in "${line}"`); - } - - const headerName = line.substring(0, colonIndex); - const headerValue = line.substring(colonIndex + 1).trim(); - - switch (headerName) { - case "Content-Length": { - const value = Number.parseInt(headerValue); - readerState.contentLength = value; - break; - } - case "Content-Type": { - if (!headerValue.startsWith(MIME_JSONRPC)) { - throw new Error( - `invalid value for Content-Type expected "${MIME_JSONRPC}", got "${headerValue}"`, - ); - } - - readerState.contentType = headerValue; - break; - } - default: - console.warn(`ignoring unknown header "${headerName}"`); - } - } - - private processIncomingBody(buffer: Buffer) { - const data = buffer.toString("utf-8"); - const body = JSON.parse(data); - - if (isJsonRpcMessage(body)) { - if (isJsonRpcRequest(body)) { - // TODO: Not implemented at the moment - return; - } - - if (isJsonRpcNotification(body)) { - // TODO: Not implemented at the moment - return; - } - - if (isJsonRpcResponse(body)) { - const pendingRequest = this.pendingRequests.get(body.id); - if (pendingRequest) { - this.pendingRequests.delete(body.id); - const { resolve, reject } = pendingRequest; - if ("result" in body) { - resolve(body.result); - } else { - reject(body.error); - } - } else { - throw new Error( - `could not find any pending request matching RPC response ID ${body.id}`, - ); - } - return; - } - } - - throw new Error( - `failed to deserialize incoming message from remote workspace, "${data}" is not a valid JSON-RPC message body`, - ); - } -} diff --git a/packages/@postgrestools/backend-jsonrpc/src/workspace.ts b/packages/@postgrestools/backend-jsonrpc/src/workspace.ts deleted file mode 100644 index a35dad81..00000000 --- a/packages/@postgrestools/backend-jsonrpc/src/workspace.ts +++ /dev/null @@ -1,469 +0,0 @@ -// Generated file, do not edit by hand, see `xtask/codegen` -import type { Transport } from "./transport"; -export interface IsPathIgnoredParams { - pgt_path: PgTPath; -} -export interface PgTPath { - /** - * Determines the kind of the file inside Postgres Tools. Some files are considered as configuration files, others as manifest files, and others as files to handle - */ - kind: FileKind; - path: string; - /** - * Whether this path (usually a file) was fixed as a result of a format/lint/check command with the `--write` filag. - */ - was_written: boolean; -} -export type FileKind = FileKind2[]; -/** - * The priority of the file - */ -export type FileKind2 = "Config" | "Ignore" | "Inspectable" | "Handleable"; -export interface GetFileContentParams { - path: PgTPath; -} -export interface PullDiagnosticsParams { - categories: RuleCategories; - max_diagnostics: number; - only: RuleCode[]; - path: PgTPath; - skip: RuleCode[]; -} -export type RuleCategories = RuleCategory[]; -export type RuleCode = string; -export type RuleCategory = "Lint" | "Action" | "Transformation"; -export interface PullDiagnosticsResult { - diagnostics: Diagnostic[]; - errors: number; - skipped_diagnostics: number; -} -/** - * Serializable representation for a [Diagnostic](super::Diagnostic). - */ -export interface Diagnostic { - advices: Advices; - category?: Category; - description: string; - location: Location; - message: MarkupBuf; - severity: Severity; - source?: Diagnostic; - tags: DiagnosticTags; - verboseAdvices: Advices; -} -/** - * Implementation of [Visitor] collecting serializable [Advice] into a vector. - */ -export interface Advices { - advices: Advice[]; -} -export type Category = - | "lint/safety/addingRequiredField" - | "lint/safety/banDropColumn" - | "lint/safety/banDropNotNull" - | "lint/safety/banDropTable" - | "stdin" - | "check" - | "configuration" - | "database/connection" - | "internalError/io" - | "internalError/runtime" - | "internalError/fs" - | "flags/invalid" - | "project" - | "typecheck" - | "internalError/panic" - | "syntax" - | "dummy" - | "lint" - | "lint/performance" - | "lint/safety"; -export interface Location { - path?: Resource_for_String; - sourceCode?: string; - span?: TextRange; -} -export type MarkupBuf = MarkupNodeBuf[]; -/** - * The severity to associate to a diagnostic. - */ -export type Severity = "hint" | "information" | "warning" | "error" | "fatal"; -export type DiagnosticTags = DiagnosticTag[]; -/** - * Serializable representation of a [Diagnostic](super::Diagnostic) advice - -See the [Visitor] trait for additional documentation on all the supported advice types. - */ -export type Advice = - | { log: [LogCategory, MarkupBuf] } - | { list: MarkupBuf[] } - | { frame: Location } - | { diff: TextEdit } - | { backtrace: [MarkupBuf, Backtrace] } - | { command: string } - | { group: [MarkupBuf, Advices] }; -/** - * Represents the resource a diagnostic is associated with. - */ -export type Resource_for_String = "argv" | "memory" | { file: string }; -export type TextRange = [TextSize, TextSize]; -export interface MarkupNodeBuf { - content: string; - elements: MarkupElement[]; -} -/** - * Internal enum used to automatically generate bit offsets for [DiagnosticTags] and help with the implementation of `serde` and `schemars` for tags. - */ -export type DiagnosticTag = - | "fixable" - | "internal" - | "unnecessaryCode" - | "deprecatedCode" - | "verbose"; -/** - * The category for a log advice, defines how the message should be presented to the user. - */ -export type LogCategory = "none" | "info" | "warn" | "error"; -export interface TextEdit { - dictionary: string; - ops: CompressedOp[]; -} -export type Backtrace = BacktraceFrame[]; -export type TextSize = number; -/** - * Enumeration of all the supported markup elements - */ -export type MarkupElement = - | "Emphasis" - | "Dim" - | "Italic" - | "Underline" - | "Error" - | "Success" - | "Warn" - | "Info" - | "Debug" - | "Trace" - | "Inverse" - | { Hyperlink: { href: string } }; -export type CompressedOp = - | { diffOp: DiffOp } - | { equalLines: { line_count: number } }; -/** - * Serializable representation of a backtrace frame. - */ -export interface BacktraceFrame { - ip: number; - symbols: BacktraceSymbol[]; -} -export type DiffOp = - | { equal: { range: TextRange } } - | { insert: { range: TextRange } } - | { delete: { range: TextRange } }; -/** - * Serializable representation of a backtrace frame symbol. - */ -export interface BacktraceSymbol { - colno?: number; - filename?: string; - lineno?: number; - name?: string; -} -export interface GetCompletionsParams { - /** - * The File for which a completion is requested. - */ - path: PgTPath; - /** - * The Cursor position in the file for which a completion is requested. - */ - position: TextSize; -} -export interface CompletionsResult { - items: CompletionItem[]; -} -export interface CompletionItem { - completion_text?: CompletionText; - description: string; - kind: CompletionItemKind; - label: string; - preselected: boolean; - /** - * String used for sorting by LSP clients. - */ - sort_text: string; -} -/** - * The text that the editor should fill in. If `None`, the `label` should be used. Tables, for example, might have different completion_texts: - -label: "users", description: "Schema: auth", completion_text: "auth.users". - */ -export interface CompletionText { - /** - * A `range` is required because some editors replace the current token, others naively insert the text. Having a range where start == end makes it an insertion. - */ - range: TextRange; - text: string; -} -export type CompletionItemKind = "table" | "function" | "column" | "schema"; -export interface UpdateSettingsParams { - configuration: PartialConfiguration; - gitignore_matches: string[]; - vcs_base_path?: string; - workspace_directory?: string; -} -/** - * The configuration that is contained inside the configuration file. - */ -export interface PartialConfiguration { - /** - * A field for the [JSON schema](https://json-schema.org/) specification - */ - $schema?: string; - /** - * The configuration of the database connection - */ - db?: PartialDatabaseConfiguration; - /** - * The configuration of the filesystem - */ - files?: PartialFilesConfiguration; - /** - * The configuration for the linter - */ - linter?: PartialLinterConfiguration; - /** - * Configure migrations - */ - migrations?: PartialMigrationsConfiguration; - /** - * The configuration of the VCS integration - */ - vcs?: PartialVcsConfiguration; -} -/** - * The configuration of the database connection. - */ -export interface PartialDatabaseConfiguration { - allowStatementExecutionsAgainst?: StringSet; - /** - * The connection timeout in seconds. - */ - connTimeoutSecs?: number; - /** - * The name of the database. - */ - database?: string; - /** - * The host of the database. Required if you want database-related features. All else falls back to sensible defaults. - */ - host?: string; - /** - * The password to connect to the database. - */ - password?: string; - /** - * The port of the database. - */ - port?: number; - /** - * The username to connect to the database. - */ - username?: string; -} -/** - * The configuration of the filesystem - */ -export interface PartialFilesConfiguration { - /** - * A list of Unix shell style patterns. Will ignore files/folders that will match these patterns. - */ - ignore?: StringSet; - /** - * A list of Unix shell style patterns. Will handle only those files/folders that will match these patterns. - */ - include?: StringSet; - /** - * The maximum allowed size for source code files in bytes. Files above this limit will be ignored for performance reasons. Defaults to 1 MiB - */ - maxSize?: number; -} -export interface PartialLinterConfiguration { - /** - * if `false`, it disables the feature and the linter won't be executed. `true` by default - */ - enabled?: boolean; - /** - * A list of Unix shell style patterns. The formatter will ignore files/folders that will match these patterns. - */ - ignore?: StringSet; - /** - * A list of Unix shell style patterns. The formatter will include files/folders that will match these patterns. - */ - include?: StringSet; - /** - * List of rules - */ - rules?: Rules; -} -/** - * The configuration of the filesystem - */ -export interface PartialMigrationsConfiguration { - /** - * Ignore any migrations before this timestamp - */ - after?: number; - /** - * The directory where the migration files are stored - */ - migrationsDir?: string; -} -/** - * Set of properties to integrate with a VCS software. - */ -export interface PartialVcsConfiguration { - /** - * The kind of client. - */ - clientKind?: VcsClientKind; - /** - * The main branch of the project - */ - defaultBranch?: string; - /** - * Whether we should integrate itself with the VCS client - */ - enabled?: boolean; - /** - * The folder where we should check for VCS files. By default, we will use the same folder where `postgrestools.jsonc` was found. - -If we can't find the configuration, it will attempt to use the current working directory. If no current working directory can't be found, we won't use the VCS integration, and a diagnostic will be emitted - */ - root?: string; - /** - * Whether we should use the VCS ignore file. When [true], we will ignore the files specified in the ignore file. - */ - useIgnoreFile?: boolean; -} -export type StringSet = string[]; -export interface Rules { - /** - * It enables ALL rules. The rules that belong to `nursery` won't be enabled. - */ - all?: boolean; - /** - * It enables the lint rules recommended by Postgres Tools. `true` by default. - */ - recommended?: boolean; - safety?: Safety; -} -export type VcsClientKind = "git"; -/** - * A list of rules that belong to this group - */ -export interface Safety { - /** - * Adding a new column that is NOT NULL and has no default value to an existing table effectively makes it required. - */ - addingRequiredField?: RuleConfiguration_for_Null; - /** - * It enables ALL rules for this group. - */ - all?: boolean; - /** - * Dropping a column may break existing clients. - */ - banDropColumn?: RuleConfiguration_for_Null; - /** - * Dropping a NOT NULL constraint may break existing clients. - */ - banDropNotNull?: RuleConfiguration_for_Null; - /** - * Dropping a table may break existing clients. - */ - banDropTable?: RuleConfiguration_for_Null; - /** - * It enables the recommended rules for this group - */ - recommended?: boolean; -} -export type RuleConfiguration_for_Null = - | RulePlainConfiguration - | RuleWithOptions_for_Null; -export type RulePlainConfiguration = "warn" | "error" | "info" | "off"; -export interface RuleWithOptions_for_Null { - /** - * The severity of the emitted diagnostics by the rule - */ - level: RulePlainConfiguration; - /** - * Rule's options - */ - options: null; -} -export interface OpenFileParams { - content: string; - path: PgTPath; - version: number; -} -export interface ChangeFileParams { - changes: ChangeParams[]; - path: PgTPath; - version: number; -} -export interface ChangeParams { - /** - * The range of the file that changed. If `None`, the whole file changed. - */ - range?: TextRange; - text: string; -} -export interface CloseFileParams { - path: PgTPath; -} -export type Configuration = PartialConfiguration; -export interface Workspace { - isPathIgnored(params: IsPathIgnoredParams): Promise; - getFileContent(params: GetFileContentParams): Promise; - pullDiagnostics( - params: PullDiagnosticsParams, - ): Promise; - getCompletions(params: GetCompletionsParams): Promise; - updateSettings(params: UpdateSettingsParams): Promise; - openFile(params: OpenFileParams): Promise; - changeFile(params: ChangeFileParams): Promise; - closeFile(params: CloseFileParams): Promise; - destroy(): void; -} -export function createWorkspace(transport: Transport): Workspace { - return { - isPathIgnored(params) { - return transport.request("pgt/is_path_ignored", params); - }, - getFileContent(params) { - return transport.request("pgt/get_file_content", params); - }, - pullDiagnostics(params) { - return transport.request("pgt/pull_diagnostics", params); - }, - getCompletions(params) { - return transport.request("pgt/get_completions", params); - }, - updateSettings(params) { - return transport.request("pgt/update_settings", params); - }, - openFile(params) { - return transport.request("pgt/open_file", params); - }, - changeFile(params) { - return transport.request("pgt/change_file", params); - }, - closeFile(params) { - return transport.request("pgt/close_file", params); - }, - destroy() { - transport.destroy(); - }, - }; -} diff --git a/packages/@postgrestools/backend-jsonrpc/tests/transport.test.mjs b/packages/@postgrestools/backend-jsonrpc/tests/transport.test.mjs deleted file mode 100644 index 32a103ee..00000000 --- a/packages/@postgrestools/backend-jsonrpc/tests/transport.test.mjs +++ /dev/null @@ -1,160 +0,0 @@ -import { describe, expect, it, mock } from "bun:test"; - -import { Transport } from "../src/transport"; - -function makeMessage(body) { - const content = JSON.stringify(body); - return Buffer.from( - `Content-Length: ${content.length}\r\nContent-Type: application/vscode-jsonrpc;charset=utf-8\r\n\r\n${content}`, - ); -} - -describe("Transport Layer", () => { - it("should encode requests into the socket", async () => { - let onData = null; - const socket = { - on(event, fn) { - expect(event).toBe("data"); - onData = fn; - }, - write: mock(), - destroy: mock(), - }; - - const transport = new Transport(socket); - - const result = transport.request("method", "params"); - - expect(socket.write).toHaveBeenCalledWith( - makeMessage({ - jsonrpc: "2.0", - id: 0, - method: "method", - params: "params", - }), - ); - - onData( - makeMessage({ - jsonrpc: "2.0", - id: 0, - result: "result", - }), - ); - - const response = await result; - expect(response).toBe("result"); - - transport.destroy(); - expect(socket.destroy).toHaveBeenCalledOnce(); - }); - - it("should throw on missing Content-Length headers", async () => { - let onData = null; - const socket = { - on(event, fn) { - expect(event).toBe("data"); - onData = fn; - }, - write: mock(), - destroy: mock(), - }; - - const transport = new Transport(socket); - - expect(() => onData(Buffer.from("\r\n"))).toThrowError( - "incoming message from the remote workspace is missing the Content-Length header", - ); - - transport.destroy(); - expect(socket.destroy).toHaveBeenCalledOnce(); - }); - - it("should throw on missing colon token", async () => { - let onData = null; - const socket = { - on(event, fn) { - expect(event).toBe("data"); - onData = fn; - }, - write: mock(), - destroy: mock(), - }; - - const transport = new Transport(socket); - - expect(() => onData(Buffer.from("Content-Length\r\n"))).toThrowError( - 'could not find colon token in "Content-Length\r\n"', - ); - - transport.destroy(); - expect(socket.destroy).toHaveBeenCalledOnce(); - }); - - it("should throw on invalid Content-Type", async () => { - let onData = null; - const socket = { - on(event, fn) { - expect(event).toBe("data"); - onData = fn; - }, - write: mock(), - destroy: mock(), - }; - - const transport = new Transport(socket); - - expect(() => - onData(Buffer.from("Content-Type: text/plain\r\n")), - ).toThrowError( - 'invalid value for Content-Type expected "application/vscode-jsonrpc", got "text/plain"', - ); - - transport.destroy(); - expect(socket.destroy).toHaveBeenCalledOnce(); - }); - - it("should throw on unknown request ID", async () => { - let onData = null; - const socket = { - on(event, fn) { - expect(event).toBe("data"); - onData = fn; - }, - write: mock(), - destroy: mock(), - }; - - const transport = new Transport(socket); - - expect(() => - onData(makeMessage({ jsonrpc: "2.0", id: 0, result: "result" })), - ).toThrowError( - "could not find any pending request matching RPC response ID 0", - ); - - transport.destroy(); - expect(socket.destroy).toHaveBeenCalledOnce(); - }); - - it("should throw on invalid messages", async () => { - let onData = null; - const socket = { - on(event, fn) { - expect(event).toBe("data"); - onData = fn; - }, - write: mock(), - destroy: mock(), - }; - - const transport = new Transport(socket); - - expect(() => onData(makeMessage({}))).toThrowError( - 'failed to deserialize incoming message from remote workspace, "{}" is not a valid JSON-RPC message body', - ); - - transport.destroy(); - expect(socket.destroy).toHaveBeenCalledOnce(); - }); -}); diff --git a/packages/@postgrestools/backend-jsonrpc/tests/workspace.test.mjs b/packages/@postgrestools/backend-jsonrpc/tests/workspace.test.mjs deleted file mode 100644 index c83d5e44..00000000 --- a/packages/@postgrestools/backend-jsonrpc/tests/workspace.test.mjs +++ /dev/null @@ -1,54 +0,0 @@ -import { resolve } from "node:path"; -import { fileURLToPath } from "node:url"; -import { describe, expect, it } from "vitest"; - -import { createWorkspaceWithBinary } from "../dist"; - -describe("Workspace API", () => { - it("should process remote requests", async () => { - const extension = process.platform === "win32" ? ".exe" : ""; - const command = resolve( - fileURLToPath(import.meta.url), - "../../../../..", - `target/release/postgrestools${extension}`, - ); - - const workspace = await createWorkspaceWithBinary(command); - await workspace.openFile({ - path: { - path: "test.sql", - was_written: false, - kind: ["Handleable"], - }, - content: "select 1 from", - version: 0, - }); - - const { diagnostics } = await workspace.pullDiagnostics({ - only: [], - skip: [], - max_diagnostics: 100, - categories: [], - path: { - path: "test.sql", - was_written: false, - kind: ["Handleable"], - }, - }); - - expect(diagnostics).toHaveLength(1); - expect(diagnostics[0].description).toBe( - "Invalid statement: syntax error at end of input", - ); - - await workspace.closeFile({ - path: { - path: "test.sql", - was_written: false, - kind: ["Handleable"], - }, - }); - - workspace.destroy(); - }); -}); diff --git a/packages/@postgrestools/postgrestools/bin/postgrestools b/packages/@postgrestools/postgrestools/bin/postgrestools deleted file mode 100755 index 63461f0c..00000000 --- a/packages/@postgrestools/postgrestools/bin/postgrestools +++ /dev/null @@ -1,46 +0,0 @@ -#!/usr/bin/env node -const { platform, arch, env } = process; - -/** - * platform and arch are values injected into the node runtime. - * We use the values documented on https://nodejs.org. - */ -const PLATFORMS = { - win32: { - x64: "@postgrestools/cli-x86_64-windows-msvc/postgrestools.exe", - arm64: "@postgrestools/cli-aarch64-windows-msvc/postgrestools.exe", - }, - darwin: { - x64: "@postgrestools/cli-x86_64-apple-darwin/postgrestools", - arm64: "@postgrestools/cli-aarch64-apple-darwin/postgrestools", - }, - linux: { - x64: "@postgrestools/cli-x86_64-linux-gnu/postgrestools", - arm64: "@postgrestools/cli-aarch64-linux-gnu/postgrestools", - }, -}; - -const binPath = env.POSTGRESTOOLS_BINARY || PLATFORMS?.[platform]?.[arch]; - -if (binPath) { - const result = require("child_process").spawnSync( - require.resolve(binPath), - process.argv.slice(2), - { - shell: false, - stdio: "inherit", - env, - } - ); - - if (result.error) { - throw result.error; - } - - process.exitCode = result.status; -} else { - console.error( - "The PostgresTools CLI package doesn't ship with prebuilt binaries for your platform yet. Please file an issue in the main repository." - ); - process.exitCode = 1; -} diff --git a/packages/@postgrestools/postgrestools/package.json b/packages/@postgrestools/postgrestools/package.json deleted file mode 100644 index 5bd8601e..00000000 --- a/packages/@postgrestools/postgrestools/package.json +++ /dev/null @@ -1,40 +0,0 @@ -{ - "name": "@postgrestools/postgrestools", - "version": "", - "bin": { - "postgrestools": "bin/postgrestools" - }, - "repository": { - "type": "git", - "url": "git+https://github.com/supabase-community/postgres-language-server.git", - "directory": "packages/@postgrestools/postgrestools" - }, - "author": "Supabase Community", - "contributors": [ - { - "name": "Philipp Steinrötter", - "url": "https://github.com/psteinroe" - }, - { - "name": "Julian Domke", - "url": "https://github.com/juleswritescode" - } - ], - "license": "MIT or Apache-2.0", - "description": "A collection of language tools and a Language Server Protocol (LSP) implementation for Postgres, focusing on developer experience and reliable SQL tooling.", - "files": ["bin/postgrestools", "schema.json"], - "engines": { - "node": ">=20" - }, - "publishConfig": { - "provenance": true - }, - "optionalDependencies": { - "@postgrestools/cli-x86_64-windows-msvc": "", - "@postgrestools/cli-aarch64-windows-msvc": "", - "@postgrestools/cli-x86_64-apple-darwin": "", - "@postgrestools/cli-aarch64-apple-darwin": "", - "@postgrestools/cli-x86_64-linux-gnu": "", - "@postgrestools/cli-aarch64-linux-gnu": "" - } -} diff --git a/packages/@postgrestools/postgrestools/scripts/generate-packages.mjs b/packages/@postgrestools/postgrestools/scripts/generate-packages.mjs deleted file mode 100644 index 34193a92..00000000 --- a/packages/@postgrestools/postgrestools/scripts/generate-packages.mjs +++ /dev/null @@ -1,257 +0,0 @@ -import assert from "node:assert"; -import * as fs from "node:fs"; -import { pipeline } from "node:stream"; -import { resolve } from "node:path"; -import { fileURLToPath } from "node:url"; -import { promisify } from "node:util"; -const streamPipeline = promisify(pipeline); - -const CLI_ROOT = resolve(fileURLToPath(import.meta.url), "../.."); -const PACKAGES_POSTGRESTOOLS_ROOT = resolve(CLI_ROOT, ".."); -const POSTGRESTOOLS_ROOT = resolve(PACKAGES_POSTGRESTOOLS_ROOT, "../.."); -const SUPPORTED_PLATFORMS = [ - "pc-windows-msvc", - "apple-darwin", - "unknown-linux-gnu", -]; -const MANIFEST_PATH = resolve(CLI_ROOT, "package.json"); -const SUPPORTED_ARCHITECTURES = ["x86_64", "aarch64"]; - -async function downloadSchema(releaseTag, githubToken) { - const assetUrl = `https://github.com/supabase-community/postgres-language-server/releases/download/${releaseTag}/schema.json`; - - const response = await fetch(assetUrl.trim(), { - headers: { - Authorization: `token ${githubToken}`, - Accept: "application/octet-stream", - }, - }); - - if (!response.ok) { - throw new Error(`Failed to Fetch Asset from ${assetUrl}`); - } - - // download to root. - const fileStream = fs.createWriteStream( - resolve(POSTGRESTOOLS_ROOT, "schema.json"), - ); - - await streamPipeline(response.body, fileStream); - - console.log(`Downloaded schema for ${releaseTag}`); -} - -async function downloadBinary(platform, arch, os, releaseTag, githubToken) { - const buildName = getBuildName(platform, arch); - - const assetUrl = `https://github.com/supabase-community/postgres-language-server/releases/download/${releaseTag}/${buildName}`; - - const response = await fetch(assetUrl.trim(), { - headers: { - Authorization: `token ${githubToken}`, - Accept: "application/octet-stream", - }, - }); - - if (!response.ok) { - const error = await response.text(); - throw new Error( - `Failed to Fetch Asset from ${assetUrl} (Reason: ${error})`, - ); - } - - // just download to root. - const fileStream = fs.createWriteStream(getBinarySource(platform, arch, os)); - - await streamPipeline(response.body, fileStream); - - console.log(`Downloaded asset for ${buildName} (v${releaseTag})`); -} - -async function writeManifest(packagePath, version) { - const manifestPath = resolve( - PACKAGES_POSTGRESTOOLS_ROOT, - packagePath, - "package.json", - ); - - const manifestData = JSON.parse( - fs.readFileSync(manifestPath).toString("utf-8"), - ); - - const nativePackages = SUPPORTED_PLATFORMS.flatMap((platform) => - SUPPORTED_ARCHITECTURES.map((arch) => [ - getPackageName(platform, arch), - version, - ]), - ); - - manifestData.version = version; - manifestData.optionalDependencies = Object.fromEntries(nativePackages); - - console.log(`Update manifest ${manifestPath}`); - const content = JSON.stringify(manifestData, null, 2); - - /** - * writeFileSync seemed to not work reliably? - */ - await new Promise((res, rej) => { - fs.writeFile(manifestPath, content, (e) => (e ? rej(e) : res())); - }); -} - -async function makePackageDir(platform, arch) { - const buildName = getBuildName(platform, arch); - const packageRoot = resolve(PACKAGES_POSTGRESTOOLS_ROOT, buildName); - - await new Promise((res, rej) => { - fs.mkdir(packageRoot, {}, (e) => (e ? rej(e) : res())); - }); -} - -function copyBinaryToNativePackage(platform, arch, os) { - // Update the package.json manifest - const buildName = getBuildName(platform, arch); - const packageRoot = resolve(PACKAGES_POSTGRESTOOLS_ROOT, buildName); - const packageName = getPackageName(platform, arch); - - const { version, license, repository, engines } = rootManifest(); - - /** - * We need to map rust triplets to NPM-known values. - * Otherwise, npm will abort the package installation. - */ - const npm_arch = arch === "aarch64" ? "arm64" : "x64"; - let libc = undefined; - let npm_os = undefined; - - switch (os) { - case "linux": { - libc = "gnu"; - npm_os = "linux"; - break; - } - case "windows": { - libc = "msvc"; - npm_os = "win32"; - break; - } - case "darwin": { - libc = undefined; - npm_os = "darwin"; - break; - } - default: { - throw new Error(`Unsupported os: ${os}`); - } - } - - const manifest = JSON.stringify( - { - name: packageName, - version, - license, - repository, - engines, - os: [npm_os], - cpu: [npm_arch], - libc, - }, - null, - 2, - ); - - const ext = getBinaryExt(os); - const manifestPath = resolve(packageRoot, "package.json"); - console.info(`Update manifest ${manifestPath}`); - fs.writeFileSync(manifestPath, manifest); - - // Copy the CLI binary - const binarySource = getBinarySource(platform, arch, os); - const binaryTarget = resolve(packageRoot, `postgrestools${ext}`); - - if (!fs.existsSync(binarySource)) { - console.error( - `Source for binary for ${buildName} not found at: ${binarySource}`, - ); - process.exit(1); - } - - console.info(`Copy binary ${binaryTarget}`); - fs.copyFileSync(binarySource, binaryTarget); - fs.chmodSync(binaryTarget, 0o755); -} - -function copySchemaToNativePackage(platform, arch) { - const buildName = getBuildName(platform, arch); - const packageRoot = resolve(PACKAGES_POSTGRESTOOLS_ROOT, buildName); - - const schemaSrc = resolve(POSTGRESTOOLS_ROOT, "schema.json"); - const schemaTarget = resolve(packageRoot, "schema.json"); - - if (!fs.existsSync(schemaSrc)) { - console.error(`schema.json not found at: ${schemaSrc}`); - process.exit(1); - } - - console.info("Copying schema.json"); - fs.copyFileSync(schemaSrc, schemaTarget); - fs.chmodSync(schemaTarget, 0o666); -} - -const rootManifest = () => - JSON.parse(fs.readFileSync(MANIFEST_PATH).toString("utf-8")); - -function getBinaryExt(os) { - return os === "windows" ? ".exe" : ""; -} - -function getBinarySource(platform, arch, os) { - const ext = getBinaryExt(os); - return resolve(POSTGRESTOOLS_ROOT, `${getBuildName(platform, arch)}${ext}`); -} - -function getBuildName(platform, arch) { - return `postgrestools_${arch}-${platform}`; -} - -function getPackageName(platform, arch) { - // trim the "unknown" from linux and the "pc" from windows - const platformName = platform.split("-").slice(-2).join("-"); - return `@postgrestools/cli-${arch}-${platformName}`; -} - -function getOs(platform) { - return platform.split("-").find((_, idx) => idx === 1); -} - -function getVersion(releaseTag, isPrerelease) { - return releaseTag + (isPrerelease ? "-rc" : ""); -} - -(async function main() { - const githubToken = process.env.GITHUB_TOKEN; - const releaseTag = process.env.RELEASE_TAG; - assert(githubToken, "GITHUB_TOKEN not defined!"); - assert(releaseTag, "RELEASE_TAG not defined!"); - - const isPrerelease = process.env.PRERELEASE === "true"; - - await downloadSchema(releaseTag, githubToken); - const version = getVersion(releaseTag, isPrerelease); - await writeManifest("postgrestools", version); - await writeManifest("backend-jsonrpc", version); - - for (const platform of SUPPORTED_PLATFORMS) { - const os = getOs(platform); - - for (const arch of SUPPORTED_ARCHITECTURES) { - await makePackageDir(platform, arch); - await downloadBinary(platform, arch, os, releaseTag, githubToken); - copyBinaryToNativePackage(platform, arch, os); - copySchemaToNativePackage(platform, arch); - } - } - - process.exit(0); -})(); diff --git a/postgrestools.jsonc b/postgrestools.jsonc deleted file mode 100644 index 325c7861..00000000 --- a/postgrestools.jsonc +++ /dev/null @@ -1,27 +0,0 @@ -{ - "$schema": "./docs/schemas/latest/schema.json", - "vcs": { - "enabled": false, - "clientKind": "git", - "useIgnoreFile": false - }, - "files": { - "ignore": [] - }, - "linter": { - "enabled": true, - "rules": { - "recommended": true - } - }, - // YOU CAN COMMENT ME OUT :) - "db": { - "host": "127.0.0.1", - "port": 5432, - "username": "postgres", - "password": "postgres", - "database": "postgres", - "connTimeoutSecs": 10, - "allowStatementExecutionsAgainst": ["127.0.0.1/*", "localhost/*"] - } -} diff --git a/pyproject.toml b/pyproject.toml deleted file mode 100644 index 73ee0fa8..00000000 --- a/pyproject.toml +++ /dev/null @@ -1,10 +0,0 @@ -[project] -name = "postgrestools" -version = "0.1.0" -description = "A collection of language tools and a Language Server Protocol (LSP) implementation for Postgres, focusing on developer experience and reliable SQL tooling." -readme = "README.md" -requires-python = ">=3.13" -dependencies = [ - "mkdocs>=1.6.1", - "mkdocs-github-admonitions-plugin>=0.0.3", -] diff --git a/rule_sources/index.html b/rule_sources/index.html new file mode 100644 index 00000000..b04e719e --- /dev/null +++ b/rule_sources/index.html @@ -0,0 +1,166 @@ + + + + + + + + Rule Sources - Postgres Language Server + + + + + + + + + + + + + +
+ + +
+ +
+
+ +
+
+
+
+ +

Exclusive rules

+

Rules from other sources

+

Squawk

+ + + + + + + + + + + + + + + + + + + + + + + + + +
Squawk Rule NameRule Name
adding-required-fieldaddingRequiredField
ban-drop-columnbanDropColumn
ban-drop-not-nullbanDropNotNull
ban-drop-tablebanDropTable
+ +
+
+ +
+
+ +
+ +
+ + + + + + + + + + diff --git a/rules/adding-required-field/index.html b/rules/adding-required-field/index.html new file mode 100644 index 00000000..082d7578 --- /dev/null +++ b/rules/adding-required-field/index.html @@ -0,0 +1,153 @@ + + + + + + + + addingRequiredField - Postgres Language Server + + + + + + + + + + + + + +
+ + +
+ +
+
+ +
+
+
+
+ +

addingRequiredField

+

Diagnostic Category: lint/safety/addingRequiredField

+

Since: vnext

+

Sources: +- Inspired from: squawk/adding-required-field

+

Description

+

Adding a new column that is NOT NULL and has no default value to an existing table effectively makes it required.

+

This will fail immediately upon running for any populated table. Furthermore, old application code that is unaware of this column will fail to INSERT to this table.

+

Make new columns optional initially by omitting the NOT NULL constraint until all existing data and application code has been updated. Once no NULL values are written to or persisted in the database, set it to NOT NULL. +Alternatively, if using Postgres version 11 or later, add a DEFAULT value that is not volatile. This allows the column to keep its NOT NULL constraint.

+

Invalid

+

alter table test add column count int not null;

+

Valid in Postgres >= 11

+

alter table test add column count int not null default 0;

+

How to configure

+

+{
+  "linter": {
+    "rules": {
+      "safety": {
+        "addingRequiredField": "error"
+      }
+    }
+  }
+}
+
+
+ +
+
+ +
+
+ +
+ +
+ + + + + + + + + + diff --git a/rules/ban-drop-column/index.html b/rules/ban-drop-column/index.html new file mode 100644 index 00000000..d2f19f71 --- /dev/null +++ b/rules/ban-drop-column/index.html @@ -0,0 +1,164 @@ + + + + + + + + banDropColumn - Postgres Language Server + + + + + + + + + + + + + +
+ + +
+ +
+
+
+
+ +

banDropColumn

+

Diagnostic Category: lint/safety/banDropColumn

+

Since: vnext

+
+

Note

+

This rule is recommended. A diagnostic error will appear when linting your code.

+
+

Sources: +- Inspired from: squawk/ban-drop-column

+

Description

+

Dropping a column may break existing clients.

+

Update your application code to no longer read or write the column.

+

You can leave the column as nullable or delete the column once queries no longer select or modify the column.

+

Examples

+

Invalid

+
alter table test drop column id;
+
+
code-block.sql lint/safety/banDropColumn ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+  × Dropping a column may break existing clients.
+
+  i You can leave the column as nullable or delete the column once queries no longer select or modify the column.
+
+
+
+

How to configure

+

+{
+  "linter": {
+    "rules": {
+      "safety": {
+        "banDropColumn": "error"
+      }
+    }
+  }
+}
+
+
+ +
+
+ +
+
+ +
+ +
+ + + + + + + + + + diff --git a/rules/ban-drop-not-null/index.html b/rules/ban-drop-not-null/index.html new file mode 100644 index 00000000..83d28327 --- /dev/null +++ b/rules/ban-drop-not-null/index.html @@ -0,0 +1,164 @@ + + + + + + + + banDropNotNull - Postgres Language Server + + + + + + + + + + + + + +
+ + +
+ +
+
+ +
+
+
+
+ +

banDropNotNull

+

Diagnostic Category: lint/safety/banDropNotNull

+

Since: vnext

+
+

Note

+

This rule is recommended. A diagnostic error will appear when linting your code.

+
+

Sources: +- Inspired from: squawk/ban-drop-not-null

+

Description

+

Dropping a NOT NULL constraint may break existing clients.

+

Application code or code written in procedural languages like PL/SQL or PL/pgSQL may not expect NULL values for the column that was previously guaranteed to be NOT NULL and therefore may fail to process them correctly.

+

You can consider using a marker value that represents NULL. Alternatively, create a new table allowing NULL values, copy the data from the old table, and create a view that filters NULL values.

+

Examples

+

Invalid

+
alter table users alter column email drop not null;
+
+
code-block.sql lint/safety/banDropNotNull ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+  × Dropping a NOT NULL constraint may break existing clients.
+
+  i Consider using a marker value that represents NULL. Alternatively, create a new table allowing NULL values, copy the data from the old table, and create a view that filters NULL values.
+
+
+
+

How to configure

+

+{
+  "linter": {
+    "rules": {
+      "safety": {
+        "banDropNotNull": "error"
+      }
+    }
+  }
+}
+
+
+ +
+
+ +
+
+ +
+ +
+ + + + + + + + + + diff --git a/rules/ban-drop-table/index.html b/rules/ban-drop-table/index.html new file mode 100644 index 00000000..8b5d2cf7 --- /dev/null +++ b/rules/ban-drop-table/index.html @@ -0,0 +1,165 @@ + + + + + + + + banDropTable - Postgres Language Server + + + + + + + + + + + + + +
+ + +
+ +
+
+
+
+ +

banDropTable

+

Diagnostic Category: lint/safety/banDropTable

+

Since: vnext

+
+

Note

+

This rule is recommended. A diagnostic error will appear when linting your code.

+
+

Sources: +- Inspired from: squawk/ban-drop-table

+

Description

+

Dropping a table may break existing clients.

+

Update your application code to no longer read or write the table.

+

Once the table is no longer needed, you can delete it by running the command "DROP TABLE mytable;".

+

This command will permanently remove the table from the database and all its contents. +Be sure to back up the table before deleting it, just in case you need to restore it in the future.

+

Examples

+
drop table some_table;
+
+
code-block.sql lint/safety/banDropTable ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+  × Dropping a table may break existing clients.
+
+  i Update your application code to no longer read or write the table, and only then delete the table. Be sure to create a backup.
+
+
+
+

How to configure

+

+{
+  "linter": {
+    "rules": {
+      "safety": {
+        "banDropTable": "error"
+      }
+    }
+  }
+}
+
+
+ +
+
+ +
+
+ +
+ +
+ + + + + + + + + + diff --git a/rules/index.html b/rules/index.html new file mode 100644 index 00000000..de4b5882 --- /dev/null +++ b/rules/index.html @@ -0,0 +1,177 @@ + + + + + + + + Rules - Postgres Language Server + + + + + + + + + + + + + +
+ + +
+ +
+
+ +
+
+
+
+ +

Rules

+

Below the list of rules supported by Postgres Language Tools, divided by group. Here's a legend of the emojis:

+
    +
  • The icon ✅ indicates that the rule is part of the recommended rules.
  • +
+

Safety

+

Rules that detect potential safety issues in your code.

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Rule nameDescriptionProperties
addingRequiredFieldAdding a new column that is NOT NULL and has no default value to an existing table effectively makes it required.
banDropColumnDropping a column may break existing clients.
banDropNotNullDropping a NOT NULL constraint may break existing clients.
banDropTableDropping a table may break existing clients.
+ +
+
+ +
+
+ +
+ +
+ + + + + + + + + + diff --git a/rust-toolchain.toml b/rust-toolchain.toml deleted file mode 100644 index 4501f2a1..00000000 --- a/rust-toolchain.toml +++ /dev/null @@ -1,3 +0,0 @@ -[toolchain] -profile = "default" -channel = "1.86.0" diff --git a/rustfmt.toml b/rustfmt.toml deleted file mode 100644 index 53e6b41c..00000000 --- a/rustfmt.toml +++ /dev/null @@ -1,2 +0,0 @@ -newline_style = "Unix" -edition = "2024" diff --git a/docs/schemas/0.0.0/schema.json b/schemas/0.0.0/schema.json similarity index 100% rename from docs/schemas/0.0.0/schema.json rename to schemas/0.0.0/schema.json diff --git a/docs/schemas/latest/schema.json b/schemas/latest/schema.json similarity index 100% rename from docs/schemas/latest/schema.json rename to schemas/latest/schema.json diff --git a/sitemap.xml b/sitemap.xml new file mode 100644 index 00000000..d5b06085 --- /dev/null +++ b/sitemap.xml @@ -0,0 +1,47 @@ + + + + https://pgtools.dev/ + 2025-05-25 + + + https://pgtools.dev/checking_migrations/ + 2025-05-25 + + + https://pgtools.dev/cli_reference/ + 2025-05-25 + + + https://pgtools.dev/env_variables/ + 2025-05-25 + + + https://pgtools.dev/rule_sources/ + 2025-05-25 + + + https://pgtools.dev/rules/ + 2025-05-25 + + + https://pgtools.dev/troubleshooting/ + 2025-05-25 + + + https://pgtools.dev/rules/adding-required-field/ + 2025-05-25 + + + https://pgtools.dev/rules/ban-drop-column/ + 2025-05-25 + + + https://pgtools.dev/rules/ban-drop-not-null/ + 2025-05-25 + + + https://pgtools.dev/rules/ban-drop-table/ + 2025-05-25 + + \ No newline at end of file diff --git a/sitemap.xml.gz b/sitemap.xml.gz new file mode 100644 index 00000000..ab870a28 Binary files /dev/null and b/sitemap.xml.gz differ diff --git a/taplo.toml b/taplo.toml deleted file mode 100644 index c11e58c2..00000000 --- a/taplo.toml +++ /dev/null @@ -1,7 +0,0 @@ -include = ["Cargo.toml", "crates/**/Cargo.toml", ".cargo/config.toml", "xtask/**/*.toml", "knope.toml"] -exclude = ["./benchmark/**/*.toml"] - -[formatting] -align_entries = true -column_width = 120 -reorder_keys = true diff --git a/test-db/seed.sql b/test-db/seed.sql deleted file mode 100644 index 97f641b9..00000000 --- a/test-db/seed.sql +++ /dev/null @@ -1,6 +0,0 @@ -create table public.contact ( - id serial primary key not null, - created_at timestamp with time zone not null default now(), - username text -); - diff --git a/test.sql b/test.sql deleted file mode 100644 index 88b7310d..00000000 --- a/test.sql +++ /dev/null @@ -1,11 +0,0 @@ -create table - unknown_users (id serial primary key, address text, email text); - -drop table unknown_users; - -select - * -from - unknown_users; - -sel 1; diff --git a/troubleshooting/index.html b/troubleshooting/index.html new file mode 100644 index 00000000..d7e8ed09 --- /dev/null +++ b/troubleshooting/index.html @@ -0,0 +1,143 @@ + + + + + + + + Troubleshooting - Postgres Language Server + + + + + + + + + + + + + +
+ + +
+ +
+
+ +
+
+
+
+ +

Troubleshooting

+

This guide describes how to resolve common issues with Postgres Language Tools.

+

Incorrect and / or misplaced diagnostics

+

We are employing pragmatic solutions to split a SQL file into statements, and they might be incorrect in certain cases. If you see diagnostics like Unexpected token in the middle of a valid statement, make sure to either end all statements with a semicolon, or put two double newlines between them. If there are still issues, its most likely a bug in the change handler that is gone after reopening the file. But please file an issue with sample code so we can fix the root cause.

+ +
+
+ +
+
+ +
+ +
+ + + + + + + + + + diff --git a/tsconfig.json b/tsconfig.json deleted file mode 100644 index 238655f2..00000000 --- a/tsconfig.json +++ /dev/null @@ -1,27 +0,0 @@ -{ - "compilerOptions": { - // Enable latest features - "lib": ["ESNext", "DOM"], - "target": "ESNext", - "module": "ESNext", - "moduleDetection": "force", - "jsx": "react-jsx", - "allowJs": true, - - // Bundler mode - "moduleResolution": "bundler", - "allowImportingTsExtensions": true, - "verbatimModuleSyntax": true, - "noEmit": true, - - // Best practices - "strict": true, - "skipLibCheck": true, - "noFallthroughCasesInSwitch": true, - - // Some stricter flags (disabled by default) - "noUnusedLocals": false, - "noUnusedParameters": false, - "noPropertyAccessFromIndexSignature": false - } -} diff --git a/uv.lock b/uv.lock deleted file mode 100644 index 8ec36cbc..00000000 --- a/uv.lock +++ /dev/null @@ -1,257 +0,0 @@ -version = 1 -revision = 1 -requires-python = ">=3.13" - -[[package]] -name = "click" -version = "8.1.8" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "colorama", marker = "sys_platform == 'win32'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/b9/2e/0090cbf739cee7d23781ad4b89a9894a41538e4fcf4c31dcdd705b78eb8b/click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a", size = 226593 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/7e/d4/7ebdbd03970677812aac39c869717059dbb71a4cfc033ca6e5221787892c/click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2", size = 98188 }, -] - -[[package]] -name = "colorama" -version = "0.4.6" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335 }, -] - -[[package]] -name = "ghp-import" -version = "2.1.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "python-dateutil" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/d9/29/d40217cbe2f6b1359e00c6c307bb3fc876ba74068cbab3dde77f03ca0dc4/ghp-import-2.1.0.tar.gz", hash = "sha256:9c535c4c61193c2df8871222567d7fd7e5014d835f97dc7b7439069e2413d343", size = 10943 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/f7/ec/67fbef5d497f86283db54c22eec6f6140243aae73265799baaaa19cd17fb/ghp_import-2.1.0-py3-none-any.whl", hash = "sha256:8337dd7b50877f163d4c0289bc1f1c7f127550241988d568c1db512c4324a619", size = 11034 }, -] - -[[package]] -name = "jinja2" -version = "3.1.5" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "markupsafe" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/af/92/b3130cbbf5591acf9ade8708c365f3238046ac7cb8ccba6e81abccb0ccff/jinja2-3.1.5.tar.gz", hash = "sha256:8fefff8dc3034e27bb80d67c671eb8a9bc424c0ef4c0826edbff304cceff43bb", size = 244674 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/bd/0f/2ba5fbcd631e3e88689309dbe978c5769e883e4b84ebfe7da30b43275c5a/jinja2-3.1.5-py3-none-any.whl", hash = "sha256:aba0f4dc9ed8013c424088f68a5c226f7d6097ed89b246d7749c2ec4175c6adb", size = 134596 }, -] - -[[package]] -name = "markdown" -version = "3.7" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/54/28/3af612670f82f4c056911fbbbb42760255801b3068c48de792d354ff4472/markdown-3.7.tar.gz", hash = "sha256:2ae2471477cfd02dbbf038d5d9bc226d40def84b4fe2986e49b59b6b472bbed2", size = 357086 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/3f/08/83871f3c50fc983b88547c196d11cf8c3340e37c32d2e9d6152abe2c61f7/Markdown-3.7-py3-none-any.whl", hash = "sha256:7eb6df5690b81a1d7942992c97fad2938e956e79df20cbc6186e9c3a77b1c803", size = 106349 }, -] - -[[package]] -name = "markupsafe" -version = "3.0.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b2/97/5d42485e71dfc078108a86d6de8fa46db44a1a9295e89c5d6d4a06e23a62/markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0", size = 20537 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/83/0e/67eb10a7ecc77a0c2bbe2b0235765b98d164d81600746914bebada795e97/MarkupSafe-3.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd", size = 14274 }, - { url = "https://files.pythonhosted.org/packages/2b/6d/9409f3684d3335375d04e5f05744dfe7e9f120062c9857df4ab490a1031a/MarkupSafe-3.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430", size = 12352 }, - { url = "https://files.pythonhosted.org/packages/d2/f5/6eadfcd3885ea85fe2a7c128315cc1bb7241e1987443d78c8fe712d03091/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094", size = 24122 }, - { url = "https://files.pythonhosted.org/packages/0c/91/96cf928db8236f1bfab6ce15ad070dfdd02ed88261c2afafd4b43575e9e9/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396", size = 23085 }, - { url = "https://files.pythonhosted.org/packages/c2/cf/c9d56af24d56ea04daae7ac0940232d31d5a8354f2b457c6d856b2057d69/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79", size = 22978 }, - { url = "https://files.pythonhosted.org/packages/2a/9f/8619835cd6a711d6272d62abb78c033bda638fdc54c4e7f4272cf1c0962b/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a", size = 24208 }, - { url = "https://files.pythonhosted.org/packages/f9/bf/176950a1792b2cd2102b8ffeb5133e1ed984547b75db47c25a67d3359f77/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca", size = 23357 }, - { url = "https://files.pythonhosted.org/packages/ce/4f/9a02c1d335caabe5c4efb90e1b6e8ee944aa245c1aaaab8e8a618987d816/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c", size = 23344 }, - { url = "https://files.pythonhosted.org/packages/ee/55/c271b57db36f748f0e04a759ace9f8f759ccf22b4960c270c78a394f58be/MarkupSafe-3.0.2-cp313-cp313-win32.whl", hash = "sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1", size = 15101 }, - { url = "https://files.pythonhosted.org/packages/29/88/07df22d2dd4df40aba9f3e402e6dc1b8ee86297dddbad4872bd5e7b0094f/MarkupSafe-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f", size = 15603 }, - { url = "https://files.pythonhosted.org/packages/62/6a/8b89d24db2d32d433dffcd6a8779159da109842434f1dd2f6e71f32f738c/MarkupSafe-3.0.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c", size = 14510 }, - { url = "https://files.pythonhosted.org/packages/7a/06/a10f955f70a2e5a9bf78d11a161029d278eeacbd35ef806c3fd17b13060d/MarkupSafe-3.0.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb", size = 12486 }, - { url = "https://files.pythonhosted.org/packages/34/cf/65d4a571869a1a9078198ca28f39fba5fbb910f952f9dbc5220afff9f5e6/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c", size = 25480 }, - { url = "https://files.pythonhosted.org/packages/0c/e3/90e9651924c430b885468b56b3d597cabf6d72be4b24a0acd1fa0e12af67/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d", size = 23914 }, - { url = "https://files.pythonhosted.org/packages/66/8c/6c7cf61f95d63bb866db39085150df1f2a5bd3335298f14a66b48e92659c/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe", size = 23796 }, - { url = "https://files.pythonhosted.org/packages/bb/35/cbe9238ec3f47ac9a7c8b3df7a808e7cb50fe149dc7039f5f454b3fba218/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5", size = 25473 }, - { url = "https://files.pythonhosted.org/packages/e6/32/7621a4382488aa283cc05e8984a9c219abad3bca087be9ec77e89939ded9/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a", size = 24114 }, - { url = "https://files.pythonhosted.org/packages/0d/80/0985960e4b89922cb5a0bac0ed39c5b96cbc1a536a99f30e8c220a996ed9/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9", size = 24098 }, - { url = "https://files.pythonhosted.org/packages/82/78/fedb03c7d5380df2427038ec8d973587e90561b2d90cd472ce9254cf348b/MarkupSafe-3.0.2-cp313-cp313t-win32.whl", hash = "sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6", size = 15208 }, - { url = "https://files.pythonhosted.org/packages/4f/65/6079a46068dfceaeabb5dcad6d674f5f5c61a6fa5673746f42a9f4c233b3/MarkupSafe-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f", size = 15739 }, -] - -[[package]] -name = "mergedeep" -version = "1.3.4" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/3a/41/580bb4006e3ed0361b8151a01d324fb03f420815446c7def45d02f74c270/mergedeep-1.3.4.tar.gz", hash = "sha256:0096d52e9dad9939c3d975a774666af186eda617e6ca84df4c94dec30004f2a8", size = 4661 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/2c/19/04f9b178c2d8a15b076c8b5140708fa6ffc5601fb6f1e975537072df5b2a/mergedeep-1.3.4-py3-none-any.whl", hash = "sha256:70775750742b25c0d8f36c55aed03d24c3384d17c951b3175d898bd778ef0307", size = 6354 }, -] - -[[package]] -name = "mkdocs" -version = "1.6.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "click" }, - { name = "colorama", marker = "sys_platform == 'win32'" }, - { name = "ghp-import" }, - { name = "jinja2" }, - { name = "markdown" }, - { name = "markupsafe" }, - { name = "mergedeep" }, - { name = "mkdocs-get-deps" }, - { name = "packaging" }, - { name = "pathspec" }, - { name = "pyyaml" }, - { name = "pyyaml-env-tag" }, - { name = "watchdog" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/bc/c6/bbd4f061bd16b378247f12953ffcb04786a618ce5e904b8c5a01a0309061/mkdocs-1.6.1.tar.gz", hash = "sha256:7b432f01d928c084353ab39c57282f29f92136665bdd6abf7c1ec8d822ef86f2", size = 3889159 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/22/5b/dbc6a8cddc9cfa9c4971d59fb12bb8d42e161b7e7f8cc89e49137c5b279c/mkdocs-1.6.1-py3-none-any.whl", hash = "sha256:db91759624d1647f3f34aa0c3f327dd2601beae39a366d6e064c03468d35c20e", size = 3864451 }, -] - -[[package]] -name = "mkdocs-get-deps" -version = "0.2.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "mergedeep" }, - { name = "platformdirs" }, - { name = "pyyaml" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/98/f5/ed29cd50067784976f25ed0ed6fcd3c2ce9eb90650aa3b2796ddf7b6870b/mkdocs_get_deps-0.2.0.tar.gz", hash = "sha256:162b3d129c7fad9b19abfdcb9c1458a651628e4b1dea628ac68790fb3061c60c", size = 10239 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/9f/d4/029f984e8d3f3b6b726bd33cafc473b75e9e44c0f7e80a5b29abc466bdea/mkdocs_get_deps-0.2.0-py3-none-any.whl", hash = "sha256:2bf11d0b133e77a0dd036abeeb06dec8775e46efa526dc70667d8863eefc6134", size = 9521 }, -] - -[[package]] -name = "mkdocs-github-admonitions-plugin" -version = "0.0.3" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "mkdocs" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/65/13/a2b2b81604481569982fdaf51f0746f320df303efbd13d7b74fbf7b2c3a4/mkdocs_github_admonitions_plugin-0.0.3.tar.gz", hash = "sha256:4fd3ca88157c18c5f0cc4420c1a7f73ed1ed3f1886f41d6ce869932e90f38c48", size = 3998 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/d2/87/650f2cbd07f142034d84357ac651586748032546287ba70e90244180b92c/mkdocs_github_admonitions_plugin-0.0.3-py3-none-any.whl", hash = "sha256:cb06f56e5b51e5d7b22fcbb4ab632079e3082b7f37bdbeb20cc9fd8a7c5e1657", size = 5043 }, -] - -[[package]] -name = "packaging" -version = "24.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/d0/63/68dbb6eb2de9cb10ee4c9c14a0148804425e13c4fb20d61cce69f53106da/packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f", size = 163950 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/88/ef/eb23f262cca3c0c4eb7ab1933c3b1f03d021f2c48f54763065b6f0e321be/packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759", size = 65451 }, -] - -[[package]] -name = "pathspec" -version = "0.12.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ca/bc/f35b8446f4531a7cb215605d100cd88b7ac6f44ab3fc94870c120ab3adbf/pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712", size = 51043 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/cc/20/ff623b09d963f88bfde16306a54e12ee5ea43e9b597108672ff3a408aad6/pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08", size = 31191 }, -] - -[[package]] -name = "platformdirs" -version = "4.3.6" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/13/fc/128cc9cb8f03208bdbf93d3aa862e16d376844a14f9a0ce5cf4507372de4/platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907", size = 21302 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/3c/a6/bc1012356d8ece4d66dd75c4b9fc6c1f6650ddd5991e421177d9f8f671be/platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb", size = 18439 }, -] - -[[package]] -name = "postgrestools" -version = "0.1.0" -source = { virtual = "." } -dependencies = [ - { name = "mkdocs" }, - { name = "mkdocs-github-admonitions-plugin" }, -] - -[package.metadata] -requires-dist = [ - { name = "mkdocs", specifier = ">=1.6.1" }, - { name = "mkdocs-github-admonitions-plugin", specifier = ">=0.0.3" }, -] - -[[package]] -name = "python-dateutil" -version = "2.9.0.post0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "six" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/66/c0/0c8b6ad9f17a802ee498c46e004a0eb49bc148f2fd230864601a86dcf6db/python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 342432 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427", size = 229892 }, -] - -[[package]] -name = "pyyaml" -version = "6.0.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/54/ed/79a089b6be93607fa5cdaedf301d7dfb23af5f25c398d5ead2525b063e17/pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e", size = 130631 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/ef/e3/3af305b830494fa85d95f6d95ef7fa73f2ee1cc8ef5b495c7c3269fb835f/PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba", size = 181309 }, - { url = "https://files.pythonhosted.org/packages/45/9f/3b1c20a0b7a3200524eb0076cc027a970d320bd3a6592873c85c92a08731/PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1", size = 171679 }, - { url = "https://files.pythonhosted.org/packages/7c/9a/337322f27005c33bcb656c655fa78325b730324c78620e8328ae28b64d0c/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133", size = 733428 }, - { url = "https://files.pythonhosted.org/packages/a3/69/864fbe19e6c18ea3cc196cbe5d392175b4cf3d5d0ac1403ec3f2d237ebb5/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484", size = 763361 }, - { url = "https://files.pythonhosted.org/packages/04/24/b7721e4845c2f162d26f50521b825fb061bc0a5afcf9a386840f23ea19fa/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5", size = 759523 }, - { url = "https://files.pythonhosted.org/packages/2b/b2/e3234f59ba06559c6ff63c4e10baea10e5e7df868092bf9ab40e5b9c56b6/PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc", size = 726660 }, - { url = "https://files.pythonhosted.org/packages/fe/0f/25911a9f080464c59fab9027482f822b86bf0608957a5fcc6eaac85aa515/PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652", size = 751597 }, - { url = "https://files.pythonhosted.org/packages/14/0d/e2c3b43bbce3cf6bd97c840b46088a3031085179e596d4929729d8d68270/PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183", size = 140527 }, - { url = "https://files.pythonhosted.org/packages/fa/de/02b54f42487e3d3c6efb3f89428677074ca7bf43aae402517bc7cca949f3/PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563", size = 156446 }, -] - -[[package]] -name = "pyyaml-env-tag" -version = "0.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "pyyaml" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/fb/8e/da1c6c58f751b70f8ceb1eb25bc25d524e8f14fe16edcce3f4e3ba08629c/pyyaml_env_tag-0.1.tar.gz", hash = "sha256:70092675bda14fdec33b31ba77e7543de9ddc88f2e5b99160396572d11525bdb", size = 5631 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/5a/66/bbb1dd374f5c870f59c5bb1db0e18cbe7fa739415a24cbd95b2d1f5ae0c4/pyyaml_env_tag-0.1-py3-none-any.whl", hash = "sha256:af31106dec8a4d68c60207c1886031cbf839b68aa7abccdb19868200532c2069", size = 3911 }, -] - -[[package]] -name = "six" -version = "1.17.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/94/e7/b2c673351809dca68a0e064b6af791aa332cf192da575fd474ed7d6f16a2/six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81", size = 34031 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", size = 11050 }, -] - -[[package]] -name = "watchdog" -version = "6.0.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/db/7d/7f3d619e951c88ed75c6037b246ddcf2d322812ee8ea189be89511721d54/watchdog-6.0.0.tar.gz", hash = "sha256:9ddf7c82fda3ae8e24decda1338ede66e1c99883db93711d8fb941eaa2d8c282", size = 131220 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/68/98/b0345cabdce2041a01293ba483333582891a3bd5769b08eceb0d406056ef/watchdog-6.0.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:490ab2ef84f11129844c23fb14ecf30ef3d8a6abafd3754a6f75ca1e6654136c", size = 96480 }, - { url = "https://files.pythonhosted.org/packages/85/83/cdf13902c626b28eedef7ec4f10745c52aad8a8fe7eb04ed7b1f111ca20e/watchdog-6.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:76aae96b00ae814b181bb25b1b98076d5fc84e8a53cd8885a318b42b6d3a5134", size = 88451 }, - { url = "https://files.pythonhosted.org/packages/fe/c4/225c87bae08c8b9ec99030cd48ae9c4eca050a59bf5c2255853e18c87b50/watchdog-6.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a175f755fc2279e0b7312c0035d52e27211a5bc39719dd529625b1930917345b", size = 89057 }, - { url = "https://files.pythonhosted.org/packages/a9/c7/ca4bf3e518cb57a686b2feb4f55a1892fd9a3dd13f470fca14e00f80ea36/watchdog-6.0.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:7607498efa04a3542ae3e05e64da8202e58159aa1fa4acddf7678d34a35d4f13", size = 79079 }, - { url = "https://files.pythonhosted.org/packages/5c/51/d46dc9332f9a647593c947b4b88e2381c8dfc0942d15b8edc0310fa4abb1/watchdog-6.0.0-py3-none-manylinux2014_armv7l.whl", hash = "sha256:9041567ee8953024c83343288ccc458fd0a2d811d6a0fd68c4c22609e3490379", size = 79078 }, - { url = "https://files.pythonhosted.org/packages/d4/57/04edbf5e169cd318d5f07b4766fee38e825d64b6913ca157ca32d1a42267/watchdog-6.0.0-py3-none-manylinux2014_i686.whl", hash = "sha256:82dc3e3143c7e38ec49d61af98d6558288c415eac98486a5c581726e0737c00e", size = 79076 }, - { url = "https://files.pythonhosted.org/packages/ab/cc/da8422b300e13cb187d2203f20b9253e91058aaf7db65b74142013478e66/watchdog-6.0.0-py3-none-manylinux2014_ppc64.whl", hash = "sha256:212ac9b8bf1161dc91bd09c048048a95ca3a4c4f5e5d4a7d1b1a7d5752a7f96f", size = 79077 }, - { url = "https://files.pythonhosted.org/packages/2c/3b/b8964e04ae1a025c44ba8e4291f86e97fac443bca31de8bd98d3263d2fcf/watchdog-6.0.0-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:e3df4cbb9a450c6d49318f6d14f4bbc80d763fa587ba46ec86f99f9e6876bb26", size = 79078 }, - { url = "https://files.pythonhosted.org/packages/62/ae/a696eb424bedff7407801c257d4b1afda455fe40821a2be430e173660e81/watchdog-6.0.0-py3-none-manylinux2014_s390x.whl", hash = "sha256:2cce7cfc2008eb51feb6aab51251fd79b85d9894e98ba847408f662b3395ca3c", size = 79077 }, - { url = "https://files.pythonhosted.org/packages/b5/e8/dbf020b4d98251a9860752a094d09a65e1b436ad181faf929983f697048f/watchdog-6.0.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:20ffe5b202af80ab4266dcd3e91aae72bf2da48c0d33bdb15c66658e685e94e2", size = 79078 }, - { url = "https://files.pythonhosted.org/packages/07/f6/d0e5b343768e8bcb4cda79f0f2f55051bf26177ecd5651f84c07567461cf/watchdog-6.0.0-py3-none-win32.whl", hash = "sha256:07df1fdd701c5d4c8e55ef6cf55b8f0120fe1aef7ef39a1c6fc6bc2e606d517a", size = 79065 }, - { url = "https://files.pythonhosted.org/packages/db/d9/c495884c6e548fce18a8f40568ff120bc3a4b7b99813081c8ac0c936fa64/watchdog-6.0.0-py3-none-win_amd64.whl", hash = "sha256:cbafb470cf848d93b5d013e2ecb245d4aa1c8fd0504e863ccefa32445359d680", size = 79070 }, - { url = "https://files.pythonhosted.org/packages/33/e8/e40370e6d74ddba47f002a32919d91310d6074130fe4e17dabcafc15cbf1/watchdog-6.0.0-py3-none-win_ia64.whl", hash = "sha256:a1914259fa9e1454315171103c6a30961236f508b9b623eae470268bbcc6a22f", size = 79067 }, -] diff --git a/xtask/Cargo.toml b/xtask/Cargo.toml deleted file mode 100644 index fe2e0ec6..00000000 --- a/xtask/Cargo.toml +++ /dev/null @@ -1,17 +0,0 @@ -[package] -edition = "2021" -license = "MIT OR Apache-2.0" -name = "xtask" -publish = false -rust-version.workspace = true -version = "0.0.0" - -[dependencies] -anyhow = "1.0.62" -flate2 = "1.0.24" -time = { version = "0.3", default-features = false } -write-json = "0.1.2" -xflags = "0.3.0" -xshell = "0.2.2" -zip = { version = "0.6", default-features = false, features = ["deflate", "time"] } -# Avoid adding more dependencies to this crate diff --git a/xtask/codegen/Cargo.toml b/xtask/codegen/Cargo.toml deleted file mode 100644 index b5497b2c..00000000 --- a/xtask/codegen/Cargo.toml +++ /dev/null @@ -1,21 +0,0 @@ -[package] -edition = "2021" -name = "xtask_codegen" -publish = false -version = "0.0.0" - -[dependencies] -anyhow = { workspace = true } -biome_js_factory = { workspace = true } -biome_js_formatter = { workspace = true } -biome_js_syntax = { workspace = true } -biome_rowan = { workspace = true } -biome_string_case = { workspace = true } -bpaf = { workspace = true, features = ["derive"] } -pgt_analyse = { workspace = true } -pgt_analyser = { workspace = true } -pgt_workspace = { workspace = true, features = ["schema"] } -proc-macro2 = { workspace = true, features = ["span-locations"] } -pulldown-cmark = { version = "0.12.2" } -quote = "1.0.36" -xtask = { path = '../', version = "0.0" } diff --git a/xtask/codegen/src/generate_analyser.rs b/xtask/codegen/src/generate_analyser.rs deleted file mode 100644 index 398e6f3a..00000000 --- a/xtask/codegen/src/generate_analyser.rs +++ /dev/null @@ -1,234 +0,0 @@ -use std::path::PathBuf; -use std::{collections::BTreeMap, path::Path}; - -use anyhow::{Context, Ok, Result}; -use biome_string_case::Case; -use proc_macro2::TokenStream; -use quote::{format_ident, quote}; -use xtask::{glue::fs2, project_root}; - -pub fn generate_analyser() -> Result<()> { - generate_linter()?; - Ok(()) -} - -fn generate_linter() -> Result<()> { - let base_path = project_root().join("crates/pgt_analyser/src"); - let mut analysers = BTreeMap::new(); - generate_category("lint", &mut analysers, &base_path)?; - - generate_options(&base_path)?; - - update_linter_registry_builder(analysers) -} - -fn generate_options(base_path: &Path) -> Result<()> { - let mut rules_options = BTreeMap::new(); - let mut crates = vec![]; - for category in ["lint"] { - let category_path = base_path.join(category); - if !category_path.exists() { - continue; - } - let category_name = format_ident!("{}", filename(&category_path)?); - for group_path in list_entry_paths(&category_path)?.filter(|path| path.is_dir()) { - let group_name = format_ident!("{}", filename(&group_path)?.to_string()); - for rule_path in list_entry_paths(&group_path)?.filter(|path| !path.is_dir()) { - let rule_filename = filename(&rule_path)?; - let rule_name = Case::Pascal.convert(rule_filename); - let rule_module_name = format_ident!("{}", rule_filename); - let rule_name = format_ident!("{}", rule_name); - rules_options.insert(rule_filename.to_string(), quote! { - pub type #rule_name = <#category_name::#group_name::#rule_module_name::#rule_name as pgt_analyse::Rule>::Options; - }); - } - } - if category == "lint" { - crates.push(quote! { - use crate::lint; - }) - } - } - let rules_options = rules_options.values(); - let tokens = xtask::reformat(quote! { - #( #crates )* - - #( #rules_options )* - })?; - fs2::write(base_path.join("options.rs"), tokens)?; - - Ok(()) -} - -fn generate_category( - name: &'static str, - entries: &mut BTreeMap<&'static str, TokenStream>, - base_path: &Path, -) -> Result<()> { - let path = base_path.join(name); - - let mut groups = BTreeMap::new(); - for entry in fs2::read_dir(path)? { - let entry = entry?; - if !entry.file_type()?.is_dir() { - continue; - } - - let entry = entry.path(); - let file_name = entry - .file_stem() - .context("path has no file name")? - .to_str() - .context("could not convert file name to string")?; - - generate_group(name, file_name, base_path)?; - - let module_name = format_ident!("{}", file_name); - let group_name = format_ident!("{}", Case::Pascal.convert(file_name)); - - groups.insert( - file_name.to_string(), - ( - quote! { - pub mod #module_name; - }, - quote! { - self::#module_name::#group_name - }, - ), - ); - } - - let key = name; - let module_name = format_ident!("{name}"); - - let category_name = Case::Pascal.convert(name); - let category_name = format_ident!("{category_name}"); - - let kind = match name { - "lint" => format_ident!("Lint"), - _ => panic!("unimplemented analyser category {name:?}"), - }; - - entries.insert( - key, - quote! { - registry.record_category::(); - }, - ); - - let (modules, paths): (Vec<_>, Vec<_>) = groups.into_values().unzip(); - let tokens = xtask::reformat(quote! { - #( #modules )* - ::pgt_analyse::declare_category! { - pub #category_name { - kind: #kind, - groups: [ - #( #paths, )* - ] - } - } - })?; - - fs2::write(base_path.join(format!("{name}.rs")), tokens)?; - - Ok(()) -} - -fn generate_group(category: &'static str, group: &str, base_path: &Path) -> Result<()> { - let path = base_path.join(category).join(group); - - let mut rules = BTreeMap::new(); - for entry in fs2::read_dir(path)? { - let entry = entry?.path(); - let file_name = entry - .file_stem() - .context("path has no file name")? - .to_str() - .context("could not convert file name to string")?; - - let rule_type = Case::Pascal.convert(file_name); - - let key = rule_type.clone(); - let module_name = format_ident!("{}", file_name); - let rule_type = format_ident!("{}", rule_type); - - rules.insert( - key, - ( - quote! { - pub mod #module_name; - }, - quote! { - self::#module_name::#rule_type - }, - ), - ); - } - - let group_name = format_ident!("{}", Case::Pascal.convert(group)); - - let (rule_imports, rule_names): (Vec<_>, Vec<_>) = rules.into_values().unzip(); - - let (import_macro, use_macro) = match category { - "lint" => ( - quote!( - use pgt_analyse::declare_lint_group; - ), - quote!(declare_lint_group), - ), - _ => panic!("Category not supported: {category}"), - }; - let tokens = xtask::reformat(quote! { - #import_macro - - #(#rule_imports)* - - #use_macro! { - pub #group_name { - name: #group, - rules: [ - #(#rule_names,)* - ] - } - } - })?; - - fs2::write(base_path.join(category).join(format!("{group}.rs")), tokens)?; - - Ok(()) -} - -fn update_linter_registry_builder(rules: BTreeMap<&'static str, TokenStream>) -> Result<()> { - let path = project_root().join("crates/pgt_analyser/src/registry.rs"); - - let categories = rules.into_values(); - - let tokens = xtask::reformat(quote! { - use pgt_analyse::RegistryVisitor; - - pub fn visit_registry(registry: &mut V) { - #( #categories )* - } - })?; - - fs2::write(path, tokens)?; - - Ok(()) -} - -/// Returns file paths of the given directory. -fn list_entry_paths(dir: &Path) -> Result + use<>> { - Ok(fs2::read_dir(dir) - .context("A directory is expected")? - .filter_map(|entry| entry.ok()) - .map(|entry| entry.path())) -} - -/// Returns filename if any. -fn filename(file: &Path) -> Result<&str> { - file.file_stem() - .context("path has no file name")? - .to_str() - .context("could not convert file name to string") -} diff --git a/xtask/codegen/src/generate_bindings.rs b/xtask/codegen/src/generate_bindings.rs deleted file mode 100644 index 4b66fd00..00000000 --- a/xtask/codegen/src/generate_bindings.rs +++ /dev/null @@ -1,435 +0,0 @@ -use crate::update; -use biome_js_factory::make; -use biome_js_formatter::{context::JsFormatOptions, format_node}; -use biome_js_syntax::{ - AnyJsBinding, AnyJsBindingPattern, AnyJsCallArgument, AnyJsDeclaration, AnyJsDeclarationClause, - AnyJsExportClause, AnyJsExpression, AnyJsFormalParameter, AnyJsImportClause, - AnyJsLiteralExpression, AnyJsModuleItem, AnyJsName, AnyJsNamedImportSpecifier, - AnyJsObjectMember, AnyJsObjectMemberName, AnyJsParameter, AnyJsStatement, AnyTsName, - AnyTsReturnType, AnyTsType, AnyTsTypeMember, JsFileSource, TriviaPieceKind, T, -}; -use biome_rowan::AstNode; -use biome_string_case::Case; -use pgt_workspace::workspace_types::{generate_type, methods, ModuleQueue}; -use xtask::{project_root, Mode, Result}; - -pub fn generate_bindings(mode: Mode) -> Result<()> { - let bindings_path = - project_root().join("packages/@postgrestools/backend-jsonrpc/src/workspace.ts"); - let methods = methods(); - - let mut declarations = Vec::new(); - let mut member_definitions = Vec::with_capacity(methods.len()); - let mut member_declarations = Vec::with_capacity(methods.len()); - let mut queue = ModuleQueue::default(); - - for method in &methods { - let params = generate_type(&mut declarations, &mut queue, &method.params); - let result = generate_type(&mut declarations, &mut queue, &method.result); - - let camel_case = Case::Camel.convert(method.name); - - member_definitions.push(AnyTsTypeMember::TsMethodSignatureTypeMember( - make::ts_method_signature_type_member( - AnyJsObjectMemberName::JsLiteralMemberName(make::js_literal_member_name( - make::ident(&camel_case), - )), - make::js_parameters( - make::token(T!['(']), - make::js_parameter_list( - Some(AnyJsParameter::AnyJsFormalParameter( - AnyJsFormalParameter::JsFormalParameter( - make::js_formal_parameter( - make::js_decorator_list([]), - AnyJsBindingPattern::AnyJsBinding( - AnyJsBinding::JsIdentifierBinding( - make::js_identifier_binding(make::ident("params")), - ), - ), - ) - .with_type_annotation(make::ts_type_annotation( - make::token(T![:]), - params, - )) - .build(), - ), - )), - None, - ), - make::token(T![')']), - ), - ) - .with_return_type_annotation(make::ts_return_type_annotation( - make::token(T![:]), - AnyTsReturnType::AnyTsType(AnyTsType::TsReferenceType( - make::ts_reference_type(AnyTsName::JsReferenceIdentifier( - make::js_reference_identifier(make::ident("Promise")), - )) - .with_type_arguments(make::ts_type_arguments( - make::token(T![<]), - make::ts_type_argument_list(Some(result), None), - make::token(T![>]), - )) - .build(), - )), - )) - .build(), - )); - - member_declarations.push(AnyJsObjectMember::JsMethodObjectMember( - make::js_method_object_member( - AnyJsObjectMemberName::JsLiteralMemberName(make::js_literal_member_name( - make::ident(&camel_case), - )), - make::js_parameters( - make::token(T!['(']), - make::js_parameter_list( - Some(AnyJsParameter::AnyJsFormalParameter( - AnyJsFormalParameter::JsFormalParameter( - make::js_formal_parameter(make::js_decorator_list([]),AnyJsBindingPattern::AnyJsBinding( - AnyJsBinding::JsIdentifierBinding(make::js_identifier_binding( - make::ident("params"), - )), - )) - .build(), - ), - )), - None, - ), - make::token(T![')']), - ), - make::js_function_body( - make::token(T!['{']), - make::js_directive_list(None), - make::js_statement_list(Some(AnyJsStatement::JsReturnStatement( - make::js_return_statement(make::token(T![return])) - .with_argument(AnyJsExpression::JsCallExpression( - make::js_call_expression( - AnyJsExpression::JsStaticMemberExpression( - make::js_static_member_expression( - AnyJsExpression::JsIdentifierExpression( - make::js_identifier_expression( - make::js_reference_identifier(make::ident( - "transport", - )), - ), - ), - make::token(T![.]), - AnyJsName::JsName(make::js_name(make::ident( - "request", - ))), - ), - ), - make::js_call_arguments( - make::token(T!['(']), - make::js_call_argument_list( - [ - AnyJsCallArgument::AnyJsExpression( - AnyJsExpression::AnyJsLiteralExpression( - AnyJsLiteralExpression::JsStringLiteralExpression(make::js_string_literal_expression(make::js_string_literal(&format!("pgt/{}", method.name)))), - ), - ), - AnyJsCallArgument::AnyJsExpression( - AnyJsExpression::JsIdentifierExpression( - make::js_identifier_expression( - make::js_reference_identifier(make::ident( - "params", - )), - ), - ), - ), - ], - Some(make::token(T![,])), - ), - make::token(T![')']), - ), - ) - .build(), - )) - .build(), - ))), - make::token(T!['}']), - ), - ) - .build(), - )); - } - - let leading_comment = [ - ( - TriviaPieceKind::SingleLineComment, - "// Generated file, do not edit by hand, see `xtask/codegen`", - ), - (TriviaPieceKind::Newline, "\n"), - ]; - - let mut items = vec![AnyJsModuleItem::JsImport( - make::js_import( - make::token(T![import]).with_leading_trivia(leading_comment.into_iter()), - AnyJsImportClause::JsImportNamedClause( - make::js_import_named_clause( - make::js_named_import_specifiers( - make::token(T!['{']), - make::js_named_import_specifier_list( - Some(AnyJsNamedImportSpecifier::JsShorthandNamedImportSpecifier( - make::js_shorthand_named_import_specifier( - AnyJsBinding::JsIdentifierBinding(make::js_identifier_binding( - make::ident("Transport"), - )), - ) - .build(), - )), - None, - ), - make::token(T!['}']), - ), - make::token(T![from]), - make::js_module_source(make::js_string_literal("./transport")), - ) - .with_type_token(make::token(T![type])) - .build(), - ), - ) - .build(), - )]; - - items.extend(declarations.into_iter().map(|(decl, description)| { - let mut export = make::token(T![export]); - if let Some(description) = description { - let comment = format!("/**\n\t* {} \n\t */\n", description); - let trivia = vec![ - (TriviaPieceKind::Newline, "\n"), - (TriviaPieceKind::MultiLineComment, comment.as_str()), - (TriviaPieceKind::Newline, "\n"), - ]; - export = export.with_leading_trivia(trivia); - } - AnyJsModuleItem::JsExport(make::js_export( - make::js_decorator_list([]), - export, - AnyJsExportClause::AnyJsDeclarationClause(match decl { - AnyJsDeclaration::JsClassDeclaration(decl) => { - AnyJsDeclarationClause::JsClassDeclaration(decl) - } - AnyJsDeclaration::JsFunctionDeclaration(decl) => { - AnyJsDeclarationClause::JsFunctionDeclaration(decl) - } - AnyJsDeclaration::JsVariableDeclaration(decl) => { - AnyJsDeclarationClause::JsVariableDeclarationClause( - make::js_variable_declaration_clause(decl).build(), - ) - } - AnyJsDeclaration::TsDeclareFunctionDeclaration(decl) => { - AnyJsDeclarationClause::TsDeclareFunctionDeclaration(decl) - } - AnyJsDeclaration::TsEnumDeclaration(decl) => { - AnyJsDeclarationClause::TsEnumDeclaration(decl) - } - AnyJsDeclaration::TsExternalModuleDeclaration(decl) => { - AnyJsDeclarationClause::TsExternalModuleDeclaration(decl) - } - AnyJsDeclaration::TsGlobalDeclaration(decl) => { - AnyJsDeclarationClause::TsGlobalDeclaration(decl) - } - AnyJsDeclaration::TsImportEqualsDeclaration(decl) => { - AnyJsDeclarationClause::TsImportEqualsDeclaration(decl) - } - AnyJsDeclaration::TsInterfaceDeclaration(decl) => { - AnyJsDeclarationClause::TsInterfaceDeclaration(decl) - } - AnyJsDeclaration::TsModuleDeclaration(decl) => { - AnyJsDeclarationClause::TsModuleDeclaration(decl) - } - AnyJsDeclaration::TsTypeAliasDeclaration(decl) => { - AnyJsDeclarationClause::TsTypeAliasDeclaration(decl) - } - }), - )) - })); - - member_definitions.push(AnyTsTypeMember::TsMethodSignatureTypeMember( - make::ts_method_signature_type_member( - AnyJsObjectMemberName::JsLiteralMemberName(make::js_literal_member_name(make::ident( - "destroy", - ))), - make::js_parameters( - make::token(T!['(']), - make::js_parameter_list(None, None), - make::token(T![')']), - ), - ) - .with_return_type_annotation(make::ts_return_type_annotation( - make::token(T![:]), - AnyTsReturnType::AnyTsType(AnyTsType::TsVoidType(make::ts_void_type(make::token(T![ - void - ])))), - )) - .build(), - )); - - member_declarations.push(AnyJsObjectMember::JsMethodObjectMember( - make::js_method_object_member( - AnyJsObjectMemberName::JsLiteralMemberName(make::js_literal_member_name(make::ident( - "destroy", - ))), - make::js_parameters( - make::token(T!['(']), - make::js_parameter_list(None, None), - make::token(T![')']), - ), - make::js_function_body( - make::token(T!['{']), - make::js_directive_list(None), - make::js_statement_list(Some(AnyJsStatement::JsExpressionStatement( - make::js_expression_statement(AnyJsExpression::JsCallExpression( - make::js_call_expression( - AnyJsExpression::JsStaticMemberExpression( - make::js_static_member_expression( - AnyJsExpression::JsIdentifierExpression( - make::js_identifier_expression( - make::js_reference_identifier(make::ident("transport")), - ), - ), - make::token(T![.]), - AnyJsName::JsName(make::js_name(make::ident("destroy"))), - ), - ), - make::js_call_arguments( - make::token(T!['(']), - make::js_call_argument_list(None, None), - make::token(T![')']), - ), - ) - .build(), - )) - .build(), - ))), - make::token(T!['}']), - ), - ) - .build(), - )); - - // Export `PartialConfiguration` as `Configuration` for backwards compatibility. - items.push(AnyJsModuleItem::JsExport(make::js_export( - make::js_decorator_list([]), - make::token(T![export]), - AnyJsExportClause::AnyJsDeclarationClause(AnyJsDeclarationClause::TsTypeAliasDeclaration( - make::ts_type_alias_declaration( - make::token(T![type]), - make::ts_identifier_binding(make::ident("Configuration")), - make::token(T![=]), - AnyTsType::TsReferenceType( - make::ts_reference_type(AnyTsName::JsReferenceIdentifier( - make::js_reference_identifier(make::ident("PartialConfiguration")), - )) - .build(), - ), - ) - .build(), - )), - ))); - - items.push(AnyJsModuleItem::JsExport(make::js_export( - make::js_decorator_list([]), - make::token(T![export]), - AnyJsExportClause::AnyJsDeclarationClause(AnyJsDeclarationClause::TsInterfaceDeclaration( - make::ts_interface_declaration( - make::token(T![interface]), - make::ts_identifier_binding(make::ident("Workspace")), - make::token(T!['{']), - make::ts_type_member_list(member_definitions), - make::token(T!['}']), - ) - .build(), - )), - ))); - - let member_separators = (0..member_declarations.len()).map(|_| make::token(T![,])); - - items.push(AnyJsModuleItem::JsExport(make::js_export( - make::js_decorator_list([]), - make::token(T![export]), - AnyJsExportClause::AnyJsDeclarationClause(AnyJsDeclarationClause::JsFunctionDeclaration( - make::js_function_declaration( - make::token(T![function]), - AnyJsBinding::JsIdentifierBinding(make::js_identifier_binding(make::ident( - "createWorkspace", - ))), - make::js_parameters( - make::token(T!['(']), - make::js_parameter_list( - Some(AnyJsParameter::AnyJsFormalParameter( - AnyJsFormalParameter::JsFormalParameter( - make::js_formal_parameter( - make::js_decorator_list([]), - AnyJsBindingPattern::AnyJsBinding( - AnyJsBinding::JsIdentifierBinding( - make::js_identifier_binding(make::ident("transport")), - ), - ), - ) - .with_type_annotation(make::ts_type_annotation( - make::token(T![:]), - AnyTsType::TsReferenceType( - make::ts_reference_type(AnyTsName::JsReferenceIdentifier( - make::js_reference_identifier(make::ident("Transport")), - )) - .build(), - ), - )) - .build(), - ), - )), - None, - ), - make::token(T![')']), - ), - make::js_function_body( - make::token(T!['{']), - make::js_directive_list(None), - make::js_statement_list(Some(AnyJsStatement::JsReturnStatement( - make::js_return_statement(make::token(T![return])) - .with_argument(AnyJsExpression::JsObjectExpression( - make::js_object_expression( - make::token(T!['{']), - make::js_object_member_list( - member_declarations, - member_separators, - ), - make::token(T!['}']), - ), - )) - .build(), - ))), - make::token(T!['}']), - ), - ) - .with_return_type_annotation(make::ts_return_type_annotation( - make::token(T![:]), - AnyTsReturnType::AnyTsType(AnyTsType::TsReferenceType( - make::ts_reference_type(AnyTsName::JsReferenceIdentifier( - make::js_reference_identifier(make::ident("Workspace")), - )) - .build(), - )), - )) - .build(), - )), - ))); - - let module = make::js_module( - make::js_directive_list(None), - make::js_module_item_list(items), - make::eof(), - ) - .build(); - - let formatted = format_node(JsFormatOptions::new(JsFileSource::ts()), module.syntax()).unwrap(); - let printed = formatted.print().unwrap(); - let code = printed.into_code(); - - update(&bindings_path, &code, &mode)?; - - Ok(()) -} diff --git a/xtask/codegen/src/generate_configuration.rs b/xtask/codegen/src/generate_configuration.rs deleted file mode 100644 index 91ae304c..00000000 --- a/xtask/codegen/src/generate_configuration.rs +++ /dev/null @@ -1,732 +0,0 @@ -use crate::{to_capitalized, update}; -use biome_string_case::Case; -use pgt_analyse::{GroupCategory, RegistryVisitor, Rule, RuleCategory, RuleGroup, RuleMetadata}; -use proc_macro2::{Ident, Literal, Span, TokenStream}; -use pulldown_cmark::{Event, Parser, Tag, TagEnd}; -use quote::quote; -use std::collections::BTreeMap; -use std::path::Path; -use xtask::*; - -#[derive(Default)] -struct LintRulesVisitor { - groups: BTreeMap<&'static str, BTreeMap<&'static str, RuleMetadata>>, -} - -impl RegistryVisitor for LintRulesVisitor { - fn record_category(&mut self) { - if matches!(C::CATEGORY, RuleCategory::Lint) { - C::record_groups(self); - } - } - - fn record_rule(&mut self) - where - R: Rule + 'static, - { - self.groups - .entry(::NAME) - .or_default() - .insert(R::METADATA.name, R::METADATA); - } -} - -pub fn generate_rules_configuration(mode: Mode) -> Result<()> { - let linter_config_root = project_root().join("crates/pgt_configuration/src/analyser/linter"); - let push_rules_directory = project_root().join("crates/pgt_configuration/src/generated"); - - let mut lint_visitor = LintRulesVisitor::default(); - pgt_analyser::visit_registry(&mut lint_visitor); - - generate_for_groups( - lint_visitor.groups, - linter_config_root.as_path(), - push_rules_directory.as_path(), - &mode, - RuleCategory::Lint, - )?; - Ok(()) -} - -fn generate_for_groups( - groups: BTreeMap<&'static str, BTreeMap<&'static str, RuleMetadata>>, - root: &Path, - push_directory: &Path, - mode: &Mode, - kind: RuleCategory, -) -> Result<()> { - let mut struct_groups = Vec::with_capacity(groups.len()); - let mut group_pascal_idents = Vec::with_capacity(groups.len()); - let mut group_idents = Vec::with_capacity(groups.len()); - let mut group_strings = Vec::with_capacity(groups.len()); - let mut group_as_default_rules = Vec::with_capacity(groups.len()); - for (group, rules) in groups { - let group_pascal_ident = quote::format_ident!("{}", &Case::Pascal.convert(group)); - let group_ident = quote::format_ident!("{}", group); - - let (global_all, global_recommended) = { - ( - quote! { self.is_all_true() }, - quote! { !self.is_recommended_false() }, - ) - }; - group_as_default_rules.push(if kind == RuleCategory::Lint { - quote! { - if let Some(group) = self.#group_ident.as_ref() { - group.collect_preset_rules( - #global_all, - #global_recommended, - &mut enabled_rules, - ); - enabled_rules.extend(&group.get_enabled_rules()); - disabled_rules.extend(&group.get_disabled_rules()); - } else if #global_all { - enabled_rules.extend(#group_pascal_ident::all_rules_as_filters()); - } else if #global_recommended { - enabled_rules.extend(#group_pascal_ident::recommended_rules_as_filters()); - } - } - } else { - quote! { - if let Some(group) = self.#group_ident.as_ref() { - enabled_rules.extend(&group.get_enabled_rules()); - } - } - }); - - group_pascal_idents.push(group_pascal_ident); - group_idents.push(group_ident); - group_strings.push(Literal::string(group)); - struct_groups.push(generate_group_struct(group, &rules, kind)); - } - - let severity_fn = if kind == RuleCategory::Action { - quote! { - /// Given a category coming from [Diagnostic](pgt_diagnostics::Diagnostic), this function returns - /// the [Severity](pgt_diagnostics::Severity) associated to the rule, if the configuration changed it. - /// If the severity is off or not set, then the function returns the default severity of the rule: - /// [Severity::Error] for recommended rules and [Severity::Warning] for other rules. - /// - /// If not, the function returns [None]. - pub fn get_severity_from_code(&self, category: &Category) -> Option { - let mut split_code = category.name().split('/'); - - let _lint = split_code.next(); - debug_assert_eq!(_lint, Some("assists")); - - let group = ::from_str(split_code.next()?).ok()?; - let rule_name = split_code.next()?; - let rule_name = Self::has_rule(group, rule_name)?; - match group { - #( - RuleGroup::#group_pascal_idents => self - .#group_idents - .as_ref() - .and_then(|group| group.get_rule_configuration(rule_name)) - .filter(|(level, _)| !matches!(level, RuleAssistPlainConfiguration::Off)) - .map(|(level, _)| level.into()) - )* - } - } - - } - } else { - quote! { - - /// Given a category coming from [Diagnostic](pgt_diagnostics::Diagnostic), this function returns - /// the [Severity](pgt_diagnostics::Severity) associated to the rule, if the configuration changed it. - /// If the severity is off or not set, then the function returns the default severity of the rule: - /// [Severity::Error] for recommended rules and [Severity::Warning] for other rules. - /// - /// If not, the function returns [None]. - pub fn get_severity_from_code(&self, category: &Category) -> Option { - let mut split_code = category.name().split('/'); - - let _lint = split_code.next(); - debug_assert_eq!(_lint, Some("lint")); - - let group = ::from_str(split_code.next()?).ok()?; - let rule_name = split_code.next()?; - let rule_name = Self::has_rule(group, rule_name)?; - let severity = match group { - #( - RuleGroup::#group_pascal_idents => self - .#group_idents - .as_ref() - .and_then(|group| group.get_rule_configuration(rule_name)) - .filter(|(level, _)| !matches!(level, RulePlainConfiguration::Off)) - .map_or_else(|| { - if #group_pascal_idents::is_recommended_rule(rule_name) { - Severity::Error - } else { - Severity::Warning - } - }, |(level, _)| level.into()), - )* - }; - Some(severity) - } - - } - }; - - let use_rule_configuration = if kind == RuleCategory::Action { - quote! { - use crate::analyser::{RuleAssistConfiguration, RuleAssistPlainConfiguration}; - use pgt_analyse::{RuleFilter, options::RuleOptions}; - } - } else { - quote! { - use crate::analyser::{RuleConfiguration, RulePlainConfiguration}; - use pgt_analyse::{RuleFilter, options::RuleOptions}; - } - }; - - let groups = if kind == RuleCategory::Action { - quote! { - #use_rule_configuration - use biome_deserialize_macros::Merge; - use pgt_diagnostics::{Category, Severity}; - use rustc_hash::FxHashSet; - use serde::{Deserialize, Serialize}; - #[cfg(feature = "schema")] - use schemars::JsonSchema; - - #[derive(Clone, Copy, Debug, Eq, Hash, Merge, Ord, PartialEq, PartialOrd, serde::Deserialize, serde::Serialize)] - #[cfg_attr(feature = "schema", derive(JsonSchema))] - #[serde(rename_all = "camelCase")] - pub enum RuleGroup { - #( #group_pascal_idents ),* - } - impl RuleGroup { - pub const fn as_str(self) -> &'static str { - match self { - #( Self::#group_pascal_idents => #group_pascal_idents::GROUP_NAME, )* - } - } - } - impl std::str::FromStr for RuleGroup { - type Err = &'static str; - fn from_str(s: &str) -> Result { - match s { - #( #group_pascal_idents::GROUP_NAME => Ok(Self::#group_pascal_idents), )* - _ => Err("This rule group doesn't exist.") - } - } - } - - #[derive(Clone, Debug, Default, Deserialize, Eq, Merge, PartialEq, Serialize)] - #[cfg_attr(feature = "schema", derive(JsonSchema))] - #[serde(rename_all = "camelCase", deny_unknown_fields)] - pub struct Actions { - #( - #[serde(skip_serializing_if = "Option::is_none")] - pub #group_idents: Option<#group_pascal_idents>, - )* - } - - impl Actions { - /// Checks if the code coming from [pgt_diagnostics::Diagnostic] corresponds to a rule. - /// Usually the code is built like {group}/{rule_name} - pub fn has_rule( - group: RuleGroup, - rule_name: &str, - ) -> Option<&'static str> { - match group { - #( - RuleGroup::#group_pascal_idents => #group_pascal_idents::has_rule(rule_name), - )* - } - } - - #severity_fn - - /// It returns the enabled rules by default. - /// - /// The enabled rules are calculated from the difference with the disabled rules. - pub fn as_enabled_rules(&self) -> FxHashSet> { - let mut enabled_rules = FxHashSet::default(); - #( #group_as_default_rules )* - enabled_rules - } - } - - #( #struct_groups )* - - #[test] - fn test_order() { - #( - for items in #group_pascal_idents::GROUP_RULES.windows(2) { - assert!(items[0] < items[1], "{} < {}", items[0], items[1]); - } - )* - } - } - } else { - quote! { - #use_rule_configuration - use biome_deserialize_macros::Merge; - use pgt_diagnostics::{Category, Severity}; - use rustc_hash::FxHashSet; - use serde::{Deserialize, Serialize}; - #[cfg(feature = "schema")] - use schemars::JsonSchema; - - #[derive(Clone, Copy, Debug, Eq, Hash, Merge, Ord, PartialEq, PartialOrd, serde::Deserialize, serde::Serialize)] - #[cfg_attr(feature = "schema", derive(JsonSchema))] - #[serde(rename_all = "camelCase")] - pub enum RuleGroup { - #( #group_pascal_idents ),* - } - impl RuleGroup { - pub const fn as_str(self) -> &'static str { - match self { - #( Self::#group_pascal_idents => #group_pascal_idents::GROUP_NAME, )* - } - } - } - impl std::str::FromStr for RuleGroup { - type Err = &'static str; - fn from_str(s: &str) -> Result { - match s { - #( #group_pascal_idents::GROUP_NAME => Ok(Self::#group_pascal_idents), )* - _ => Err("This rule group doesn't exist.") - } - } - } - - #[derive(Clone, Debug, Default, Deserialize, Eq, Merge, PartialEq, Serialize)] - #[cfg_attr(feature = "schema", derive(JsonSchema))] - #[serde(rename_all = "camelCase", deny_unknown_fields)] - pub struct Rules { - /// It enables the lint rules recommended by Postgres Tools. `true` by default. - #[serde(skip_serializing_if = "Option::is_none")] - pub recommended: Option, - - /// It enables ALL rules. The rules that belong to `nursery` won't be enabled. - #[serde(skip_serializing_if = "Option::is_none")] - pub all: Option, - - #( - #[serde(skip_serializing_if = "Option::is_none")] - pub #group_idents: Option<#group_pascal_idents>, - )* - } - - impl Rules { - /// Checks if the code coming from [pgt_diagnostics::Diagnostic] corresponds to a rule. - /// Usually the code is built like {group}/{rule_name} - pub fn has_rule( - group: RuleGroup, - rule_name: &str, - ) -> Option<&'static str> { - match group { - #( - RuleGroup::#group_pascal_idents => #group_pascal_idents::has_rule(rule_name), - )* - } - } - - #severity_fn - - /// Ensure that `recommended` is set to `true` or implied. - pub fn set_recommended(&mut self) { - if self.all != Some(true) && self.recommended == Some(false) { - self.recommended = Some(true) - } - #( - if let Some(group) = &mut self.#group_idents { - group.recommended = None; - } - )* - } - - // Note: In top level, it is only considered _not_ recommended - // when the recommended option is false - pub(crate) const fn is_recommended_false(&self) -> bool { - matches!(self.recommended, Some(false)) - } - - pub(crate) const fn is_all_true(&self) -> bool { - matches!(self.all, Some(true)) - } - - /// It returns the enabled rules by default. - /// - /// The enabled rules are calculated from the difference with the disabled rules. - pub fn as_enabled_rules(&self) -> FxHashSet> { - let mut enabled_rules = FxHashSet::default(); - let mut disabled_rules = FxHashSet::default(); - #( #group_as_default_rules )* - - enabled_rules.difference(&disabled_rules).copied().collect() - } - } - - #( #struct_groups )* - - #[test] - fn test_order() { - #( - for items in #group_pascal_idents::GROUP_RULES.windows(2) { - assert!(items[0] < items[1], "{} < {}", items[0], items[1]); - } - )* - } - } - }; - - let push_rules = match kind { - RuleCategory::Lint => { - quote! { - use crate::analyser::linter::*; - use pgt_analyse::{AnalyserRules, MetadataRegistry}; - - pub fn push_to_analyser_rules( - rules: &Rules, - metadata: &MetadataRegistry, - analyser_rules: &mut AnalyserRules, - ) { - #( - if let Some(rules) = rules.#group_idents.as_ref() { - for rule_name in #group_pascal_idents::GROUP_RULES { - if let Some((_, Some(rule_options))) = rules.get_rule_configuration(rule_name) { - if let Some(rule_key) = metadata.find_rule(#group_strings, rule_name) { - analyser_rules.push_rule(rule_key, rule_options); - } - } - } - } - )* - } - } - } - RuleCategory::Action => { - quote! { - use crate::analyser::assists::*; - use pgt_analyse::{AnalyserRules, MetadataRegistry}; - - pub fn push_to_analyser_assists( - rules: &Actions, - metadata: &MetadataRegistry, - analyser_rules: &mut AnalyserRules, - ) { - #( - if let Some(rules) = rules.#group_idents.as_ref() { - for rule_name in #group_pascal_idents::GROUP_RULES { - if let Some((_, Some(rule_options))) = rules.get_rule_configuration(rule_name) { - if let Some(rule_key) = metadata.find_rule(#group_strings, rule_name) { - analyser_rules.push_rule(rule_key, rule_options); - } - } - } - } - )* - } - } - } - RuleCategory::Transformation => unimplemented!(), - }; - - let configuration = groups.to_string(); - let push_rules = push_rules.to_string(); - - let file_name = match kind { - RuleCategory::Lint => &push_directory.join("linter.rs"), - RuleCategory::Action => &push_directory.join("assists.rs"), - RuleCategory::Transformation => unimplemented!(), - }; - - let path = if kind == RuleCategory::Action { - &root.join("actions.rs") - } else { - &root.join("rules.rs") - }; - update(path, &xtask::reformat(configuration)?, mode)?; - update(file_name, &xtask::reformat(push_rules)?, mode)?; - - Ok(()) -} - -fn generate_group_struct( - group: &str, - rules: &BTreeMap<&'static str, RuleMetadata>, - kind: RuleCategory, -) -> TokenStream { - let mut lines_recommended_rule = Vec::new(); - let mut lines_recommended_rule_as_filter = Vec::new(); - let mut lines_all_rule_as_filter = Vec::new(); - let mut lines_rule = Vec::new(); - let mut schema_lines_rules = Vec::new(); - let mut rule_enabled_check_line = Vec::new(); - let mut rule_disabled_check_line = Vec::new(); - let mut get_rule_configuration_line = Vec::new(); - - for (index, (rule, metadata)) in rules.iter().enumerate() { - let summary = { - let mut docs = String::new(); - let parser = Parser::new(metadata.docs); - for event in parser { - match event { - Event::Text(text) => { - docs.push_str(text.as_ref()); - } - Event::Code(text) => { - // Escape `[` and `<` to obtain valid Markdown - docs.push_str(text.replace('[', "\\[").replace('<', "\\<").as_ref()); - } - Event::SoftBreak => { - docs.push(' '); - } - - Event::Start(Tag::Paragraph) => {} - Event::End(TagEnd::Paragraph) => { - break; - } - - Event::Start(tag) => match tag { - Tag::Strong | Tag::Paragraph => { - continue; - } - - _ => panic!("Unimplemented tag {:?}", { tag }), - }, - - Event::End(tag) => match tag { - TagEnd::Strong | TagEnd::Paragraph => { - continue; - } - _ => panic!("Unimplemented tag {:?}", { tag }), - }, - - _ => { - panic!("Unimplemented event {:?}", { event }) - } - } - } - docs - }; - - let rule_position = Literal::u8_unsuffixed(index as u8); - let rule_identifier = quote::format_ident!("{}", Case::Snake.convert(rule)); - let rule_config_type = quote::format_ident!( - "{}", - if kind == RuleCategory::Action { - "RuleAssistConfiguration" - } else { - "RuleConfiguration" - } - ); - let rule_name = Ident::new(&to_capitalized(rule), Span::call_site()); - if metadata.recommended { - lines_recommended_rule_as_filter.push(quote! { - RuleFilter::Rule(Self::GROUP_NAME, Self::GROUP_RULES[#rule_position]) - }); - - lines_recommended_rule.push(quote! { - #rule - }); - } - lines_all_rule_as_filter.push(quote! { - RuleFilter::Rule(Self::GROUP_NAME, Self::GROUP_RULES[#rule_position]) - }); - lines_rule.push(quote! { - #rule - }); - let rule_option_type = quote! { - pgt_analyser::options::#rule_name - }; - let rule_option = quote! { Option<#rule_config_type<#rule_option_type>> }; - schema_lines_rules.push(quote! { - #[doc = #summary] - #[serde(skip_serializing_if = "Option::is_none")] - pub #rule_identifier: #rule_option - }); - - rule_enabled_check_line.push(quote! { - if let Some(rule) = self.#rule_identifier.as_ref() { - if rule.is_enabled() { - index_set.insert(RuleFilter::Rule( - Self::GROUP_NAME, - Self::GROUP_RULES[#rule_position], - )); - } - } - }); - rule_disabled_check_line.push(quote! { - if let Some(rule) = self.#rule_identifier.as_ref() { - if rule.is_disabled() { - index_set.insert(RuleFilter::Rule( - Self::GROUP_NAME, - Self::GROUP_RULES[#rule_position], - )); - } - } - }); - - get_rule_configuration_line.push(quote! { - #rule => self.#rule_identifier.as_ref().map(|conf| (conf.level(), conf.get_options())) - }); - } - - let group_pascal_ident = Ident::new(&to_capitalized(group), Span::call_site()); - - let get_configuration_function = if kind == RuleCategory::Action { - quote! { - pub(crate) fn get_rule_configuration(&self, rule_name: &str) -> Option<(RuleAssistPlainConfiguration, Option)> { - match rule_name { - #( #get_rule_configuration_line ),*, - _ => None - } - } - } - } else { - quote! { - pub(crate) fn get_rule_configuration(&self, rule_name: &str) -> Option<(RulePlainConfiguration, Option)> { - match rule_name { - #( #get_rule_configuration_line ),*, - _ => None - } - } - } - }; - - if kind == RuleCategory::Action { - quote! { - #[derive(Clone, Debug, Default, Deserialize, Eq, Merge, PartialEq, Serialize)] - #[cfg_attr(feature = "schema", derive(JsonSchema))] - #[serde(rename_all = "camelCase", default, deny_unknown_fields)] - /// A list of rules that belong to this group - pub struct #group_pascal_ident { - - #( #schema_lines_rules ),* - } - - impl #group_pascal_ident { - - const GROUP_NAME: &'static str = #group; - pub(crate) const GROUP_RULES: &'static [&'static str] = &[ - #( #lines_rule ),* - ]; - - pub(crate) fn get_enabled_rules(&self) -> FxHashSet> { - let mut index_set = FxHashSet::default(); - #( #rule_enabled_check_line )* - index_set - } - - /// Checks if, given a rule name, matches one of the rules contained in this category - pub(crate) fn has_rule(rule_name: &str) -> Option<&'static str> { - Some(Self::GROUP_RULES[Self::GROUP_RULES.binary_search(&rule_name).ok()?]) - } - - #get_configuration_function - } - } - } else { - quote! { - #[derive(Clone, Debug, Default, Deserialize, Eq, Merge, PartialEq, Serialize)] - #[cfg_attr(feature = "schema", derive(JsonSchema))] - #[serde(rename_all = "camelCase", default, deny_unknown_fields)] - /// A list of rules that belong to this group - pub struct #group_pascal_ident { - /// It enables the recommended rules for this group - #[serde(skip_serializing_if = "Option::is_none")] - pub recommended: Option, - - /// It enables ALL rules for this group. - #[serde(skip_serializing_if = "Option::is_none")] - pub all: Option, - - #( #schema_lines_rules ),* - } - - impl #group_pascal_ident { - - const GROUP_NAME: &'static str = #group; - pub(crate) const GROUP_RULES: &'static [&'static str] = &[ - #( #lines_rule ),* - ]; - - const RECOMMENDED_RULES: &'static [&'static str] = &[ - #( #lines_recommended_rule ),* - ]; - - const RECOMMENDED_RULES_AS_FILTERS: &'static [RuleFilter<'static>] = &[ - #( #lines_recommended_rule_as_filter ),* - ]; - - const ALL_RULES_AS_FILTERS: &'static [RuleFilter<'static>] = &[ - #( #lines_all_rule_as_filter ),* - ]; - - /// Retrieves the recommended rules - pub(crate) fn is_recommended_true(&self) -> bool { - // we should inject recommended rules only when they are set to "true" - matches!(self.recommended, Some(true)) - } - - pub(crate) fn is_recommended_unset(&self) -> bool { - self.recommended.is_none() - } - - pub(crate) fn is_all_true(&self) -> bool { - matches!(self.all, Some(true)) - } - - pub(crate) fn is_all_unset(&self) -> bool { - self.all.is_none() - } - - pub(crate) fn get_enabled_rules(&self) -> FxHashSet> { - let mut index_set = FxHashSet::default(); - #( #rule_enabled_check_line )* - index_set - } - - pub(crate) fn get_disabled_rules(&self) -> FxHashSet> { - let mut index_set = FxHashSet::default(); - #( #rule_disabled_check_line )* - index_set - } - - /// Checks if, given a rule name, matches one of the rules contained in this category - pub(crate) fn has_rule(rule_name: &str) -> Option<&'static str> { - Some(Self::GROUP_RULES[Self::GROUP_RULES.binary_search(&rule_name).ok()?]) - } - - /// Checks if, given a rule name, it is marked as recommended - pub(crate) fn is_recommended_rule(rule_name: &str) -> bool { - Self::RECOMMENDED_RULES.contains(&rule_name) - } - - pub(crate) fn recommended_rules_as_filters() -> &'static [RuleFilter<'static>] { - Self::RECOMMENDED_RULES_AS_FILTERS - } - - pub(crate) fn all_rules_as_filters() -> &'static [RuleFilter<'static>] { - Self::ALL_RULES_AS_FILTERS - } - - /// Select preset rules - // Preset rules shouldn't populate disabled rules - // because that will make specific rules cannot be enabled later. - pub(crate) fn collect_preset_rules( - &self, - parent_is_all: bool, - parent_is_recommended: bool, - enabled_rules: &mut FxHashSet>, - ) { - // The order of the if-else branches MATTERS! - if self.is_all_true() || self.is_all_unset() && parent_is_all { - enabled_rules.extend(Self::all_rules_as_filters()); - } else if self.is_recommended_true() || self.is_recommended_unset() && self.is_all_unset() && parent_is_recommended { - enabled_rules.extend(Self::recommended_rules_as_filters()); - } - } - - #get_configuration_function - } - } - } -} diff --git a/xtask/codegen/src/generate_crate.rs b/xtask/codegen/src/generate_crate.rs deleted file mode 100644 index 0b92e82e..00000000 --- a/xtask/codegen/src/generate_crate.rs +++ /dev/null @@ -1,62 +0,0 @@ -use std::fs; -use xtask::*; - -fn cargo_template(name: &str) -> String { - format!( - r#" -[package] -authors.workspace = true -categories.workspace = true -description = "" -edition.workspace = true -homepage.workspace = true -keywords.workspace = true -license.workspace = true -name = "{name}" -repository.workspace = true -version = "0.0.0" - - -"# - ) -} - -// fn knope_template(name: &str) -> String { -// format!( -// r#" -// [packages.{name}] -// versioned_files = ["crates/{name}/Cargo.toml"] -// changelog = "crates/{name}/CHANGELOG.md" -// "# -// ) -// } - -pub fn generate_crate(crate_name: String) -> Result<()> { - let crate_root = project_root().join("crates").join(crate_name.as_str()); - let cargo_file = crate_root.join("Cargo.toml"); - // let knope_config = project_root().join("knope.toml"); - - // let mut knope_contents = fs::read_to_string(&knope_config)?; - fs::write(cargo_file, cargo_template(crate_name.as_str()))?; - // let start_content = "## Rust crates. DO NOT CHANGE!\n"; - // let end_content = "\n## End of crates. DO NOT CHANGE!"; - // debug_assert!( - // knope_contents.contains(start_content), - // "The file knope.toml must contains `{start_content}`" - // ); - // debug_assert!( - // knope_contents.contains(end_content), - // "The file knope.toml must contains `{end_content}`" - // ); - - // let file_start_index = knope_contents.find(start_content).unwrap() + start_content.len(); - // let file_end_index = knope_contents.find(end_content).unwrap(); - // let crates_text = &knope_contents[file_start_index..file_end_index]; - // let template = knope_template(crate_name.as_str()); - // let new_crates_text: Vec<_> = crates_text.lines().chain(Some(&template[..])).collect(); - // let new_crates_text = new_crates_text.join("\n"); - // - // knope_contents.replace_range(file_start_index..file_end_index, &new_crates_text); - // fs::write(knope_config, knope_contents)?; - Ok(()) -} diff --git a/xtask/codegen/src/generate_new_analyser_rule.rs b/xtask/codegen/src/generate_new_analyser_rule.rs deleted file mode 100644 index 6fecdff7..00000000 --- a/xtask/codegen/src/generate_new_analyser_rule.rs +++ /dev/null @@ -1,152 +0,0 @@ -use biome_string_case::Case; -use bpaf::Bpaf; -use std::str::FromStr; -use xtask::project_root; - -#[derive(Debug, Clone, Bpaf)] -pub enum Category { - /// Lint rules - Lint, -} - -impl FromStr for Category { - type Err = &'static str; - - fn from_str(s: &str) -> std::result::Result { - match s { - "lint" => Ok(Self::Lint), - _ => Err("Not supported"), - } - } -} - -fn generate_rule_template( - category: &Category, - rule_name_upper_camel: &str, - rule_name_lower_camel: &str, -) -> String { - let macro_name = match category { - Category::Lint => "declare_lint_rule", - }; - format!( - r#"use pgt_analyse::{{ - context::RuleContext, {macro_name}, Rule, RuleDiagnostic -}}; -use pgt_console::markup; - -{macro_name}! {{ - /// Succinct description of the rule. - /// - /// Put context and details about the rule. - /// - /// Try to stay consistent with the descriptions of implemented rules. - /// - /// ## Examples - /// - /// ### Invalid - /// - /// ```sql,expect_diagnostic - /// select 1; - /// ``` - /// - /// ### Valid - /// - /// ``sql` - /// select 2; - /// ``` - /// - pub {rule_name_upper_camel} {{ - version: "next", - name: "{rule_name_lower_camel}", - recommended: false, - }} -}} - -impl Rule for {rule_name_upper_camel} {{ - type Options = (); - - fn run(ctx: &RuleContext) -> Vec {{ - Vec::new() - }} -}} -"# - ) -} - -fn gen_sql(category_name: &str) -> String { - format!("-- expect_only_{category_name}\n-- select 1;") -} - -pub fn generate_new_analyser_rule(category: Category, rule_name: &str, group: &str) { - let rule_name_camel = Case::Camel.convert(rule_name); - let crate_folder = project_root().join("crates/pgt_analyser"); - let rule_folder = match &category { - Category::Lint => crate_folder.join(format!("src/lint/{group}")), - }; - if !rule_folder.exists() { - std::fs::create_dir(rule_folder.clone()).expect("To create the rule folder"); - } - - // Generate rule code - let code = generate_rule_template( - &category, - Case::Pascal.convert(rule_name).as_str(), - rule_name_camel.as_str(), - ); - let file_name = format!( - "{}/{}.rs", - rule_folder.display(), - Case::Snake.convert(rule_name) - ); - std::fs::write(file_name.clone(), code).unwrap_or_else(|_| panic!("To write {}", &file_name)); - - let categories_path = "crates/pgt_diagnostics_categories/src/categories.rs"; - let mut categories = std::fs::read_to_string(categories_path).unwrap(); - - if !categories.contains(&rule_name_camel) { - let kebab_case_rule = Case::Kebab.convert(&rule_name_camel); - // We sort rules to reduce conflicts between contributions made in parallel. - let rule_line = match category { - Category::Lint => format!( - r#" "lint/{group}/{rule_name_camel}": "https://pgtools.dev/linter/rules/{kebab_case_rule}","# - ), - }; - let lint_start = match category { - Category::Lint => "define_categories! {\n", - }; - let lint_end = match category { - Category::Lint => "\n // end lint rules\n", - }; - debug_assert!(categories.contains(lint_start), "{}", lint_start); - debug_assert!(categories.contains(lint_end), "{}", lint_end); - let lint_start_index = categories.find(lint_start).unwrap() + lint_start.len(); - let lint_end_index = categories.find(lint_end).unwrap(); - let lint_rule_text = &categories[lint_start_index..lint_end_index]; - let mut lint_rules: Vec<_> = lint_rule_text.lines().chain(Some(&rule_line[..])).collect(); - lint_rules.sort_unstable(); - let new_lint_rule_text = lint_rules.join("\n"); - categories.replace_range(lint_start_index..lint_end_index, &new_lint_rule_text); - std::fs::write(categories_path, categories).unwrap(); - } - - let test_group_folder = match &category { - Category::Lint => crate_folder.join(format!("tests/specs/{group}")), - }; - if !test_group_folder.exists() { - std::fs::create_dir(test_group_folder.clone()).expect("To create the test group folder"); - } - - let test_folder = match &category { - Category::Lint => crate_folder.join(format!("tests/specs/{group}/{rule_name_camel}")), - }; - if !test_folder.exists() { - std::fs::create_dir(test_folder.clone()).expect("To create the test rule folder"); - } - - let test_file_name = format!("{}/basic.sql", test_folder.display()); - std::fs::write( - test_file_name.clone(), - gen_sql(format!("lint/{group}/{rule_name_camel}").as_str()), - ) - .unwrap_or_else(|_| panic!("To write {}", &test_file_name)); -} diff --git a/xtask/codegen/src/lib.rs b/xtask/codegen/src/lib.rs deleted file mode 100644 index 61ae5e4f..00000000 --- a/xtask/codegen/src/lib.rs +++ /dev/null @@ -1,88 +0,0 @@ -//! Codegen tools. Derived from Biome's codegen - -mod generate_analyser; -mod generate_bindings; -mod generate_configuration; -mod generate_crate; -mod generate_new_analyser_rule; - -pub use self::generate_analyser::generate_analyser; -pub use self::generate_bindings::generate_bindings; -pub use self::generate_configuration::generate_rules_configuration; -pub use self::generate_crate::generate_crate; -pub use self::generate_new_analyser_rule::generate_new_analyser_rule; -use bpaf::Bpaf; -use generate_new_analyser_rule::Category; -use std::path::Path; -use xtask::{glue::fs2, Mode, Result}; - -pub enum UpdateResult { - NotUpdated, - Updated, -} - -/// A helper to update file on disk if it has changed. -/// With verify = false, the contents of the file will be updated to the passed in contents. -/// With verify = true, an Err will be returned if the contents of the file do not match the passed-in contents. -pub fn update(path: &Path, contents: &str, mode: &Mode) -> Result { - if fs2::read_to_string(path).is_ok_and(|old_contents| old_contents == contents) { - return Ok(UpdateResult::NotUpdated); - } - - if *mode == Mode::Verify { - anyhow::bail!("`{}` is not up-to-date", path.display()); - } - - eprintln!("updating {}", path.display()); - if let Some(parent) = path.parent() { - if !parent.exists() { - fs2::create_dir_all(parent)?; - } - } - fs2::write(path, contents)?; - Ok(UpdateResult::Updated) -} - -pub fn to_capitalized(s: &str) -> String { - let mut c = s.chars(); - match c.next() { - None => String::new(), - Some(f) => f.to_uppercase().collect::() + c.as_str(), - } -} - -#[derive(Debug, Clone, Bpaf)] -#[bpaf(options)] -pub enum TaskCommand { - /// Generate TypeScript definitions for the JavaScript bindings to the Workspace API - #[bpaf(command)] - Bindings, - /// Generate factory functions for the analyser and the configuration of the analysers - #[bpaf(command)] - Analyser, - /// Generate the part of the configuration that depends on some metadata - #[bpaf(command)] - Configuration, - /// Creates a new crate - #[bpaf(command, long("new-crate"))] - NewCrate { - /// The name of the crate - #[bpaf(long("name"), argument("STRING"))] - name: String, - }, - /// Creates a new lint rule - #[bpaf(command, long("new-lintrule"))] - NewRule { - /// Name of the rule - #[bpaf(long("name"))] - name: String, - - /// Category of the rule - #[bpaf(long("category"))] - category: Category, - - /// Group of the rule - #[bpaf(long("group"))] - group: String, - }, -} diff --git a/xtask/codegen/src/main.rs b/xtask/codegen/src/main.rs deleted file mode 100644 index 8e0e6cd8..00000000 --- a/xtask/codegen/src/main.rs +++ /dev/null @@ -1,36 +0,0 @@ -use xtask::Mode::Overwrite; -use xtask::{project_root, pushd, Result}; - -use xtask_codegen::{ - generate_analyser, generate_bindings, generate_crate, generate_new_analyser_rule, - generate_rules_configuration, task_command, TaskCommand, -}; - -fn main() -> Result<()> { - let _d = pushd(project_root()); - let result = task_command().fallback_to_usage().run(); - - match result { - TaskCommand::Analyser => { - generate_analyser()?; - } - TaskCommand::NewCrate { name } => { - generate_crate(name)?; - } - TaskCommand::NewRule { - name, - category, - group, - } => { - generate_new_analyser_rule(category, &name, &group); - } - TaskCommand::Configuration => { - generate_rules_configuration(Overwrite)?; - } - TaskCommand::Bindings => { - generate_bindings(Overwrite)?; - } - } - - Ok(()) -} diff --git a/xtask/rules_check/Cargo.toml b/xtask/rules_check/Cargo.toml deleted file mode 100644 index 3f0198d1..00000000 --- a/xtask/rules_check/Cargo.toml +++ /dev/null @@ -1,17 +0,0 @@ -[package] -description = "Internal script to make sure that the metadata or the rules are correct" -edition = "2021" -name = "rules_check" -publish = false -version = "0.0.0" - -[dependencies] -anyhow = { workspace = true } -pgt_analyse = { workspace = true } -pgt_analyser = { workspace = true } -pgt_console = { workspace = true } -pgt_diagnostics = { workspace = true } -pgt_query_ext = { workspace = true } -pgt_statement_splitter = { workspace = true } -pgt_workspace = { workspace = true } -pulldown-cmark = "0.12.2" diff --git a/xtask/rules_check/src/lib.rs b/xtask/rules_check/src/lib.rs deleted file mode 100644 index 68a6d650..00000000 --- a/xtask/rules_check/src/lib.rs +++ /dev/null @@ -1,251 +0,0 @@ -use std::collections::BTreeMap; -use std::str::FromStr; -use std::{fmt::Write, slice}; - -use anyhow::bail; -use pgt_analyse::{ - AnalyserOptions, AnalysisFilter, GroupCategory, RegistryVisitor, Rule, RuleCategory, - RuleFilter, RuleGroup, RuleMetadata, -}; -use pgt_analyser::{Analyser, AnalyserConfig}; -use pgt_console::{markup, Console}; -use pgt_diagnostics::{Diagnostic, DiagnosticExt, PrintDiagnostic}; -use pgt_query_ext::diagnostics::SyntaxDiagnostic; -use pgt_workspace::settings::Settings; -use pulldown_cmark::{CodeBlockKind, Event, Parser, Tag, TagEnd}; - -pub fn check_rules() -> anyhow::Result<()> { - #[derive(Default)] - struct LintRulesVisitor { - groups: BTreeMap<&'static str, BTreeMap<&'static str, RuleMetadata>>, - } - - impl LintRulesVisitor { - fn push_rule(&mut self) - where - R: Rule + 'static, - { - self.groups - .entry(::NAME) - .or_default() - .insert(R::METADATA.name, R::METADATA); - } - } - - impl RegistryVisitor for LintRulesVisitor { - fn record_category(&mut self) { - if matches!(C::CATEGORY, RuleCategory::Lint) { - C::record_groups(self); - } - } - - fn record_rule(&mut self) - where - R: Rule + 'static, - { - self.push_rule::() - } - } - - let mut visitor = LintRulesVisitor::default(); - pgt_analyser::visit_registry(&mut visitor); - - let LintRulesVisitor { groups } = visitor; - - for (group, rules) in groups { - for (_, meta) in rules { - parse_documentation(group, meta.name, meta.docs)?; - } - } - - Ok(()) -} - -/// Parse and analyze the provided code block, and asserts that it emits -/// exactly zero or one diagnostic depending on the value of `expect_diagnostic`. -/// That diagnostic is then emitted as text into the `content` buffer -fn assert_lint( - group: &'static str, - rule: &'static str, - test: &CodeBlockTest, - code: &str, -) -> anyhow::Result<()> { - let file_path = format!("code-block.{}", test.tag); - let mut diagnostic_count = 0; - let mut all_diagnostics = vec![]; - let mut has_error = false; - let mut write_diagnostic = |code: &str, diag: pgt_diagnostics::Error| { - all_diagnostics.push(diag); - // Fail the test if the analysis returns more diagnostics than expected - if test.expect_diagnostic { - // Print all diagnostics to help the user - if all_diagnostics.len() > 1 { - let mut console = pgt_console::EnvConsole::default(); - for diag in all_diagnostics.iter() { - console.println( - pgt_console::LogLevel::Error, - markup! { - {PrintDiagnostic::verbose(diag)} - }, - ); - } - has_error = true; - bail!("Analysis of '{group}/{rule}' on the following code block returned multiple diagnostics.\n\n{code}"); - } - } else { - // Print all diagnostics to help the user - let mut console = pgt_console::EnvConsole::default(); - for diag in all_diagnostics.iter() { - console.println( - pgt_console::LogLevel::Error, - markup! { - {PrintDiagnostic::verbose(diag)} - }, - ); - } - has_error = true; - bail!("Analysis of '{group}/{rule}' on the following code block returned an unexpected diagnostic.\n\n{code}"); - } - diagnostic_count += 1; - Ok(()) - }; - - if test.ignore { - return Ok(()); - } - - let rule_filter = RuleFilter::Rule(group, rule); - let filter = AnalysisFilter { - enabled_rules: Some(slice::from_ref(&rule_filter)), - ..AnalysisFilter::default() - }; - let settings = Settings::default(); - let options = AnalyserOptions::default(); - let analyser = Analyser::new(AnalyserConfig { - options: &options, - filter, - }); - - // split and parse each statement - match pgt_statement_splitter::split(code) { - Ok(stmts) => { - for stmt in stmts.ranges { - match pgt_query_ext::parse(&code[stmt]) { - Ok(ast) => { - for rule_diag in analyser.run(pgt_analyser::AnalyserContext { root: &ast }) - { - let diag = pgt_diagnostics::serde::Diagnostic::new(rule_diag); - - let category = diag.category().expect("linter diagnostic has no code"); - let severity = settings.get_severity_from_rule_code(category).expect( - "If you see this error, it means you need to run cargo codegen-configuration", - ); - - let error = diag - .with_severity(severity) - .with_file_path(&file_path) - .with_file_source_code(code); - - write_diagnostic(code, error)?; - } - } - Err(e) => { - let error = SyntaxDiagnostic::from(e) - .with_file_path(&file_path) - .with_file_source_code(code); - write_diagnostic(code, error)?; - } - }; - } - } - Err(errs) => { - // Print all diagnostics to help the user - let mut console = pgt_console::EnvConsole::default(); - for err in errs { - console.println( - pgt_console::LogLevel::Error, - markup! { - {PrintDiagnostic::verbose(&err)} - }, - ); - } - bail!("Analysis of '{group}/{rule}' on the following code block returned a scan diagnostic.\n\n{code}"); - } - }; - - Ok(()) -} - -struct CodeBlockTest { - tag: String, - expect_diagnostic: bool, - ignore: bool, -} - -impl FromStr for CodeBlockTest { - type Err = anyhow::Error; - - fn from_str(input: &str) -> anyhow::Result { - // This is based on the parsing logic for code block languages in `rustdoc`: - // https://github.com/rust-lang/rust/blob/6ac8adad1f7d733b5b97d1df4e7f96e73a46db42/src/librustdoc/html/markdown.rs#L873 - let tokens = input - .split([',', ' ', '\t']) - .map(str::trim) - .filter(|token| !token.is_empty()); - - let mut test = CodeBlockTest { - tag: String::new(), - expect_diagnostic: false, - ignore: false, - }; - - for token in tokens { - match token { - // Other attributes - "expect_diagnostic" => test.expect_diagnostic = true, - "ignore" => test.ignore = true, - // Regard as language tags, last one wins - _ => test.tag = token.to_string(), - } - } - - Ok(test) - } -} - -/// Parse the documentation fragment for a lint rule (in markdown) and lint the code blcoks. -fn parse_documentation( - group: &'static str, - rule: &'static str, - docs: &'static str, -) -> anyhow::Result<()> { - let parser = Parser::new(docs); - - // Tracks the content of the current code block if it's using a - // language supported for analysis - let mut language = None; - for event in parser { - match event { - // CodeBlock-specific handling - Event::Start(Tag::CodeBlock(CodeBlockKind::Fenced(meta))) => { - // Track the content of code blocks to pass them through the analyser - let test = CodeBlockTest::from_str(meta.as_ref())?; - language = Some((test, String::new())); - } - Event::End(TagEnd::CodeBlock) => { - if let Some((test, block)) = language.take() { - assert_lint(group, rule, &test, &block)?; - } - } - Event::Text(text) => { - if let Some((_, block)) = &mut language { - write!(block, "{text}")?; - } - } - // We don't care other events - _ => {} - } - } - - Ok(()) -} diff --git a/xtask/rules_check/src/main.rs b/xtask/rules_check/src/main.rs deleted file mode 100644 index 1de34236..00000000 --- a/xtask/rules_check/src/main.rs +++ /dev/null @@ -1,5 +0,0 @@ -use rules_check::check_rules; - -fn main() -> anyhow::Result<()> { - check_rules() -} diff --git a/xtask/src/flags.rs b/xtask/src/flags.rs deleted file mode 100644 index 4ed3da40..00000000 --- a/xtask/src/flags.rs +++ /dev/null @@ -1,77 +0,0 @@ -#![allow(unreachable_pub)] - -use crate::install::ClientOpt; - -xflags::xflags! { - src "./src/flags.rs" - - /// Run custom build command. - cmd xtask { - - /// Install postgres_lsp server or editor plugin. - cmd install { - /// Install only VS Code plugin. - optional --client - /// One of 'code', 'code-exploration', 'code-insiders', 'codium', or 'code-oss'. - optional --code-bin name: String - - /// Install only the language server. - optional --server - } - } -} - -// generated start -// The following code is generated by `xflags` macro. -// Run `env UPDATE_XFLAGS=1 cargo build` to regenerate. -#[derive(Debug)] -pub struct Xtask { - pub subcommand: XtaskCmd, -} - -#[derive(Debug)] -pub enum XtaskCmd { - Install(Install), -} - -#[derive(Debug)] -pub struct Install { - pub client: bool, - pub code_bin: Option, - pub server: bool, -} - -impl Xtask { - #[allow(dead_code)] - pub fn from_env_or_exit() -> Self { - Self::from_env_or_exit_() - } - - #[allow(dead_code)] - pub fn from_env() -> xflags::Result { - Self::from_env_() - } - - #[allow(dead_code)] - pub fn from_vec(args: Vec) -> xflags::Result { - Self::from_vec_(args) - } -} -// generated end - -impl Install { - pub(crate) fn server(&self) -> Option<()> { - if self.client && !self.server { - return None; - } - Some(()) - } - pub(crate) fn client(&self) -> Option { - if !self.client && self.server { - return None; - } - Some(ClientOpt { - code_bin: self.code_bin.clone(), - }) - } -} diff --git a/xtask/src/glue.rs b/xtask/src/glue.rs deleted file mode 100644 index 3a4f8485..00000000 --- a/xtask/src/glue.rs +++ /dev/null @@ -1,216 +0,0 @@ -//! A shell but bad, some cross platform glue code - -use std::{ - cell::RefCell, - env, - ffi::OsString, - io::Write, - path::{Path, PathBuf}, - process::{Command, Stdio}, -}; - -use anyhow::{bail, Context, Result}; - -pub mod fs2 { - use std::{fs, path::Path}; - - use anyhow::{Context, Result}; - - pub fn read_dir>(path: P) -> Result { - let path = path.as_ref(); - fs::read_dir(path).with_context(|| format!("Failed to read {}", path.display())) - } - - pub fn read_to_string>(path: P) -> Result { - let path = path.as_ref(); - fs::read_to_string(path).with_context(|| format!("Failed to read {}", path.display())) - } - - pub fn write, C: AsRef<[u8]>>(path: P, contents: C) -> Result<()> { - let path = path.as_ref(); - fs::write(path, contents).with_context(|| format!("Failed to write {}", path.display())) - } - - pub fn copy, Q: AsRef>(from: P, to: Q) -> Result { - let from = from.as_ref(); - let to = to.as_ref(); - fs::copy(from, to) - .with_context(|| format!("Failed to copy {} to {}", from.display(), to.display())) - } - - pub fn remove_file>(path: P) -> Result<()> { - let path = path.as_ref(); - fs::remove_file(path).with_context(|| format!("Failed to remove file {}", path.display())) - } - - pub fn remove_dir_all>(path: P) -> Result<()> { - let path = path.as_ref(); - fs::remove_dir_all(path).with_context(|| format!("Failed to remove dir {}", path.display())) - } - - pub fn create_dir_all>(path: P) -> Result<()> { - let path = path.as_ref(); - fs::create_dir_all(path).with_context(|| format!("Failed to create dir {}", path.display())) - } -} - -#[macro_export] -macro_rules! run { - ($($expr:expr_2021),*) => { - run!($($expr),*; echo = true) - }; - ($($expr:expr_2021),* ; echo = $echo:expr_2021) => { - $crate::glue::run_process(format!($($expr),*), $echo, None) - }; - ($($expr:expr_2021),* ; <$stdin:expr_2021) => { - $crate::glue::run_process(format!($($expr),*), false, Some($stdin)) - }; -} -pub use crate::run; - -pub struct Pushd { - _p: (), -} - -pub fn pushd(path: impl Into) -> Pushd { - Env::with(|env| env.pushd(path.into())); - Pushd { _p: () } -} - -impl Drop for Pushd { - fn drop(&mut self) { - Env::with(|env| env.popd()) - } -} - -pub struct Pushenv { - _p: (), -} - -pub fn pushenv(var: &str, value: &str) -> Pushenv { - Env::with(|env| env.pushenv(var.into(), value.into())); - Pushenv { _p: () } -} - -impl Drop for Pushenv { - fn drop(&mut self) { - Env::with(|env| env.popenv()) - } -} - -pub fn rm_rf(path: impl AsRef) -> Result<()> { - let path = path.as_ref(); - if !path.exists() { - return Ok(()); - } - if path.is_file() { - fs2::remove_file(path) - } else { - fs2::remove_dir_all(path) - } -} - -#[doc(hidden)] -pub fn run_process(cmd: String, echo: bool, stdin: Option<&[u8]>) -> Result { - run_process_inner(&cmd, echo, stdin).with_context(|| format!("process `{cmd}` failed")) -} - -pub fn date_iso() -> Result { - run!("date --iso --utc") -} - -fn run_process_inner(cmd: &str, echo: bool, stdin: Option<&[u8]>) -> Result { - let mut args = shelx(cmd); - let binary = args.remove(0); - let current_dir = Env::with(|it| it.cwd().to_path_buf()); - - if echo { - println!("> {cmd}") - } - - let mut command = Command::new(binary); - command - .args(args) - .current_dir(current_dir) - .stderr(Stdio::inherit()); - let output = match stdin { - None => command.stdin(Stdio::null()).output(), - Some(stdin) => { - command.stdin(Stdio::piped()).stdout(Stdio::piped()); - let mut process = command.spawn()?; - process.stdin.take().unwrap().write_all(stdin)?; - process.wait_with_output() - } - }?; - let stdout = String::from_utf8(output.stdout)?; - - if echo { - print!("{stdout}") - } - - if !output.status.success() { - bail!("{}", output.status) - } - - Ok(stdout.trim().to_string()) -} - -fn shelx(cmd: &str) -> Vec { - let mut res = Vec::new(); - for (string_piece, in_quotes) in cmd.split('\'').zip([false, true].iter().copied().cycle()) { - if in_quotes { - res.push(string_piece.to_string()) - } else if !string_piece.is_empty() { - res.extend( - string_piece - .split_ascii_whitespace() - .map(|it| it.to_string()), - ) - } - } - res -} - -struct Env { - pushd_stack: Vec, - pushenv_stack: Vec<(OsString, Option)>, -} - -impl Env { - fn with T, T>(f: F) -> T { - thread_local! { - static ENV: RefCell = RefCell::new(Env { - pushd_stack: vec![env::current_dir().unwrap()], - pushenv_stack: vec![], - }); - } - ENV.with(|it| f(&mut it.borrow_mut())) - } - - fn pushd(&mut self, dir: PathBuf) { - let dir = self.cwd().join(dir); - self.pushd_stack.push(dir); - env::set_current_dir(self.cwd()).unwrap(); - } - fn popd(&mut self) { - self.pushd_stack.pop().unwrap(); - env::set_current_dir(self.cwd()).unwrap(); - } - fn pushenv(&mut self, var: OsString, value: OsString) { - self.pushenv_stack.push((var.clone(), env::var_os(&var))); - // TODO: Audit that the environment access only happens in single-threaded code. - unsafe { env::set_var(var, value) } - } - fn popenv(&mut self) { - let (var, value) = self.pushenv_stack.pop().unwrap(); - match value { - // TODO: Audit that the environment access only happens in single-threaded code. - None => unsafe { env::remove_var(var) }, - // TODO: Audit that the environment access only happens in single-threaded code. - Some(value) => unsafe { env::set_var(var, value) }, - } - } - fn cwd(&self) -> &Path { - self.pushd_stack.last().unwrap() - } -} diff --git a/xtask/src/install.rs b/xtask/src/install.rs deleted file mode 100644 index faaf5c56..00000000 --- a/xtask/src/install.rs +++ /dev/null @@ -1,143 +0,0 @@ -//! Installs postgres_lsp language server and/or editor plugin. - -use std::{env, path::PathBuf, str}; - -use anyhow::{bail, format_err, Context}; -use xshell::{cmd, Shell}; - -use crate::flags; - -impl flags::Install { - pub(crate) fn run(self, sh: &Shell) -> anyhow::Result<()> { - if cfg!(target_os = "macos") { - fix_path_for_mac(sh).context("Fix path for mac")?; - } - if self.server().is_some() { - install_server(sh).context("install server")?; - } - if let Some(client) = self.client() { - install_client(sh, client).context("install client")?; - } - Ok(()) - } -} - -#[derive(Clone)] -pub(crate) struct ClientOpt { - pub(crate) code_bin: Option, -} - -const VS_CODES: &[&str] = &[ - "code", - "code-exploration", - "code-insiders", - "codium", - "code-oss", -]; - -fn fix_path_for_mac(sh: &Shell) -> anyhow::Result<()> { - let mut vscode_path: Vec = { - const COMMON_APP_PATH: &str = - r"/Applications/Visual Studio Code.app/Contents/Resources/app/bin"; - const ROOT_DIR: &str = ""; - let home_dir = sh.var("HOME").map_err(|err| { - format_err!("Failed getting HOME from environment with error: {}.", err) - })?; - - [ROOT_DIR, &home_dir] - .into_iter() - .map(|dir| dir.to_string() + COMMON_APP_PATH) - .map(PathBuf::from) - .filter(|path| path.exists()) - .collect() - }; - - if !vscode_path.is_empty() { - let vars = sh - .var_os("PATH") - .context("Could not get PATH variable from env.")?; - - let mut paths = env::split_paths(&vars).collect::>(); - paths.append(&mut vscode_path); - let new_paths = env::join_paths(paths).context("build env PATH")?; - sh.set_var("PATH", new_paths); - } - - Ok(()) -} - -fn install_client(sh: &Shell, client_opt: ClientOpt) -> anyhow::Result<()> { - let _dir = sh.push_dir("./editors/code"); - - // Package extension. - if cfg!(unix) { - cmd!(sh, "npm --version") - .run() - .context("`npm` is required to build the VS Code plugin")?; - cmd!(sh, "npm ci").run()?; - - cmd!(sh, "npm run package --scripts-prepend-node-path").run()?; - } else { - cmd!(sh, "cmd.exe /c npm --version") - .run() - .context("`npm` is required to build the VS Code plugin")?; - cmd!(sh, "cmd.exe /c npm ci").run()?; - - cmd!(sh, "cmd.exe /c npm run package").run()?; - }; - - // Find the appropriate VS Code binary. - let lifetime_extender; - let candidates: &[&str] = match client_opt.code_bin.as_deref() { - Some(it) => { - lifetime_extender = [it]; - &lifetime_extender[..] - } - None => VS_CODES, - }; - let code = candidates - .iter() - .copied() - .find(|&bin| { - if cfg!(unix) { - cmd!(sh, "{bin} --version").read().is_ok() - } else { - cmd!(sh, "cmd.exe /c {bin}.cmd --version").read().is_ok() - } - }) - .ok_or_else(|| { - format_err!( - "Can't execute `{} --version`. Perhaps it is not in $PATH?", - candidates[0] - ) - })?; - - // Install & verify. - let installed_extensions = if cfg!(unix) { - cmd!(sh, "{code} --install-extension postgres_lsp.vsix --force").run()?; - cmd!(sh, "{code} --list-extensions").read()? - } else { - cmd!( - sh, - "cmd.exe /c {code}.cmd --install-extension postgres_lsp.vsix --force" - ) - .run()?; - cmd!(sh, "cmd.exe /c {code}.cmd --list-extensions").read()? - }; - - if !installed_extensions.contains("postgres-lsp") { - bail!( - "Could not install the Visual Studio Code extension. \ - Please make sure you have at least NodeJS 12.x together with the latest version of VS Code installed and try again. \ - Note that installing via xtask install does not work for VS Code Remote, instead you’ll need to install the .vsix manually." - ); - } - - Ok(()) -} - -fn install_server(sh: &Shell) -> anyhow::Result<()> { - let cmd = cmd!(sh, "cargo install --path crates/pgt_cli --locked --force"); - cmd.run()?; - Ok(()) -} diff --git a/xtask/src/lib.rs b/xtask/src/lib.rs deleted file mode 100644 index 89f4cce3..00000000 --- a/xtask/src/lib.rs +++ /dev/null @@ -1,77 +0,0 @@ -//! Codegen tools mostly used to generate ast and syntax definitions. Adapted from rust analyzer's codegen - -pub mod glue; - -use std::{ - env, - fmt::Display, - path::{Path, PathBuf}, -}; - -pub use crate::glue::{pushd, pushenv}; - -pub use anyhow::{anyhow, bail, ensure, Context as _, Error, Result}; - -#[derive(Debug, PartialEq, Eq, Clone, Copy)] -pub enum Mode { - Overwrite, - Verify, -} - -pub fn project_root() -> PathBuf { - Path::new( - &env::var("CARGO_MANIFEST_DIR").unwrap_or_else(|_| env!("CARGO_MANIFEST_DIR").to_owned()), - ) - .ancestors() - .nth(2) - .unwrap() - .to_path_buf() -} - -pub fn run_rustfmt(mode: Mode) -> Result<()> { - let _dir = pushd(project_root()); - let _e = pushenv("RUSTUP_TOOLCHAIN", "nightly"); - ensure_rustfmt()?; - match mode { - Mode::Overwrite => run!("cargo fmt"), - Mode::Verify => run!("cargo fmt -- --check"), - }?; - Ok(()) -} - -pub fn reformat(text: impl Display) -> Result { - reformat_without_preamble(text).map(prepend_generated_preamble) -} - -pub fn reformat_with_command(text: impl Display, command: impl Display) -> Result { - reformat_without_preamble(text).map(|formatted| { - format!("//! This is a generated file. Don't modify it by hand! Run '{command}' to re-generate the file.\n\n{formatted}") - }) -} - -pub const PREAMBLE: &str = "Generated file, do not edit by hand, see `xtask/codegen`"; -pub fn prepend_generated_preamble(content: impl Display) -> String { - format!("//! {PREAMBLE}\n\n{content}") -} - -pub fn reformat_without_preamble(text: impl Display) -> Result { - let _e = pushenv("RUSTUP_TOOLCHAIN", "nightly"); - ensure_rustfmt()?; - let output = run!( - "rustfmt --config newline_style=Unix"; - Result<()> { - let out = run!("rustfmt --version")?; - if !out.contains("nightly") { - bail!( - "Failed to run rustfmt from toolchain 'nightly'. \ - Please run `rustup component add rustfmt --toolchain nightly` to install it.", - ) - } - Ok(()) -} diff --git a/xtask/src/main.rs b/xtask/src/main.rs deleted file mode 100644 index 28247084..00000000 --- a/xtask/src/main.rs +++ /dev/null @@ -1,46 +0,0 @@ -//! See . -//! -//! This binary defines various auxiliary build commands, which are not -//! expressible with just `cargo`. Notably, it provides tests via `cargo test -p xtask` -//! for code generation and `cargo xtask install` for installation of -//! postgres_lsp server and client. -//! -//! This binary is integrated into the `cargo` command line by using an alias in -//! `.cargo/config`. - -#![warn( - rust_2018_idioms, - unused_lifetimes, - semicolon_in_expressions_from_macros -)] - -mod flags; - -mod install; - -use std::{ - env, - path::{Path, PathBuf}, -}; -use xshell::Shell; - -fn main() -> anyhow::Result<()> { - let flags = flags::Xtask::from_env_or_exit(); - - let sh = &Shell::new()?; - sh.change_dir(project_root()); - - match flags.subcommand { - flags::XtaskCmd::Install(cmd) => cmd.run(sh), - } -} - -fn project_root() -> PathBuf { - Path::new( - &env::var("CARGO_MANIFEST_DIR").unwrap_or_else(|_| env!("CARGO_MANIFEST_DIR").to_owned()), - ) - .ancestors() - .nth(1) - .unwrap() - .to_path_buf() -}

{ - jsonrpc: Cow<'static, str>, - id: u64, - method: Cow<'static, str>, - params: P, -} - -#[derive(Debug, Deserialize)] -#[serde(deny_unknown_fields)] -struct JsonRpcResponse { - #[allow(dead_code)] - jsonrpc: Cow<'static, str>, - id: u64, - result: Option>, - error: Option, -} - -#[derive(Debug, Deserialize)] -struct JsonRpcError { - #[allow(dead_code)] - code: i64, - message: String, - #[allow(dead_code)] - data: Option>, -} - -enum TransportHeader { - ContentLength(usize), - ContentType, - Unknown(String), -} - -impl FromStr for TransportHeader { - type Err = Error; - - fn from_str(line: &str) -> Result { - let colon = line - .find(':') - .with_context(|| format!("could not find colon token in {line:?}"))?; - - let (name, value) = line.split_at(colon); - let value = value[1..].trim(); - - match name { - "Content-Length" => { - let value = value.parse().with_context(|| { - format!("could not parse Content-Length header value {value:?}") - })?; - - Ok(TransportHeader::ContentLength(value)) - } - "Content-Type" => { - ensure!( - value.starts_with("application/vscode-jsonrpc"), - "invalid value for Content-Type expected \"application/vscode-jsonrpc\", got {value:?}" - ); - - Ok(TransportHeader::ContentType) - } - _ => Ok(TransportHeader::Unknown(name.into())), - } - } -} diff --git a/crates/pgt_cli/src/service/unix.rs b/crates/pgt_cli/src/service/unix.rs deleted file mode 100644 index 2ff7adb0..00000000 --- a/crates/pgt_cli/src/service/unix.rs +++ /dev/null @@ -1,232 +0,0 @@ -use std::{ - convert::Infallible, - env, fs, - io::{self, ErrorKind}, - path::PathBuf, - time::Duration, -}; - -use pgt_lsp::{ServerConnection, ServerFactory}; -use tokio::{ - io::Interest, - net::{ - UnixListener, UnixStream, - unix::{OwnedReadHalf, OwnedWriteHalf}, - }, - process::{Child, Command}, - time, -}; -use tracing::{Instrument, debug, info}; - -/// Returns the filesystem path of the global socket used to communicate with -/// the server daemon -fn get_socket_name() -> PathBuf { - pgt_fs::ensure_cache_dir().join(format!("pgt-socket-{}", pgt_configuration::VERSION)) -} - -#[allow(dead_code)] -pub(crate) fn enumerate_pipes() -> io::Result> { - fs::read_dir(pgt_fs::ensure_cache_dir()).map(|iter| { - iter.filter_map(|entry| { - let entry = entry.ok()?.path(); - let file_name = entry.file_name()?; - let file_name = file_name.to_str()?; - - let version = file_name.strip_prefix("pgt-socket")?; - if version.is_empty() { - Some(String::new()) - } else { - Some(version.strip_prefix('-')?.to_string()) - } - }) - }) -} - -/// Try to connect to the global socket and wait for the connection to become ready -async fn try_connect() -> io::Result { - let socket_name = get_socket_name(); - info!("Trying to connect to socket {}", socket_name.display()); - let stream = UnixStream::connect(socket_name).await?; - stream - .ready(Interest::READABLE | Interest::WRITABLE) - .await?; - Ok(stream) -} - -/// Spawn the daemon server process in the background -fn spawn_daemon( - stop_on_disconnect: bool, - config_path: Option, - log_path: Option, - log_file_name_prefix: Option, -) -> io::Result { - let binary = env::current_exe()?; - - let mut cmd = Command::new(binary); - debug!("command {:?}", &cmd); - cmd.arg("__run_server"); - - if stop_on_disconnect { - cmd.arg("--stop-on-disconnect"); - } - if let Some(config_path) = config_path { - cmd.arg(format!("--config-path={}", config_path.display())); - } - if let Some(log_path) = log_path { - cmd.arg(format!("--log-path={}", log_path.display())); - } - - if let Some(log_file_name_prefix) = log_file_name_prefix { - cmd.arg(format!("--log-prefix-name={}", log_file_name_prefix)); - } - - // Create a new session for the process and make it the leader, this will - // ensures that the child process is fully detached from its parent and will - // continue running in the background even after the parent process exits - // - // SAFETY: This closure runs in the forked child process before it starts - // executing, this is a highly unsafe environment because the process isn't - // running yet so seemingly innocuous operation like allocating memory may - // hang indefinitely. - // The only thing we do here is issuing a syscall, which is safe to do in - // this state but still "unsafe" in Rust semantics because it's technically - // mutating the shared global state of the process - unsafe { - cmd.pre_exec(|| { - libc::setsid(); - Ok(()) - }); - } - - let child = cmd.spawn()?; - Ok(child) -} - -/// Open a connection to the daemon server process, returning [None] if the -/// server is not running -pub(crate) async fn open_socket() -> io::Result> { - match try_connect().await { - Ok(socket) => Ok(Some(socket.into_split())), - Err(err) - // The OS will return `ConnectionRefused` if the socket file exists - // but no server process is listening on it - if matches!( - err.kind(), - ErrorKind::NotFound | ErrorKind::ConnectionRefused - ) => - { - Ok(None) - } - Err(err) => Err(err), - } -} - -/// Ensure the server daemon is running and ready to receive connections -/// -/// Returns false if the daemon process was already running or true if it had -/// to be started -pub(crate) async fn ensure_daemon( - stop_on_disconnect: bool, - config_path: Option, - log_path: Option, - log_file_name_prefix: Option, -) -> io::Result { - let mut current_child: Option = None; - let mut last_error = None; - - // Try to initialize the connection a few times - for _ in 0..10 { - // Try to open a connection on the global socket - match try_connect().await { - // The connection is open and ready - Ok(_) => { - return Ok(current_child.is_some()); - } - - // There's no process listening on the global socket - Err(err) - if matches!( - err.kind(), - ErrorKind::NotFound | ErrorKind::ConnectionRefused - ) => - { - last_error = Some(err); - - match &mut current_child { - Some(current_child) => { - // If we have a handle to the daemon process, wait for a few - // milliseconds for it to exit, or retry the connection - tokio::select! { - result = current_child.wait() => { - let _status = result?; - return Err(io::Error::new( - io::ErrorKind::ConnectionReset, - "the server process exited before the connection could be established", - )); - } - _ = time::sleep(Duration::from_millis(50)) => {} - } - } - _ => { - // Spawn the daemon process and wait a few milliseconds for - // it to become ready then retry the connection - current_child = Some(spawn_daemon( - stop_on_disconnect, - config_path.clone(), - log_path.clone(), - log_file_name_prefix.clone(), - )?); - time::sleep(Duration::from_millis(50)).await; - } - } - } - - Err(err) => return Err(err), - } - } - - // If the connection couldn't be opened after 10 tries fail with the last - // error message from the OS, or a generic error message otherwise - Err(last_error.unwrap_or_else(|| io::Error::other("could not connect to the daemon socket"))) -} - -/// Ensure the server daemon is running and ready to receive connections and -/// print the global socket name in the standard output -pub(crate) async fn print_socket() -> io::Result<()> { - ensure_daemon(true, None, None, None).await?; - println!("{}", get_socket_name().display()); - Ok(()) -} - -/// Start listening on the global socket and accepting connections with the -/// provided [ServerFactory] -pub(crate) async fn run_daemon( - factory: ServerFactory, - config_path: Option, -) -> io::Result { - let path = get_socket_name(); - - info!("Trying to connect to socket {}", path.display()); - - // Try to remove the socket file if it already exists - if path.exists() { - info!("Remove socket folder {}", path.display()); - fs::remove_file(&path)?; - } - - let listener = UnixListener::bind(path)?; - - loop { - let (stream, _) = listener.accept().await?; - let connection = factory.create(config_path.clone()); - let span = tracing::trace_span!("run_server"); - info!("Accepted connection"); - tokio::spawn(run_server(connection, stream).instrument(span.or_current())); - } -} - -/// Async task driving a single client connection -async fn run_server(connection: ServerConnection, stream: UnixStream) { - let (read, write) = stream.into_split(); - connection.accept(read, write).await; -} diff --git a/crates/pgt_cli/src/service/windows.rs b/crates/pgt_cli/src/service/windows.rs deleted file mode 100644 index e6fc48f4..00000000 --- a/crates/pgt_cli/src/service/windows.rs +++ /dev/null @@ -1,314 +0,0 @@ -use std::{ - convert::Infallible, - env, - fs::read_dir, - io::{self, ErrorKind}, - mem::swap, - os::windows::process::CommandExt, - path::PathBuf, - pin::Pin, - process::Command, - sync::Arc, - task::{Context, Poll}, - time::Duration, -}; - -use pgt_lsp::{ServerConnection, ServerFactory}; -use tokio::{ - io::{AsyncRead, AsyncWrite, ReadBuf}, - net::windows::named_pipe::{ClientOptions, NamedPipeClient, NamedPipeServer, ServerOptions}, - time, -}; -use tracing::Instrument; - -/// Returns the name of the global named pipe used to communicate with the -/// server daemon -fn get_pipe_name() -> String { - format!(r"\\.\pipe\pgt-service-{}", pgt_configuration::VERSION) -} - -#[allow(dead_code)] -pub(crate) fn enumerate_pipes() -> io::Result> { - read_dir(r"\\.\pipe").map(|iter| { - iter.filter_map(|entry| { - let entry = entry.ok()?.path(); - let file_name = entry.file_name()?; - let file_name = file_name.to_str()?; - - let version = file_name.strip_prefix("pgt-service")?; - if version.is_empty() { - Some(String::new()) - } else { - Some(version.strip_prefix('-')?.to_string()) - } - }) - }) -} - -/// Error code from the Win32 API -const ERROR_PIPE_BUSY: i32 = 231; - -/// Try to connect to the global pipe and wait for the connection to become ready -async fn try_connect() -> io::Result { - loop { - match ClientOptions::new().open(get_pipe_name()) { - Ok(client) => return Ok(client), - // If the connection failed with ERROR_PIPE_BUSY, wait a few - // milliseconds then retry the connection (we should be using - // WaitNamedPipe here but that's not exposed by tokio / mio) - Err(e) if e.raw_os_error() == Some(ERROR_PIPE_BUSY) => {} - Err(e) => return Err(e), - } - - time::sleep(Duration::from_millis(50)).await; - } -} - -/// Process creation flag from the Win32 API, ensures the process is created -/// in its own group and will not be killed when the parent process exits -const CREATE_NEW_PROCESS_GROUP: u32 = 0x00000200; - -/// Spawn the daemon server process in the background -fn spawn_daemon( - stop_on_disconnect: bool, - config_path: Option, - log_path: Option, - log_file_name_prefix: Option, -) -> io::Result<()> { - let binary = env::current_exe()?; - - let mut cmd = Command::new(binary); - cmd.arg("__run_server"); - - if stop_on_disconnect { - cmd.arg("--stop-on-disconnect"); - } - - if let Some(config_path) = config_path { - cmd.arg(format!("--config-path={}", config_path.display())); - } - if let Some(log_path) = log_path { - cmd.arg(format!("--log-path={}", log_path.display())); - } - if let Some(log_file_name_prefix) = log_file_name_prefix { - cmd.arg(format!("--log-prefix-name={}", log_file_name_prefix)); - } - cmd.creation_flags(CREATE_NEW_PROCESS_GROUP); - - cmd.spawn()?; - - Ok(()) -} - -/// Open a connection to the daemon server process, returning [None] if the -/// server is not running -pub(crate) async fn open_socket() -> io::Result> { - match try_connect().await { - Ok(socket) => { - let inner = Arc::new(socket); - Ok(Some(( - ClientReadHalf { - inner: inner.clone(), - }, - ClientWriteHalf { inner }, - ))) - } - Err(err) if err.kind() == ErrorKind::NotFound => Ok(None), - Err(err) => Err(err), - } -} - -pub(crate) struct ClientReadHalf { - inner: Arc, -} - -impl AsyncRead for ClientReadHalf { - fn poll_read( - self: Pin<&mut Self>, - cx: &mut Context<'_>, - buf: &mut ReadBuf<'_>, - ) -> Poll> { - loop { - match self.inner.poll_read_ready(cx) { - Poll::Ready(Ok(())) => match self.inner.try_read(buf.initialize_unfilled()) { - Ok(count) => { - buf.advance(count); - return Poll::Ready(Ok(())); - } - - Err(err) if err.kind() == io::ErrorKind::WouldBlock => continue, - Err(err) => return Poll::Ready(Err(err)), - }, - - Poll::Ready(Err(err)) => return Poll::Ready(Err(err)), - Poll::Pending => return Poll::Pending, - }; - } - } -} - -pub(crate) struct ClientWriteHalf { - inner: Arc, -} - -impl AsyncWrite for ClientWriteHalf { - fn poll_write( - self: Pin<&mut Self>, - cx: &mut Context<'_>, - buf: &[u8], - ) -> Poll> { - loop { - match self.inner.poll_write_ready(cx) { - Poll::Ready(Ok(())) => match self.inner.try_write(buf) { - Ok(count) => return Poll::Ready(Ok(count)), - Err(err) if err.kind() == io::ErrorKind::WouldBlock => continue, - Err(err) => return Poll::Ready(Err(err)), - }, - - Poll::Ready(Err(err)) => return Poll::Ready(Err(err)), - Poll::Pending => return Poll::Pending, - } - } - } - - fn poll_flush(self: Pin<&mut Self>, _cx: &mut Context<'_>) -> Poll> { - Poll::Ready(Ok(())) - } - - fn poll_shutdown(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll> { - self.poll_flush(cx) - } -} - -/// Ensure the server daemon is running and ready to receive connections -/// -/// Returns false if the daemon process was already running or true if it had -/// to be started -pub(crate) async fn ensure_daemon( - stop_on_disconnect: bool, - config_path: Option, - log_path: Option, - log_file_name_prefix: Option, -) -> io::Result { - let mut did_spawn = false; - - loop { - match open_socket().await { - Ok(Some(_)) => break, - Ok(None) => { - spawn_daemon( - stop_on_disconnect, - config_path.clone(), - log_path.clone(), - log_file_name_prefix.clone(), - )?; - did_spawn = true; - time::sleep(Duration::from_millis(50)).await; - } - Err(err) => return Err(err), - } - } - - Ok(did_spawn) -} - -/// Ensure the server daemon is running and ready to receive connections and -/// print the global pipe name in the standard output -pub(crate) async fn print_socket() -> io::Result<()> { - ensure_daemon(true, None, None, None).await?; - println!("{}", get_pipe_name()); - Ok(()) -} - -/// Start listening on the global pipe and accepting connections with the -/// provided [ServerFactory] -pub(crate) async fn run_daemon( - factory: ServerFactory, - config_path: Option, -) -> io::Result { - let mut prev_server = ServerOptions::new() - .first_pipe_instance(true) - .create(get_pipe_name())?; - - loop { - prev_server.connect().await?; - let mut next_server = ServerOptions::new().create(get_pipe_name())?; - swap(&mut prev_server, &mut next_server); - - let connection = factory.create(config_path.clone()); - let span = tracing::trace_span!("run_server"); - tokio::spawn(run_server(connection, next_server).instrument(span.or_current())); - } -} - -/// Async task driving a single client connection -async fn run_server(connection: ServerConnection, stream: NamedPipeServer) { - let inner = Arc::new(stream); - let read = ServerReadHalf { - inner: inner.clone(), - }; - let write = ServerWriteHalf { inner }; - connection.accept(read, write).await; -} - -struct ServerReadHalf { - inner: Arc, -} - -impl AsyncRead for ServerReadHalf { - fn poll_read( - self: Pin<&mut Self>, - cx: &mut Context<'_>, - buf: &mut ReadBuf<'_>, - ) -> Poll> { - loop { - match self.inner.poll_read_ready(cx) { - Poll::Ready(Ok(())) => match self.inner.try_read(buf.initialize_unfilled()) { - Ok(count) => { - buf.advance(count); - return Poll::Ready(Ok(())); - } - - Err(err) if err.kind() == io::ErrorKind::WouldBlock => continue, - Err(err) => return Poll::Ready(Err(err)), - }, - - Poll::Ready(Err(err)) => return Poll::Ready(Err(err)), - Poll::Pending => return Poll::Pending, - }; - } - } -} - -struct ServerWriteHalf { - inner: Arc, -} - -impl AsyncWrite for ServerWriteHalf { - fn poll_write( - self: Pin<&mut Self>, - cx: &mut Context<'_>, - buf: &[u8], - ) -> Poll> { - loop { - match self.inner.poll_write_ready(cx) { - Poll::Ready(Ok(())) => match self.inner.try_write(buf) { - Ok(count) => return Poll::Ready(Ok(count)), - Err(err) if err.kind() == io::ErrorKind::WouldBlock => continue, - Err(err) => return Poll::Ready(Err(err)), - }, - - Poll::Ready(Err(err)) => return Poll::Ready(Err(err)), - Poll::Pending => return Poll::Pending, - } - } - } - - fn poll_flush(self: Pin<&mut Self>, _cx: &mut Context<'_>) -> Poll> { - Poll::Ready(Ok(())) - } - - fn poll_shutdown(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll> { - self.poll_flush(cx) - } -} diff --git a/crates/pgt_cli/tests/assert_cmd.rs b/crates/pgt_cli/tests/assert_cmd.rs deleted file mode 100644 index a7ddc17f..00000000 --- a/crates/pgt_cli/tests/assert_cmd.rs +++ /dev/null @@ -1,16 +0,0 @@ -use std::path::PathBuf; - -use assert_cmd::Command; -use predicates::prelude::*; - -#[test] -fn test_cli_check_command() { - let mut cmd = Command::cargo_bin("postgrestools").unwrap(); - - let test_sql_path = PathBuf::from("tests/fixtures/test.sql"); - - cmd.args(["check", test_sql_path.to_str().unwrap()]) - .assert() - .failure() - .stdout(predicate::str::contains("Found 1 error")); -} diff --git a/crates/pgt_cli/tests/commands/check.rs b/crates/pgt_cli/tests/commands/check.rs deleted file mode 100644 index ce0de03d..00000000 --- a/crates/pgt_cli/tests/commands/check.rs +++ /dev/null @@ -1,24 +0,0 @@ -use bpaf::Args; -use std::path::Path; - -use crate::run_cli; -use pgt_console::BufferConsole; -use pgt_fs::MemoryFileSystem; -use pgt_workspace::DynRef; - -#[test] -fn syntax_error() { - let mut fs = MemoryFileSystem::default(); - let mut console = BufferConsole::default(); - - let file_path = Path::new("test.sql"); - fs.insert(file_path.into(), "select 1".as_bytes()); - - let result = run_cli( - DynRef::Borrowed(&mut fs), - &mut console, - Args::from([("check"), file_path.as_os_str().to_str().unwrap()].as_slice()), - ); - - assert!(result.is_ok(), "run_cli returned {result:?}"); -} diff --git a/crates/pgt_cli/tests/commands/mod.rs b/crates/pgt_cli/tests/commands/mod.rs deleted file mode 100644 index be0c6a3e..00000000 --- a/crates/pgt_cli/tests/commands/mod.rs +++ /dev/null @@ -1 +0,0 @@ -mod check; diff --git a/crates/pgt_cli/tests/fixtures/test.sql b/crates/pgt_cli/tests/fixtures/test.sql deleted file mode 100644 index de3b52a5..00000000 --- a/crates/pgt_cli/tests/fixtures/test.sql +++ /dev/null @@ -1,2 +0,0 @@ -alter tqjable test drop column id; - diff --git a/crates/pgt_cli/tests/main.rs b/crates/pgt_cli/tests/main.rs deleted file mode 100644 index 4ab06172..00000000 --- a/crates/pgt_cli/tests/main.rs +++ /dev/null @@ -1,53 +0,0 @@ -mod commands; - -use bpaf::ParseFailure; -use pgt_cli::{CliDiagnostic, CliSession, pgt_command}; -use pgt_console::{Console, ConsoleExt, markup}; -use pgt_fs::FileSystem; -use pgt_workspace::{App, DynRef}; - -/// Create an [App] instance using the provided [FileSystem] and [Console] -/// instance, and using an in-process "remote" instance of the workspace -pub(crate) fn run_cli<'app>( - fs: DynRef<'app, dyn FileSystem>, - console: &'app mut dyn Console, - args: bpaf::Args, -) -> Result<(), CliDiagnostic> { - use pgt_cli::SocketTransport; - use pgt_lsp::ServerFactory; - use pgt_workspace::{WorkspaceRef, workspace}; - use tokio::{ - io::{duplex, split}, - runtime::Runtime, - }; - - let factory = ServerFactory::default(); - let connection = factory.create(None); - - let runtime = Runtime::new().expect("failed to create runtime"); - - let (client, server) = duplex(4096); - let (stdin, stdout) = split(server); - runtime.spawn(connection.accept(stdin, stdout)); - - let (client_read, client_write) = split(client); - let transport = SocketTransport::open(runtime, client_read, client_write); - - let workspace = workspace::client(transport).unwrap(); - let app = App::new(fs, console, WorkspaceRef::Owned(workspace)); - - let mut session = CliSession { app }; - let command = pgt_command().run_inner(args); - match command { - Ok(command) => session.run(command), - Err(failure) => { - if let ParseFailure::Stdout(help, _) = &failure { - let console = &mut session.app.console; - console.log(markup! {{help.to_string()}}); - Ok(()) - } else { - Err(CliDiagnostic::parse_error_bpaf(failure)) - } - } - } -} diff --git a/crates/pgt_completions/Cargo.toml b/crates/pgt_completions/Cargo.toml deleted file mode 100644 index 916a0020..00000000 --- a/crates/pgt_completions/Cargo.toml +++ /dev/null @@ -1,46 +0,0 @@ -[package] -authors.workspace = true -categories.workspace = true -description = "" -edition.workspace = true -homepage.workspace = true -keywords.workspace = true -license.workspace = true -name = "pgt_completions" -repository.workspace = true -version = "0.0.0" - - -[dependencies] -async-std = "1.12.0" - -pgt_text_size.workspace = true - - -fuzzy-matcher = "0.3.7" -pgt_schema_cache.workspace = true -pgt_treesitter_queries.workspace = true -schemars = { workspace = true, optional = true } -serde = { workspace = true, features = ["derive"] } -serde_json = { workspace = true } -tracing = { workspace = true } -tree-sitter.workspace = true -tree_sitter_sql.workspace = true - -sqlx.workspace = true - -tokio = { version = "1.41.1", features = ["full"] } - -[dev-dependencies] -criterion = "0.5.1" -pgt_test_utils.workspace = true - -[lib] -doctest = false - -[features] -schema = ["dep:schemars"] - -[[bench]] -harness = false -name = "sanitization" diff --git a/crates/pgt_completions/README.md b/crates/pgt_completions/README.md deleted file mode 100644 index bb52bc0b..00000000 --- a/crates/pgt_completions/README.md +++ /dev/null @@ -1,15 +0,0 @@ -# Auto-Completions - -## What does this crate do? - -The `pgt_completions` identifies and ranks autocompletion items that can be displayed in your code editor. -Its main export is the `complete` function. The function takes a PostgreSQL statement, a cursor position, and a datastructure representing the underlying databases schema. It returns a list of completion items. - -Postgres's statement-parsing-engine, `libpg_query`, which is used in other parts of this LSP, is only capable of parsing _complete and valid_ statements. Since autocompletion should work for incomplete statements, we rely heavily on tree-sitter – an incremental parsing library. - -### Working with TreeSitter - -In the `pgt_test_utils` crate, there's a binary that parses an SQL file and prints out the matching tree-sitter tree. -This makes writing tree-sitter queries for this crate easy. - -To print a tree, run `cargo run --bin tree_print -- -f `. diff --git a/crates/pgt_completions/benches/sanitization.rs b/crates/pgt_completions/benches/sanitization.rs deleted file mode 100644 index 50c2a0e3..00000000 --- a/crates/pgt_completions/benches/sanitization.rs +++ /dev/null @@ -1,249 +0,0 @@ -use criterion::{Criterion, black_box, criterion_group, criterion_main}; -use pgt_completions::{CompletionParams, benchmark_sanitization}; -use pgt_schema_cache::SchemaCache; -use pgt_text_size::TextSize; - -static CURSOR_POS: &str = "€"; - -fn sql_and_pos(sql: &str) -> (String, usize) { - let pos = sql.find(CURSOR_POS).unwrap(); - (sql.replace(CURSOR_POS, ""), pos) -} - -fn get_tree(sql: &str) -> tree_sitter::Tree { - let mut parser = tree_sitter::Parser::new(); - parser.set_language(tree_sitter_sql::language()).unwrap(); - parser.parse(sql, None).unwrap() -} - -fn to_params<'a>( - text: String, - tree: &'a tree_sitter::Tree, - pos: usize, - cache: &'a SchemaCache, -) -> CompletionParams<'a> { - let pos: u32 = pos.try_into().unwrap(); - CompletionParams { - position: TextSize::new(pos), - schema: cache, - text, - tree, - } -} - -pub fn criterion_benchmark(c: &mut Criterion) { - c.bench_function("small sql, adjusted", |b| { - let content = format!("select {} from users;", CURSOR_POS); - - let cache = SchemaCache::default(); - let (sql, pos) = sql_and_pos(content.as_str()); - let tree = get_tree(sql.as_str()); - - b.iter(|| benchmark_sanitization(black_box(to_params(sql.clone(), &tree, pos, &cache)))); - }); - - c.bench_function("mid sql, adjusted", |b| { - let content = format!( - r#"select - n.oid :: int8 as "id!", - n.nspname as name, - u.rolname as "owner!" -from - pg_namespace n, - {} -where - n.nspowner = u.oid - and ( - pg_has_role(n.nspowner, 'USAGE') - or has_schema_privilege(n.oid, 'CREATE, USAGE') - ) - and not pg_catalog.starts_with(n.nspname, 'pg_temp_') - and not pg_catalog.starts_with(n.nspname, 'pg_toast_temp_');"#, - CURSOR_POS - ); - - let cache = SchemaCache::default(); - let (sql, pos) = sql_and_pos(content.as_str()); - let tree = get_tree(sql.as_str()); - - b.iter(|| benchmark_sanitization(black_box(to_params(sql.clone(), &tree, pos, &cache)))); - }); - - c.bench_function("large sql, adjusted", |b| { - let content = format!( - r#"with - available_tables as ( - select - c.relname as table_name, - c.oid as table_oid, - c.relkind as class_kind, - n.nspname as schema_name - from - pg_catalog.pg_class c - join pg_catalog.pg_namespace n on n.oid = c.relnamespace - where - -- r: normal tables - -- v: views - -- m: materialized views - -- f: foreign tables - -- p: partitioned tables - c.relkind in ('r', 'v', 'm', 'f', 'p') - ), - available_indexes as ( - select - unnest (ix.indkey) as attnum, - ix.indisprimary as is_primary, - ix.indisunique as is_unique, - ix.indrelid as table_oid - from - {} - where - c.relkind = 'i' - ) -select - atts.attname as name, - ts.table_name, - ts.table_oid :: int8 as "table_oid!", - ts.class_kind :: char as "class_kind!", - ts.schema_name, - atts.atttypid :: int8 as "type_id!", - not atts.attnotnull as "is_nullable!", - nullif( - information_schema._pg_char_max_length (atts.atttypid, atts.atttypmod), - -1 - ) as varchar_length, - pg_get_expr (def.adbin, def.adrelid) as default_expr, - coalesce(ix.is_primary, false) as "is_primary_key!", - coalesce(ix.is_unique, false) as "is_unique!", - pg_catalog.col_description (ts.table_oid, atts.attnum) as comment -from - pg_catalog.pg_attribute atts - join available_tables ts on atts.attrelid = ts.table_oid - left join available_indexes ix on atts.attrelid = ix.table_oid - and atts.attnum = ix.attnum - left join pg_catalog.pg_attrdef def on atts.attrelid = def.adrelid - and atts.attnum = def.adnum -where - -- system columns, such as `cmax` or `tableoid`, have negative `attnum`s - atts.attnum >= 0; -"#, - CURSOR_POS - ); - - let cache = SchemaCache::default(); - let (sql, pos) = sql_and_pos(content.as_str()); - let tree = get_tree(sql.as_str()); - - b.iter(|| benchmark_sanitization(black_box(to_params(sql.clone(), &tree, pos, &cache)))); - }); - - c.bench_function("small sql, unadjusted", |b| { - let content = format!("select e{} from users;", CURSOR_POS); - - let cache = SchemaCache::default(); - let (sql, pos) = sql_and_pos(content.as_str()); - let tree = get_tree(sql.as_str()); - - b.iter(|| benchmark_sanitization(black_box(to_params(sql.clone(), &tree, pos, &cache)))); - }); - - c.bench_function("mid sql, unadjusted", |b| { - let content = format!( - r#"select - n.oid :: int8 as "id!", - n.nspname as name, - u.rolname as "owner!" -from - pg_namespace n, - pg_r{} -where - n.nspowner = u.oid - and ( - pg_has_role(n.nspowner, 'USAGE') - or has_schema_privilege(n.oid, 'CREATE, USAGE') - ) - and not pg_catalog.starts_with(n.nspname, 'pg_temp_') - and not pg_catalog.starts_with(n.nspname, 'pg_toast_temp_');"#, - CURSOR_POS - ); - - let cache = SchemaCache::default(); - let (sql, pos) = sql_and_pos(content.as_str()); - let tree = get_tree(sql.as_str()); - - b.iter(|| benchmark_sanitization(black_box(to_params(sql.clone(), &tree, pos, &cache)))); - }); - - c.bench_function("large sql, unadjusted", |b| { - let content = format!( - r#"with - available_tables as ( - select - c.relname as table_name, - c.oid as table_oid, - c.relkind as class_kind, - n.nspname as schema_name - from - pg_catalog.pg_class c - join pg_catalog.pg_namespace n on n.oid = c.relnamespace - where - -- r: normal tables - -- v: views - -- m: materialized views - -- f: foreign tables - -- p: partitioned tables - c.relkind in ('r', 'v', 'm', 'f', 'p') - ), - available_indexes as ( - select - unnest (ix.indkey) as attnum, - ix.indisprimary as is_primary, - ix.indisunique as is_unique, - ix.indrelid as table_oid - from - pg_catalog.pg_class c - join pg_catalog.pg_index ix on c.oid = ix.indexrelid - where - c.relkind = 'i' - ) -select - atts.attname as name, - ts.table_name, - ts.table_oid :: int8 as "table_oid!", - ts.class_kind :: char as "class_kind!", - ts.schema_name, - atts.atttypid :: int8 as "type_id!", - not atts.attnotnull as "is_nullable!", - nullif( - information_schema._pg_char_max_length (atts.atttypid, atts.atttypmod), - -1 - ) as varchar_length, - pg_get_expr (def.adbin, def.adrelid) as default_expr, - coalesce(ix.is_primary, false) as "is_primary_key!", - coalesce(ix.is_unique, false) as "is_unique!", - pg_catalog.col_description (ts.table_oid, atts.attnum) as comment -from - pg_catalog.pg_attribute atts - join available_tables ts on atts.attrelid = ts.table_oid - left join available_indexes ix on atts.attrelid = ix.table_oid - and atts.attnum = ix.attnum - left join pg_catalog.pg_attrdef def on atts.attrelid = def.adrelid - and atts.attnum = def.adnum -where - -- system columns, such as `cmax` or `tableoid`, have negative `attnum`s - atts.attnum >= 0 -order by - sch{} "#, - CURSOR_POS - ); - - let cache = SchemaCache::default(); - let (sql, pos) = sql_and_pos(content.as_str()); - let tree = get_tree(sql.as_str()); - - b.iter(|| benchmark_sanitization(black_box(to_params(sql.clone(), &tree, pos, &cache)))); - }); -} - -criterion_group!(benches, criterion_benchmark); -criterion_main!(benches); diff --git a/crates/pgt_completions/src/builder.rs b/crates/pgt_completions/src/builder.rs deleted file mode 100644 index 96576053..00000000 --- a/crates/pgt_completions/src/builder.rs +++ /dev/null @@ -1,94 +0,0 @@ -use crate::{ - CompletionItemKind, CompletionText, - context::CompletionContext, - item::CompletionItem, - relevance::{filtering::CompletionFilter, scoring::CompletionScore}, -}; - -pub(crate) struct PossibleCompletionItem<'a> { - pub label: String, - pub description: String, - pub kind: CompletionItemKind, - pub score: CompletionScore<'a>, - pub filter: CompletionFilter<'a>, - pub completion_text: Option, - pub detail: Option, -} - -pub(crate) struct CompletionBuilder<'a> { - items: Vec>, - ctx: &'a CompletionContext<'a>, -} - -impl<'a> CompletionBuilder<'a> { - pub fn new(ctx: &'a CompletionContext) -> Self { - CompletionBuilder { items: vec![], ctx } - } - - pub fn add_item(&mut self, item: PossibleCompletionItem<'a>) { - self.items.push(item); - } - - pub fn finish(self) -> Vec { - let mut items: Vec = self - .items - .into_iter() - .filter(|i| i.filter.is_relevant(self.ctx).is_some()) - .collect(); - - for item in items.iter_mut() { - item.score.calc_score(self.ctx); - } - - items.sort_by(|a, b| { - b.score - .get_score() - .cmp(&a.score.get_score()) - .then_with(|| a.label.cmp(&b.label)) - }); - - items.dedup_by(|a, b| a.label == b.label); - items.truncate(crate::LIMIT); - - let should_preselect_first_item = should_preselect_first_item(&items); - - /* - * LSP Clients themselves sort the completion items. - * They'll use the `sort_text` property if present (or fallback to the `label`). - * Since our items are already sorted, we're 'hijacking' the sort_text. - * We're simply adding the index of the item, padded by zeroes to the max length. - */ - let max_padding = items.len().to_string().len(); - - items - .into_iter() - .enumerate() - .map(|(idx, item)| { - let preselected = idx == 0 && should_preselect_first_item; - - CompletionItem { - description: item.description, - kind: item.kind, - label: item.label, - preselected, - detail: item.detail, - - // wonderous Rust syntax ftw - sort_text: format!("{:0>padding$}", idx, padding = max_padding), - completion_text: item.completion_text, - } - }) - .collect() - } -} - -fn should_preselect_first_item(items: &Vec) -> bool { - let mut items_iter = items.iter(); - let first = items_iter.next(); - let second = items_iter.next(); - - first.is_some_and(|f| match second { - Some(s) => (f.score.get_score() - s.score.get_score()) > 15, - None => true, - }) && items.len() >= 10 -} diff --git a/crates/pgt_completions/src/complete.rs b/crates/pgt_completions/src/complete.rs deleted file mode 100644 index 5bc5d41c..00000000 --- a/crates/pgt_completions/src/complete.rs +++ /dev/null @@ -1,41 +0,0 @@ -use pgt_text_size::TextSize; - -use crate::{ - builder::CompletionBuilder, - context::CompletionContext, - item::CompletionItem, - providers::{ - complete_columns, complete_functions, complete_policies, complete_schemas, complete_tables, - }, - sanitization::SanitizedCompletionParams, -}; - -pub const LIMIT: usize = 50; - -#[derive(Debug)] -pub struct CompletionParams<'a> { - pub position: TextSize, - pub schema: &'a pgt_schema_cache::SchemaCache, - pub text: String, - pub tree: &'a tree_sitter::Tree, -} - -#[tracing::instrument(level = "debug", skip_all, fields( - text = params.text, - position = params.position.to_string() -))] -pub fn complete(params: CompletionParams) -> Vec { - let sanitized_params = SanitizedCompletionParams::from(params); - - let ctx = CompletionContext::new(&sanitized_params); - - let mut builder = CompletionBuilder::new(&ctx); - - complete_tables(&ctx, &mut builder); - complete_functions(&ctx, &mut builder); - complete_columns(&ctx, &mut builder); - complete_schemas(&ctx, &mut builder); - complete_policies(&ctx, &mut builder); - - builder.finish() -} diff --git a/crates/pgt_completions/src/context/mod.rs b/crates/pgt_completions/src/context/mod.rs deleted file mode 100644 index 0bb190a9..00000000 --- a/crates/pgt_completions/src/context/mod.rs +++ /dev/null @@ -1,965 +0,0 @@ -use std::{ - cmp, - collections::{HashMap, HashSet}, -}; -mod policy_parser; - -use pgt_schema_cache::SchemaCache; -use pgt_text_size::TextRange; -use pgt_treesitter_queries::{ - TreeSitterQueriesExecutor, - queries::{self, QueryResult}, -}; - -use crate::{ - NodeText, - context::policy_parser::{PolicyParser, PolicyStmtKind}, - sanitization::SanitizedCompletionParams, -}; - -#[derive(Debug, PartialEq, Eq, Hash, Clone)] -pub enum WrappingClause<'a> { - Select, - Where, - From, - Join { - on_node: Option>, - }, - Update, - Delete, - ColumnDefinitions, - Insert, - AlterTable, - DropTable, - PolicyName, - ToRoleAssignment, -} - -#[derive(PartialEq, Eq, Hash, Debug, Clone)] -pub(crate) struct MentionedColumn { - pub(crate) column: String, - pub(crate) alias: Option, -} - -/// We can map a few nodes, such as the "update" node, to actual SQL clauses. -/// That gives us a lot of insight for completions. -/// Other nodes, such as the "relation" node, gives us less but still -/// relevant information. -/// `WrappingNode` maps to such nodes. -/// -/// Note: This is not the direct parent of the `node_under_cursor`, but the closest -/// *relevant* parent. -#[derive(Debug, PartialEq, Eq)] -pub enum WrappingNode { - Relation, - BinaryExpression, - Assignment, - List, -} - -#[derive(Debug)] -pub(crate) enum NodeUnderCursor<'a> { - TsNode(tree_sitter::Node<'a>), - CustomNode { - text: NodeText, - range: TextRange, - kind: String, - }, -} - -impl NodeUnderCursor<'_> { - pub fn start_byte(&self) -> usize { - match self { - NodeUnderCursor::TsNode(node) => node.start_byte(), - NodeUnderCursor::CustomNode { range, .. } => range.start().into(), - } - } - - pub fn end_byte(&self) -> usize { - match self { - NodeUnderCursor::TsNode(node) => node.end_byte(), - NodeUnderCursor::CustomNode { range, .. } => range.end().into(), - } - } - - pub fn kind(&self) -> &str { - match self { - NodeUnderCursor::TsNode(node) => node.kind(), - NodeUnderCursor::CustomNode { kind, .. } => kind.as_str(), - } - } -} - -impl<'a> From> for NodeUnderCursor<'a> { - fn from(node: tree_sitter::Node<'a>) -> Self { - NodeUnderCursor::TsNode(node) - } -} - -impl TryFrom<&str> for WrappingNode { - type Error = String; - - fn try_from(value: &str) -> Result { - match value { - "relation" => Ok(Self::Relation), - "assignment" => Ok(Self::Assignment), - "binary_expression" => Ok(Self::BinaryExpression), - "list" => Ok(Self::List), - _ => { - let message = format!("Unimplemented Relation: {}", value); - - // Err on tests, so we notice that we're lacking an implementation immediately. - if cfg!(test) { - panic!("{}", message); - } - - Err(message) - } - } - } -} - -impl TryFrom for WrappingNode { - type Error = String; - fn try_from(value: String) -> Result { - Self::try_from(value.as_str()) - } -} - -#[derive(Debug)] -pub(crate) struct CompletionContext<'a> { - pub node_under_cursor: Option>, - - pub tree: &'a tree_sitter::Tree, - pub text: &'a str, - pub schema_cache: &'a SchemaCache, - pub position: usize, - - /// If the cursor is on a node that uses dot notation - /// to specify an alias or schema, this will hold the schema's or - /// alias's name. - /// - /// Here, `auth` is a schema name: - /// ```sql - /// select * from auth.users; - /// ``` - /// - /// Here, `u` is an alias name: - /// ```sql - /// select - /// * - /// from - /// auth.users u - /// left join identities i - /// on u.id = i.user_id; - /// ``` - pub schema_or_alias_name: Option, - pub wrapping_clause_type: Option>, - - pub wrapping_node_kind: Option, - - pub is_invocation: bool, - pub wrapping_statement_range: Option, - - pub mentioned_relations: HashMap, HashSet>, - pub mentioned_table_aliases: HashMap, - pub mentioned_columns: HashMap>, HashSet>, -} - -impl<'a> CompletionContext<'a> { - pub fn new(params: &'a SanitizedCompletionParams) -> Self { - let mut ctx = Self { - tree: params.tree.as_ref(), - text: ¶ms.text, - schema_cache: params.schema, - position: usize::from(params.position), - node_under_cursor: None, - schema_or_alias_name: None, - wrapping_clause_type: None, - wrapping_node_kind: None, - wrapping_statement_range: None, - is_invocation: false, - mentioned_relations: HashMap::new(), - mentioned_table_aliases: HashMap::new(), - mentioned_columns: HashMap::new(), - }; - - // policy handling is important to Supabase, but they are a PostgreSQL specific extension, - // so the tree_sitter_sql language does not support it. - // We infer the context manually. - if PolicyParser::looks_like_policy_stmt(¶ms.text) { - ctx.gather_policy_context(); - } else { - ctx.gather_tree_context(); - ctx.gather_info_from_ts_queries(); - } - - ctx - } - - fn gather_policy_context(&mut self) { - let policy_context = PolicyParser::get_context(self.text, self.position); - - self.node_under_cursor = Some(NodeUnderCursor::CustomNode { - text: policy_context.node_text.into(), - range: policy_context.node_range, - kind: policy_context.node_kind.clone(), - }); - - if policy_context.node_kind == "policy_table" { - self.schema_or_alias_name = policy_context.schema_name.clone(); - } - - if policy_context.table_name.is_some() { - let mut new = HashSet::new(); - new.insert(policy_context.table_name.unwrap()); - self.mentioned_relations - .insert(policy_context.schema_name, new); - } - - self.wrapping_clause_type = match policy_context.node_kind.as_str() { - "policy_name" if policy_context.statement_kind != PolicyStmtKind::Create => { - Some(WrappingClause::PolicyName) - } - "policy_role" => Some(WrappingClause::ToRoleAssignment), - "policy_table" => Some(WrappingClause::From), - _ => None, - }; - } - - fn gather_info_from_ts_queries(&mut self) { - let stmt_range = self.wrapping_statement_range.as_ref(); - let sql = self.text; - - let mut executor = TreeSitterQueriesExecutor::new(self.tree.root_node(), sql); - - executor.add_query_results::(); - executor.add_query_results::(); - executor.add_query_results::(); - executor.add_query_results::(); - executor.add_query_results::(); - - for relation_match in executor.get_iter(stmt_range) { - match relation_match { - QueryResult::Relation(r) => { - let schema_name = r.get_schema(sql); - let table_name = r.get_table(sql); - - self.mentioned_relations - .entry(schema_name) - .and_modify(|s| { - s.insert(table_name.clone()); - }) - .or_insert(HashSet::from([table_name])); - } - - QueryResult::TableAliases(table_alias_match) => { - self.mentioned_table_aliases.insert( - table_alias_match.get_alias(sql), - table_alias_match.get_table(sql), - ); - } - - QueryResult::SelectClauseColumns(c) => { - let mentioned = MentionedColumn { - column: c.get_column(sql), - alias: c.get_alias(sql), - }; - - self.mentioned_columns - .entry(Some(WrappingClause::Select)) - .and_modify(|s| { - s.insert(mentioned.clone()); - }) - .or_insert(HashSet::from([mentioned])); - } - - QueryResult::WhereClauseColumns(c) => { - let mentioned = MentionedColumn { - column: c.get_column(sql), - alias: c.get_alias(sql), - }; - - self.mentioned_columns - .entry(Some(WrappingClause::Where)) - .and_modify(|s| { - s.insert(mentioned.clone()); - }) - .or_insert(HashSet::from([mentioned])); - } - - QueryResult::InsertClauseColumns(c) => { - let mentioned = MentionedColumn { - column: c.get_column(sql), - alias: None, - }; - - self.mentioned_columns - .entry(Some(WrappingClause::Insert)) - .and_modify(|s| { - s.insert(mentioned.clone()); - }) - .or_insert(HashSet::from([mentioned])); - } - _ => {} - }; - } - } - - fn get_ts_node_content(&self, ts_node: &tree_sitter::Node<'a>) -> Option { - let source = self.text; - ts_node.utf8_text(source.as_bytes()).ok().map(|txt| { - if SanitizedCompletionParams::is_sanitized_token(txt) { - NodeText::Replaced - } else { - NodeText::Original(txt.into()) - } - }) - } - - pub fn get_node_under_cursor_content(&self) -> Option { - match self.node_under_cursor.as_ref()? { - NodeUnderCursor::TsNode(node) => { - self.get_ts_node_content(node).and_then(|nt| match nt { - NodeText::Replaced => None, - NodeText::Original(c) => Some(c.to_string()), - }) - } - NodeUnderCursor::CustomNode { text, .. } => match text { - NodeText::Replaced => None, - NodeText::Original(c) => Some(c.to_string()), - }, - } - } - - fn gather_tree_context(&mut self) { - let mut cursor = self.tree.root_node().walk(); - - /* - * The head node of any treesitter tree is always the "PROGRAM" node. - * - * We want to enter the next layer and focus on the child node that matches the user's cursor position. - * If there is no node under the users position, however, the cursor won't enter the next level – it - * will stay on the Program node. - * - * This might lead to an unexpected context or infinite recursion. - * - * We'll therefore adjust the cursor position such that it meets the last node of the AST. - * `select * from use {}` becomes `select * from use{}`. - */ - let current_node = cursor.node(); - - let mut chars = self.text.chars(); - - if chars - .nth(self.position) - .is_some_and(|c| !c.is_ascii_whitespace() && !&[';', ')'].contains(&c)) - { - self.position = cmp::min(self.position + 1, self.text.len()); - } else { - self.position = cmp::min(self.position, self.text.len()); - } - - cursor.goto_first_child_for_byte(self.position); - - self.gather_context_from_node(cursor, current_node); - } - - fn gather_context_from_node( - &mut self, - mut cursor: tree_sitter::TreeCursor<'a>, - parent_node: tree_sitter::Node<'a>, - ) { - let current_node = cursor.node(); - - let parent_node_kind = parent_node.kind(); - let current_node_kind = current_node.kind(); - - // prevent infinite recursion – this can happen with ERROR nodes - if current_node_kind == parent_node_kind && ["ERROR", "program"].contains(&parent_node_kind) - { - self.node_under_cursor = Some(NodeUnderCursor::from(current_node)); - return; - } - - match parent_node_kind { - "statement" | "subquery" => { - self.wrapping_clause_type = - self.get_wrapping_clause_from_current_node(current_node, &mut cursor); - - self.wrapping_statement_range = Some(parent_node.range()); - } - "invocation" => self.is_invocation = true, - _ => {} - } - - // try to gather context from the siblings if we're within an error node. - if parent_node_kind == "ERROR" { - if let Some(clause_type) = self.get_wrapping_clause_from_error_node_child(current_node) - { - self.wrapping_clause_type = Some(clause_type); - } - if let Some(wrapping_node) = self.get_wrapping_node_from_error_node_child(current_node) - { - self.wrapping_node_kind = Some(wrapping_node) - } - - self.get_info_from_error_node_child(current_node); - } - - match current_node_kind { - "object_reference" | "field" => { - let content = self.get_ts_node_content(¤t_node); - if let Some(node_txt) = content { - match node_txt { - NodeText::Original(txt) => { - let parts: Vec<&str> = txt.split('.').collect(); - if parts.len() == 2 { - self.schema_or_alias_name = Some(parts[0].to_string()); - } - } - NodeText::Replaced => {} - } - } - } - - "where" | "update" | "select" | "delete" | "from" | "join" | "column_definitions" - | "drop_table" | "alter_table" => { - self.wrapping_clause_type = - self.get_wrapping_clause_from_current_node(current_node, &mut cursor); - } - - "relation" | "binary_expression" | "assignment" => { - self.wrapping_node_kind = current_node_kind.try_into().ok(); - } - - "list" => { - if current_node - .prev_sibling() - .is_none_or(|n| n.kind() != "keyword_values") - { - self.wrapping_node_kind = current_node_kind.try_into().ok(); - } - } - - _ => {} - } - - // We have arrived at the leaf node - if current_node.child_count() == 0 { - self.node_under_cursor = Some(NodeUnderCursor::from(current_node)); - return; - } - - cursor.goto_first_child_for_byte(self.position); - self.gather_context_from_node(cursor, current_node); - } - - fn get_first_sibling(&self, node: tree_sitter::Node<'a>) -> tree_sitter::Node<'a> { - let mut first_sibling = node; - while let Some(n) = first_sibling.prev_sibling() { - first_sibling = n; - } - first_sibling - } - - fn get_wrapping_node_from_error_node_child( - &self, - node: tree_sitter::Node<'a>, - ) -> Option { - self.wrapping_clause_type - .as_ref() - .and_then(|clause| match clause { - WrappingClause::Insert => { - let mut first_sib = self.get_first_sibling(node); - - let mut after_opening_bracket = false; - let mut before_closing_bracket = false; - - while let Some(next_sib) = first_sib.next_sibling() { - if next_sib.kind() == "(" - && next_sib.end_position() <= node.start_position() - { - after_opening_bracket = true; - } - - if next_sib.kind() == ")" - && next_sib.start_position() >= node.end_position() - { - before_closing_bracket = true; - } - - first_sib = next_sib; - } - - if after_opening_bracket && before_closing_bracket { - Some(WrappingNode::List) - } else { - None - } - } - _ => None, - }) - } - - fn get_wrapping_clause_from_error_node_child( - &self, - node: tree_sitter::Node<'a>, - ) -> Option> { - let clause_combinations: Vec<(WrappingClause, &[&'static str])> = vec![ - (WrappingClause::Where, &["where"]), - (WrappingClause::Update, &["update"]), - (WrappingClause::Select, &["select"]), - (WrappingClause::Delete, &["delete"]), - (WrappingClause::Insert, &["insert", "into"]), - (WrappingClause::From, &["from"]), - (WrappingClause::Join { on_node: None }, &["join"]), - (WrappingClause::AlterTable, &["alter", "table"]), - ( - WrappingClause::AlterTable, - &["alter", "table", "if", "exists"], - ), - (WrappingClause::DropTable, &["drop", "table"]), - ( - WrappingClause::DropTable, - &["drop", "table", "if", "exists"], - ), - ]; - - let first_sibling = self.get_first_sibling(node); - - /* - * For each clause, we'll iterate from first_sibling to the next ones, - * either until the end or until we land on the node under the cursor. - * We'll score the `WrappingClause` by how many tokens it matches in order. - */ - let mut clauses_with_score: Vec<(WrappingClause, usize)> = clause_combinations - .into_iter() - .map(|(clause, tokens)| { - let mut idx = 0; - - let mut sibling = Some(first_sibling); - while let Some(sib) = sibling { - if sib.end_byte() >= node.end_byte() || idx >= tokens.len() { - break; - } - - if let Some(sibling_content) = - self.get_ts_node_content(&sib).and_then(|txt| match txt { - NodeText::Original(txt) => Some(txt), - NodeText::Replaced => None, - }) - { - if sibling_content == tokens[idx] { - idx += 1; - } - } else { - break; - } - - sibling = sib.next_sibling(); - } - - (clause, idx) - }) - .collect(); - - clauses_with_score.sort_by(|(_, score_a), (_, score_b)| score_b.cmp(score_a)); - clauses_with_score - .iter() - .find(|(_, score)| *score > 0) - .map(|c| c.0.clone()) - } - - fn get_info_from_error_node_child(&mut self, node: tree_sitter::Node<'a>) { - let mut first_sibling = self.get_first_sibling(node); - - if let Some(clause) = self.wrapping_clause_type.as_ref() { - if clause == &WrappingClause::Insert { - while let Some(sib) = first_sibling.next_sibling() { - match sib.kind() { - "object_reference" => { - if let Some(NodeText::Original(txt)) = self.get_ts_node_content(&sib) { - let mut iter = txt.split('.').rev(); - let table = iter.next().unwrap().to_string(); - let schema = iter.next().map(|s| s.to_string()); - self.mentioned_relations - .entry(schema) - .and_modify(|s| { - s.insert(table.clone()); - }) - .or_insert(HashSet::from([table])); - } - } - "column" => { - if let Some(NodeText::Original(txt)) = self.get_ts_node_content(&sib) { - let entry = MentionedColumn { - column: txt, - alias: None, - }; - - self.mentioned_columns - .entry(Some(WrappingClause::Insert)) - .and_modify(|s| { - s.insert(entry.clone()); - }) - .or_insert(HashSet::from([entry])); - } - } - - _ => {} - } - - first_sibling = sib; - } - } - } - } - - fn get_wrapping_clause_from_current_node( - &self, - node: tree_sitter::Node<'a>, - cursor: &mut tree_sitter::TreeCursor<'a>, - ) -> Option> { - match node.kind() { - "where" => Some(WrappingClause::Where), - "update" => Some(WrappingClause::Update), - "select" => Some(WrappingClause::Select), - "delete" => Some(WrappingClause::Delete), - "from" => Some(WrappingClause::From), - "drop_table" => Some(WrappingClause::DropTable), - "alter_table" => Some(WrappingClause::AlterTable), - "column_definitions" => Some(WrappingClause::ColumnDefinitions), - "insert" => Some(WrappingClause::Insert), - "join" => { - // sadly, we need to manually iterate over the children – - // `node.child_by_field_id(..)` does not work as expected - let mut on_node = None; - for child in node.children(cursor) { - // 28 is the id for "keyword_on" - if child.kind_id() == 28 { - on_node = Some(child); - } - } - cursor.goto_parent(); - Some(WrappingClause::Join { on_node }) - } - _ => None, - } - } - - pub(crate) fn parent_matches_one_of_kind(&self, kinds: &[&'static str]) -> bool { - self.node_under_cursor - .as_ref() - .is_some_and(|under_cursor| match under_cursor { - NodeUnderCursor::TsNode(node) => node - .parent() - .is_some_and(|parent| kinds.contains(&parent.kind())), - - NodeUnderCursor::CustomNode { .. } => false, - }) - } - pub(crate) fn before_cursor_matches_kind(&self, kinds: &[&'static str]) -> bool { - self.node_under_cursor.as_ref().is_some_and(|under_cursor| { - match under_cursor { - NodeUnderCursor::TsNode(node) => { - let mut current = *node; - - // move up to the parent until we're at top OR we have a prev sibling - while current.prev_sibling().is_none() && current.parent().is_some() { - current = current.parent().unwrap(); - } - - current - .prev_sibling() - .is_some_and(|sib| kinds.contains(&sib.kind())) - } - - NodeUnderCursor::CustomNode { .. } => false, - } - }) - } -} - -#[cfg(test)] -mod tests { - use crate::{ - NodeText, - context::{CompletionContext, WrappingClause}, - sanitization::SanitizedCompletionParams, - test_helper::{CURSOR_POS, get_text_and_position}, - }; - - use super::NodeUnderCursor; - - fn get_tree(input: &str) -> tree_sitter::Tree { - let mut parser = tree_sitter::Parser::new(); - parser - .set_language(tree_sitter_sql::language()) - .expect("Couldn't set language"); - - parser.parse(input, None).expect("Unable to parse tree") - } - - #[test] - fn identifies_clauses() { - let test_cases = vec![ - ( - format!("Select {}* from users;", CURSOR_POS), - WrappingClause::Select, - ), - ( - format!("Select * from u{};", CURSOR_POS), - WrappingClause::From, - ), - ( - format!("Select {}* from users where n = 1;", CURSOR_POS), - WrappingClause::Select, - ), - ( - format!("Select * from users where {}n = 1;", CURSOR_POS), - WrappingClause::Where, - ), - ( - format!("update users set u{} = 1 where n = 2;", CURSOR_POS), - WrappingClause::Update, - ), - ( - format!("update users set u = 1 where n{} = 2;", CURSOR_POS), - WrappingClause::Where, - ), - ( - format!("delete{} from users;", CURSOR_POS), - WrappingClause::Delete, - ), - ( - format!("delete from {}users;", CURSOR_POS), - WrappingClause::From, - ), - ( - format!("select name, age, location from public.u{}sers", CURSOR_POS), - WrappingClause::From, - ), - ]; - - for (query, expected_clause) in test_cases { - let (position, text) = get_text_and_position(query.as_str().into()); - - let tree = get_tree(text.as_str()); - - let params = SanitizedCompletionParams { - position: (position as u32).into(), - text, - tree: std::borrow::Cow::Owned(tree), - schema: &pgt_schema_cache::SchemaCache::default(), - }; - - let ctx = CompletionContext::new(¶ms); - - assert_eq!(ctx.wrapping_clause_type, Some(expected_clause)); - } - } - - #[test] - fn identifies_schema() { - let test_cases = vec![ - ( - format!("Select * from private.u{}", CURSOR_POS), - Some("private"), - ), - ( - format!("Select * from private.u{}sers()", CURSOR_POS), - Some("private"), - ), - (format!("Select * from u{}sers", CURSOR_POS), None), - (format!("Select * from u{}sers()", CURSOR_POS), None), - ]; - - for (query, expected_schema) in test_cases { - let (position, text) = get_text_and_position(query.as_str().into()); - - let tree = get_tree(text.as_str()); - let params = SanitizedCompletionParams { - position: (position as u32).into(), - text, - tree: std::borrow::Cow::Owned(tree), - schema: &pgt_schema_cache::SchemaCache::default(), - }; - - let ctx = CompletionContext::new(¶ms); - - assert_eq!( - ctx.schema_or_alias_name, - expected_schema.map(|f| f.to_string()) - ); - } - } - - #[test] - fn identifies_invocation() { - let test_cases = vec![ - (format!("Select * from u{}sers", CURSOR_POS), false), - (format!("Select * from u{}sers()", CURSOR_POS), true), - (format!("Select cool{};", CURSOR_POS), false), - (format!("Select cool{}();", CURSOR_POS), true), - ( - format!("Select upp{}ercase as title from users;", CURSOR_POS), - false, - ), - ( - format!("Select upp{}ercase(name) as title from users;", CURSOR_POS), - true, - ), - ]; - - for (query, is_invocation) in test_cases { - let (position, text) = get_text_and_position(query.as_str().into()); - - let tree = get_tree(text.as_str()); - let params = SanitizedCompletionParams { - position: (position as u32).into(), - text, - tree: std::borrow::Cow::Owned(tree), - schema: &pgt_schema_cache::SchemaCache::default(), - }; - - let ctx = CompletionContext::new(¶ms); - - assert_eq!(ctx.is_invocation, is_invocation); - } - } - - #[test] - fn does_not_fail_on_leading_whitespace() { - let cases = vec![ - format!("{} select * from", CURSOR_POS), - format!(" {} select * from", CURSOR_POS), - ]; - - for query in cases { - let (position, text) = get_text_and_position(query.as_str().into()); - - let tree = get_tree(text.as_str()); - - let params = SanitizedCompletionParams { - position: (position as u32).into(), - text, - tree: std::borrow::Cow::Owned(tree), - schema: &pgt_schema_cache::SchemaCache::default(), - }; - - let ctx = CompletionContext::new(¶ms); - - let node = ctx.node_under_cursor.as_ref().unwrap(); - - match node { - NodeUnderCursor::TsNode(node) => { - assert_eq!( - ctx.get_ts_node_content(node), - Some(NodeText::Original("select".into())) - ); - - assert_eq!( - ctx.wrapping_clause_type, - Some(crate::context::WrappingClause::Select) - ); - } - _ => unreachable!(), - } - } - } - - #[test] - fn does_not_fail_on_trailing_whitespace() { - let query = format!("select * from {}", CURSOR_POS); - - let (position, text) = get_text_and_position(query.as_str().into()); - - let tree = get_tree(text.as_str()); - - let params = SanitizedCompletionParams { - position: (position as u32).into(), - text, - tree: std::borrow::Cow::Owned(tree), - schema: &pgt_schema_cache::SchemaCache::default(), - }; - - let ctx = CompletionContext::new(¶ms); - - let node = ctx.node_under_cursor.as_ref().unwrap(); - - match node { - NodeUnderCursor::TsNode(node) => { - assert_eq!( - ctx.get_ts_node_content(node), - Some(NodeText::Original("from".into())) - ); - } - _ => unreachable!(), - } - } - - #[test] - fn does_not_fail_with_empty_statements() { - let query = format!("{}", CURSOR_POS); - - let (position, text) = get_text_and_position(query.as_str().into()); - - let tree = get_tree(text.as_str()); - - let params = SanitizedCompletionParams { - position: (position as u32).into(), - text, - tree: std::borrow::Cow::Owned(tree), - schema: &pgt_schema_cache::SchemaCache::default(), - }; - - let ctx = CompletionContext::new(¶ms); - - let node = ctx.node_under_cursor.as_ref().unwrap(); - - match node { - NodeUnderCursor::TsNode(node) => { - assert_eq!( - ctx.get_ts_node_content(node), - Some(NodeText::Original("".into())) - ); - assert_eq!(ctx.wrapping_clause_type, None); - } - _ => unreachable!(), - } - } - - #[test] - fn does_not_fail_on_incomplete_keywords() { - // Instead of autocompleting "FROM", we'll assume that the user - // is selecting a certain column name, such as `frozen_account`. - let query = format!("select * fro{}", CURSOR_POS); - - let (position, text) = get_text_and_position(query.as_str().into()); - - let tree = get_tree(text.as_str()); - - let params = SanitizedCompletionParams { - position: (position as u32).into(), - text, - tree: std::borrow::Cow::Owned(tree), - schema: &pgt_schema_cache::SchemaCache::default(), - }; - - let ctx = CompletionContext::new(¶ms); - - let node = ctx.node_under_cursor.as_ref().unwrap(); - - match node { - NodeUnderCursor::TsNode(node) => { - assert_eq!( - ctx.get_ts_node_content(node), - Some(NodeText::Original("fro".into())) - ); - assert_eq!(ctx.wrapping_clause_type, Some(WrappingClause::Select)); - } - _ => unreachable!(), - } - } -} diff --git a/crates/pgt_completions/src/context/policy_parser.rs b/crates/pgt_completions/src/context/policy_parser.rs deleted file mode 100644 index db37a13f..00000000 --- a/crates/pgt_completions/src/context/policy_parser.rs +++ /dev/null @@ -1,617 +0,0 @@ -use std::iter::Peekable; - -use pgt_text_size::{TextRange, TextSize}; - -#[derive(Default, Debug, PartialEq, Eq)] -pub(crate) enum PolicyStmtKind { - #[default] - Create, - - Alter, - Drop, -} - -#[derive(Clone, Debug, PartialEq, Eq)] -struct WordWithIndex { - word: String, - start: usize, - end: usize, -} - -impl WordWithIndex { - fn is_under_cursor(&self, cursor_pos: usize) -> bool { - self.start <= cursor_pos && self.end > cursor_pos - } - - fn get_range(&self) -> TextRange { - let start: u32 = self.start.try_into().expect("Text too long"); - let end: u32 = self.end.try_into().expect("Text too long"); - TextRange::new(TextSize::from(start), TextSize::from(end)) - } -} - -/// Note: A policy name within quotation marks will be considered a single word. -fn sql_to_words(sql: &str) -> Result, String> { - let mut words = vec![]; - - let mut start_of_word: Option = None; - let mut current_word = String::new(); - let mut in_quotation_marks = false; - - for (current_position, current_char) in sql.char_indices() { - if (current_char.is_ascii_whitespace() || current_char == ';') - && !current_word.is_empty() - && start_of_word.is_some() - && !in_quotation_marks - { - words.push(WordWithIndex { - word: current_word, - start: start_of_word.unwrap(), - end: current_position, - }); - - current_word = String::new(); - start_of_word = None; - } else if (current_char.is_ascii_whitespace() || current_char == ';') - && current_word.is_empty() - { - // do nothing - } else if current_char == '"' && start_of_word.is_none() { - in_quotation_marks = true; - current_word.push(current_char); - start_of_word = Some(current_position); - } else if current_char == '"' && start_of_word.is_some() { - current_word.push(current_char); - words.push(WordWithIndex { - word: current_word, - start: start_of_word.unwrap(), - end: current_position + 1, - }); - in_quotation_marks = false; - start_of_word = None; - current_word = String::new() - } else if start_of_word.is_some() { - current_word.push(current_char) - } else { - start_of_word = Some(current_position); - current_word.push(current_char); - } - } - - if let Some(start_of_word) = start_of_word { - if !current_word.is_empty() { - words.push(WordWithIndex { - word: current_word, - start: start_of_word, - end: sql.len(), - }); - } - } - - if in_quotation_marks { - Err("String was not closed properly.".into()) - } else { - Ok(words) - } -} - -#[derive(Default, Debug, PartialEq, Eq)] -pub(crate) struct PolicyContext { - pub policy_name: Option, - pub table_name: Option, - pub schema_name: Option, - pub statement_kind: PolicyStmtKind, - pub node_text: String, - pub node_range: TextRange, - pub node_kind: String, -} - -/// Simple parser that'll turn a policy-related statement into a context object required for -/// completions. -/// The parser will only work if the (trimmed) sql starts with `create policy`, `drop policy`, or `alter policy`. -/// It can only parse policy statements. -pub(crate) struct PolicyParser { - tokens: Peekable>, - previous_token: Option, - current_token: Option, - context: PolicyContext, - cursor_position: usize, -} - -impl PolicyParser { - pub(crate) fn looks_like_policy_stmt(sql: &str) -> bool { - let lowercased = sql.to_ascii_lowercase(); - let trimmed = lowercased.trim(); - trimmed.starts_with("create policy") - || trimmed.starts_with("drop policy") - || trimmed.starts_with("alter policy") - } - - pub(crate) fn get_context(sql: &str, cursor_position: usize) -> PolicyContext { - assert!( - Self::looks_like_policy_stmt(sql), - "PolicyParser should only be used for policy statements. Developer error!" - ); - - match sql_to_words(sql) { - Ok(tokens) => { - let parser = PolicyParser { - tokens: tokens.into_iter().peekable(), - context: PolicyContext::default(), - previous_token: None, - current_token: None, - cursor_position, - }; - - parser.parse() - } - Err(_) => PolicyContext::default(), - } - } - - fn parse(mut self) -> PolicyContext { - while let Some(token) = self.advance() { - if token.is_under_cursor(self.cursor_position) { - self.handle_token_under_cursor(token); - } else { - self.handle_token(token); - } - } - - self.context - } - - fn handle_token_under_cursor(&mut self, token: WordWithIndex) { - if self.previous_token.is_none() { - return; - } - - let previous = self.previous_token.take().unwrap(); - - match previous.word.to_ascii_lowercase().as_str() { - "policy" => { - self.context.node_range = token.get_range(); - self.context.node_kind = "policy_name".into(); - self.context.node_text = token.word; - } - "on" => { - if token.word.contains('.') { - let (schema_name, table_name) = self.schema_and_table_name(&token); - - let schema_name_len = schema_name.len(); - self.context.schema_name = Some(schema_name); - - let offset: u32 = schema_name_len.try_into().expect("Text too long"); - let range_without_schema = token - .get_range() - .checked_expand_start( - TextSize::new(offset + 1), // kill the dot as well - ) - .expect("Text too long"); - - self.context.node_range = range_without_schema; - self.context.node_kind = "policy_table".into(); - - // In practice, we should always have a table name. - // The completion sanitization will add a word after a `.` if nothing follows it; - // the token_text will then look like `schema.REPLACED_TOKEN`. - self.context.node_text = table_name.unwrap_or_default(); - } else { - self.context.node_range = token.get_range(); - self.context.node_text = token.word; - self.context.node_kind = "policy_table".into(); - } - } - "to" => { - self.context.node_range = token.get_range(); - self.context.node_kind = "policy_role".into(); - self.context.node_text = token.word; - } - _ => { - self.context.node_range = token.get_range(); - self.context.node_text = token.word; - } - } - } - - fn handle_token(&mut self, token: WordWithIndex) { - match token.word.to_ascii_lowercase().as_str() { - "create" if self.next_matches("policy") => { - self.context.statement_kind = PolicyStmtKind::Create; - } - "alter" if self.next_matches("policy") => { - self.context.statement_kind = PolicyStmtKind::Alter; - } - "drop" if self.next_matches("policy") => { - self.context.statement_kind = PolicyStmtKind::Drop; - } - "on" => self.table_with_schema(), - - // skip the "to" so we don't parse it as the TO rolename when it's under the cursor - "rename" if self.next_matches("to") => { - self.advance(); - } - - _ => { - if self.prev_matches("policy") { - self.context.policy_name = Some(token.word); - } - } - } - } - - fn next_matches(&mut self, it: &str) -> bool { - self.tokens.peek().is_some_and(|c| c.word.as_str() == it) - } - - fn prev_matches(&self, it: &str) -> bool { - self.previous_token.as_ref().is_some_and(|t| t.word == it) - } - - fn advance(&mut self) -> Option { - // we can't peek back n an iterator, so we'll have to keep track manually. - self.previous_token = self.current_token.take(); - self.current_token = self.tokens.next(); - self.current_token.clone() - } - - fn table_with_schema(&mut self) { - if let Some(token) = self.advance() { - if token.is_under_cursor(self.cursor_position) { - self.handle_token_under_cursor(token); - } else if token.word.contains('.') { - let (schema, maybe_table) = self.schema_and_table_name(&token); - self.context.schema_name = Some(schema); - self.context.table_name = maybe_table; - } else { - self.context.table_name = Some(token.word); - } - }; - } - - fn schema_and_table_name(&self, token: &WordWithIndex) -> (String, Option) { - let mut parts = token.word.split('.'); - - ( - parts.next().unwrap().into(), - parts.next().map(|tb| tb.into()), - ) - } -} - -#[cfg(test)] -mod tests { - use pgt_text_size::{TextRange, TextSize}; - - use crate::{ - context::policy_parser::{PolicyContext, PolicyStmtKind, WordWithIndex}, - test_helper::CURSOR_POS, - }; - - use super::{PolicyParser, sql_to_words}; - - fn with_pos(query: String) -> (usize, String) { - let mut pos: Option = None; - - for (p, c) in query.char_indices() { - if c == CURSOR_POS { - pos = Some(p); - break; - } - } - - ( - pos.expect("Please add cursor position!"), - query.replace(CURSOR_POS, "REPLACED_TOKEN").to_string(), - ) - } - - #[test] - fn infers_progressively() { - let (pos, query) = with_pos(format!( - r#" - create policy {} - "#, - CURSOR_POS - )); - - let context = PolicyParser::get_context(query.as_str(), pos); - - assert_eq!( - context, - PolicyContext { - policy_name: None, - table_name: None, - schema_name: None, - statement_kind: PolicyStmtKind::Create, - node_text: "REPLACED_TOKEN".into(), - node_range: TextRange::new(TextSize::new(25), TextSize::new(39)), - node_kind: "policy_name".into() - } - ); - - let (pos, query) = with_pos(format!( - r#" - create policy "my cool policy" {} - "#, - CURSOR_POS - )); - - let context = PolicyParser::get_context(query.as_str(), pos); - - assert_eq!( - context, - PolicyContext { - policy_name: Some("\"my cool policy\"".into()), - table_name: None, - schema_name: None, - statement_kind: PolicyStmtKind::Create, - node_text: "REPLACED_TOKEN".into(), - node_kind: "".into(), - node_range: TextRange::new(TextSize::new(42), TextSize::new(56)), - } - ); - - let (pos, query) = with_pos(format!( - r#" - create policy "my cool policy" on {} - "#, - CURSOR_POS - )); - - let context = PolicyParser::get_context(query.as_str(), pos); - - assert_eq!( - context, - PolicyContext { - policy_name: Some("\"my cool policy\"".into()), - table_name: None, - schema_name: None, - statement_kind: PolicyStmtKind::Create, - node_text: "REPLACED_TOKEN".into(), - node_kind: "policy_table".into(), - node_range: TextRange::new(TextSize::new(45), TextSize::new(59)), - } - ); - - let (pos, query) = with_pos(format!( - r#" - create policy "my cool policy" on auth.{} - "#, - CURSOR_POS - )); - - let context = PolicyParser::get_context(query.as_str(), pos); - - assert_eq!( - context, - PolicyContext { - policy_name: Some("\"my cool policy\"".into()), - table_name: None, - schema_name: Some("auth".into()), - statement_kind: PolicyStmtKind::Create, - node_text: "REPLACED_TOKEN".into(), - node_kind: "policy_table".into(), - node_range: TextRange::new(TextSize::new(50), TextSize::new(64)), - } - ); - - let (pos, query) = with_pos(format!( - r#" - create policy "my cool policy" on auth.users - as {} - "#, - CURSOR_POS - )); - - let context = PolicyParser::get_context(query.as_str(), pos); - - assert_eq!( - context, - PolicyContext { - policy_name: Some("\"my cool policy\"".into()), - table_name: Some("users".into()), - schema_name: Some("auth".into()), - statement_kind: PolicyStmtKind::Create, - node_text: "REPLACED_TOKEN".into(), - node_kind: "".into(), - node_range: TextRange::new(TextSize::new(72), TextSize::new(86)), - } - ); - - let (pos, query) = with_pos(format!( - r#" - create policy "my cool policy" on auth.users - as permissive - {} - "#, - CURSOR_POS - )); - - let context = PolicyParser::get_context(query.as_str(), pos); - - assert_eq!( - context, - PolicyContext { - policy_name: Some("\"my cool policy\"".into()), - table_name: Some("users".into()), - schema_name: Some("auth".into()), - statement_kind: PolicyStmtKind::Create, - node_text: "REPLACED_TOKEN".into(), - node_kind: "".into(), - node_range: TextRange::new(TextSize::new(95), TextSize::new(109)), - } - ); - - let (pos, query) = with_pos(format!( - r#" - create policy "my cool policy" on auth.users - as permissive - to {} - "#, - CURSOR_POS - )); - - let context = PolicyParser::get_context(query.as_str(), pos); - - assert_eq!( - context, - PolicyContext { - policy_name: Some("\"my cool policy\"".into()), - table_name: Some("users".into()), - schema_name: Some("auth".into()), - statement_kind: PolicyStmtKind::Create, - node_text: "REPLACED_TOKEN".into(), - node_kind: "policy_role".into(), - node_range: TextRange::new(TextSize::new(98), TextSize::new(112)), - } - ); - } - - #[test] - fn determines_on_table_node() { - let (pos, query) = with_pos(format!( - r#" - create policy "my cool policy" - on {} - to all - using (true); - "#, - CURSOR_POS - )); - - let context = PolicyParser::get_context(query.as_str(), pos); - - assert_eq!( - context, - PolicyContext { - policy_name: Some(r#""my cool policy""#.into()), - table_name: None, - schema_name: None, - statement_kind: PolicyStmtKind::Create, - node_text: "REPLACED_TOKEN".into(), - node_range: TextRange::new(TextSize::new(57), TextSize::new(71)), - node_kind: "policy_table".into() - } - ) - } - - #[test] - fn determines_on_table_node_after_schema() { - let (pos, query) = with_pos(format!( - r#" - create policy "my cool policy" - on auth.{} - to all - using (true); - "#, - CURSOR_POS - )); - - let context = PolicyParser::get_context(query.as_str(), pos); - - assert_eq!( - context, - PolicyContext { - policy_name: Some(r#""my cool policy""#.into()), - table_name: None, - schema_name: Some("auth".into()), - statement_kind: PolicyStmtKind::Create, - node_text: "REPLACED_TOKEN".into(), - node_range: TextRange::new(TextSize::new(62), TextSize::new(76)), - node_kind: "policy_table".into() - } - ) - } - - #[test] - fn determines_we_are_on_column_name() { - let (pos, query) = with_pos(format!( - r#" - drop policy {} on auth.users; - "#, - CURSOR_POS - )); - - let context = PolicyParser::get_context(query.as_str(), pos); - - assert_eq!( - context, - PolicyContext { - policy_name: None, - table_name: Some("users".into()), - schema_name: Some("auth".into()), - statement_kind: PolicyStmtKind::Drop, - node_text: "REPLACED_TOKEN".into(), - node_range: TextRange::new(TextSize::new(23), TextSize::new(37)), - node_kind: "policy_name".into() - } - ); - - // cursor within quotation marks. - let (pos, query) = with_pos(format!( - r#" - drop policy "{}" on auth.users; - "#, - CURSOR_POS - )); - - let context = PolicyParser::get_context(query.as_str(), pos); - - assert_eq!( - context, - PolicyContext { - policy_name: None, - table_name: Some("users".into()), - schema_name: Some("auth".into()), - statement_kind: PolicyStmtKind::Drop, - node_text: "\"REPLACED_TOKEN\"".into(), - node_range: TextRange::new(TextSize::new(23), TextSize::new(39)), - node_kind: "policy_name".into() - } - ); - } - - #[test] - fn single_quotation_mark_does_not_fail() { - let (pos, query) = with_pos(format!( - r#" - drop policy "{} on auth.users; - "#, - CURSOR_POS - )); - - let context = PolicyParser::get_context(query.as_str(), pos); - - assert_eq!(context, PolicyContext::default()); - } - - fn to_word(word: &str, start: usize, end: usize) -> WordWithIndex { - WordWithIndex { - word: word.into(), - start, - end, - } - } - - #[test] - fn determines_positions_correctly() { - let query = "\ncreate policy \"my cool pol\"\n\ton auth.users\n\tas permissive\n\tfor select\n\t\tto public\n\t\tusing (true);".to_string(); - - let words = sql_to_words(query.as_str()).unwrap(); - - assert_eq!(words[0], to_word("create", 1, 7)); - assert_eq!(words[1], to_word("policy", 8, 14)); - assert_eq!(words[2], to_word("\"my cool pol\"", 15, 28)); - assert_eq!(words[3], to_word("on", 30, 32)); - assert_eq!(words[4], to_word("auth.users", 33, 43)); - assert_eq!(words[5], to_word("as", 45, 47)); - assert_eq!(words[6], to_word("permissive", 48, 58)); - assert_eq!(words[7], to_word("for", 60, 63)); - assert_eq!(words[8], to_word("select", 64, 70)); - assert_eq!(words[9], to_word("to", 73, 75)); - assert_eq!(words[10], to_word("public", 78, 84)); - assert_eq!(words[11], to_word("using", 87, 92)); - assert_eq!(words[12], to_word("(true)", 93, 99)); - } -} diff --git a/crates/pgt_completions/src/item.rs b/crates/pgt_completions/src/item.rs deleted file mode 100644 index 73e08cc0..00000000 --- a/crates/pgt_completions/src/item.rs +++ /dev/null @@ -1,60 +0,0 @@ -use std::fmt::Display; - -use pgt_text_size::TextRange; -use serde::{Deserialize, Serialize}; - -#[derive(Debug, PartialEq, Eq, Serialize, Deserialize)] -#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] -#[serde(rename_all = "camelCase")] -pub enum CompletionItemKind { - Table, - Function, - Column, - Schema, - Policy, -} - -impl Display for CompletionItemKind { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - let txt = match self { - CompletionItemKind::Table => "Table", - CompletionItemKind::Function => "Function", - CompletionItemKind::Column => "Column", - CompletionItemKind::Schema => "Schema", - CompletionItemKind::Policy => "Policy", - }; - - write!(f, "{txt}") - } -} - -#[derive(Debug, Serialize, Deserialize)] -#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] -/// The text that the editor should fill in. -/// If `None`, the `label` should be used. -/// Tables, for example, might have different completion_texts: -/// -/// label: "users", description: "Schema: auth", completion_text: "auth.users". -pub struct CompletionText { - pub text: String, - /// A `range` is required because some editors replace the current token, - /// others naively insert the text. - /// Having a range where start == end makes it an insertion. - pub range: TextRange, - - pub is_snippet: bool, -} - -#[derive(Debug, Serialize, Deserialize)] -#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] -pub struct CompletionItem { - pub label: String, - pub description: String, - pub preselected: bool, - pub kind: CompletionItemKind, - /// String used for sorting by LSP clients. - pub sort_text: String, - pub detail: Option, - - pub completion_text: Option, -} diff --git a/crates/pgt_completions/src/lib.rs b/crates/pgt_completions/src/lib.rs deleted file mode 100644 index f8ca1a55..00000000 --- a/crates/pgt_completions/src/lib.rs +++ /dev/null @@ -1,14 +0,0 @@ -mod builder; -mod complete; -mod context; -mod item; -mod providers; -mod relevance; -mod sanitization; - -#[cfg(test)] -mod test_helper; - -pub use complete::*; -pub use item::*; -pub use sanitization::*; diff --git a/crates/pgt_completions/src/providers/columns.rs b/crates/pgt_completions/src/providers/columns.rs deleted file mode 100644 index da6d23bc..00000000 --- a/crates/pgt_completions/src/providers/columns.rs +++ /dev/null @@ -1,728 +0,0 @@ -use crate::{ - CompletionItemKind, - builder::{CompletionBuilder, PossibleCompletionItem}, - context::{CompletionContext, WrappingClause}, - relevance::{CompletionRelevanceData, filtering::CompletionFilter, scoring::CompletionScore}, -}; - -use super::helper::{find_matching_alias_for_table, get_completion_text_with_schema_or_alias}; - -pub fn complete_columns<'a>(ctx: &CompletionContext<'a>, builder: &mut CompletionBuilder<'a>) { - let available_columns = &ctx.schema_cache.columns; - - for col in available_columns { - let relevance = CompletionRelevanceData::Column(col); - - let mut item = PossibleCompletionItem { - label: col.name.clone(), - score: CompletionScore::from(relevance.clone()), - filter: CompletionFilter::from(relevance), - description: format!("{}.{}", col.schema_name, col.table_name), - kind: CompletionItemKind::Column, - completion_text: None, - detail: col.type_name.as_ref().map(|t| t.to_string()), - }; - - // autocomplete with the alias in a join clause if we find one - if matches!( - ctx.wrapping_clause_type, - Some(WrappingClause::Join { .. }) - | Some(WrappingClause::Where) - | Some(WrappingClause::Select) - ) { - item.completion_text = find_matching_alias_for_table(ctx, col.table_name.as_str()) - .and_then(|alias| { - get_completion_text_with_schema_or_alias(ctx, col.name.as_str(), alias.as_str()) - }); - } - - builder.add_item(item); - } -} - -#[cfg(test)] -mod tests { - use std::vec; - - use crate::{ - CompletionItem, CompletionItemKind, complete, - test_helper::{ - CURSOR_POS, CompletionAssertion, InputQuery, assert_complete_results, - assert_no_complete_results, get_test_deps, get_test_params, - }, - }; - - struct TestCase { - query: String, - message: &'static str, - label: &'static str, - description: &'static str, - } - - impl TestCase { - fn get_input_query(&self) -> InputQuery { - let strs: Vec<&str> = self.query.split_whitespace().collect(); - strs.join(" ").as_str().into() - } - } - - #[tokio::test] - async fn completes_columns() { - let setup = r#" - create schema private; - - create table public.users ( - id serial primary key, - name text - ); - - create table public.audio_books ( - id serial primary key, - narrator text - ); - - create table private.audio_books ( - id serial primary key, - narrator_id text - ); - "#; - - let queries: Vec = vec![ - TestCase { - message: "correctly prefers the columns of present tables", - query: format!(r#"select na{} from public.audio_books;"#, CURSOR_POS), - label: "narrator", - description: "public.audio_books", - }, - TestCase { - message: "correctly handles nested queries", - query: format!( - r#" - select - * - from ( - select id, na{} - from private.audio_books - ) as subquery - join public.users u - on u.id = subquery.id; - "#, - CURSOR_POS - ), - label: "narrator_id", - description: "private.audio_books", - }, - TestCase { - message: "works without a schema", - query: format!(r#"select na{} from users;"#, CURSOR_POS), - label: "name", - description: "public.users", - }, - ]; - - for q in queries { - let (tree, cache) = get_test_deps(setup, q.get_input_query()).await; - let params = get_test_params(&tree, &cache, q.get_input_query()); - let results = complete(params); - - let CompletionItem { - label, description, .. - } = results - .into_iter() - .next() - .expect("Should return at least one completion item"); - - assert_eq!(label, q.label, "{}", q.message); - assert_eq!(description, q.description, "{}", q.message); - } - } - - #[tokio::test] - async fn shows_multiple_columns_if_no_relation_specified() { - let setup = r#" - create schema private; - - create table public.users ( - id serial primary key, - name text - ); - - create table public.audio_books ( - id serial primary key, - narrator text - ); - - create table private.audio_books ( - id serial primary key, - narrator_id text - ); - "#; - - let case = TestCase { - query: format!(r#"select n{};"#, CURSOR_POS), - description: "", - label: "", - message: "", - }; - - let (tree, cache) = get_test_deps(setup, case.get_input_query()).await; - let params = get_test_params(&tree, &cache, case.get_input_query()); - let mut items = complete(params); - - let _ = items.split_off(6); - - #[derive(Eq, PartialEq, Debug)] - struct LabelAndDesc { - label: String, - desc: String, - } - - let labels: Vec = items - .into_iter() - .map(|c| LabelAndDesc { - label: c.label, - desc: c.description, - }) - .collect(); - - let expected = vec![ - ("name", "public.users"), - ("narrator", "public.audio_books"), - ("narrator_id", "private.audio_books"), - ("id", "public.audio_books"), - ("name", "Schema: pg_catalog"), - ("nameconcatoid", "Schema: pg_catalog"), - ] - .into_iter() - .map(|(label, schema)| LabelAndDesc { - label: label.into(), - desc: schema.into(), - }) - .collect::>(); - - assert_eq!(labels, expected); - } - - #[tokio::test] - async fn suggests_relevant_columns_without_letters() { - let setup = r#" - create table users ( - id serial primary key, - name text, - address text, - email text - ); - "#; - - let test_case = TestCase { - message: "suggests user created tables first", - query: format!(r#"select {} from users"#, CURSOR_POS), - label: "", - description: "", - }; - - let (tree, cache) = get_test_deps(setup, test_case.get_input_query()).await; - let params = get_test_params(&tree, &cache, test_case.get_input_query()); - let results = complete(params); - - let (first_four, _rest) = results.split_at(4); - - let has_column_in_first_four = |col: &'static str| { - first_four - .iter() - .any(|compl_item| compl_item.label.as_str() == col) - }; - - assert!( - has_column_in_first_four("id"), - "`id` not present in first four completion items." - ); - assert!( - has_column_in_first_four("name"), - "`name` not present in first four completion items." - ); - assert!( - has_column_in_first_four("address"), - "`address` not present in first four completion items." - ); - assert!( - has_column_in_first_four("email"), - "`email` not present in first four completion items." - ); - } - - #[tokio::test] - async fn ignores_cols_in_from_clause() { - let setup = r#" - create schema private; - - create table private.users ( - id serial primary key, - name text, - address text, - email text - ); - "#; - - let test_case = TestCase { - message: "suggests user created tables first", - query: format!(r#"select * from private.{}"#, CURSOR_POS), - label: "", - description: "", - }; - - let (tree, cache) = get_test_deps(setup, test_case.get_input_query()).await; - let params = get_test_params(&tree, &cache, test_case.get_input_query()); - let results = complete(params); - - assert!( - !results - .into_iter() - .any(|item| item.kind == CompletionItemKind::Column) - ); - } - - #[tokio::test] - async fn prefers_columns_of_mentioned_tables() { - let setup = r#" - create schema private; - - create table private.users ( - id1 serial primary key, - name1 text, - address1 text, - email1 text, - user_settings jsonb - ); - - create table public.users ( - id2 serial primary key, - name2 text, - address2 text, - email2 text, - settings jsonb - ); - "#; - - assert_complete_results( - format!(r#"select {} from users"#, CURSOR_POS).as_str(), - vec![ - CompletionAssertion::Label("address2".into()), - CompletionAssertion::Label("email2".into()), - CompletionAssertion::Label("id2".into()), - CompletionAssertion::Label("name2".into()), - ], - setup, - ) - .await; - - assert_complete_results( - format!(r#"select {} from private.users"#, CURSOR_POS).as_str(), - vec![ - CompletionAssertion::Label("address1".into()), - CompletionAssertion::Label("email1".into()), - CompletionAssertion::Label("id1".into()), - CompletionAssertion::Label("name1".into()), - ], - setup, - ) - .await; - - // asserts fuzzy finding for "settings" - assert_complete_results( - format!(r#"select sett{} from private.users"#, CURSOR_POS).as_str(), - vec![CompletionAssertion::Label("user_settings".into())], - setup, - ) - .await; - } - - #[tokio::test] - async fn filters_out_by_aliases() { - let setup = r#" - create schema auth; - - create table auth.users ( - uid serial primary key, - name text not null, - email text unique not null - ); - - create table auth.posts ( - pid serial primary key, - user_id int not null references auth.users(uid), - title text not null, - content text, - created_at timestamp default now() - ); - "#; - - // test in SELECT clause - assert_complete_results( - format!( - "select u.id, p.{} from auth.users u join auth.posts p on u.id = p.user_id;", - CURSOR_POS - ) - .as_str(), - vec![ - CompletionAssertion::LabelNotExists("uid".to_string()), - CompletionAssertion::LabelNotExists("name".to_string()), - CompletionAssertion::LabelNotExists("email".to_string()), - CompletionAssertion::Label("content".to_string()), - CompletionAssertion::Label("created_at".to_string()), - CompletionAssertion::Label("pid".to_string()), - CompletionAssertion::Label("title".to_string()), - CompletionAssertion::Label("user_id".to_string()), - ], - setup, - ) - .await; - - // test in JOIN clause - assert_complete_results( - format!( - "select u.id, p.content from auth.users u join auth.posts p on u.id = p.{};", - CURSOR_POS - ) - .as_str(), - vec![ - CompletionAssertion::LabelNotExists("uid".to_string()), - CompletionAssertion::LabelNotExists("name".to_string()), - CompletionAssertion::LabelNotExists("email".to_string()), - // primary keys are preferred - CompletionAssertion::Label("pid".to_string()), - CompletionAssertion::Label("content".to_string()), - CompletionAssertion::Label("created_at".to_string()), - CompletionAssertion::Label("title".to_string()), - CompletionAssertion::Label("user_id".to_string()), - ], - setup, - ) - .await; - } - - #[tokio::test] - async fn does_not_complete_cols_in_join_clauses() { - let setup = r#" - create schema auth; - - create table auth.users ( - uid serial primary key, - name text not null, - email text unique not null - ); - - create table auth.posts ( - pid serial primary key, - user_id int not null references auth.users(uid), - title text not null, - content text, - created_at timestamp default now() - ); - "#; - - /* - * We are not in the "ON" part of the JOIN clause, so we should not complete columns. - */ - assert_complete_results( - format!( - "select u.id, p.content from auth.users u join auth.{}", - CURSOR_POS - ) - .as_str(), - vec![ - CompletionAssertion::KindNotExists(CompletionItemKind::Column), - CompletionAssertion::LabelAndKind("posts".to_string(), CompletionItemKind::Table), - CompletionAssertion::LabelAndKind("users".to_string(), CompletionItemKind::Table), - ], - setup, - ) - .await; - } - - #[tokio::test] - async fn completes_in_join_on_clause() { - let setup = r#" - create schema auth; - - create table auth.users ( - uid serial primary key, - name text not null, - email text unique not null - ); - - create table auth.posts ( - pid serial primary key, - user_id int not null references auth.users(uid), - title text not null, - content text, - created_at timestamp default now() - ); - "#; - - assert_complete_results( - format!( - "select u.id, auth.posts.content from auth.users u join auth.posts on u.{}", - CURSOR_POS - ) - .as_str(), - vec![ - CompletionAssertion::KindNotExists(CompletionItemKind::Table), - CompletionAssertion::LabelAndKind("uid".to_string(), CompletionItemKind::Column), - CompletionAssertion::LabelAndKind("email".to_string(), CompletionItemKind::Column), - CompletionAssertion::LabelAndKind("name".to_string(), CompletionItemKind::Column), - ], - setup, - ) - .await; - - assert_complete_results( - format!( - "select u.id, p.content from auth.users u join auth.posts p on p.user_id = u.{}", - CURSOR_POS - ) - .as_str(), - vec![ - CompletionAssertion::KindNotExists(CompletionItemKind::Table), - CompletionAssertion::LabelAndKind("uid".to_string(), CompletionItemKind::Column), - CompletionAssertion::LabelAndKind("email".to_string(), CompletionItemKind::Column), - CompletionAssertion::LabelAndKind("name".to_string(), CompletionItemKind::Column), - ], - setup, - ) - .await; - } - - #[tokio::test] - async fn prefers_not_mentioned_columns() { - let setup = r#" - create schema auth; - - create table public.one ( - id serial primary key, - a text, - b text, - z text - ); - - create table public.two ( - id serial primary key, - c text, - d text, - e text - ); - "#; - - assert_complete_results( - format!( - "select {} from public.one o join public.two on o.id = t.id;", - CURSOR_POS - ) - .as_str(), - vec![ - CompletionAssertion::Label("a".to_string()), - CompletionAssertion::Label("b".to_string()), - CompletionAssertion::Label("c".to_string()), - CompletionAssertion::Label("d".to_string()), - CompletionAssertion::Label("e".to_string()), - ], - setup, - ) - .await; - - // "a" is already mentioned, so it jumps down - assert_complete_results( - format!( - "select a, {} from public.one o join public.two on o.id = t.id;", - CURSOR_POS - ) - .as_str(), - vec![ - CompletionAssertion::Label("b".to_string()), - CompletionAssertion::Label("c".to_string()), - CompletionAssertion::Label("d".to_string()), - CompletionAssertion::Label("e".to_string()), - CompletionAssertion::Label("id".to_string()), - CompletionAssertion::Label("z".to_string()), - CompletionAssertion::Label("a".to_string()), - ], - setup, - ) - .await; - - // "id" of table one is mentioned, but table two isn't – - // its priority stays up - assert_complete_results( - format!( - "select o.id, a, b, c, d, e, {} from public.one o join public.two on o.id = t.id;", - CURSOR_POS - ) - .as_str(), - vec![ - CompletionAssertion::LabelAndDesc("id".to_string(), "public.two".to_string()), - CompletionAssertion::Label("z".to_string()), - ], - setup, - ) - .await; - - // "id" is ambiguous, so both "id" columns are lowered in priority - assert_complete_results( - format!( - "select id, a, b, c, d, e, {} from public.one o join public.two on o.id = t.id;", - CURSOR_POS - ) - .as_str(), - vec![CompletionAssertion::Label("z".to_string())], - setup, - ) - .await; - } - - #[tokio::test] - async fn suggests_columns_in_insert_clause() { - let setup = r#" - create table instruments ( - id bigint primary key generated always as identity, - name text not null, - z text - ); - - create table others ( - id serial primary key, - a text, - b text - ); - "#; - - // We should prefer the instrument columns, even though they - // are lower in the alphabet - - assert_complete_results( - format!("insert into instruments ({})", CURSOR_POS).as_str(), - vec![ - CompletionAssertion::Label("id".to_string()), - CompletionAssertion::Label("name".to_string()), - CompletionAssertion::Label("z".to_string()), - ], - setup, - ) - .await; - - assert_complete_results( - format!("insert into instruments (id, {})", CURSOR_POS).as_str(), - vec![ - CompletionAssertion::Label("name".to_string()), - CompletionAssertion::Label("z".to_string()), - ], - setup, - ) - .await; - - assert_complete_results( - format!("insert into instruments (id, {}, name)", CURSOR_POS).as_str(), - vec![CompletionAssertion::Label("z".to_string())], - setup, - ) - .await; - - // works with completed statement - assert_complete_results( - format!( - "insert into instruments (name, {}) values ('my_bass');", - CURSOR_POS - ) - .as_str(), - vec![ - CompletionAssertion::Label("id".to_string()), - CompletionAssertion::Label("z".to_string()), - ], - setup, - ) - .await; - - // no completions in the values list! - assert_no_complete_results( - format!("insert into instruments (id, name) values ({})", CURSOR_POS).as_str(), - setup, - ) - .await; - } - - #[tokio::test] - async fn suggests_columns_in_where_clause() { - let setup = r#" - create table instruments ( - id bigint primary key generated always as identity, - name text not null, - z text, - created_at timestamp with time zone default now() - ); - - create table others ( - a text, - b text, - c text - ); - "#; - - assert_complete_results( - format!("select name from instruments where {} ", CURSOR_POS).as_str(), - vec![ - CompletionAssertion::Label("created_at".into()), - CompletionAssertion::Label("id".into()), - CompletionAssertion::Label("name".into()), - CompletionAssertion::Label("z".into()), - ], - setup, - ) - .await; - - assert_complete_results( - format!( - "select name from instruments where z = 'something' and created_at > {}", - CURSOR_POS - ) - .as_str(), - // simply do not complete columns + schemas; functions etc. are ok - vec![ - CompletionAssertion::KindNotExists(CompletionItemKind::Column), - CompletionAssertion::KindNotExists(CompletionItemKind::Schema), - ], - setup, - ) - .await; - - // prefers not mentioned columns - assert_complete_results( - format!( - "select name from instruments where id = 'something' and {}", - CURSOR_POS - ) - .as_str(), - vec![ - CompletionAssertion::Label("created_at".into()), - CompletionAssertion::Label("name".into()), - CompletionAssertion::Label("z".into()), - ], - setup, - ) - .await; - - // // uses aliases - assert_complete_results( - format!( - "select name from instruments i join others o on i.z = o.a where i.{}", - CURSOR_POS - ) - .as_str(), - vec![ - CompletionAssertion::Label("created_at".into()), - CompletionAssertion::Label("id".into()), - CompletionAssertion::Label("name".into()), - ], - setup, - ) - .await; - } -} diff --git a/crates/pgt_completions/src/providers/functions.rs b/crates/pgt_completions/src/providers/functions.rs deleted file mode 100644 index f1b57e8c..00000000 --- a/crates/pgt_completions/src/providers/functions.rs +++ /dev/null @@ -1,202 +0,0 @@ -use pgt_schema_cache::Function; - -use crate::{ - CompletionItemKind, CompletionText, - builder::{CompletionBuilder, PossibleCompletionItem}, - context::CompletionContext, - providers::helper::get_range_to_replace, - relevance::{CompletionRelevanceData, filtering::CompletionFilter, scoring::CompletionScore}, -}; - -use super::helper::get_completion_text_with_schema_or_alias; - -pub fn complete_functions<'a>(ctx: &'a CompletionContext, builder: &mut CompletionBuilder<'a>) { - let available_functions = &ctx.schema_cache.functions; - - for func in available_functions { - let relevance = CompletionRelevanceData::Function(func); - - let item = PossibleCompletionItem { - label: func.name.clone(), - score: CompletionScore::from(relevance.clone()), - filter: CompletionFilter::from(relevance), - description: format!("Schema: {}", func.schema), - kind: CompletionItemKind::Function, - detail: None, - completion_text: Some(get_completion_text(ctx, func)), - }; - - builder.add_item(item); - } -} - -fn get_completion_text(ctx: &CompletionContext, func: &Function) -> CompletionText { - let range = get_range_to_replace(ctx); - let mut text = get_completion_text_with_schema_or_alias(ctx, &func.name, &func.schema) - .map(|ct| ct.text) - .unwrap_or(func.name.to_string()); - - if ctx.is_invocation { - CompletionText { - text, - range, - is_snippet: false, - } - } else { - text.push('('); - - let num_args = func.args.args.len(); - for (idx, arg) in func.args.args.iter().enumerate() { - text.push_str(format!(r#"${{{}:{}}}"#, idx + 1, arg.name).as_str()); - if idx < num_args - 1 { - text.push_str(", "); - } - } - - text.push(')'); - - CompletionText { - text, - range, - is_snippet: num_args > 0, - } - } -} - -#[cfg(test)] -mod tests { - use crate::{ - CompletionItem, CompletionItemKind, complete, - test_helper::{CURSOR_POS, get_test_deps, get_test_params}, - }; - - #[tokio::test] - async fn completes_fn() { - let setup = r#" - create or replace function cool() - returns trigger - language plpgsql - security invoker - as $$ - begin - raise exception 'dont matter'; - end; - $$; - "#; - - let query = format!("select coo{}", CURSOR_POS); - - let (tree, cache) = get_test_deps(setup, query.as_str().into()).await; - let params = get_test_params(&tree, &cache, query.as_str().into()); - let results = complete(params); - - let CompletionItem { label, .. } = results - .into_iter() - .next() - .expect("Should return at least one completion item"); - - assert_eq!(label, "cool"); - } - - #[tokio::test] - async fn prefers_fn_if_invocation() { - let setup = r#" - create table coos ( - id serial primary key, - name text - ); - - create or replace function cool() - returns trigger - language plpgsql - security invoker - as $$ - begin - raise exception 'dont matter'; - end; - $$; - "#; - - let query = format!(r#"select * from coo{}()"#, CURSOR_POS); - - let (tree, cache) = get_test_deps(setup, query.as_str().into()).await; - let params = get_test_params(&tree, &cache, query.as_str().into()); - let results = complete(params); - - let CompletionItem { label, kind, .. } = results - .into_iter() - .next() - .expect("Should return at least one completion item"); - - assert_eq!(label, "cool"); - assert_eq!(kind, CompletionItemKind::Function); - } - - #[tokio::test] - async fn prefers_fn_in_select_clause() { - let setup = r#" - create table coos ( - id serial primary key, - name text - ); - - create or replace function cool() - returns trigger - language plpgsql - security invoker - as $$ - begin - raise exception 'dont matter'; - end; - $$; - "#; - - let query = format!(r#"select coo{}"#, CURSOR_POS); - - let (tree, cache) = get_test_deps(setup, query.as_str().into()).await; - let params = get_test_params(&tree, &cache, query.as_str().into()); - let results = complete(params); - - let CompletionItem { label, kind, .. } = results - .into_iter() - .next() - .expect("Should return at least one completion item"); - - assert_eq!(label, "cool"); - assert_eq!(kind, CompletionItemKind::Function); - } - - #[tokio::test] - async fn prefers_function_in_from_clause_if_invocation() { - let setup = r#" - create table coos ( - id serial primary key, - name text - ); - - create or replace function cool() - returns trigger - language plpgsql - security invoker - as $$ - begin - raise exception 'dont matter'; - end; - $$; - "#; - - let query = format!(r#"select * from coo{}()"#, CURSOR_POS); - - let (tree, cache) = get_test_deps(setup, query.as_str().into()).await; - let params = get_test_params(&tree, &cache, query.as_str().into()); - let results = complete(params); - - let CompletionItem { label, kind, .. } = results - .into_iter() - .next() - .expect("Should return at least one completion item"); - - assert_eq!(label, "cool"); - assert_eq!(kind, CompletionItemKind::Function); - } -} diff --git a/crates/pgt_completions/src/providers/helper.rs b/crates/pgt_completions/src/providers/helper.rs deleted file mode 100644 index 811125bd..00000000 --- a/crates/pgt_completions/src/providers/helper.rs +++ /dev/null @@ -1,50 +0,0 @@ -use pgt_text_size::{TextRange, TextSize}; - -use crate::{CompletionText, context::CompletionContext, remove_sanitized_token}; - -pub(crate) fn find_matching_alias_for_table( - ctx: &CompletionContext, - table_name: &str, -) -> Option { - for (alias, table) in ctx.mentioned_table_aliases.iter() { - if table == table_name { - return Some(alias.to_string()); - } - } - None -} - -pub(crate) fn get_range_to_replace(ctx: &CompletionContext) -> TextRange { - match ctx.node_under_cursor.as_ref() { - Some(node) => { - let content = ctx.get_node_under_cursor_content().unwrap_or("".into()); - let length = remove_sanitized_token(content.as_str()).len(); - - let start = node.start_byte(); - let end = start + length; - - TextRange::new(start.try_into().unwrap(), end.try_into().unwrap()) - } - None => TextRange::empty(TextSize::new(0)), - } -} - -pub(crate) fn get_completion_text_with_schema_or_alias( - ctx: &CompletionContext, - item_name: &str, - schema_or_alias_name: &str, -) -> Option { - let is_already_prefixed_with_schema_name = ctx.schema_or_alias_name.is_some(); - - if schema_or_alias_name == "public" || is_already_prefixed_with_schema_name { - None - } else { - let range = get_range_to_replace(ctx); - - Some(CompletionText { - text: format!("{}.{}", schema_or_alias_name, item_name), - range, - is_snippet: false, - }) - } -} diff --git a/crates/pgt_completions/src/providers/mod.rs b/crates/pgt_completions/src/providers/mod.rs deleted file mode 100644 index 7b07cee8..00000000 --- a/crates/pgt_completions/src/providers/mod.rs +++ /dev/null @@ -1,12 +0,0 @@ -mod columns; -mod functions; -mod helper; -mod policies; -mod schemas; -mod tables; - -pub use columns::*; -pub use functions::*; -pub use policies::*; -pub use schemas::*; -pub use tables::*; diff --git a/crates/pgt_completions/src/providers/policies.rs b/crates/pgt_completions/src/providers/policies.rs deleted file mode 100644 index a4d3a9bb..00000000 --- a/crates/pgt_completions/src/providers/policies.rs +++ /dev/null @@ -1,106 +0,0 @@ -use pgt_text_size::{TextRange, TextSize}; - -use crate::{ - CompletionItemKind, CompletionText, - builder::{CompletionBuilder, PossibleCompletionItem}, - context::CompletionContext, - relevance::{CompletionRelevanceData, filtering::CompletionFilter, scoring::CompletionScore}, -}; - -use super::helper::get_range_to_replace; - -pub fn complete_policies<'a>(ctx: &CompletionContext<'a>, builder: &mut CompletionBuilder<'a>) { - let available_policies = &ctx.schema_cache.policies; - - let surrounded_by_quotes = ctx - .get_node_under_cursor_content() - .is_some_and(|c| c.starts_with('"') && c.ends_with('"') && c != "\"\""); - - for pol in available_policies { - let completion_text = if surrounded_by_quotes { - // If we're within quotes, we want to change the content - // *within* the quotes. - // If we attempt to replace outside the quotes, the VSCode - // client won't show the suggestions. - let range = get_range_to_replace(ctx); - Some(CompletionText { - text: pol.name.clone(), - is_snippet: false, - range: TextRange::new( - range.start() + TextSize::new(1), - range.end() - TextSize::new(1), - ), - }) - } else { - // If we aren't within quotes, we want to complete the - // full policy including quotation marks. - Some(CompletionText { - is_snippet: false, - text: format!("\"{}\"", pol.name), - range: get_range_to_replace(ctx), - }) - }; - - let relevance = CompletionRelevanceData::Policy(pol); - - let item = PossibleCompletionItem { - label: pol.name.chars().take(35).collect::(), - score: CompletionScore::from(relevance.clone()), - filter: CompletionFilter::from(relevance), - description: pol.table_name.to_string(), - kind: CompletionItemKind::Policy, - completion_text, - detail: None, - }; - - builder.add_item(item); - } -} - -#[cfg(test)] -mod tests { - use crate::test_helper::{CURSOR_POS, CompletionAssertion, assert_complete_results}; - - #[tokio::test] - async fn completes_within_quotation_marks() { - let setup = r#" - create schema private; - - create table private.users ( - id serial primary key, - email text - ); - - create policy "read for public users disallowed" on private.users - as restrictive - for select - to public - using (false); - - create policy "write for public users allowed" on private.users - as restrictive - for insert - to public - with check (true); - "#; - - assert_complete_results( - format!("alter policy \"{}\" on private.users;", CURSOR_POS).as_str(), - vec![ - CompletionAssertion::Label("read for public users disallowed".into()), - CompletionAssertion::Label("write for public users allowed".into()), - ], - setup, - ) - .await; - - assert_complete_results( - format!("alter policy \"w{}\" on private.users;", CURSOR_POS).as_str(), - vec![CompletionAssertion::Label( - "write for public users allowed".into(), - )], - setup, - ) - .await; - } -} diff --git a/crates/pgt_completions/src/providers/schemas.rs b/crates/pgt_completions/src/providers/schemas.rs deleted file mode 100644 index 02d2fd0c..00000000 --- a/crates/pgt_completions/src/providers/schemas.rs +++ /dev/null @@ -1,106 +0,0 @@ -use crate::{ - builder::{CompletionBuilder, PossibleCompletionItem}, - context::CompletionContext, - relevance::{CompletionRelevanceData, filtering::CompletionFilter, scoring::CompletionScore}, -}; - -pub fn complete_schemas<'a>(ctx: &'a CompletionContext, builder: &mut CompletionBuilder<'a>) { - let available_schemas = &ctx.schema_cache.schemas; - - for schema in available_schemas { - let relevance = CompletionRelevanceData::Schema(schema); - - let item = PossibleCompletionItem { - label: schema.name.clone(), - description: "Schema".into(), - kind: crate::CompletionItemKind::Schema, - score: CompletionScore::from(relevance.clone()), - filter: CompletionFilter::from(relevance), - detail: None, - completion_text: None, - }; - - builder.add_item(item); - } -} - -#[cfg(test)] -mod tests { - - use crate::{ - CompletionItemKind, - test_helper::{CURSOR_POS, CompletionAssertion, assert_complete_results}, - }; - - #[tokio::test] - async fn autocompletes_schemas() { - let setup = r#" - create schema private; - create schema auth; - create schema internal; - - -- add a table to compete against schemas - create table users ( - id serial primary key, - name text, - password text - ); - "#; - - assert_complete_results( - format!("select * from {}", CURSOR_POS).as_str(), - vec![ - CompletionAssertion::LabelAndKind("public".to_string(), CompletionItemKind::Schema), - CompletionAssertion::LabelAndKind("auth".to_string(), CompletionItemKind::Schema), - CompletionAssertion::LabelAndKind( - "internal".to_string(), - CompletionItemKind::Schema, - ), - CompletionAssertion::LabelAndKind( - "private".to_string(), - CompletionItemKind::Schema, - ), - // users table still preferred over system schemas - CompletionAssertion::LabelAndKind("users".to_string(), CompletionItemKind::Table), - CompletionAssertion::LabelAndKind( - "information_schema".to_string(), - CompletionItemKind::Schema, - ), - CompletionAssertion::LabelAndKind( - "pg_catalog".to_string(), - CompletionItemKind::Schema, - ), - CompletionAssertion::LabelAndKind( - "pg_toast".to_string(), - CompletionItemKind::Schema, - ), - ], - setup, - ) - .await; - } - - #[tokio::test] - async fn suggests_tables_and_schemas_with_matching_keys() { - let setup = r#" - create schema ultimate; - - -- add a table to compete against schemas - create table users ( - id serial primary key, - name text, - password text - ); - "#; - - assert_complete_results( - format!("select * from u{}", CURSOR_POS).as_str(), - vec![ - CompletionAssertion::LabelAndKind("users".into(), CompletionItemKind::Table), - CompletionAssertion::LabelAndKind("ultimate".into(), CompletionItemKind::Schema), - ], - setup, - ) - .await; - } -} diff --git a/crates/pgt_completions/src/providers/tables.rs b/crates/pgt_completions/src/providers/tables.rs deleted file mode 100644 index 2102d41c..00000000 --- a/crates/pgt_completions/src/providers/tables.rs +++ /dev/null @@ -1,441 +0,0 @@ -use crate::{ - builder::{CompletionBuilder, PossibleCompletionItem}, - context::CompletionContext, - item::CompletionItemKind, - relevance::{CompletionRelevanceData, filtering::CompletionFilter, scoring::CompletionScore}, -}; - -use super::helper::get_completion_text_with_schema_or_alias; - -pub fn complete_tables<'a>(ctx: &'a CompletionContext, builder: &mut CompletionBuilder<'a>) { - let available_tables = &ctx.schema_cache.tables; - - for table in available_tables { - let relevance = CompletionRelevanceData::Table(table); - - let detail: Option = match table.table_kind { - pgt_schema_cache::TableKind::Ordinary | pgt_schema_cache::TableKind::Partitioned => { - None - } - pgt_schema_cache::TableKind::View => Some("View".into()), - pgt_schema_cache::TableKind::MaterializedView => Some("MView".into()), - }; - - let item = PossibleCompletionItem { - label: table.name.clone(), - score: CompletionScore::from(relevance.clone()), - filter: CompletionFilter::from(relevance), - description: table.schema.to_string(), - kind: CompletionItemKind::Table, - detail, - completion_text: get_completion_text_with_schema_or_alias( - ctx, - &table.name, - &table.schema, - ), - }; - - builder.add_item(item); - } -} - -#[cfg(test)] -mod tests { - - use crate::{ - CompletionItem, CompletionItemKind, complete, - test_helper::{ - CURSOR_POS, CompletionAssertion, assert_complete_results, assert_no_complete_results, - get_test_deps, get_test_params, - }, - }; - - #[tokio::test] - async fn autocompletes_simple_table() { - let setup = r#" - create table users ( - id serial primary key, - name text, - password text - ); - "#; - - let query = format!("select * from u{}", CURSOR_POS); - - let (tree, cache) = get_test_deps(setup, query.as_str().into()).await; - let params = get_test_params(&tree, &cache, query.as_str().into()); - let items = complete(params); - - assert!(!items.is_empty()); - - let best_match = &items[0]; - - assert_eq!( - best_match.label, "users", - "Does not return the expected table to autocomplete: {}", - best_match.label - ) - } - - #[tokio::test] - async fn autocompletes_table_alphanumerically() { - let setup = r#" - create table addresses ( - id serial primary key - ); - - create table users ( - id serial primary key - ); - - create table emails ( - id serial primary key - ); - "#; - - let test_cases = vec![ - (format!("select * from u{}", CURSOR_POS), "users"), - (format!("select * from e{}", CURSOR_POS), "emails"), - (format!("select * from a{}", CURSOR_POS), "addresses"), - ]; - - for (query, expected_label) in test_cases { - let (tree, cache) = get_test_deps(setup, query.as_str().into()).await; - let params = get_test_params(&tree, &cache, query.as_str().into()); - let items = complete(params); - - assert!(!items.is_empty()); - - let best_match = &items[0]; - - assert_eq!( - best_match.label, expected_label, - "Does not return the expected table to autocomplete: {}", - best_match.label - ) - } - } - - #[tokio::test] - async fn autocompletes_table_with_schema() { - let setup = r#" - create schema customer_support; - create schema private; - - create table private.user_z ( - id serial primary key, - name text, - password text - ); - - create table customer_support.user_y ( - id serial primary key, - request text, - send_at timestamp with time zone - ); - "#; - - let test_cases = vec![ - (format!("select * from u{}", CURSOR_POS), "user_y"), // user_y is preferred alphanumerically - (format!("select * from private.u{}", CURSOR_POS), "user_z"), - ( - format!("select * from customer_support.u{}", CURSOR_POS), - "user_y", - ), - ]; - - for (query, expected_label) in test_cases { - let (tree, cache) = get_test_deps(setup, query.as_str().into()).await; - let params = get_test_params(&tree, &cache, query.as_str().into()); - let items = complete(params); - - assert!(!items.is_empty()); - - let best_match = &items[0]; - - assert_eq!( - best_match.label, expected_label, - "Does not return the expected table to autocomplete: {}", - best_match.label - ) - } - } - - #[tokio::test] - async fn prefers_table_in_from_clause() { - let setup = r#" - create table coos ( - id serial primary key, - name text - ); - - create or replace function cool() - returns trigger - language plpgsql - security invoker - as $$ - begin - raise exception 'dont matter'; - end; - $$; - "#; - - let query = format!(r#"select * from coo{}"#, CURSOR_POS); - - let (tree, cache) = get_test_deps(setup, query.as_str().into()).await; - let params = get_test_params(&tree, &cache, query.as_str().into()); - let items = complete(params); - - let CompletionItem { label, kind, .. } = items - .into_iter() - .next() - .expect("Should return at least one completion item"); - - assert_eq!(label, "coos"); - assert_eq!(kind, CompletionItemKind::Table); - } - - #[tokio::test] - async fn suggests_tables_in_update() { - let setup = r#" - create table coos ( - id serial primary key, - name text - ); - "#; - - assert_complete_results( - format!("update {}", CURSOR_POS).as_str(), - vec![CompletionAssertion::LabelAndKind( - "public".into(), - CompletionItemKind::Schema, - )], - setup, - ) - .await; - - assert_complete_results( - format!("update public.{}", CURSOR_POS).as_str(), - vec![CompletionAssertion::LabelAndKind( - "coos".into(), - CompletionItemKind::Table, - )], - setup, - ) - .await; - - assert_no_complete_results(format!("update public.coos {}", CURSOR_POS).as_str(), setup) - .await; - - assert_complete_results( - format!("update coos set {}", CURSOR_POS).as_str(), - vec![ - CompletionAssertion::Label("id".into()), - CompletionAssertion::Label("name".into()), - ], - setup, - ) - .await; - - assert_complete_results( - format!("update coos set name = 'cool' where {}", CURSOR_POS).as_str(), - vec![ - CompletionAssertion::Label("id".into()), - CompletionAssertion::Label("name".into()), - ], - setup, - ) - .await; - } - - #[tokio::test] - async fn suggests_tables_in_delete() { - let setup = r#" - create table coos ( - id serial primary key, - name text - ); - "#; - - assert_no_complete_results(format!("delete {}", CURSOR_POS).as_str(), setup).await; - - assert_complete_results( - format!("delete from {}", CURSOR_POS).as_str(), - vec![ - CompletionAssertion::LabelAndKind("public".into(), CompletionItemKind::Schema), - CompletionAssertion::LabelAndKind("coos".into(), CompletionItemKind::Table), - ], - setup, - ) - .await; - - assert_complete_results( - format!("delete from public.{}", CURSOR_POS).as_str(), - vec![CompletionAssertion::Label("coos".into())], - setup, - ) - .await; - - assert_complete_results( - format!("delete from public.coos where {}", CURSOR_POS).as_str(), - vec![ - CompletionAssertion::Label("id".into()), - CompletionAssertion::Label("name".into()), - ], - setup, - ) - .await; - } - - #[tokio::test] - async fn suggests_tables_in_join() { - let setup = r#" - create schema auth; - - create table auth.users ( - uid serial primary key, - name text not null, - email text unique not null - ); - - create table auth.posts ( - pid serial primary key, - user_id int not null references auth.users(uid), - title text not null, - content text, - created_at timestamp default now() - ); - "#; - - assert_complete_results( - format!("select * from auth.users u join {}", CURSOR_POS).as_str(), - vec![ - CompletionAssertion::LabelAndKind("public".into(), CompletionItemKind::Schema), - CompletionAssertion::LabelAndKind("auth".into(), CompletionItemKind::Schema), - CompletionAssertion::LabelAndKind("posts".into(), CompletionItemKind::Table), // self-join - CompletionAssertion::LabelAndKind("users".into(), CompletionItemKind::Table), - ], - setup, - ) - .await; - } - - #[tokio::test] - async fn suggests_tables_in_alter_and_drop_statements() { - let setup = r#" - create schema auth; - - create table auth.users ( - uid serial primary key, - name text not null, - email text unique not null - ); - - create table auth.posts ( - pid serial primary key, - user_id int not null references auth.users(uid), - title text not null, - content text, - created_at timestamp default now() - ); - "#; - - assert_complete_results( - format!("alter table {}", CURSOR_POS).as_str(), - vec![ - CompletionAssertion::LabelAndKind("public".into(), CompletionItemKind::Schema), - CompletionAssertion::LabelAndKind("auth".into(), CompletionItemKind::Schema), - CompletionAssertion::LabelAndKind("posts".into(), CompletionItemKind::Table), - CompletionAssertion::LabelAndKind("users".into(), CompletionItemKind::Table), - ], - setup, - ) - .await; - - assert_complete_results( - format!("alter table if exists {}", CURSOR_POS).as_str(), - vec![ - CompletionAssertion::LabelAndKind("public".into(), CompletionItemKind::Schema), - CompletionAssertion::LabelAndKind("auth".into(), CompletionItemKind::Schema), - CompletionAssertion::LabelAndKind("posts".into(), CompletionItemKind::Table), - CompletionAssertion::LabelAndKind("users".into(), CompletionItemKind::Table), - ], - setup, - ) - .await; - - assert_complete_results( - format!("drop table {}", CURSOR_POS).as_str(), - vec![ - CompletionAssertion::LabelAndKind("public".into(), CompletionItemKind::Schema), - CompletionAssertion::LabelAndKind("auth".into(), CompletionItemKind::Schema), - CompletionAssertion::LabelAndKind("posts".into(), CompletionItemKind::Table), - CompletionAssertion::LabelAndKind("users".into(), CompletionItemKind::Table), - ], - setup, - ) - .await; - - assert_complete_results( - format!("drop table if exists {}", CURSOR_POS).as_str(), - vec![ - CompletionAssertion::LabelAndKind("public".into(), CompletionItemKind::Schema), - CompletionAssertion::LabelAndKind("auth".into(), CompletionItemKind::Schema), - CompletionAssertion::LabelAndKind("posts".into(), CompletionItemKind::Table), // self-join - CompletionAssertion::LabelAndKind("users".into(), CompletionItemKind::Table), - ], - setup, - ) - .await; - } - - #[tokio::test] - async fn suggests_tables_in_insert_into() { - let setup = r#" - create schema auth; - - create table auth.users ( - uid serial primary key, - name text not null, - email text unique not null - ); - "#; - - assert_complete_results( - format!("insert into {}", CURSOR_POS).as_str(), - vec![ - CompletionAssertion::LabelAndKind("public".into(), CompletionItemKind::Schema), - CompletionAssertion::LabelAndKind("auth".into(), CompletionItemKind::Schema), - CompletionAssertion::LabelAndKind("users".into(), CompletionItemKind::Table), - ], - setup, - ) - .await; - - assert_complete_results( - format!("insert into auth.{}", CURSOR_POS).as_str(), - vec![CompletionAssertion::LabelAndKind( - "users".into(), - CompletionItemKind::Table, - )], - setup, - ) - .await; - - // works with complete statement. - assert_complete_results( - format!( - "insert into {} (name, email) values ('jules', 'a@b.com');", - CURSOR_POS - ) - .as_str(), - vec![ - CompletionAssertion::LabelAndKind("public".into(), CompletionItemKind::Schema), - CompletionAssertion::LabelAndKind("auth".into(), CompletionItemKind::Schema), - CompletionAssertion::LabelAndKind("users".into(), CompletionItemKind::Table), - ], - setup, - ) - .await; - } -} diff --git a/crates/pgt_completions/src/providers/triggers.rs b/crates/pgt_completions/src/providers/triggers.rs deleted file mode 100644 index 6bc04deb..00000000 --- a/crates/pgt_completions/src/providers/triggers.rs +++ /dev/null @@ -1,169 +0,0 @@ -use crate::{ - CompletionItemKind, - builder::{CompletionBuilder, PossibleCompletionItem}, - context::CompletionContext, - relevance::{CompletionRelevanceData, filtering::CompletionFilter, scoring::CompletionScore}, -}; - -use super::helper::get_completion_text_with_schema_or_alias; - -pub fn complete_functions<'a>(ctx: &'a CompletionContext, builder: &mut CompletionBuilder<'a>) { - let available_functions = &ctx.schema_cache.functions; - - for func in available_functions { - let relevance = CompletionRelevanceData::Function(func); - - let item = PossibleCompletionItem { - label: func.name.clone(), - score: CompletionScore::from(relevance.clone()), - filter: CompletionFilter::from(relevance), - description: format!("Schema: {}", func.schema), - kind: CompletionItemKind::Function, - completion_text: get_completion_text_with_schema_or_alias( - ctx, - &func.name, - &func.schema, - ), - }; - - builder.add_item(item); - } -} - -#[cfg(test)] -mod tests { - use crate::{ - CompletionItem, CompletionItemKind, complete, - test_helper::{CURSOR_POS, get_test_deps, get_test_params}, - }; - - #[tokio::test] - async fn completes_fn() { - let setup = r#" - create or replace function cool() - returns trigger - language plpgsql - security invoker - as $$ - begin - raise exception 'dont matter'; - end; - $$; - "#; - - let query = format!("select coo{}", CURSOR_POS); - - let (tree, cache) = get_test_deps(setup, query.as_str().into()).await; - let params = get_test_params(&tree, &cache, query.as_str().into()); - let results = complete(params); - - let CompletionItem { label, .. } = results - .into_iter() - .next() - .expect("Should return at least one completion item"); - - assert_eq!(label, "cool"); - } - - #[tokio::test] - async fn prefers_fn_if_invocation() { - let setup = r#" - create table coos ( - id serial primary key, - name text - ); - - create or replace function cool() - returns trigger - language plpgsql - security invoker - as $$ - begin - raise exception 'dont matter'; - end; - $$; - "#; - - let query = format!(r#"select * from coo{}()"#, CURSOR_POS); - - let (tree, cache) = get_test_deps(setup, query.as_str().into()).await; - let params = get_test_params(&tree, &cache, query.as_str().into()); - let results = complete(params); - - let CompletionItem { label, kind, .. } = results - .into_iter() - .next() - .expect("Should return at least one completion item"); - - assert_eq!(label, "cool"); - assert_eq!(kind, CompletionItemKind::Function); - } - - #[tokio::test] - async fn prefers_fn_in_select_clause() { - let setup = r#" - create table coos ( - id serial primary key, - name text - ); - - create or replace function cool() - returns trigger - language plpgsql - security invoker - as $$ - begin - raise exception 'dont matter'; - end; - $$; - "#; - - let query = format!(r#"select coo{}"#, CURSOR_POS); - - let (tree, cache) = get_test_deps(setup, query.as_str().into()).await; - let params = get_test_params(&tree, &cache, query.as_str().into()); - let results = complete(params); - - let CompletionItem { label, kind, .. } = results - .into_iter() - .next() - .expect("Should return at least one completion item"); - - assert_eq!(label, "cool"); - assert_eq!(kind, CompletionItemKind::Function); - } - - #[tokio::test] - async fn prefers_function_in_from_clause_if_invocation() { - let setup = r#" - create table coos ( - id serial primary key, - name text - ); - - create or replace function cool() - returns trigger - language plpgsql - security invoker - as $$ - begin - raise exception 'dont matter'; - end; - $$; - "#; - - let query = format!(r#"select * from coo{}()"#, CURSOR_POS); - - let (tree, cache) = get_test_deps(setup, query.as_str().into()).await; - let params = get_test_params(&tree, &cache, query.as_str().into()); - let results = complete(params); - - let CompletionItem { label, kind, .. } = results - .into_iter() - .next() - .expect("Should return at least one completion item"); - - assert_eq!(label, "cool"); - assert_eq!(kind, CompletionItemKind::Function); - } -} diff --git a/crates/pgt_completions/src/relevance.rs b/crates/pgt_completions/src/relevance.rs deleted file mode 100644 index f51c3c52..00000000 --- a/crates/pgt_completions/src/relevance.rs +++ /dev/null @@ -1,11 +0,0 @@ -pub(crate) mod filtering; -pub(crate) mod scoring; - -#[derive(Debug, Clone)] -pub(crate) enum CompletionRelevanceData<'a> { - Table(&'a pgt_schema_cache::Table), - Function(&'a pgt_schema_cache::Function), - Column(&'a pgt_schema_cache::Column), - Schema(&'a pgt_schema_cache::Schema), - Policy(&'a pgt_schema_cache::Policy), -} diff --git a/crates/pgt_completions/src/relevance/filtering.rs b/crates/pgt_completions/src/relevance/filtering.rs deleted file mode 100644 index 5323e2bc..00000000 --- a/crates/pgt_completions/src/relevance/filtering.rs +++ /dev/null @@ -1,260 +0,0 @@ -use crate::context::{CompletionContext, NodeUnderCursor, WrappingClause, WrappingNode}; - -use super::CompletionRelevanceData; - -#[derive(Debug)] -pub(crate) struct CompletionFilter<'a> { - data: CompletionRelevanceData<'a>, -} - -impl<'a> From> for CompletionFilter<'a> { - fn from(value: CompletionRelevanceData<'a>) -> Self { - Self { data: value } - } -} - -impl CompletionFilter<'_> { - pub fn is_relevant(&self, ctx: &CompletionContext) -> Option<()> { - self.completable_context(ctx)?; - self.check_clause(ctx)?; - self.check_invocation(ctx)?; - self.check_mentioned_schema_or_alias(ctx)?; - - Some(()) - } - - fn completable_context(&self, ctx: &CompletionContext) -> Option<()> { - if ctx.wrapping_node_kind.is_none() && ctx.wrapping_clause_type.is_none() { - return None; - } - - let current_node_kind = ctx - .node_under_cursor - .as_ref() - .map(|n| n.kind()) - .unwrap_or(""); - - if current_node_kind.starts_with("keyword_") - || current_node_kind == "=" - || current_node_kind == "," - || current_node_kind == "literal" - || current_node_kind == "ERROR" - { - return None; - } - - // No autocompletions if there are two identifiers without a separator. - if ctx.node_under_cursor.as_ref().is_some_and(|n| match n { - NodeUnderCursor::TsNode(node) => node.prev_sibling().is_some_and(|p| { - (p.kind() == "identifier" || p.kind() == "object_reference") - && n.kind() == "identifier" - }), - NodeUnderCursor::CustomNode { .. } => false, - }) { - return None; - } - - // no completions if we're right after an asterisk: - // `select * {}` - if ctx.node_under_cursor.as_ref().is_some_and(|n| match n { - NodeUnderCursor::TsNode(node) => node - .prev_sibling() - .is_some_and(|p| (p.kind() == "all_fields") && n.kind() == "identifier"), - NodeUnderCursor::CustomNode { .. } => false, - }) { - return None; - } - - Some(()) - } - - fn check_clause(&self, ctx: &CompletionContext) -> Option<()> { - ctx.wrapping_clause_type - .as_ref() - .map(|clause| { - match self.data { - CompletionRelevanceData::Table(_) => match clause { - WrappingClause::Select - | WrappingClause::Where - | WrappingClause::ColumnDefinitions => false, - - WrappingClause::Insert => { - ctx.wrapping_node_kind - .as_ref() - .is_none_or(|n| n != &WrappingNode::List) - && (ctx.before_cursor_matches_kind(&["keyword_into"]) - || (ctx.before_cursor_matches_kind(&["."]) - && ctx.parent_matches_one_of_kind(&["object_reference"]))) - } - - WrappingClause::DropTable | WrappingClause::AlterTable => ctx - .before_cursor_matches_kind(&[ - "keyword_exists", - "keyword_only", - "keyword_table", - ]), - - _ => true, - }, - - CompletionRelevanceData::Column(_) => { - match clause { - WrappingClause::From - | WrappingClause::ColumnDefinitions - | WrappingClause::AlterTable - | WrappingClause::DropTable => false, - - // We can complete columns in JOIN cluases, but only if we are after the - // ON node in the "ON u.id = posts.user_id" part. - WrappingClause::Join { on_node: Some(on) } => ctx - .node_under_cursor - .as_ref() - .is_some_and(|cn| cn.start_byte() >= on.end_byte()), - - // we are in a JOIN, but definitely not after an ON - WrappingClause::Join { on_node: None } => false, - - WrappingClause::Insert => ctx - .wrapping_node_kind - .as_ref() - .is_some_and(|n| n == &WrappingNode::List), - - // only autocomplete left side of binary expression - WrappingClause::Where => { - ctx.before_cursor_matches_kind(&["keyword_and", "keyword_where"]) - || (ctx.before_cursor_matches_kind(&["."]) - && ctx.parent_matches_one_of_kind(&["field"])) - } - - _ => true, - } - } - - CompletionRelevanceData::Function(_) => matches!( - clause, - WrappingClause::From - | WrappingClause::Select - | WrappingClause::Where - | WrappingClause::Join { .. } - ), - - CompletionRelevanceData::Schema(_) => match clause { - WrappingClause::Select - | WrappingClause::From - | WrappingClause::Join { .. } - | WrappingClause::Update - | WrappingClause::Delete => true, - - WrappingClause::Where => { - ctx.before_cursor_matches_kind(&["keyword_and", "keyword_where"]) - } - - WrappingClause::DropTable | WrappingClause::AlterTable => ctx - .before_cursor_matches_kind(&[ - "keyword_exists", - "keyword_only", - "keyword_table", - ]), - - WrappingClause::Insert => { - ctx.wrapping_node_kind - .as_ref() - .is_none_or(|n| n != &WrappingNode::List) - && ctx.before_cursor_matches_kind(&["keyword_into"]) - } - - _ => false, - }, - - CompletionRelevanceData::Policy(_) => { - matches!(clause, WrappingClause::PolicyName) - } - } - }) - .and_then(|is_ok| if is_ok { Some(()) } else { None }) - } - - fn check_invocation(&self, ctx: &CompletionContext) -> Option<()> { - if !ctx.is_invocation { - return Some(()); - } - - match self.data { - CompletionRelevanceData::Table(_) | CompletionRelevanceData::Column(_) => return None, - _ => {} - } - - Some(()) - } - - fn check_mentioned_schema_or_alias(&self, ctx: &CompletionContext) -> Option<()> { - if ctx.schema_or_alias_name.is_none() { - return Some(()); - } - - let schema_or_alias = ctx.schema_or_alias_name.as_ref().unwrap(); - - let matches = match self.data { - CompletionRelevanceData::Table(table) => &table.schema == schema_or_alias, - CompletionRelevanceData::Function(f) => &f.schema == schema_or_alias, - CompletionRelevanceData::Column(col) => ctx - .mentioned_table_aliases - .get(schema_or_alias) - .is_some_and(|t| t == &col.table_name), - - // we should never allow schema suggestions if there already was one. - CompletionRelevanceData::Schema(_) => false, - // no policy comletion if user typed a schema node first. - CompletionRelevanceData::Policy(_) => false, - }; - - if !matches { - return None; - } - - Some(()) - } -} - -#[cfg(test)] -mod tests { - use crate::test_helper::{ - CURSOR_POS, CompletionAssertion, assert_complete_results, assert_no_complete_results, - }; - - #[tokio::test] - async fn completion_after_asterisk() { - let setup = r#" - create table users ( - id serial primary key, - email text, - address text - ); - "#; - - assert_no_complete_results(format!("select * {}", CURSOR_POS).as_str(), setup).await; - - // if there s a COMMA after the asterisk, we're good - assert_complete_results( - format!("select *, {}", CURSOR_POS).as_str(), - vec![ - CompletionAssertion::Label("address".into()), - CompletionAssertion::Label("email".into()), - CompletionAssertion::Label("id".into()), - ], - setup, - ) - .await; - } - - #[tokio::test] - async fn completion_after_create_table() { - assert_no_complete_results(format!("create table {}", CURSOR_POS).as_str(), "").await; - } - - #[tokio::test] - async fn completion_in_column_definitions() { - let query = format!(r#"create table instruments ( {} )"#, CURSOR_POS); - assert_no_complete_results(query.as_str(), "").await; - } -} diff --git a/crates/pgt_completions/src/relevance/scoring.rs b/crates/pgt_completions/src/relevance/scoring.rs deleted file mode 100644 index 2fe12511..00000000 --- a/crates/pgt_completions/src/relevance/scoring.rs +++ /dev/null @@ -1,287 +0,0 @@ -use fuzzy_matcher::{FuzzyMatcher, skim::SkimMatcherV2}; - -use crate::context::{CompletionContext, WrappingClause, WrappingNode}; - -use super::CompletionRelevanceData; - -#[derive(Debug)] -pub(crate) struct CompletionScore<'a> { - score: i32, - data: CompletionRelevanceData<'a>, -} - -impl<'a> From> for CompletionScore<'a> { - fn from(value: CompletionRelevanceData<'a>) -> Self { - Self { - score: 0, - data: value, - } - } -} - -impl CompletionScore<'_> { - pub fn get_score(&self) -> i32 { - self.score - } - - pub fn calc_score(&mut self, ctx: &CompletionContext) { - self.check_is_user_defined(); - self.check_matches_schema(ctx); - self.check_matches_query_input(ctx); - self.check_is_invocation(ctx); - self.check_matching_clause_type(ctx); - self.check_matching_wrapping_node(ctx); - self.check_relations_in_stmt(ctx); - self.check_columns_in_stmt(ctx); - } - - fn check_matches_query_input(&mut self, ctx: &CompletionContext) { - let content = match ctx.get_node_under_cursor_content() { - Some(c) => c.replace('"', ""), - None => return, - }; - - let name = match self.data { - CompletionRelevanceData::Function(f) => f.name.as_str().to_ascii_lowercase(), - CompletionRelevanceData::Table(t) => t.name.as_str().to_ascii_lowercase(), - CompletionRelevanceData::Column(c) => c.name.as_str().to_ascii_lowercase(), - CompletionRelevanceData::Schema(s) => s.name.as_str().to_ascii_lowercase(), - CompletionRelevanceData::Policy(p) => p.name.as_str().to_ascii_lowercase(), - }; - - let fz_matcher = SkimMatcherV2::default(); - - if let Some(score) = - fz_matcher.fuzzy_match(name.as_str(), content.to_ascii_lowercase().as_str()) - { - let scorei32: i32 = score - .try_into() - .expect("The length of the input exceeds i32 capacity"); - - // the scoring value isn't linear. - // here are a couple of samples: - // - item: bytea_string_agg_transfn, input: n, score: 15 - // - item: numeric_uplus, input: n, score: 31 - // - item: settings, input: sett, score: 91 - // - item: user_settings, input: sett, score: 82 - self.score += scorei32 / 2; - } - } - - fn check_matching_clause_type(&mut self, ctx: &CompletionContext) { - let clause_type = match ctx.wrapping_clause_type.as_ref() { - None => return, - Some(ct) => ct, - }; - - let has_mentioned_tables = !ctx.mentioned_relations.is_empty(); - let has_mentioned_schema = ctx.schema_or_alias_name.is_some(); - - self.score += match self.data { - CompletionRelevanceData::Table(_) => match clause_type { - WrappingClause::Update => 10, - WrappingClause::Delete => 10, - WrappingClause::From => 5, - WrappingClause::Join { on_node } - if on_node.is_none_or(|on| { - ctx.node_under_cursor - .as_ref() - .is_none_or(|n| n.end_byte() < on.start_byte()) - }) => - { - 5 - } - _ => -50, - }, - CompletionRelevanceData::Function(_) => match clause_type { - WrappingClause::Select if !has_mentioned_tables => 15, - WrappingClause::Select if has_mentioned_tables => 0, - WrappingClause::From => 0, - _ => -50, - }, - CompletionRelevanceData::Column(col) => match clause_type { - WrappingClause::Select if has_mentioned_tables => 10, - WrappingClause::Select if !has_mentioned_tables => 0, - WrappingClause::Where => 10, - WrappingClause::Join { on_node } - if on_node.is_some_and(|on| { - ctx.node_under_cursor - .as_ref() - .is_some_and(|n| n.start_byte() > on.end_byte()) - }) => - { - // Users will probably join on primary keys - if col.is_primary_key { 20 } else { 10 } - } - _ => -15, - }, - CompletionRelevanceData::Schema(_) => match clause_type { - WrappingClause::From if !has_mentioned_schema => 15, - WrappingClause::Join { .. } if !has_mentioned_schema => 15, - WrappingClause::Update if !has_mentioned_schema => 15, - WrappingClause::Delete if !has_mentioned_schema => 15, - _ => -50, - }, - CompletionRelevanceData::Policy(_) => match clause_type { - WrappingClause::PolicyName => 25, - _ => -50, - }, - } - } - - fn check_matching_wrapping_node(&mut self, ctx: &CompletionContext) { - let wrapping_node = match ctx.wrapping_node_kind.as_ref() { - None => return, - Some(wn) => wn, - }; - - let has_mentioned_schema = ctx.schema_or_alias_name.is_some(); - let has_node_text = ctx.get_node_under_cursor_content().is_some(); - - self.score += match self.data { - CompletionRelevanceData::Table(_) => match wrapping_node { - WrappingNode::Relation if has_mentioned_schema => 15, - WrappingNode::Relation if !has_mentioned_schema => 10, - WrappingNode::BinaryExpression => 5, - _ => -50, - }, - CompletionRelevanceData::Function(_) => match wrapping_node { - WrappingNode::Relation => 10, - _ => -50, - }, - CompletionRelevanceData::Column(_) => match wrapping_node { - WrappingNode::BinaryExpression => 15, - WrappingNode::Assignment => 15, - _ => -15, - }, - CompletionRelevanceData::Schema(_) => match wrapping_node { - WrappingNode::Relation if !has_mentioned_schema && !has_node_text => 15, - WrappingNode::Relation if !has_mentioned_schema && has_node_text => 0, - _ => -50, - }, - CompletionRelevanceData::Policy(_) => 0, - } - } - - fn check_is_invocation(&mut self, ctx: &CompletionContext) { - self.score += match self.data { - CompletionRelevanceData::Function(_) if ctx.is_invocation => 30, - CompletionRelevanceData::Function(_) if !ctx.is_invocation => -10, - _ if ctx.is_invocation => -10, - _ => 0, - }; - } - - fn check_matches_schema(&mut self, ctx: &CompletionContext) { - let schema_name = match ctx.schema_or_alias_name.as_ref() { - None => return, - Some(n) => n, - }; - - let data_schema = self.get_schema_name(); - - if schema_name == data_schema { - self.score += 25; - } else { - self.score -= 10; - } - } - - fn get_schema_name(&self) -> &str { - match self.data { - CompletionRelevanceData::Function(f) => f.schema.as_str(), - CompletionRelevanceData::Table(t) => t.schema.as_str(), - CompletionRelevanceData::Column(c) => c.schema_name.as_str(), - CompletionRelevanceData::Schema(s) => s.name.as_str(), - CompletionRelevanceData::Policy(p) => p.schema_name.as_str(), - } - } - - fn get_table_name(&self) -> Option<&str> { - match self.data { - CompletionRelevanceData::Column(c) => Some(c.table_name.as_str()), - CompletionRelevanceData::Table(t) => Some(t.name.as_str()), - CompletionRelevanceData::Policy(p) => Some(p.table_name.as_str()), - _ => None, - } - } - - fn check_relations_in_stmt(&mut self, ctx: &CompletionContext) { - match self.data { - CompletionRelevanceData::Table(_) | CompletionRelevanceData::Function(_) => return, - _ => {} - } - - let schema = self.get_schema_name().to_string(); - let table_name = match self.get_table_name() { - Some(t) => t, - None => return, - }; - - if ctx - .mentioned_relations - .get(&Some(schema.to_string())) - .is_some_and(|tables| tables.contains(table_name)) - { - self.score += 45; - } else if ctx - .mentioned_relations - .get(&None) - .is_some_and(|tables| tables.contains(table_name)) - { - self.score += 30; - } - } - - fn check_is_user_defined(&mut self) { - let schema = self.get_schema_name().to_string(); - - let system_schemas = ["pg_catalog", "information_schema", "pg_toast"]; - - if system_schemas.contains(&schema.as_str()) { - self.score -= 20; - } - - // "public" is the default postgres schema where users - // create objects. Prefer it by a slight bit. - if schema.as_str() == "public" { - self.score += 2; - } - } - - fn check_columns_in_stmt(&mut self, ctx: &CompletionContext) { - if let CompletionRelevanceData::Column(column) = self.data { - /* - * Columns can be mentioned in one of two ways: - * - * 1) With an alias: `select u.id`. - * If the currently investigated suggestion item is "id" of the "users" table, - * we want to check - * a) whether the name of the column matches. - * b) whether we know which table is aliased by "u" (if we don't, we ignore the alias). - * c) whether the aliased table matches the currently investigated suggestion item's table. - * - * 2) Without an alias: `select id`. - * In that case, we only check whether the mentioned column fits our currently investigated - * suggestion item's name. - * - */ - if ctx - .mentioned_columns - .get(&ctx.wrapping_clause_type) - .is_some_and(|set| { - set.iter().any(|mentioned| match mentioned.alias.as_ref() { - Some(als) => { - let aliased_table = ctx.mentioned_table_aliases.get(als.as_str()); - column.name == mentioned.column - && aliased_table.is_none_or(|t| t == &column.table_name) - } - None => mentioned.column == column.name, - }) - }) - { - self.score -= 10; - } - } - } -} diff --git a/crates/pgt_completions/src/sanitization.rs b/crates/pgt_completions/src/sanitization.rs deleted file mode 100644 index 40dea7e6..00000000 --- a/crates/pgt_completions/src/sanitization.rs +++ /dev/null @@ -1,416 +0,0 @@ -use std::{borrow::Cow, cmp::max}; - -use pgt_text_size::TextSize; - -use crate::CompletionParams; - -static SANITIZED_TOKEN: &str = "REPLACED_TOKEN"; - -pub(crate) struct SanitizedCompletionParams<'a> { - pub position: TextSize, - pub text: String, - pub schema: &'a pgt_schema_cache::SchemaCache, - pub tree: Cow<'a, tree_sitter::Tree>, -} - -pub fn benchmark_sanitization(params: CompletionParams) -> String { - let params: SanitizedCompletionParams = params.into(); - params.text -} - -pub(crate) fn remove_sanitized_token(it: &str) -> String { - it.replace(SANITIZED_TOKEN, "") -} - -#[derive(PartialEq, Eq, Debug)] -pub(crate) enum NodeText { - Replaced, - Original(String), -} - -impl From<&str> for NodeText { - fn from(value: &str) -> Self { - if value == SANITIZED_TOKEN { - NodeText::Replaced - } else { - NodeText::Original(value.into()) - } - } -} - -impl From for NodeText { - fn from(value: String) -> Self { - NodeText::from(value.as_str()) - } -} - -impl<'larger, 'smaller> From> for SanitizedCompletionParams<'smaller> -where - 'larger: 'smaller, -{ - fn from(params: CompletionParams<'larger>) -> Self { - if cursor_inbetween_nodes(¶ms.text, params.position) - || cursor_prepared_to_write_token_after_last_node(¶ms.text, params.position) - || cursor_before_semicolon(params.tree, params.position) - || cursor_on_a_dot(¶ms.text, params.position) - || cursor_between_parentheses(¶ms.text, params.position) - { - SanitizedCompletionParams::with_adjusted_sql(params) - } else { - SanitizedCompletionParams::unadjusted(params) - } - } -} - -impl<'larger, 'smaller> SanitizedCompletionParams<'smaller> -where - 'larger: 'smaller, -{ - fn with_adjusted_sql(params: CompletionParams<'larger>) -> Self { - let cursor_pos: usize = params.position.into(); - let mut sql = String::new(); - - let mut sql_iter = params.text.chars(); - - let max = max(cursor_pos + 1, params.text.len()); - - for idx in 0..max { - match sql_iter.next() { - Some(c) => { - if idx == cursor_pos { - sql.push_str(SANITIZED_TOKEN); - } - sql.push(c); - } - None => { - // the cursor is outside the statement, - // we want to push spaces until we arrive at the cursor position. - // we'll then add the SANITIZED_TOKEN - if idx == cursor_pos { - sql.push_str(SANITIZED_TOKEN); - } else { - sql.push(' '); - } - } - } - } - - let mut parser = tree_sitter::Parser::new(); - parser - .set_language(tree_sitter_sql::language()) - .expect("Error loading sql language"); - let tree = parser.parse(sql.clone(), None).unwrap(); - - Self { - position: params.position, - text: sql, - schema: params.schema, - tree: Cow::Owned(tree), - } - } - fn unadjusted(params: CompletionParams<'larger>) -> Self { - Self { - position: params.position, - text: params.text.clone(), - schema: params.schema, - tree: Cow::Borrowed(params.tree), - } - } - - pub fn is_sanitized_token(txt: &str) -> bool { - txt == SANITIZED_TOKEN - } -} - -/// Checks if the cursor is positioned inbetween two SQL nodes. -/// -/// ```sql -/// select| from users; -- cursor "touches" select node. returns false. -/// select |from users; -- cursor "touches" from node. returns false. -/// select | from users; -- cursor is between select and from nodes. returns true. -/// ``` -fn cursor_inbetween_nodes(sql: &str, position: TextSize) -> bool { - let position: usize = position.into(); - let mut chars = sql.chars(); - - let previous_whitespace = chars - .nth(position - 1) - .is_some_and(|c| c.is_ascii_whitespace()); - - let current_whitespace = chars.next().is_some_and(|c| c.is_ascii_whitespace()); - - previous_whitespace && current_whitespace -} - -/// Checks if the cursor is positioned after the last node, -/// ready to write the next token: -/// -/// ```sql -/// select * from | -- ready to write! -/// select * from| -- user still needs to type a space -/// select * from | -- too far off. -/// ``` -fn cursor_prepared_to_write_token_after_last_node(sql: &str, position: TextSize) -> bool { - let cursor_pos: usize = position.into(); - cursor_pos == sql.len() + 1 -} - -fn cursor_on_a_dot(sql: &str, position: TextSize) -> bool { - let position: usize = position.into(); - sql.chars().nth(position - 1).is_some_and(|c| c == '.') -} - -fn cursor_before_semicolon(tree: &tree_sitter::Tree, position: TextSize) -> bool { - let mut cursor = tree.walk(); - let mut leaf_node = tree.root_node(); - - let byte: usize = position.into(); - - // if the cursor escapes the root node, it can't be between nodes. - if byte < leaf_node.start_byte() || byte >= leaf_node.end_byte() { - return false; - } - - loop { - let child_idx = cursor.goto_first_child_for_byte(position.into()); - if child_idx.is_none() { - break; - } - leaf_node = cursor.node(); - } - - // The semicolon node is on the same level as the statement: - // - // program [0..26] - // statement [0..19] - // ; [25..26] - // - // However, if we search for position 21, we'll still land on the semi node. - // We must manually verify that the cursor is between the statement and the semi nodes. - - // if the last node is not a semi, the statement is not completed. - if leaf_node.kind() != ";" { - return false; - } - - leaf_node - .prev_named_sibling() - .map(|n| n.end_byte() < byte) - .unwrap_or(false) -} - -fn cursor_between_parentheses(sql: &str, position: TextSize) -> bool { - let position: usize = position.into(); - - let mut level = 0; - let mut tracking_open_idx = None; - - let mut matching_open_idx = None; - let mut matching_close_idx = None; - - for (idx, char) in sql.chars().enumerate() { - if char == '(' { - tracking_open_idx = Some(idx); - level += 1; - } - - if char == ')' { - level -= 1; - - if tracking_open_idx.is_some_and(|it| it < position) && idx >= position { - matching_open_idx = tracking_open_idx; - matching_close_idx = Some(idx) - } - } - } - - // invalid statement - if level != 0 { - return false; - } - - // early check: '(|)' - // however, we want to check this after the level nesting. - let mut chars = sql.chars(); - if chars.nth(position - 1).is_some_and(|c| c == '(') && chars.next().is_some_and(|c| c == ')') { - return true; - } - - // not *within* parentheses - if matching_open_idx.is_none() || matching_close_idx.is_none() { - return false; - } - - // use string indexing, because we can't `.rev()` after `.take()` - let before = sql[..position] - .to_string() - .chars() - .rev() - .find(|c| !c.is_whitespace()) - .unwrap_or_default(); - - let after = sql - .chars() - .skip(position) - .find(|c| !c.is_whitespace()) - .unwrap_or_default(); - - let before_matches = before == ',' || before == '('; - let after_matches = after == ',' || after == ')'; - - before_matches && after_matches -} - -#[cfg(test)] -mod tests { - use pgt_text_size::TextSize; - - use crate::sanitization::{ - cursor_before_semicolon, cursor_between_parentheses, cursor_inbetween_nodes, - cursor_on_a_dot, cursor_prepared_to_write_token_after_last_node, - }; - - #[test] - fn test_cursor_inbetween_nodes() { - // note: two spaces between select and from. - let input = "select from users;"; - - // select | from users; <-- just right, one space after select token, one space before from - assert!(cursor_inbetween_nodes(input, TextSize::new(7))); - - // select| from users; <-- still on select token - assert!(!cursor_inbetween_nodes(input, TextSize::new(6))); - - // select |from users; <-- already on from token - assert!(!cursor_inbetween_nodes(input, TextSize::new(8))); - - // select from users;| - assert!(!cursor_inbetween_nodes(input, TextSize::new(19))); - } - - #[test] - fn test_cursor_after_nodes() { - let input = "select * from"; - - // select * from| <-- still on previous token - assert!(!cursor_prepared_to_write_token_after_last_node( - input, - TextSize::new(13) - )); - - // select * from | <-- too far off, two spaces afterward - assert!(!cursor_prepared_to_write_token_after_last_node( - input, - TextSize::new(15) - )); - - // select * |from <-- it's within - assert!(!cursor_prepared_to_write_token_after_last_node( - input, - TextSize::new(9) - )); - - // select * from | <-- just right - assert!(cursor_prepared_to_write_token_after_last_node( - input, - TextSize::new(14) - )); - } - - #[test] - fn on_a_dot() { - let input = "select * from private."; - - // select * from private.| <-- on a dot - assert!(cursor_on_a_dot(input, TextSize::new(22))); - - // select * from private|. <-- before the dot - assert!(!cursor_on_a_dot(input, TextSize::new(21))); - - // select * from private. | <-- too far off the dot - assert!(!cursor_on_a_dot(input, TextSize::new(23))); - } - - #[test] - fn test_cursor_before_semicolon() { - // Idx "13" is the exlusive end of `select * from` (first space after from) - // Idx "18" is right where the semi is - let input = "select * from ;"; - - let mut parser = tree_sitter::Parser::new(); - parser - .set_language(tree_sitter_sql::language()) - .expect("Error loading sql language"); - - let tree = parser.parse(input, None).unwrap(); - - // select * from ;| <-- it's after the statement - assert!(!cursor_before_semicolon(&tree, TextSize::new(19))); - - // select * from| ; <-- still touches the from - assert!(!cursor_before_semicolon(&tree, TextSize::new(13))); - - // anything is fine here - // select * from | ; - // select * from | ; - // select * from | ; - // select * from | ; - // select * from |; - assert!(cursor_before_semicolon(&tree, TextSize::new(14))); - assert!(cursor_before_semicolon(&tree, TextSize::new(15))); - assert!(cursor_before_semicolon(&tree, TextSize::new(16))); - assert!(cursor_before_semicolon(&tree, TextSize::new(17))); - assert!(cursor_before_semicolon(&tree, TextSize::new(18))); - } - - #[test] - fn between_parentheses() { - let input = "insert into instruments ()"; - - // insert into (|) <- right in the parentheses - assert!(cursor_between_parentheses(input, TextSize::new(25))); - - // insert into ()| <- too late - assert!(!cursor_between_parentheses(input, TextSize::new(26))); - - // insert into |() <- too early - assert!(!cursor_between_parentheses(input, TextSize::new(24))); - - let input = "insert into instruments (name, id, )"; - - // insert into instruments (name, id, |) <-- we should sanitize the next column - assert!(cursor_between_parentheses(input, TextSize::new(35))); - - // insert into instruments (name, id|, ) <-- we are still on the previous token. - assert!(!cursor_between_parentheses(input, TextSize::new(33))); - - let input = "insert into instruments (name, , id)"; - - // insert into instruments (name, |, id) <-- we can sanitize! - assert!(cursor_between_parentheses(input, TextSize::new(31))); - - // insert into instruments (name, ,| id) <-- we are already on the next token - assert!(!cursor_between_parentheses(input, TextSize::new(32))); - - let input = "insert into instruments (, name, id)"; - - // insert into instruments (|, name, id) <-- we can sanitize! - assert!(cursor_between_parentheses(input, TextSize::new(25))); - - // insert into instruments (,| name, id) <-- already on next token - assert!(!cursor_between_parentheses(input, TextSize::new(26))); - - // bails on invalidly nested statements - assert!(!cursor_between_parentheses( - "insert into (instruments ()", - TextSize::new(26) - )); - - // can find its position in nested statements - // "insert into instruments (name) values (a_function(name, |))", - assert!(cursor_between_parentheses( - "insert into instruments (name) values (a_function(name, ))", - TextSize::new(56) - )); - } -} diff --git a/crates/pgt_completions/src/test_helper.rs b/crates/pgt_completions/src/test_helper.rs deleted file mode 100644 index 937c11af..00000000 --- a/crates/pgt_completions/src/test_helper.rs +++ /dev/null @@ -1,250 +0,0 @@ -use std::fmt::Display; - -use pgt_schema_cache::SchemaCache; -use pgt_test_utils::test_database::get_new_test_db; -use sqlx::Executor; - -use crate::{CompletionItem, CompletionItemKind, CompletionParams, complete}; - -pub static CURSOR_POS: char = '€'; - -#[derive(Clone)] -pub struct InputQuery { - sql: String, - position: usize, -} - -impl From<&str> for InputQuery { - fn from(value: &str) -> Self { - let position = value - .find(CURSOR_POS) - .expect("Insert Cursor Position into your Query."); - - InputQuery { - sql: value.replace(CURSOR_POS, "").trim().to_string(), - position, - } - } -} - -impl Display for InputQuery { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(f, "{}", self.sql) - } -} - -pub(crate) async fn get_test_deps( - setup: &str, - input: InputQuery, -) -> (tree_sitter::Tree, pgt_schema_cache::SchemaCache) { - let test_db = get_new_test_db().await; - - test_db - .execute(setup) - .await - .expect("Failed to execute setup query"); - - let schema_cache = SchemaCache::load(&test_db) - .await - .expect("Failed to load Schema Cache"); - - let mut parser = tree_sitter::Parser::new(); - parser - .set_language(tree_sitter_sql::language()) - .expect("Error loading sql language"); - - let tree = parser.parse(input.to_string(), None).unwrap(); - - (tree, schema_cache) -} - -/// Careful: This will connect against the passed database. -/// Use this only to debug issues. Do not commit to version control. -#[allow(dead_code)] -pub(crate) async fn test_against_connection_string( - conn_str: &str, - input: InputQuery, -) -> (tree_sitter::Tree, pgt_schema_cache::SchemaCache) { - let pool = sqlx::PgPool::connect(conn_str) - .await - .expect("Unable to connect to database."); - - let schema_cache = SchemaCache::load(&pool) - .await - .expect("Failed to load Schema Cache"); - - let mut parser = tree_sitter::Parser::new(); - parser - .set_language(tree_sitter_sql::language()) - .expect("Error loading sql language"); - - let tree = parser.parse(input.to_string(), None).unwrap(); - - (tree, schema_cache) -} - -pub(crate) fn get_text_and_position(q: InputQuery) -> (usize, String) { - (q.position, q.sql) -} - -pub(crate) fn get_test_params<'a>( - tree: &'a tree_sitter::Tree, - schema_cache: &'a pgt_schema_cache::SchemaCache, - sql: InputQuery, -) -> CompletionParams<'a> { - let (position, text) = get_text_and_position(sql); - - CompletionParams { - position: (position as u32).into(), - schema: schema_cache, - tree, - text, - } -} - -#[cfg(test)] -mod tests { - use crate::test_helper::CURSOR_POS; - - use super::InputQuery; - - #[test] - fn input_query_should_extract_correct_position() { - struct TestCase { - query: String, - expected_pos: usize, - expected_sql_len: usize, - } - - let cases = vec![ - TestCase { - query: format!("select * from{}", CURSOR_POS), - expected_pos: 13, - expected_sql_len: 13, - }, - TestCase { - query: format!("{}select * from", CURSOR_POS), - expected_pos: 0, - expected_sql_len: 13, - }, - TestCase { - query: format!("select {} from", CURSOR_POS), - expected_pos: 7, - expected_sql_len: 12, - }, - ]; - - for case in cases { - let query = InputQuery::from(case.query.as_str()); - assert_eq!(query.position, case.expected_pos); - assert_eq!(query.sql.len(), case.expected_sql_len); - } - } -} - -#[derive(Debug, PartialEq, Eq)] -pub(crate) enum CompletionAssertion { - Label(String), - LabelAndKind(String, CompletionItemKind), - LabelAndDesc(String, String), - LabelNotExists(String), - KindNotExists(CompletionItemKind), -} - -impl CompletionAssertion { - fn assert(&self, item: &CompletionItem) { - match self { - CompletionAssertion::Label(label) => { - assert_eq!( - &item.label, label, - "Expected label to be {}, but got {}", - label, &item.label - ); - } - CompletionAssertion::LabelAndKind(label, kind) => { - assert_eq!( - &item.label, label, - "Expected label to be {}, but got {}", - label, &item.label - ); - assert_eq!( - &item.kind, kind, - "Expected kind to be {:?}, but got {:?}", - kind, &item.kind - ); - } - CompletionAssertion::LabelNotExists(label) => { - assert_ne!( - &item.label, label, - "Expected label {} not to exist, but found it", - label - ); - } - CompletionAssertion::KindNotExists(kind) => { - assert_ne!( - &item.kind, kind, - "Expected kind {:?} not to exist, but found it", - kind - ); - } - CompletionAssertion::LabelAndDesc(label, desc) => { - assert_eq!( - &item.label, label, - "Expected label to be {}, but got {}", - label, &item.label - ); - assert_eq!( - &item.description, desc, - "Expected desc to be {}, but got {}", - desc, &item.description - ); - } - } - } -} - -pub(crate) async fn assert_complete_results( - query: &str, - assertions: Vec, - setup: &str, -) { - let (tree, cache) = get_test_deps(setup, query.into()).await; - let params = get_test_params(&tree, &cache, query.into()); - let items = complete(params); - - let (not_existing, existing): (Vec, Vec) = - assertions.into_iter().partition(|a| match a { - CompletionAssertion::LabelNotExists(_) | CompletionAssertion::KindNotExists(_) => true, - CompletionAssertion::Label(_) - | CompletionAssertion::LabelAndKind(_, _) - | CompletionAssertion::LabelAndDesc(_, _) => false, - }); - - assert!( - items.len() >= existing.len(), - "Not enough items returned. Expected at least {} items, but got {}", - existing.len(), - items.len() - ); - - for item in &items { - for assertion in ¬_existing { - assertion.assert(item); - } - } - - existing - .into_iter() - .zip(items.into_iter()) - .for_each(|(assertion, result)| { - assertion.assert(&result); - }); -} - -pub(crate) async fn assert_no_complete_results(query: &str, setup: &str) { - let (tree, cache) = get_test_deps(setup, query.into()).await; - let params = get_test_params(&tree, &cache, query.into()); - let items = complete(params); - - assert_eq!(items.len(), 0) -} diff --git a/crates/pgt_configuration/Cargo.toml b/crates/pgt_configuration/Cargo.toml deleted file mode 100644 index 61da458b..00000000 --- a/crates/pgt_configuration/Cargo.toml +++ /dev/null @@ -1,33 +0,0 @@ -[package] -authors.workspace = true -categories.workspace = true -description = "" -edition.workspace = true -homepage.workspace = true -keywords.workspace = true -license.workspace = true -name = "pgt_configuration" -repository.workspace = true -version = "0.0.0" - - -[dependencies] -biome_deserialize = { workspace = true, features = ["schema"] } -biome_deserialize_macros = { workspace = true } -bpaf = { workspace = true } -indexmap = { workspace = true } -pgt_analyse = { workspace = true } -pgt_analyser = { workspace = true } -pgt_console = { workspace = true } -pgt_diagnostics = { workspace = true } -pgt_text_size = { workspace = true } -rustc-hash = { workspace = true } -schemars = { workspace = true, features = ["indexmap1"], optional = true } -serde = { workspace = true, features = ["derive"] } -serde_json = { workspace = true, features = ["raw_value"] } - -[lib] -doctest = false - -[features] -schema = ["dep:schemars", "schemars/indexmap"] diff --git a/crates/pgt_configuration/src/analyser/linter/mod.rs b/crates/pgt_configuration/src/analyser/linter/mod.rs deleted file mode 100644 index 20535a2e..00000000 --- a/crates/pgt_configuration/src/analyser/linter/mod.rs +++ /dev/null @@ -1,58 +0,0 @@ -mod rules; - -use biome_deserialize::StringSet; -use biome_deserialize_macros::{Merge, Partial}; -use bpaf::Bpaf; -pub use rules::*; -use serde::{Deserialize, Serialize}; - -#[derive(Clone, Debug, Deserialize, Eq, Partial, PartialEq, Serialize)] -#[partial(derive(Bpaf, Clone, Eq, Merge, PartialEq))] -#[partial(cfg_attr(feature = "schema", derive(schemars::JsonSchema)))] -#[partial(serde(rename_all = "camelCase", default, deny_unknown_fields))] -pub struct LinterConfiguration { - /// if `false`, it disables the feature and the linter won't be executed. `true` by default - #[partial(bpaf(hide))] - pub enabled: bool, - - /// List of rules - #[partial(bpaf(pure(Default::default()), optional, hide))] - pub rules: Rules, - - /// A list of Unix shell style patterns. The formatter will ignore files/folders that will - /// match these patterns. - #[partial(bpaf(hide))] - pub ignore: StringSet, - - /// A list of Unix shell style patterns. The formatter will include files/folders that will - /// match these patterns. - #[partial(bpaf(hide))] - pub include: StringSet, -} - -impl LinterConfiguration { - pub const fn is_disabled(&self) -> bool { - !self.enabled - } -} - -impl Default for LinterConfiguration { - fn default() -> Self { - Self { - enabled: true, - rules: Default::default(), - ignore: Default::default(), - include: Default::default(), - } - } -} - -impl PartialLinterConfiguration { - pub const fn is_disabled(&self) -> bool { - matches!(self.enabled, Some(false)) - } - - pub fn get_rules(&self) -> Rules { - self.rules.clone().unwrap_or_default() - } -} diff --git a/crates/pgt_configuration/src/analyser/linter/rules.rs b/crates/pgt_configuration/src/analyser/linter/rules.rs deleted file mode 100644 index 14d796bf..00000000 --- a/crates/pgt_configuration/src/analyser/linter/rules.rs +++ /dev/null @@ -1,301 +0,0 @@ -//! Generated file, do not edit by hand, see `xtask/codegen` - -use crate::analyser::{RuleConfiguration, RulePlainConfiguration}; -use biome_deserialize_macros::Merge; -use pgt_analyse::{RuleFilter, options::RuleOptions}; -use pgt_diagnostics::{Category, Severity}; -use rustc_hash::FxHashSet; -#[cfg(feature = "schema")] -use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; -#[derive( - Clone, - Copy, - Debug, - Eq, - Hash, - Merge, - Ord, - PartialEq, - PartialOrd, - serde :: Deserialize, - serde :: Serialize, -)] -#[cfg_attr(feature = "schema", derive(JsonSchema))] -#[serde(rename_all = "camelCase")] -pub enum RuleGroup { - Safety, -} -impl RuleGroup { - pub const fn as_str(self) -> &'static str { - match self { - Self::Safety => Safety::GROUP_NAME, - } - } -} -impl std::str::FromStr for RuleGroup { - type Err = &'static str; - fn from_str(s: &str) -> Result { - match s { - Safety::GROUP_NAME => Ok(Self::Safety), - _ => Err("This rule group doesn't exist."), - } - } -} -#[derive(Clone, Debug, Default, Deserialize, Eq, Merge, PartialEq, Serialize)] -#[cfg_attr(feature = "schema", derive(JsonSchema))] -#[serde(rename_all = "camelCase", deny_unknown_fields)] -pub struct Rules { - #[doc = r" It enables the lint rules recommended by Postgres Tools. `true` by default."] - #[serde(skip_serializing_if = "Option::is_none")] - pub recommended: Option, - #[doc = r" It enables ALL rules. The rules that belong to `nursery` won't be enabled."] - #[serde(skip_serializing_if = "Option::is_none")] - pub all: Option, - #[serde(skip_serializing_if = "Option::is_none")] - pub safety: Option, -} -impl Rules { - #[doc = r" Checks if the code coming from [pgt_diagnostics::Diagnostic] corresponds to a rule."] - #[doc = r" Usually the code is built like {group}/{rule_name}"] - pub fn has_rule(group: RuleGroup, rule_name: &str) -> Option<&'static str> { - match group { - RuleGroup::Safety => Safety::has_rule(rule_name), - } - } - #[doc = r" Given a category coming from [Diagnostic](pgt_diagnostics::Diagnostic), this function returns"] - #[doc = r" the [Severity](pgt_diagnostics::Severity) associated to the rule, if the configuration changed it."] - #[doc = r" If the severity is off or not set, then the function returns the default severity of the rule:"] - #[doc = r" [Severity::Error] for recommended rules and [Severity::Warning] for other rules."] - #[doc = r""] - #[doc = r" If not, the function returns [None]."] - pub fn get_severity_from_code(&self, category: &Category) -> Option { - let mut split_code = category.name().split('/'); - let _lint = split_code.next(); - debug_assert_eq!(_lint, Some("lint")); - let group = ::from_str(split_code.next()?).ok()?; - let rule_name = split_code.next()?; - let rule_name = Self::has_rule(group, rule_name)?; - let severity = match group { - RuleGroup::Safety => self - .safety - .as_ref() - .and_then(|group| group.get_rule_configuration(rule_name)) - .filter(|(level, _)| !matches!(level, RulePlainConfiguration::Off)) - .map_or_else( - || { - if Safety::is_recommended_rule(rule_name) { - Severity::Error - } else { - Severity::Warning - } - }, - |(level, _)| level.into(), - ), - }; - Some(severity) - } - #[doc = r" Ensure that `recommended` is set to `true` or implied."] - pub fn set_recommended(&mut self) { - if self.all != Some(true) && self.recommended == Some(false) { - self.recommended = Some(true) - } - if let Some(group) = &mut self.safety { - group.recommended = None; - } - } - pub(crate) const fn is_recommended_false(&self) -> bool { - matches!(self.recommended, Some(false)) - } - pub(crate) const fn is_all_true(&self) -> bool { - matches!(self.all, Some(true)) - } - #[doc = r" It returns the enabled rules by default."] - #[doc = r""] - #[doc = r" The enabled rules are calculated from the difference with the disabled rules."] - pub fn as_enabled_rules(&self) -> FxHashSet> { - let mut enabled_rules = FxHashSet::default(); - let mut disabled_rules = FxHashSet::default(); - if let Some(group) = self.safety.as_ref() { - group.collect_preset_rules( - self.is_all_true(), - !self.is_recommended_false(), - &mut enabled_rules, - ); - enabled_rules.extend(&group.get_enabled_rules()); - disabled_rules.extend(&group.get_disabled_rules()); - } else if self.is_all_true() { - enabled_rules.extend(Safety::all_rules_as_filters()); - } else if !self.is_recommended_false() { - enabled_rules.extend(Safety::recommended_rules_as_filters()); - } - enabled_rules.difference(&disabled_rules).copied().collect() - } -} -#[derive(Clone, Debug, Default, Deserialize, Eq, Merge, PartialEq, Serialize)] -#[cfg_attr(feature = "schema", derive(JsonSchema))] -#[serde(rename_all = "camelCase", default, deny_unknown_fields)] -#[doc = r" A list of rules that belong to this group"] -pub struct Safety { - #[doc = r" It enables the recommended rules for this group"] - #[serde(skip_serializing_if = "Option::is_none")] - pub recommended: Option, - #[doc = r" It enables ALL rules for this group."] - #[serde(skip_serializing_if = "Option::is_none")] - pub all: Option, - #[doc = "Adding a new column that is NOT NULL and has no default value to an existing table effectively makes it required."] - #[serde(skip_serializing_if = "Option::is_none")] - pub adding_required_field: - Option>, - #[doc = "Dropping a column may break existing clients."] - #[serde(skip_serializing_if = "Option::is_none")] - pub ban_drop_column: Option>, - #[doc = "Dropping a NOT NULL constraint may break existing clients."] - #[serde(skip_serializing_if = "Option::is_none")] - pub ban_drop_not_null: Option>, - #[doc = "Dropping a table may break existing clients."] - #[serde(skip_serializing_if = "Option::is_none")] - pub ban_drop_table: Option>, -} -impl Safety { - const GROUP_NAME: &'static str = "safety"; - pub(crate) const GROUP_RULES: &'static [&'static str] = &[ - "addingRequiredField", - "banDropColumn", - "banDropNotNull", - "banDropTable", - ]; - const RECOMMENDED_RULES: &'static [&'static str] = - &["banDropColumn", "banDropNotNull", "banDropTable"]; - const RECOMMENDED_RULES_AS_FILTERS: &'static [RuleFilter<'static>] = &[ - RuleFilter::Rule(Self::GROUP_NAME, Self::GROUP_RULES[1]), - RuleFilter::Rule(Self::GROUP_NAME, Self::GROUP_RULES[2]), - RuleFilter::Rule(Self::GROUP_NAME, Self::GROUP_RULES[3]), - ]; - const ALL_RULES_AS_FILTERS: &'static [RuleFilter<'static>] = &[ - RuleFilter::Rule(Self::GROUP_NAME, Self::GROUP_RULES[0]), - RuleFilter::Rule(Self::GROUP_NAME, Self::GROUP_RULES[1]), - RuleFilter::Rule(Self::GROUP_NAME, Self::GROUP_RULES[2]), - RuleFilter::Rule(Self::GROUP_NAME, Self::GROUP_RULES[3]), - ]; - #[doc = r" Retrieves the recommended rules"] - pub(crate) fn is_recommended_true(&self) -> bool { - matches!(self.recommended, Some(true)) - } - pub(crate) fn is_recommended_unset(&self) -> bool { - self.recommended.is_none() - } - pub(crate) fn is_all_true(&self) -> bool { - matches!(self.all, Some(true)) - } - pub(crate) fn is_all_unset(&self) -> bool { - self.all.is_none() - } - pub(crate) fn get_enabled_rules(&self) -> FxHashSet> { - let mut index_set = FxHashSet::default(); - if let Some(rule) = self.adding_required_field.as_ref() { - if rule.is_enabled() { - index_set.insert(RuleFilter::Rule(Self::GROUP_NAME, Self::GROUP_RULES[0])); - } - } - if let Some(rule) = self.ban_drop_column.as_ref() { - if rule.is_enabled() { - index_set.insert(RuleFilter::Rule(Self::GROUP_NAME, Self::GROUP_RULES[1])); - } - } - if let Some(rule) = self.ban_drop_not_null.as_ref() { - if rule.is_enabled() { - index_set.insert(RuleFilter::Rule(Self::GROUP_NAME, Self::GROUP_RULES[2])); - } - } - if let Some(rule) = self.ban_drop_table.as_ref() { - if rule.is_enabled() { - index_set.insert(RuleFilter::Rule(Self::GROUP_NAME, Self::GROUP_RULES[3])); - } - } - index_set - } - pub(crate) fn get_disabled_rules(&self) -> FxHashSet> { - let mut index_set = FxHashSet::default(); - if let Some(rule) = self.adding_required_field.as_ref() { - if rule.is_disabled() { - index_set.insert(RuleFilter::Rule(Self::GROUP_NAME, Self::GROUP_RULES[0])); - } - } - if let Some(rule) = self.ban_drop_column.as_ref() { - if rule.is_disabled() { - index_set.insert(RuleFilter::Rule(Self::GROUP_NAME, Self::GROUP_RULES[1])); - } - } - if let Some(rule) = self.ban_drop_not_null.as_ref() { - if rule.is_disabled() { - index_set.insert(RuleFilter::Rule(Self::GROUP_NAME, Self::GROUP_RULES[2])); - } - } - if let Some(rule) = self.ban_drop_table.as_ref() { - if rule.is_disabled() { - index_set.insert(RuleFilter::Rule(Self::GROUP_NAME, Self::GROUP_RULES[3])); - } - } - index_set - } - #[doc = r" Checks if, given a rule name, matches one of the rules contained in this category"] - pub(crate) fn has_rule(rule_name: &str) -> Option<&'static str> { - Some(Self::GROUP_RULES[Self::GROUP_RULES.binary_search(&rule_name).ok()?]) - } - #[doc = r" Checks if, given a rule name, it is marked as recommended"] - pub(crate) fn is_recommended_rule(rule_name: &str) -> bool { - Self::RECOMMENDED_RULES.contains(&rule_name) - } - pub(crate) fn recommended_rules_as_filters() -> &'static [RuleFilter<'static>] { - Self::RECOMMENDED_RULES_AS_FILTERS - } - pub(crate) fn all_rules_as_filters() -> &'static [RuleFilter<'static>] { - Self::ALL_RULES_AS_FILTERS - } - #[doc = r" Select preset rules"] - pub(crate) fn collect_preset_rules( - &self, - parent_is_all: bool, - parent_is_recommended: bool, - enabled_rules: &mut FxHashSet>, - ) { - if self.is_all_true() || self.is_all_unset() && parent_is_all { - enabled_rules.extend(Self::all_rules_as_filters()); - } else if self.is_recommended_true() - || self.is_recommended_unset() && self.is_all_unset() && parent_is_recommended - { - enabled_rules.extend(Self::recommended_rules_as_filters()); - } - } - pub(crate) fn get_rule_configuration( - &self, - rule_name: &str, - ) -> Option<(RulePlainConfiguration, Option)> { - match rule_name { - "addingRequiredField" => self - .adding_required_field - .as_ref() - .map(|conf| (conf.level(), conf.get_options())), - "banDropColumn" => self - .ban_drop_column - .as_ref() - .map(|conf| (conf.level(), conf.get_options())), - "banDropNotNull" => self - .ban_drop_not_null - .as_ref() - .map(|conf| (conf.level(), conf.get_options())), - "banDropTable" => self - .ban_drop_table - .as_ref() - .map(|conf| (conf.level(), conf.get_options())), - _ => None, - } - } -} -#[test] -fn test_order() { - for items in Safety::GROUP_RULES.windows(2) { - assert!(items[0] < items[1], "{} < {}", items[0], items[1]); - } -} diff --git a/crates/pgt_configuration/src/analyser/mod.rs b/crates/pgt_configuration/src/analyser/mod.rs deleted file mode 100644 index 2d43fca4..00000000 --- a/crates/pgt_configuration/src/analyser/mod.rs +++ /dev/null @@ -1,389 +0,0 @@ -pub mod linter; - -pub use crate::analyser::linter::*; -use biome_deserialize::Merge; -use biome_deserialize_macros::Deserializable; -use pgt_analyse::RuleFilter; -use pgt_analyse::options::RuleOptions; -use pgt_diagnostics::Severity; -#[cfg(feature = "schema")] -use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; -use std::str::FromStr; - -#[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)] -#[cfg_attr(feature = "schema", derive(JsonSchema))] -#[serde(rename_all = "camelCase", deny_unknown_fields, untagged)] -pub enum RuleConfiguration { - Plain(RulePlainConfiguration), - WithOptions(RuleWithOptions), -} -impl RuleConfiguration { - pub fn is_disabled(&self) -> bool { - matches!(self.level(), RulePlainConfiguration::Off) - } - pub fn is_enabled(&self) -> bool { - !self.is_disabled() - } - pub fn level(&self) -> RulePlainConfiguration { - match self { - Self::Plain(plain) => *plain, - Self::WithOptions(options) => options.level, - } - } - pub fn set_level(&mut self, level: RulePlainConfiguration) { - match self { - Self::Plain(plain) => *plain = level, - Self::WithOptions(options) => options.level = level, - } - } -} -// Rule configuration has a custom [Merge] implementation so that overriding the -// severity doesn't override the options. -impl Merge for RuleConfiguration { - fn merge_with(&mut self, other: Self) { - match self { - Self::Plain(_) => *self = other, - Self::WithOptions(this) => match other { - Self::Plain(level) => { - this.level = level; - } - Self::WithOptions(other) => { - this.merge_with(other); - } - }, - } - } -} -impl RuleConfiguration { - pub fn get_options(&self) -> Option { - match self { - Self::Plain(_) => None, - Self::WithOptions(options) => Some(RuleOptions::new(options.options.clone())), - } - } -} -impl Default for RuleConfiguration { - fn default() -> Self { - Self::Plain(RulePlainConfiguration::Error) - } -} - -#[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)] -#[cfg_attr(feature = "schema", derive(JsonSchema))] -#[serde(rename_all = "camelCase", deny_unknown_fields, untagged)] -pub enum RuleFixConfiguration { - Plain(RulePlainConfiguration), - WithOptions(RuleWithFixOptions), -} -impl Default for RuleFixConfiguration { - fn default() -> Self { - Self::Plain(RulePlainConfiguration::Error) - } -} -impl RuleFixConfiguration { - pub fn is_disabled(&self) -> bool { - matches!(self.level(), RulePlainConfiguration::Off) - } - pub fn is_enabled(&self) -> bool { - !self.is_disabled() - } - pub fn level(&self) -> RulePlainConfiguration { - match self { - Self::Plain(plain) => *plain, - Self::WithOptions(options) => options.level, - } - } - pub fn set_level(&mut self, level: RulePlainConfiguration) { - match self { - Self::Plain(plain) => *plain = level, - Self::WithOptions(options) => options.level = level, - } - } -} -// Rule configuration has a custom [Merge] implementation so that overriding the -// severity doesn't override the options. -impl Merge for RuleFixConfiguration { - fn merge_with(&mut self, other: Self) { - match self { - Self::Plain(_) => *self = other, - Self::WithOptions(this) => match other { - Self::Plain(level) => { - this.level = level; - } - Self::WithOptions(other) => { - this.merge_with(other); - } - }, - } - } -} -impl RuleFixConfiguration { - pub fn get_options(&self) -> Option { - match self { - Self::Plain(_) => None, - Self::WithOptions(options) => Some(RuleOptions::new(options.options.clone())), - } - } -} -impl From<&RuleConfiguration> for Severity { - fn from(conf: &RuleConfiguration) -> Self { - match conf { - RuleConfiguration::Plain(p) => (*p).into(), - RuleConfiguration::WithOptions(conf) => { - let level = &conf.level; - (*level).into() - } - } - } -} -impl From for Severity { - fn from(conf: RulePlainConfiguration) -> Self { - match conf { - RulePlainConfiguration::Warn => Severity::Warning, - RulePlainConfiguration::Error => Severity::Error, - RulePlainConfiguration::Info => Severity::Information, - RulePlainConfiguration::Off => { - unreachable!("the rule is turned off, it should not step in here") - } - } - } -} -impl From for Severity { - fn from(conf: RuleAssistPlainConfiguration) -> Self { - match conf { - RuleAssistPlainConfiguration::On => Severity::Hint, - RuleAssistPlainConfiguration::Off => { - unreachable!("the rule is turned off, it should not step in here") - } - } - } -} - -#[derive(Clone, Copy, Debug, Default, Deserialize, Deserializable, Eq, PartialEq, Serialize)] -#[cfg_attr(feature = "schema", derive(JsonSchema))] -#[serde(rename_all = "camelCase")] -pub enum RulePlainConfiguration { - #[default] - Warn, - Error, - Info, - Off, -} - -#[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)] -#[cfg_attr(feature = "schema", derive(JsonSchema))] -#[serde(rename_all = "camelCase", deny_unknown_fields, untagged)] -pub enum RuleAssistConfiguration { - Plain(RuleAssistPlainConfiguration), - WithOptions(RuleAssistWithOptions), -} -impl RuleAssistConfiguration { - pub fn is_disabled(&self) -> bool { - matches!(self.level(), RuleAssistPlainConfiguration::Off) - } - pub fn is_enabled(&self) -> bool { - !self.is_disabled() - } - pub fn level(&self) -> RuleAssistPlainConfiguration { - match self { - Self::Plain(plain) => *plain, - Self::WithOptions(options) => options.level, - } - } - pub fn set_level(&mut self, level: RuleAssistPlainConfiguration) { - match self { - Self::Plain(plain) => *plain = level, - Self::WithOptions(options) => options.level = level, - } - } -} -// Rule configuration has a custom [Merge] implementation so that overriding the -// severity doesn't override the options. -impl Merge for RuleAssistConfiguration { - fn merge_with(&mut self, other: Self) { - match self { - Self::Plain(_) => *self = other, - Self::WithOptions(this) => match other { - Self::Plain(level) => { - this.level = level; - } - Self::WithOptions(other) => { - this.merge_with(other); - } - }, - } - } -} -impl RuleAssistConfiguration { - pub fn get_options(&self) -> Option { - match self { - Self::Plain(_) => None, - Self::WithOptions(options) => Some(RuleOptions::new(options.options.clone())), - } - } -} -impl Default for RuleAssistConfiguration { - fn default() -> Self { - Self::Plain(RuleAssistPlainConfiguration::Off) - } -} - -#[derive(Clone, Copy, Debug, Default, Deserialize, Deserializable, Eq, PartialEq, Serialize)] -#[cfg_attr(feature = "schema", derive(JsonSchema))] -#[serde(rename_all = "camelCase")] -pub enum RuleAssistPlainConfiguration { - #[default] - On, - Off, -} -impl RuleAssistPlainConfiguration { - pub const fn is_enabled(&self) -> bool { - matches!(self, Self::On) - } - - pub const fn is_disabled(&self) -> bool { - matches!(self, Self::Off) - } -} -impl Merge for RuleAssistPlainConfiguration { - fn merge_with(&mut self, other: Self) { - *self = other; - } -} - -#[derive(Clone, Debug, Default, Deserialize, Deserializable, Eq, PartialEq, Serialize)] -#[cfg_attr(feature = "schema", derive(JsonSchema))] -#[serde(rename_all = "camelCase", deny_unknown_fields)] -pub struct RuleAssistWithOptions { - /// The severity of the emitted diagnostics by the rule - pub level: RuleAssistPlainConfiguration, - /// Rule's options - pub options: T, -} -impl Merge for RuleAssistWithOptions { - fn merge_with(&mut self, other: Self) { - self.level = other.level; - self.options = other.options; - } -} - -#[derive(Clone, Debug, Default, Deserialize, Deserializable, Eq, PartialEq, Serialize)] -#[cfg_attr(feature = "schema", derive(JsonSchema))] -#[serde(rename_all = "camelCase", deny_unknown_fields)] -pub struct RuleWithOptions { - /// The severity of the emitted diagnostics by the rule - pub level: RulePlainConfiguration, - /// Rule's options - pub options: T, -} -impl Merge for RuleWithOptions { - fn merge_with(&mut self, other: Self) { - self.level = other.level; - self.options = other.options; - } -} - -#[derive(Clone, Debug, Default, Deserialize, Deserializable, Eq, PartialEq, Serialize)] -#[cfg_attr(feature = "schema", derive(JsonSchema))] -#[serde(rename_all = "camelCase", deny_unknown_fields)] -pub struct RuleWithFixOptions { - /// The severity of the emitted diagnostics by the rule - pub level: RulePlainConfiguration, - /// Rule's options - pub options: T, -} - -impl Merge for RuleWithFixOptions { - fn merge_with(&mut self, other: Self) { - self.level = other.level; - self.options = other.options; - } -} - -#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)] -pub enum RuleSelector { - Group(linter::RuleGroup), - Rule(linter::RuleGroup, &'static str), -} - -impl From for RuleFilter<'static> { - fn from(value: RuleSelector) -> Self { - match value { - RuleSelector::Group(group) => RuleFilter::Group(group.as_str()), - RuleSelector::Rule(group, name) => RuleFilter::Rule(group.as_str(), name), - } - } -} - -impl<'a> From<&'a RuleSelector> for RuleFilter<'static> { - fn from(value: &'a RuleSelector) -> Self { - match value { - RuleSelector::Group(group) => RuleFilter::Group(group.as_str()), - RuleSelector::Rule(group, name) => RuleFilter::Rule(group.as_str(), name), - } - } -} - -impl FromStr for RuleSelector { - type Err = &'static str; - fn from_str(selector: &str) -> Result { - let selector = selector.strip_prefix("lint/").unwrap_or(selector); - if let Some((group_name, rule_name)) = selector.split_once('/') { - let group = linter::RuleGroup::from_str(group_name)?; - if let Some(rule_name) = Rules::has_rule(group, rule_name) { - Ok(RuleSelector::Rule(group, rule_name)) - } else { - Err("This rule doesn't exist.") - } - } else { - match linter::RuleGroup::from_str(selector) { - Ok(group) => Ok(RuleSelector::Group(group)), - Err(_) => Err( - "This group doesn't exist. Use the syntax `/` to specify a rule.", - ), - } - } - } -} - -impl serde::Serialize for RuleSelector { - fn serialize(&self, serializer: S) -> Result { - match self { - RuleSelector::Group(group) => serializer.serialize_str(group.as_str()), - RuleSelector::Rule(group, rule_name) => { - let group_name = group.as_str(); - serializer.serialize_str(&format!("{group_name}/{rule_name}")) - } - } - } -} - -impl<'de> serde::Deserialize<'de> for RuleSelector { - fn deserialize>(deserializer: D) -> Result { - struct Visitor; - impl serde::de::Visitor<'_> for Visitor { - type Value = RuleSelector; - fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result { - formatter.write_str("/") - } - fn visit_str(self, v: &str) -> Result { - match RuleSelector::from_str(v) { - Ok(result) => Ok(result), - Err(error) => Err(serde::de::Error::custom(error)), - } - } - } - deserializer.deserialize_str(Visitor) - } -} - -#[cfg(feature = "schema")] -impl schemars::JsonSchema for RuleSelector { - fn schema_name() -> String { - "RuleCode".to_string() - } - fn json_schema(r#gen: &mut schemars::r#gen::SchemaGenerator) -> schemars::schema::Schema { - String::json_schema(r#gen) - } -} diff --git a/crates/pgt_configuration/src/database.rs b/crates/pgt_configuration/src/database.rs deleted file mode 100644 index 39efb8d1..00000000 --- a/crates/pgt_configuration/src/database.rs +++ /dev/null @@ -1,60 +0,0 @@ -use biome_deserialize::StringSet; -use biome_deserialize_macros::{Merge, Partial}; -use bpaf::Bpaf; -use serde::{Deserialize, Serialize}; - -/// The configuration of the database connection. -#[derive(Clone, Debug, Deserialize, Eq, Partial, PartialEq, Serialize)] -#[partial(derive(Bpaf, Clone, Eq, PartialEq, Merge))] -#[partial(cfg_attr(feature = "schema", derive(schemars::JsonSchema)))] -#[partial(serde(rename_all = "camelCase", default, deny_unknown_fields))] -pub struct DatabaseConfiguration { - /// The host of the database. - /// Required if you want database-related features. - /// All else falls back to sensible defaults. - #[partial(bpaf(long("host")))] - pub host: String, - - /// The port of the database. - #[partial(bpaf(long("port")))] - pub port: u16, - - /// The username to connect to the database. - #[partial(bpaf(long("username")))] - pub username: String, - - /// The password to connect to the database. - #[partial(bpaf(long("password")))] - pub password: String, - - /// The name of the database. - #[partial(bpaf(long("database")))] - pub database: String, - - #[partial(bpaf(long("allow_statement_executions_against")))] - pub allow_statement_executions_against: StringSet, - - /// The connection timeout in seconds. - #[partial(bpaf(long("conn_timeout_secs"), fallback(Some(10)), debug_fallback))] - pub conn_timeout_secs: u16, - - /// Actively disable all database-related features. - #[partial(bpaf(long("disable-db"), switch, fallback(Some(false))))] - #[partial(cfg_attr(feature = "schema", schemars(skip)))] - pub disable_connection: bool, -} - -impl Default for DatabaseConfiguration { - fn default() -> Self { - Self { - disable_connection: false, - host: "127.0.0.1".to_string(), - port: 5432, - username: "postgres".to_string(), - password: "postgres".to_string(), - database: "postgres".to_string(), - allow_statement_executions_against: Default::default(), - conn_timeout_secs: 10, - } - } -} diff --git a/crates/pgt_configuration/src/diagnostics.rs b/crates/pgt_configuration/src/diagnostics.rs deleted file mode 100644 index dc835ed7..00000000 --- a/crates/pgt_configuration/src/diagnostics.rs +++ /dev/null @@ -1,170 +0,0 @@ -use pgt_console::fmt::Display; -use pgt_console::{MarkupBuf, markup}; -use pgt_diagnostics::{Advices, Diagnostic, Error, LogCategory, MessageAndDescription, Visit}; -use serde::{Deserialize, Serialize}; -use std::fmt::{Debug, Formatter}; - -/// Series of errors that can be thrown while computing the configuration. -#[derive(Deserialize, Diagnostic, Serialize)] -pub enum ConfigurationDiagnostic { - /// Thrown when the program can't serialize the configuration, while saving it - SerializationError(SerializationError), - - /// Error thrown when de-serialising the configuration from file - DeserializationError(DeserializationError), - - /// Thrown when trying to **create** a new configuration file, but it exists already - ConfigAlreadyExists(ConfigAlreadyExists), - - /// When something is wrong with the configuration - InvalidConfiguration(InvalidConfiguration), - - /// Thrown when the pattern inside the `ignore` field errors - InvalidIgnorePattern(InvalidIgnorePattern), -} - -impl ConfigurationDiagnostic { - pub fn new_deserialization_error(error: serde_json::Error) -> Self { - Self::DeserializationError(DeserializationError { - message: error.to_string(), - }) - } - - pub fn new_serialization_error() -> Self { - Self::SerializationError(SerializationError) - } - - pub fn new_invalid_ignore_pattern( - pattern: impl Into, - reason: impl Into, - ) -> Self { - Self::InvalidIgnorePattern(InvalidIgnorePattern { - message: format!( - "Couldn't parse the pattern \"{}\". Reason: {}", - pattern.into(), - reason.into() - ), - file_path: None, - }) - } - - pub fn new_invalid_ignore_pattern_with_path( - pattern: impl Into, - reason: impl Into, - file_path: Option>, - ) -> Self { - Self::InvalidIgnorePattern(InvalidIgnorePattern { - message: format!( - "Couldn't parse the pattern \"{}\". Reason: {}", - pattern.into(), - reason.into() - ), - file_path: file_path.map(|f| f.into()), - }) - } - - pub fn new_already_exists() -> Self { - Self::ConfigAlreadyExists(ConfigAlreadyExists {}) - } - - pub fn invalid_configuration(message: impl Display) -> Self { - Self::InvalidConfiguration(InvalidConfiguration { - message: MessageAndDescription::from(markup! {{message}}.to_owned()), - }) - } -} - -impl Debug for ConfigurationDiagnostic { - fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { - std::fmt::Display::fmt(self, f) - } -} - -impl std::fmt::Display for ConfigurationDiagnostic { - fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { - self.description(f) - } -} - -#[derive(Debug, Serialize, Deserialize, Default)] -pub struct ConfigurationAdvices { - messages: Vec, -} - -impl Advices for ConfigurationAdvices { - fn record(&self, visitor: &mut dyn Visit) -> std::io::Result<()> { - for message in &self.messages { - visitor.record_log(LogCategory::Info, message)?; - } - - Ok(()) - } -} - -#[derive(Debug, Serialize, Deserialize, Diagnostic)] -#[diagnostic( - message = "Failed to deserialize", - category = "configuration", - severity = Error -)] -pub struct DeserializationError { - #[message] - #[description] - pub message: String, -} - -#[derive(Debug, Serialize, Deserialize, Diagnostic)] -#[diagnostic( - message = "Failed to serialize", - category = "configuration", - severity = Error -)] -pub struct SerializationError; - -#[derive(Debug, Serialize, Deserialize, Diagnostic)] -#[diagnostic( - message = "It seems that a configuration file already exists", - category = "configuration", - severity = Error -)] -pub struct ConfigAlreadyExists {} - -#[derive(Debug, Serialize, Deserialize, Diagnostic)] -#[diagnostic( - category = "configuration", - severity = Error, -)] -pub struct InvalidIgnorePattern { - #[message] - #[description] - pub message: String, - - #[location(resource)] - pub file_path: Option, -} - -#[derive(Debug, Serialize, Deserialize, Diagnostic)] -#[diagnostic( - category = "configuration", - severity = Error, -)] -pub struct InvalidConfiguration { - #[message] - #[description] - message: MessageAndDescription, -} - -#[derive(Debug, Serialize, Deserialize, Diagnostic)] -#[diagnostic( - category = "configuration", - severity = Error, -)] -pub struct CantResolve { - #[message] - #[description] - message: MessageAndDescription, - - #[serde(skip)] - #[source] - source: Option, -} diff --git a/crates/pgt_configuration/src/files.rs b/crates/pgt_configuration/src/files.rs deleted file mode 100644 index c8e3cde2..00000000 --- a/crates/pgt_configuration/src/files.rs +++ /dev/null @@ -1,43 +0,0 @@ -use std::num::NonZeroU64; - -use biome_deserialize::StringSet; -use biome_deserialize_macros::{Merge, Partial}; -use bpaf::Bpaf; -use serde::{Deserialize, Serialize}; - -/// Limit the size of files to 1.0 MiB by default -pub const DEFAULT_FILE_SIZE_LIMIT: NonZeroU64 = - // SAFETY: This constant is initialized with a non-zero value - NonZeroU64::new(1024 * 1024).unwrap(); - -/// The configuration of the filesystem -#[derive(Clone, Debug, Deserialize, Eq, Partial, PartialEq, Serialize)] -#[partial(derive(Bpaf, Clone, Eq, PartialEq, Merge))] -#[partial(cfg_attr(feature = "schema", derive(schemars::JsonSchema)))] -#[partial(serde(rename_all = "camelCase", default, deny_unknown_fields))] -pub struct FilesConfiguration { - /// The maximum allowed size for source code files in bytes. Files above - /// this limit will be ignored for performance reasons. Defaults to 1 MiB - #[partial(bpaf(long("files-max-size"), argument("NUMBER")))] - pub max_size: NonZeroU64, - - /// A list of Unix shell style patterns. Will ignore files/folders that will - /// match these patterns. - #[partial(bpaf(hide))] - pub ignore: StringSet, - - /// A list of Unix shell style patterns. Will handle only those files/folders that will - /// match these patterns. - #[partial(bpaf(hide))] - pub include: StringSet, -} - -impl Default for FilesConfiguration { - fn default() -> Self { - Self { - max_size: DEFAULT_FILE_SIZE_LIMIT, - ignore: Default::default(), - include: Default::default(), - } - } -} diff --git a/crates/pgt_configuration/src/generated.rs b/crates/pgt_configuration/src/generated.rs deleted file mode 100644 index 3bae7b80..00000000 --- a/crates/pgt_configuration/src/generated.rs +++ /dev/null @@ -1,3 +0,0 @@ -mod linter; - -pub use linter::push_to_analyser_rules; diff --git a/crates/pgt_configuration/src/generated/linter.rs b/crates/pgt_configuration/src/generated/linter.rs deleted file mode 100644 index 142284f6..00000000 --- a/crates/pgt_configuration/src/generated/linter.rs +++ /dev/null @@ -1,19 +0,0 @@ -//! Generated file, do not edit by hand, see `xtask/codegen` - -use crate::analyser::linter::*; -use pgt_analyse::{AnalyserRules, MetadataRegistry}; -pub fn push_to_analyser_rules( - rules: &Rules, - metadata: &MetadataRegistry, - analyser_rules: &mut AnalyserRules, -) { - if let Some(rules) = rules.safety.as_ref() { - for rule_name in Safety::GROUP_RULES { - if let Some((_, Some(rule_options))) = rules.get_rule_configuration(rule_name) { - if let Some(rule_key) = metadata.find_rule("safety", rule_name) { - analyser_rules.push_rule(rule_key, rule_options); - } - } - } - } -} diff --git a/crates/pgt_configuration/src/lib.rs b/crates/pgt_configuration/src/lib.rs deleted file mode 100644 index fcf0b5c6..00000000 --- a/crates/pgt_configuration/src/lib.rs +++ /dev/null @@ -1,158 +0,0 @@ -//! This module contains the configuration of `postgrestools.jsonc` -//! -//! The configuration is divided by "tool", and then it's possible to further customise it -//! by language. The language might further options divided by tool. - -pub mod analyser; -pub mod database; -pub mod diagnostics; -pub mod files; -pub mod generated; -pub mod migrations; -pub mod vcs; - -pub use crate::diagnostics::ConfigurationDiagnostic; - -use std::path::PathBuf; - -pub use crate::generated::push_to_analyser_rules; -use crate::vcs::{PartialVcsConfiguration, VcsConfiguration, partial_vcs_configuration}; -pub use analyser::{ - LinterConfiguration, PartialLinterConfiguration, RuleConfiguration, RuleFixConfiguration, - RulePlainConfiguration, RuleSelector, RuleWithFixOptions, RuleWithOptions, Rules, - partial_linter_configuration, -}; -use biome_deserialize_macros::{Merge, Partial}; -use bpaf::Bpaf; -use database::{ - DatabaseConfiguration, PartialDatabaseConfiguration, partial_database_configuration, -}; -use files::{FilesConfiguration, PartialFilesConfiguration, partial_files_configuration}; -use migrations::{ - MigrationsConfiguration, PartialMigrationsConfiguration, partial_migrations_configuration, -}; -use serde::{Deserialize, Serialize}; -use vcs::VcsClientKind; - -pub const VERSION: &str = match option_env!("PGT_VERSION") { - Some(version) => version, - None => "0.0.0", -}; - -/// The configuration that is contained inside the configuration file. -#[derive(Clone, Debug, Default, Deserialize, Eq, Partial, PartialEq, Serialize)] -#[partial(derive(Bpaf, Clone, Eq, PartialEq, Merge))] -#[partial(cfg_attr(feature = "schema", derive(schemars::JsonSchema)))] -#[partial(serde(deny_unknown_fields, rename_all = "camelCase"))] -pub struct Configuration { - /// A field for the [JSON schema](https://json-schema.org/) specification - #[partial(serde(rename = "$schema"))] - #[partial(bpaf(hide))] - pub schema: String, - - /// The configuration of the VCS integration - #[partial(type, bpaf(external(partial_vcs_configuration), optional, hide_usage))] - pub vcs: VcsConfiguration, - - /// The configuration of the filesystem - #[partial( - type, - bpaf(external(partial_files_configuration), optional, hide_usage) - )] - pub files: FilesConfiguration, - - /// Configure migrations - #[partial( - type, - bpaf(external(partial_migrations_configuration), optional, hide_usage) - )] - pub migrations: MigrationsConfiguration, - - /// The configuration for the linter - #[partial(type, bpaf(external(partial_linter_configuration), optional))] - pub linter: LinterConfiguration, - - /// The configuration of the database connection - #[partial( - type, - bpaf(external(partial_database_configuration), optional, hide_usage) - )] - pub db: DatabaseConfiguration, -} - -impl PartialConfiguration { - /// Returns the initial configuration. - pub fn init() -> Self { - Self { - schema: Some(format!("https://pgtools.dev/schemas/{VERSION}/schema.json")), - files: Some(PartialFilesConfiguration { - ignore: Some(Default::default()), - ..Default::default() - }), - migrations: None, - vcs: Some(PartialVcsConfiguration { - enabled: Some(false), - client_kind: Some(VcsClientKind::Git), - use_ignore_file: Some(false), - ..Default::default() - }), - linter: Some(PartialLinterConfiguration { - enabled: Some(true), - rules: Some(Rules { - recommended: Some(true), - ..Default::default() - }), - ..Default::default() - }), - db: Some(PartialDatabaseConfiguration { - host: Some("127.0.0.1".to_string()), - port: Some(5432), - username: Some("postgres".to_string()), - password: Some("postgres".to_string()), - database: Some("postgres".to_string()), - allow_statement_executions_against: Default::default(), - conn_timeout_secs: Some(10), - disable_connection: Some(false), - }), - } - } -} - -pub struct ConfigurationPayload { - /// The result of the deserialization - pub deserialized: PartialConfiguration, - /// The path of where the configuration file that was found. This contains the file name. - pub configuration_file_path: PathBuf, - /// The base path where the external configuration in a package should be resolved from - pub external_resolution_base_path: PathBuf, -} - -#[derive(Debug, Default, PartialEq, Clone)] -pub enum ConfigurationPathHint { - /// The default mode, not having a configuration file is not an error. - /// The path will be filled with the working directory if it is not filled at the time of usage. - #[default] - None, - - /// Very similar to [ConfigurationPathHint::None]. However, the path provided by this variant - /// will be used as **working directory**, which means that all globs defined in the configuration - /// will use **this path** as base path. - FromWorkspace(PathBuf), - - /// The configuration path provided by the LSP, not having a configuration file is not an error. - /// The path will always be a directory path. - FromLsp(PathBuf), - /// The configuration path provided by the user, not having a configuration file is an error. - /// The path can either be a directory path or a file path. - /// Throws any kind of I/O errors. - FromUser(PathBuf), -} - -impl ConfigurationPathHint { - pub const fn is_from_user(&self) -> bool { - matches!(self, Self::FromUser(_)) - } - pub const fn is_from_lsp(&self) -> bool { - matches!(self, Self::FromLsp(_)) - } -} diff --git a/crates/pgt_configuration/src/migrations.rs b/crates/pgt_configuration/src/migrations.rs deleted file mode 100644 index c134e1a4..00000000 --- a/crates/pgt_configuration/src/migrations.rs +++ /dev/null @@ -1,18 +0,0 @@ -use biome_deserialize_macros::{Merge, Partial}; -use bpaf::Bpaf; -use serde::{Deserialize, Serialize}; - -/// The configuration of the filesystem -#[derive(Clone, Debug, Deserialize, Eq, Partial, PartialEq, Serialize, Default)] -#[partial(derive(Bpaf, Clone, Eq, PartialEq, Merge))] -#[partial(serde(rename_all = "camelCase", default, deny_unknown_fields))] -#[partial(cfg_attr(feature = "schema", derive(schemars::JsonSchema)))] -pub struct MigrationsConfiguration { - /// The directory where the migration files are stored - #[partial(bpaf(long("migrations-dir")))] - pub migrations_dir: String, - - /// Ignore any migrations before this timestamp - #[partial(bpaf(long("after")))] - pub after: u64, -} diff --git a/crates/pgt_configuration/src/vcs.rs b/crates/pgt_configuration/src/vcs.rs deleted file mode 100644 index b665a36f..00000000 --- a/crates/pgt_configuration/src/vcs.rs +++ /dev/null @@ -1,118 +0,0 @@ -use biome_deserialize::{DeserializableValidator, DeserializationDiagnostic}; -use biome_deserialize_macros::{Deserializable, Merge, Partial}; -use bpaf::Bpaf; -use serde::{Deserialize, Serialize}; -use std::str::FromStr; - -const GIT_IGNORE_FILE_NAME: &str = ".gitignore"; - -/// Set of properties to integrate with a VCS software. -#[derive(Clone, Debug, Deserialize, Eq, Partial, PartialEq, Serialize)] -#[partial(derive(Bpaf, Clone, Deserializable, Eq, Merge, PartialEq))] -#[partial(deserializable(with_validator))] -#[partial(cfg_attr(feature = "schema", derive(schemars::JsonSchema)))] -#[partial(serde(deny_unknown_fields, rename_all = "camelCase"))] -pub struct VcsConfiguration { - /// Whether we should integrate itself with the VCS client - #[partial(bpaf(long("vcs-enabled"), argument("true|false")))] - pub enabled: bool, - - /// The kind of client. - #[partial(bpaf(long("vcs-client-kind"), argument("git"), optional))] - #[partial(deserializable(bail_on_error))] - pub client_kind: VcsClientKind, - - /// Whether we should use the VCS ignore file. When [true], we will ignore the files - /// specified in the ignore file. - #[partial(bpaf(long("vcs-use-ignore-file"), argument("true|false")))] - pub use_ignore_file: bool, - - /// The folder where we should check for VCS files. By default, we will use the same - /// folder where `postgrestools.jsonc` was found. - /// - /// If we can't find the configuration, it will attempt to use the current working directory. - /// If no current working directory can't be found, we won't use the VCS integration, and a diagnostic - /// will be emitted - #[partial(bpaf(long("vcs-root"), argument("PATH"), optional))] - pub root: String, - - /// The main branch of the project - #[partial(bpaf(long("vcs-default-branch"), argument("BRANCH"), optional))] - pub default_branch: String, -} - -impl Default for VcsConfiguration { - fn default() -> Self { - Self { - client_kind: VcsClientKind::Git, - enabled: false, - use_ignore_file: true, - root: Default::default(), - default_branch: Default::default(), - } - } -} - -impl PartialVcsConfiguration { - pub const fn is_enabled(&self) -> bool { - matches!(self.enabled, Some(true)) - } - pub const fn is_disabled(&self) -> bool { - !self.is_enabled() - } - pub const fn ignore_file_disabled(&self) -> bool { - matches!(self.use_ignore_file, Some(false)) - } -} - -impl DeserializableValidator for PartialVcsConfiguration { - fn validate( - &mut self, - _name: &str, - range: biome_deserialize::TextRange, - diagnostics: &mut Vec, - ) -> bool { - if self.client_kind.is_none() && self.is_enabled() { - diagnostics.push( - DeserializationDiagnostic::new( - "You enabled the VCS integration, but you didn't specify a client.", - ) - .with_range(range) - .with_note("We will disable the VCS integration until the issue is fixed."), - ); - return false; - } - - true - } -} - -#[derive( - Clone, Copy, Debug, Default, Deserialize, Deserializable, Eq, Merge, PartialEq, Serialize, -)] -#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] -#[serde(rename_all = "camelCase")] -pub enum VcsClientKind { - #[default] - /// Integration with the git client as VCS - Git, -} - -impl VcsClientKind { - pub const fn ignore_file(&self) -> &'static str { - match self { - VcsClientKind::Git => GIT_IGNORE_FILE_NAME, - } - } -} - -impl FromStr for VcsClientKind { - type Err = &'static str; - - fn from_str(s: &str) -> Result { - match s { - "git" => Ok(Self::Git), - _ => Err("Value not supported for VcsClientKind"), - } - } -} diff --git a/crates/pgt_console/Cargo.toml b/crates/pgt_console/Cargo.toml deleted file mode 100644 index 174a70ea..00000000 --- a/crates/pgt_console/Cargo.toml +++ /dev/null @@ -1,32 +0,0 @@ -[package] -authors.workspace = true -categories.workspace = true -description = "" -edition.workspace = true -homepage.workspace = true -keywords.workspace = true -license.workspace = true -name = "pgt_console" -repository.workspace = true -version = "0.0.0" - - -[dependencies] -pgt_markup = { workspace = true } -pgt_text_size = { workspace = true } - -schemars = { workspace = true, optional = true } -serde = { workspace = true, optional = true, features = ["derive"] } -termcolor = { workspace = true } -unicode-segmentation = "1.12.0" -unicode-width = { workspace = true } - -[dev-dependencies] -trybuild = "1.0.99" - -[features] -schema = ["dep:schemars", "pgt_text_size/schema"] -serde = ["dep:serde"] - -[lib] -doctest = false diff --git a/crates/pgt_console/README.md b/crates/pgt_console/README.md deleted file mode 100644 index bfaca8ce..00000000 --- a/crates/pgt_console/README.md +++ /dev/null @@ -1,8 +0,0 @@ -# `pgt_console` - -The crate contains a general abstraction over printing messages (formatted with markup) and diagnostics to a console. - -## Acknowledgement - -This crate was initially forked from [biome](https://github.com/biomejs/biome). - diff --git a/crates/pgt_console/src/fmt.rs b/crates/pgt_console/src/fmt.rs deleted file mode 100644 index 82e82157..00000000 --- a/crates/pgt_console/src/fmt.rs +++ /dev/null @@ -1,299 +0,0 @@ -use std::{borrow::Cow, fmt, io, time::Duration}; - -pub use crate::write::{HTML, Termcolor, Write}; -use crate::{Markup, MarkupElement, markup}; - -/// A stack-allocated linked-list of [MarkupElement] slices -#[derive(Clone, Copy)] -pub enum MarkupElements<'a> { - Root, - Node(&'a Self, &'a [MarkupElement<'a>]), -} - -impl<'a> MarkupElements<'a> { - /// Iterates on all the element slices depth-first - pub fn for_each( - &self, - func: &mut impl FnMut(&'a [MarkupElement]) -> io::Result<()>, - ) -> io::Result<()> { - if let Self::Node(parent, elem) = self { - parent.for_each(func)?; - func(elem)?; - } - - Ok(()) - } - - /// Iterates on all the element slices breadth-first - pub fn for_each_rev( - &self, - func: &mut impl FnMut(&'a [MarkupElement]) -> io::Result<()>, - ) -> io::Result<()> { - if let Self::Node(parent, elem) = self { - func(elem)?; - parent.for_each(func)?; - } - - Ok(()) - } -} - -/// The [Formatter] is the `pgt_console` equivalent to [std::fmt::Formatter]: -/// it's never constructed directly by consumers, and can only be used through -/// the mutable reference passed to implementations of the [Display] trait). -/// It manages the state of the markup to print, and implementations of -/// [Display] can call into its methods to append content into the current -/// printing session -pub struct Formatter<'fmt> { - /// Stack of markup elements currently applied to the text being printed - state: MarkupElements<'fmt>, - /// Inner IO writer this [Formatter] will print text into - writer: &'fmt mut dyn Write, -} - -impl<'fmt> Formatter<'fmt> { - /// Create a new instance of the [Formatter] using the provided `writer` for printing - pub fn new(writer: &'fmt mut dyn Write) -> Self { - Self { - state: MarkupElements::Root, - writer, - } - } - - pub fn wrap_writer<'b: 'c, 'c>( - &'b mut self, - wrap: impl FnOnce(&'b mut dyn Write) -> &'c mut dyn Write, - ) -> Formatter<'c> { - Formatter { - state: self.state, - writer: wrap(self.writer), - } - } - - /// Return a new instance of the [Formatter] with `elements` appended to its element stack - fn with_elements<'b>(&'b mut self, elements: &'b [MarkupElement]) -> Formatter<'b> { - Formatter { - state: MarkupElements::Node(&self.state, elements), - writer: self.writer, - } - } - - /// Write a piece of markup into this formatter - pub fn write_markup(&mut self, markup: Markup) -> io::Result<()> { - for node in markup.0 { - let mut fmt = self.with_elements(node.elements); - node.content.fmt(&mut fmt)?; - } - - Ok(()) - } - - /// Write a slice of text into this formatter - pub fn write_str(&mut self, content: &str) -> io::Result<()> { - self.writer.write_str(&self.state, content) - } - - /// Write formatted text into this formatter - pub fn write_fmt(&mut self, content: fmt::Arguments) -> io::Result<()> { - self.writer.write_fmt(&self.state, content) - } -} - -/// Formatting trait for types to be displayed as markup, the `pgt_console` -/// equivalent to [std::fmt::Display] -/// -/// # Example -/// Implementing `Display` on a custom struct -/// ``` -/// use pgt_console::{ -/// fmt::{Display, Formatter}, -/// markup, -/// }; -/// use std::io; -/// -/// struct Warning(String); -/// -/// impl Display for Warning { -/// fn fmt(&self, fmt: &mut Formatter) -> io::Result<()> { -/// fmt.write_markup(markup! { -/// {self.0} -/// }) -/// } -/// } -/// -/// let warning = Warning(String::from("content")); -/// markup! { -/// {warning} -/// }; -/// ``` -pub trait Display { - fn fmt(&self, fmt: &mut Formatter) -> io::Result<()>; -} - -// Blanket implementations of Display for reference types -impl Display for &T -where - T: Display + ?Sized, -{ - fn fmt(&self, fmt: &mut Formatter) -> io::Result<()> { - T::fmt(self, fmt) - } -} - -impl Display for Cow<'_, T> -where - T: Display + ToOwned + ?Sized, -{ - fn fmt(&self, fmt: &mut Formatter) -> io::Result<()> { - T::fmt(self, fmt) - } -} - -// Simple implementations of Display calling through to write_str for types -// that implement Deref -impl Display for str { - fn fmt(&self, fmt: &mut Formatter) -> io::Result<()> { - fmt.write_str(self) - } -} - -impl Display for String { - fn fmt(&self, fmt: &mut Formatter) -> io::Result<()> { - fmt.write_str(self) - } -} - -// Implement Display for Markup and Rust format Arguments -impl Display for Markup<'_> { - fn fmt(&self, fmt: &mut Formatter) -> io::Result<()> { - fmt.write_markup(*self) - } -} - -impl Display for std::fmt::Arguments<'_> { - fn fmt(&self, fmt: &mut Formatter) -> io::Result<()> { - fmt.write_fmt(*self) - } -} - -/// Implement [Display] for types that implement [std::fmt::Display] by calling -/// through to [Formatter::write_fmt] -macro_rules! impl_std_display { - ($ty:ty) => { - impl Display for $ty { - fn fmt(&self, fmt: &mut Formatter) -> io::Result<()> { - write!(fmt, "{self}") - } - } - }; -} - -impl_std_display!(char); -impl_std_display!(i8); -impl_std_display!(i16); -impl_std_display!(i32); -impl_std_display!(i64); -impl_std_display!(i128); -impl_std_display!(isize); -impl_std_display!(u8); -impl_std_display!(u16); -impl_std_display!(u32); -impl_std_display!(u64); -impl_std_display!(u128); -impl_std_display!(usize); - -impl Display for Duration { - fn fmt(&self, fmt: &mut Formatter) -> io::Result<()> { - use crate as pgt_console; - - let secs = self.as_secs(); - if secs > 1 { - return fmt.write_markup(markup! { - {secs}"s" - }); - } - - let millis = self.as_millis(); - if millis > 1 { - return fmt.write_markup(markup! { - {millis}"ms" - }); - } - - let micros = self.as_micros(); - if micros > 1 { - return fmt.write_markup(markup! { - {micros}"µs" - }); - } - - let nanos = self.as_nanos(); - fmt.write_markup(markup! { - {nanos}"ns" - }) - } -} - -#[repr(transparent)] -#[derive(Clone, Copy, Debug)] -pub struct Bytes(pub usize); - -impl std::fmt::Display for Bytes { - fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { - let Self(mut value) = *self; - - if value < 1024 { - return write!(fmt, "{value} B"); - } - - const PREFIX: [char; 4] = ['K', 'M', 'G', 'T']; - let prefix = PREFIX - .into_iter() - .find(|_| { - let next_value = value / 1024; - if next_value < 1024 { - return true; - } - - value = next_value; - false - }) - .unwrap_or('T'); - - write!(fmt, "{:.1} {prefix}iB", value as f32 / 1024.0) - } -} - -impl Display for Bytes { - fn fmt(&self, fmt: &mut Formatter) -> io::Result<()> { - write!(fmt, "{self}") - } -} - -#[cfg(test)] -mod tests { - use crate::fmt::Bytes; - - #[test] - fn display_bytes() { - // Examples taken from https://stackoverflow.com/a/3758880 - assert_eq!(Bytes(0).to_string(), "0 B"); - assert_eq!(Bytes(27).to_string(), "27 B"); - assert_eq!(Bytes(999).to_string(), "999 B"); - assert_eq!(Bytes(1_000).to_string(), "1000 B"); - assert_eq!(Bytes(1_023).to_string(), "1023 B"); - assert_eq!(Bytes(1_024).to_string(), "1.0 KiB"); - assert_eq!(Bytes(1_728).to_string(), "1.7 KiB"); - assert_eq!(Bytes(110_592).to_string(), "108.0 KiB"); - assert_eq!(Bytes(999_999).to_string(), "976.6 KiB"); - assert_eq!(Bytes(7_077_888).to_string(), "6.8 MiB"); - assert_eq!(Bytes(452_984_832).to_string(), "432.0 MiB"); - assert_eq!(Bytes(28_991_029_248).to_string(), "27.0 GiB"); - assert_eq!(Bytes(1_855_425_871_872).to_string(), "1.7 TiB"); - - #[cfg(target_pointer_width = "32")] - assert_eq!(Bytes(usize::MAX).to_string(), "4.0 GiB"); - #[cfg(target_pointer_width = "64")] - assert_eq!(Bytes(usize::MAX).to_string(), "16384.0 TiB"); - } -} diff --git a/crates/pgt_console/src/lib.rs b/crates/pgt_console/src/lib.rs deleted file mode 100644 index 50498a03..00000000 --- a/crates/pgt_console/src/lib.rs +++ /dev/null @@ -1,230 +0,0 @@ -//! # pgt_console - -use std::io; -use std::io::{IsTerminal, Read, Write}; -use std::panic::RefUnwindSafe; -use termcolor::{ColorChoice, StandardStream}; -use write::Termcolor; - -pub mod fmt; -mod markup; -mod utils; -mod write; - -pub use self::markup::{Markup, MarkupBuf, MarkupElement, MarkupNode}; -pub use pgt_markup::markup; -pub use utils::*; - -/// Determines the "output stream" a message should get printed to -#[derive(Clone, Copy, Debug, PartialEq, Eq)] -pub enum LogLevel { - /// Print the message to the `Error` stream of the console, for instance - /// "stderr" for the [EnvConsole] - Error, - /// Print the message to the `Log` stream of the console, for instance - /// "stdout" for the [EnvConsole] - Log, -} - -/// Generic abstraction over printing markup and diagnostics to an output, -/// which can be a terminal, a file, a memory buffer ... -pub trait Console: Send + Sync + RefUnwindSafe { - /// Prints a message (formatted using [markup!]) to the console. - /// - /// It adds a new line at the end. - fn println(&mut self, level: LogLevel, args: Markup); - - /// Prints a message (formatted using [markup!]) to the console. - fn print(&mut self, level: LogLevel, args: Markup); - - /// It reads from a source, and if this source contains something, it's converted into a [String] - fn read(&mut self) -> Option; -} - -/// Extension trait for [Console] providing convenience printing methods -pub trait ConsoleExt: Console { - /// Prints a piece of markup with level [LogLevel::Error] - fn error(&mut self, args: Markup); - - /// Prints a piece of markup with level [LogLevel::Log] - /// - /// Logs a message, adds a new line at the end. - fn log(&mut self, args: Markup); - - /// Prints a piece of markup with level [LogLevel::Log] - /// - /// It doesn't add any line - fn append(&mut self, args: Markup); -} - -impl ConsoleExt for T { - fn error(&mut self, args: Markup) { - self.println(LogLevel::Error, args); - } - - fn log(&mut self, args: Markup) { - self.println(LogLevel::Log, args); - } - - fn append(&mut self, args: Markup) { - self.print(LogLevel::Log, args); - } -} - -/// Implementation of [Console] printing messages to the standard output and standard error -pub struct EnvConsole { - /// Channel to print messages - out: StandardStream, - /// Channel to print errors - err: StandardStream, - /// Channel to read arbitrary input - r#in: io::Stdin, -} - -#[derive(Debug, Clone)] -pub enum ColorMode { - /// Always print color using either ANSI or the Windows Console API - Enabled, - /// Never print colors - Disabled, - /// Print colors if stdout / stderr are determined to be TTY / Console - /// streams, and the `TERM=dumb` and `NO_COLOR` environment variables are - /// not set - Auto, -} - -impl EnvConsole { - fn compute_color(colors: ColorMode) -> (ColorChoice, ColorChoice) { - match colors { - ColorMode::Enabled => (ColorChoice::Always, ColorChoice::Always), - ColorMode::Disabled => (ColorChoice::Never, ColorChoice::Never), - ColorMode::Auto => { - let stdout = if io::stdout().is_terminal() { - ColorChoice::Auto - } else { - ColorChoice::Never - }; - - let stderr = if io::stderr().is_terminal() { - ColorChoice::Auto - } else { - ColorChoice::Never - }; - - (stdout, stderr) - } - } - } - - pub fn new(colors: ColorMode) -> Self { - let (out_mode, err_mode) = Self::compute_color(colors); - - Self { - out: StandardStream::stdout(out_mode), - err: StandardStream::stderr(err_mode), - r#in: io::stdin(), - } - } - - pub fn set_color(&mut self, colors: ColorMode) { - let (out_mode, err_mode) = Self::compute_color(colors); - self.out = StandardStream::stdout(out_mode); - self.err = StandardStream::stderr(err_mode); - } -} - -impl Default for EnvConsole { - fn default() -> Self { - Self::new(ColorMode::Auto) - } -} - -impl Console for EnvConsole { - fn println(&mut self, level: LogLevel, args: Markup) { - let mut out = match level { - LogLevel::Error => self.err.lock(), - LogLevel::Log => self.out.lock(), - }; - - fmt::Formatter::new(&mut Termcolor(&mut out)) - .write_markup(args) - .unwrap(); - - writeln!(out).unwrap(); - } - - fn print(&mut self, level: LogLevel, args: Markup) { - let mut out = match level { - LogLevel::Error => self.err.lock(), - LogLevel::Log => self.out.lock(), - }; - - fmt::Formatter::new(&mut Termcolor(&mut out)) - .write_markup(args) - .unwrap(); - - write!(out, "").unwrap(); - } - - fn read(&mut self) -> Option { - // Here we check if stdin is redirected. If not, we bail. - // - // Doing this check allows us to pipe stdin to rome, without expecting - // user content when we call `read_to_string` - if io::stdin().is_terminal() { - return None; - } - let mut handle = self.r#in.lock(); - let mut buffer = String::new(); - let result = handle.read_to_string(&mut buffer); - // Skipping the error for now - if result.is_ok() { Some(buffer) } else { None } - } -} - -/// Implementation of [Console] storing all printed messages to a memory buffer -#[derive(Default, Debug)] -pub struct BufferConsole { - pub out_buffer: Vec, - pub in_buffer: Vec, - pub print_json: bool, -} - -impl BufferConsole { - pub fn with_json(mut self) -> Self { - self.print_json = true; - self - } -} - -/// Individual message entry printed to a [BufferConsole] -#[derive(Debug)] -pub struct Message { - pub level: LogLevel, - pub content: MarkupBuf, -} - -impl Console for BufferConsole { - fn println(&mut self, level: LogLevel, args: Markup) { - self.out_buffer.push(Message { - level, - content: args.to_owned(), - }); - } - - fn print(&mut self, level: LogLevel, args: Markup) { - self.out_buffer.push(Message { - level, - content: args.to_owned(), - }); - } - fn read(&mut self) -> Option { - if self.in_buffer.is_empty() { - None - } else { - // for the time being we simple return the first message, as we don't - // particular use case for multiple prompts - Some(self.in_buffer[0].clone()) - } - } -} diff --git a/crates/pgt_console/src/markup.rs b/crates/pgt_console/src/markup.rs deleted file mode 100644 index 59cfb6f0..00000000 --- a/crates/pgt_console/src/markup.rs +++ /dev/null @@ -1,267 +0,0 @@ -use std::{ - borrow::Cow, - fmt::{self, Debug}, - io, -}; - -use pgt_text_size::TextSize; -use termcolor::{Color, ColorSpec}; - -use crate::fmt::{Display, Formatter, MarkupElements, Write}; - -/// Enumeration of all the supported markup elements -#[derive(Clone, Debug, PartialEq, Eq, Hash)] -#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] -#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] -pub enum MarkupElement<'fmt> { - Emphasis, - Dim, - Italic, - Underline, - Error, - Success, - Warn, - Info, - Debug, - Trace, - Inverse, - Hyperlink { href: Cow<'fmt, str> }, -} - -impl fmt::Display for MarkupElement<'_> { - fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { - if let Self::Hyperlink { href } = self { - if fmt.alternate() { - write!(fmt, "Hyperlink href={:?}", href.as_ref()) - } else { - fmt.write_str("Hyperlink") - } - } else { - write!(fmt, "{self:?}") - } - } -} - -impl MarkupElement<'_> { - /// Mutate a [ColorSpec] object in place to apply this element's associated - /// style to it - pub(crate) fn update_color(&self, color: &mut ColorSpec) { - match self { - // Text Styles - MarkupElement::Emphasis => { - color.set_bold(true); - } - MarkupElement::Dim => { - color.set_dimmed(true); - } - MarkupElement::Italic => { - color.set_italic(true); - } - MarkupElement::Underline => { - color.set_underline(true); - } - - // Text Colors - MarkupElement::Error => { - color.set_fg(Some(Color::Red)); - } - MarkupElement::Success => { - color.set_fg(Some(Color::Green)); - } - MarkupElement::Warn => { - color.set_fg(Some(Color::Yellow)); - } - MarkupElement::Trace => { - color.set_fg(Some(Color::Magenta)); - } - MarkupElement::Info | MarkupElement::Debug => { - // Blue is really difficult to see on the standard windows command line - #[cfg(windows)] - const BLUE: Color = Color::Cyan; - #[cfg(not(windows))] - const BLUE: Color = Color::Blue; - - color.set_fg(Some(BLUE)); - } - - MarkupElement::Inverse | MarkupElement::Hyperlink { .. } => {} - } - } - - fn to_owned(&self) -> MarkupElement<'static> { - match self { - MarkupElement::Emphasis => MarkupElement::Emphasis, - MarkupElement::Dim => MarkupElement::Dim, - MarkupElement::Italic => MarkupElement::Italic, - MarkupElement::Underline => MarkupElement::Underline, - MarkupElement::Error => MarkupElement::Error, - MarkupElement::Success => MarkupElement::Success, - MarkupElement::Warn => MarkupElement::Warn, - MarkupElement::Info => MarkupElement::Info, - MarkupElement::Debug => MarkupElement::Debug, - MarkupElement::Trace => MarkupElement::Trace, - MarkupElement::Inverse => MarkupElement::Inverse, - MarkupElement::Hyperlink { href } => MarkupElement::Hyperlink { - href: Cow::Owned(match href { - Cow::Borrowed(href) => (*href).to_string(), - Cow::Owned(href) => href.clone(), - }), - }, - } - } -} - -/// Implementation of a single "markup node": a piece of text with a number of -/// associated styles applied to it -#[derive(Copy, Clone)] -pub struct MarkupNode<'fmt> { - pub elements: &'fmt [MarkupElement<'fmt>], - pub content: &'fmt dyn Display, -} - -#[derive(Clone, PartialEq, Eq, Hash)] -#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] -#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] -pub struct MarkupNodeBuf { - pub elements: Vec>, - pub content: String, -} - -impl Debug for MarkupNodeBuf { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - for element in &self.elements { - write!(fmt, "<{element:#}>")?; - } - - if fmt.alternate() { - let mut content = self.content.as_str(); - while let Some(index) = content.find('\n') { - let (before, after) = content.split_at(index + 1); - if !before.is_empty() { - writeln!(fmt, "{before:?}")?; - } - content = after; - } - - if !content.is_empty() { - write!(fmt, "{content:?}")?; - } - } else { - write!(fmt, "{:?}", self.content)?; - } - - for element in self.elements.iter().rev() { - write!(fmt, "")?; - } - - Ok(()) - } -} - -/// Root type returned by the `markup` macro: this is simply a container for a -/// list of markup nodes -/// -/// Text nodes are formatted lazily by storing an [fmt::Arguments] struct, this -/// means [Markup] shares the same restriction as the values returned by -/// [format_args] and can't be stored in a `let` binding for instance -#[derive(Copy, Clone)] -pub struct Markup<'fmt>(pub &'fmt [MarkupNode<'fmt>]); - -impl Markup<'_> { - pub fn to_owned(&self) -> MarkupBuf { - let mut result = MarkupBuf(Vec::new()); - // SAFETY: The implementation of Write for MarkupBuf below always returns Ok - Formatter::new(&mut result).write_markup(*self).unwrap(); - result - } -} - -#[derive(Clone, Default, PartialEq, Eq, Hash)] -#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] -#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] -pub struct MarkupBuf(pub Vec); - -impl MarkupBuf { - pub fn is_empty(&self) -> bool { - self.0.iter().all(|node| node.content.is_empty()) - } - - pub fn len(&self) -> TextSize { - self.0.iter().map(|node| TextSize::of(&node.content)).sum() - } - - pub fn text_len(&self) -> usize { - self.0 - .iter() - .fold(0, |acc, string| acc + string.content.len()) - } -} - -impl Write for MarkupBuf { - fn write_str(&mut self, elements: &MarkupElements, content: &str) -> io::Result<()> { - let mut styles = Vec::new(); - elements.for_each(&mut |elements| { - styles.extend(elements.iter().map(MarkupElement::to_owned)); - Ok(()) - })?; - - if let Some(last) = self.0.last_mut() { - if last.elements == styles { - last.content.push_str(content); - return Ok(()); - } - } - - self.0.push(MarkupNodeBuf { - elements: styles, - content: content.into(), - }); - - Ok(()) - } - - fn write_fmt(&mut self, elements: &MarkupElements, content: fmt::Arguments) -> io::Result<()> { - let mut styles = Vec::new(); - elements.for_each(&mut |elements| { - styles.extend(elements.iter().map(MarkupElement::to_owned)); - Ok(()) - })?; - - if let Some(last) = self.0.last_mut() { - if last.elements == styles { - last.content.push_str(&content.to_string()); - return Ok(()); - } - } - - self.0.push(MarkupNodeBuf { - elements: styles, - content: content.to_string(), - }); - Ok(()) - } -} - -impl Display for MarkupBuf { - fn fmt(&self, fmt: &mut Formatter) -> io::Result<()> { - let nodes: Vec<_> = self - .0 - .iter() - .map(|node| MarkupNode { - elements: &node.elements, - content: &node.content, - }) - .collect(); - - fmt.write_markup(Markup(&nodes)) - } -} - -impl Debug for MarkupBuf { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - for node in &self.0 { - Debug::fmt(node, fmt)?; - } - Ok(()) - } -} diff --git a/crates/pgt_console/src/utils.rs b/crates/pgt_console/src/utils.rs deleted file mode 100644 index 95756516..00000000 --- a/crates/pgt_console/src/utils.rs +++ /dev/null @@ -1,139 +0,0 @@ -use termcolor::NoColor; - -use crate::fmt::{Display, Formatter, Termcolor}; -use crate::{Markup, markup}; -use std::io; - -/// Adapter type providing a std::fmt::Display implementation for any type that -/// implements pgt_console::fmt::Display. -pub struct StdDisplay(pub T); - -impl std::fmt::Display for StdDisplay -where - T: Display, -{ - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - let mut buffer: Vec = Vec::new(); - let mut termcolor = Termcolor(NoColor::new(&mut buffer)); - let mut formatter = Formatter::new(&mut termcolor); - - self.0.fmt(&mut formatter).map_err(|_| std::fmt::Error)?; - - let content = String::from_utf8(buffer).map_err(|_| std::fmt::Error)?; - - f.write_str(content.as_str()) - } -} - -/// It displays a type that implements [std::fmt::Display] -pub struct DebugDisplay(pub T); - -impl Display for DebugDisplay -where - T: std::fmt::Debug, -{ - fn fmt(&self, f: &mut Formatter<'_>) -> io::Result<()> { - write!(f, "{:?}", self.0) - } -} - -/// It displays a `Option`, where `T` implements [std::fmt::Display] -pub struct DebugDisplayOption(pub Option); - -impl Display for DebugDisplayOption -where - T: std::fmt::Debug, -{ - fn fmt(&self, fmt: &mut Formatter) -> io::Result<()> { - use crate as pgt_console; - - match &self.0 { - Some(value) => { - markup!({ DebugDisplay(value) }).fmt(fmt)?; - } - _ => { - markup!("unset").fmt(fmt)?; - } - } - Ok(()) - } -} - -/// A horizontal line with the given print width -pub struct HorizontalLine { - width: usize, -} - -impl HorizontalLine { - pub fn new(width: usize) -> Self { - Self { width } - } -} - -impl Display for HorizontalLine { - fn fmt(&self, fmt: &mut Formatter) -> io::Result<()> { - fmt.write_str(&"\u{2501}".repeat(self.width)) - } -} - -// It prints `\n` -pub struct Softline; - -pub const SOFT_LINE: Softline = Softline; - -impl Display for Softline { - fn fmt(&self, fmt: &mut Formatter) -> io::Result<()> { - fmt.write_str("\n") - } -} - -// It prints `\n\n` -pub struct Hardline; - -pub const HARD_LINE: Hardline = Hardline; - -impl Display for Hardline { - fn fmt(&self, fmt: &mut Formatter) -> io::Result<()> { - fmt.write_str("\n\n") - } -} - -/// It prints N whitespaces, where N is the `width` provided by [Padding::new] -pub struct Padding { - width: usize, -} - -impl Padding { - pub fn new(width: usize) -> Self { - Self { width } - } -} - -impl Display for Padding { - fn fmt(&self, fmt: &mut Formatter) -> io::Result<()> { - for _ in 0..self.width { - fmt.write_str(" ")?; - } - Ok(()) - } -} - -/// It writes a pair of key-value, with the given padding -pub struct KeyValuePair<'a>(pub &'a str, pub Markup<'a>); - -impl Display for KeyValuePair<'_> { - fn fmt(&self, fmt: &mut Formatter) -> io::Result<()> { - let KeyValuePair(key, value) = self; - write!(fmt, " {key}:")?; - - let padding_width = 30usize.saturating_sub(key.len() + 1); - - for _ in 0..padding_width { - fmt.write_str(" ")?; - } - - value.fmt(fmt)?; - - fmt.write_str("\n") - } -} diff --git a/crates/pgt_console/src/write.rs b/crates/pgt_console/src/write.rs deleted file mode 100644 index a11d79d5..00000000 --- a/crates/pgt_console/src/write.rs +++ /dev/null @@ -1,13 +0,0 @@ -mod html; -mod termcolor; - -use std::{fmt, io}; - -use crate::fmt::MarkupElements; - -pub use self::{html::HTML, termcolor::Termcolor}; - -pub trait Write { - fn write_str(&mut self, elements: &MarkupElements, content: &str) -> io::Result<()>; - fn write_fmt(&mut self, elements: &MarkupElements, content: fmt::Arguments) -> io::Result<()>; -} diff --git a/crates/pgt_console/src/write/html.rs b/crates/pgt_console/src/write/html.rs deleted file mode 100644 index 88825e9f..00000000 --- a/crates/pgt_console/src/write/html.rs +++ /dev/null @@ -1,268 +0,0 @@ -use std::{ - fmt, - io::{self, Write as _}, -}; - -use crate::{MarkupElement, fmt::MarkupElements}; - -use super::Write; - -/// Adapter struct implementing [Write] over types implementing [io::Write], -/// renders markup as UTF-8 strings of HTML code -pub struct HTML(pub W, bool); - -impl HTML { - pub fn new(writer: W) -> Self { - Self(writer, false) - } - - pub fn with_mdx(mut self) -> Self { - self.1 = true; - self - } -} - -impl Write for HTML -where - W: io::Write, -{ - fn write_str(&mut self, elements: &MarkupElements, content: &str) -> io::Result<()> { - push_styles(&mut self.0, elements)?; - HtmlAdapter(&mut self.0, self.1).write_all(content.as_bytes())?; - pop_styles(&mut self.0, elements) - } - - fn write_fmt(&mut self, elements: &MarkupElements, content: fmt::Arguments) -> io::Result<()> { - push_styles(&mut self.0, elements)?; - HtmlAdapter(&mut self.0, self.1).write_fmt(content)?; - pop_styles(&mut self.0, elements) - } -} - -fn push_styles(fmt: &mut W, elements: &MarkupElements) -> io::Result<()> { - elements.for_each(&mut |styles| { - for style in styles { - match style { - MarkupElement::Emphasis => write!(fmt, "")?, - MarkupElement::Dim => write!(fmt, "")?, - MarkupElement::Italic => write!(fmt, "")?, - MarkupElement::Underline => write!(fmt, "")?, - MarkupElement::Error => write!(fmt, "")?, - MarkupElement::Success => write!(fmt, "")?, - MarkupElement::Warn => write!(fmt, "")?, - MarkupElement::Debug => write!(fmt, "")?, - MarkupElement::Info => write!(fmt, "")?, - MarkupElement::Trace => write!(fmt, "")?, - MarkupElement::Inverse => { - write!(fmt, "")? - } - MarkupElement::Hyperlink { href } => write!(fmt, "")?, - } - } - - Ok(()) - }) -} - -fn pop_styles(fmt: &mut W, elements: &MarkupElements) -> io::Result<()> { - elements.for_each_rev(&mut |styles| { - for style in styles.iter().rev() { - match style { - MarkupElement::Emphasis => write!(fmt, "")?, - MarkupElement::Italic => write!(fmt, "")?, - MarkupElement::Underline => write!(fmt, "")?, - MarkupElement::Dim - | MarkupElement::Error - | MarkupElement::Success - | MarkupElement::Warn - | MarkupElement::Debug - | MarkupElement::Trace - | MarkupElement::Info - | MarkupElement::Inverse => write!(fmt, "")?, - MarkupElement::Hyperlink { .. } => write!(fmt, "")?, - } - } - - Ok(()) - }) -} - -/// Adapter wrapping a type implementing [io::Write]. It's responsible for: -/// - and adding HTML special characters escaping to the written byte sequence -/// - and adding HTML line breaks for newline characters -struct HtmlAdapter(W, bool); - -impl HtmlAdapter { - fn write_escapes(&mut self, current_byte: &u8) -> io::Result { - match *current_byte { - b'"' => self.0.write_all(b""")?, - b'&' => self.0.write_all(b"&")?, - b'<' => self.0.write_all(b"<")?, - b'>' => self.0.write_all(b">")?, - _ => return Ok(false), - }; - - Ok(true) - } - - fn write_mdx_escapes(&mut self, current_byte: &u8) -> io::Result { - if !self.1 { - return Ok(false); - } else { - match current_byte { - b'\n' => self.0.write_all(b"
")?, - b'\r' => self.0.write_all(b"
")?, - b'{' => self.0.write_all(b"{")?, - b'}' => self.0.write_all(b"}")?, - b'*' => self.0.write_all(b"*")?, - b'_' => self.0.write_all(b"_")?, - b'\\' => self.0.write_all(b"\")?, - _ => return Ok(false), - } - } - - Ok(true) - } -} - -impl io::Write for HtmlAdapter { - fn write(&mut self, buf: &[u8]) -> io::Result { - for byte in buf { - let escaped = self.write_escapes(byte)?; - let mdx_escaped = self.write_mdx_escapes(byte)?; - if !escaped && !mdx_escaped { - self.0.write_all(&[*byte])?; - } - } - Ok(buf.len()) - } - - fn flush(&mut self) -> io::Result<()> { - self.0.flush() - } -} - -#[cfg(test)] -mod test { - use crate as pgt_console; - use crate::fmt::Formatter; - use pgt_markup::markup; - - #[test] - fn test_mdx_new_lines() { - let mut buf = Vec::new(); - let mut writer = super::HTML(&mut buf, true); - let mut formatter = Formatter::new(&mut writer); - - formatter - .write_markup(markup! { - "Hello" - }) - .unwrap(); - - formatter - .write_markup(markup! { - "\n" - }) - .unwrap(); - - formatter - .write_markup(markup! { - "World" - }) - .unwrap(); - - assert_eq!(String::from_utf8(buf).unwrap(), "Hello
World"); - } - - #[test] - fn test_escapes() { - let mut buf = Vec::new(); - let mut writer = super::HTML(&mut buf, false); - let mut formatter = Formatter::new(&mut writer); - - formatter - .write_markup(markup! { - "\"" - }) - .unwrap(); - formatter - .write_markup(markup! { - "\"" - }) - .unwrap(); - - assert_eq!(String::from_utf8(buf).unwrap(), """"); - } - - #[test] - fn test_escapes_and_new_lines() { - let mut buf = Vec::new(); - let mut writer = super::HTML(&mut buf, true); - let mut formatter = Formatter::new(&mut writer); - - formatter - .write_markup(markup! { - "New rules that are still under development.\n\n." - }) - .unwrap(); - - assert_eq!( - String::from_utf8(buf).unwrap(), - "New rules that are still under development.

." - ); - } - - #[test] - fn does_not_escape_curly_braces() { - let mut buf = Vec::new(); - let mut writer = super::HTML(&mut buf, false); - let mut formatter = Formatter::new(&mut writer); - - formatter - .write_markup(markup! { - "New rules that are {still} under development." - }) - .unwrap(); - - assert_eq!( - String::from_utf8(buf).unwrap(), - "New rules that are {still} under development." - ); - } - - #[test] - fn escape_curly_braces() { - let mut buf = Vec::new(); - let mut writer = super::HTML(&mut buf, false).with_mdx(); - let mut formatter = Formatter::new(&mut writer); - - formatter - .write_markup(markup! { - "New rules that are {still} under development.\n\n." - }) - .unwrap(); - - assert_eq!( - String::from_utf8(buf).unwrap(), - "New rules that are {still} under development.

." - ); - } - #[test] - fn test_from_website() { - let mut buf = Vec::new(); - let mut writer = super::HTML(&mut buf, false).with_mdx(); - let mut formatter = Formatter::new(&mut writer); - - formatter - .write_markup(markup! { - "Rules focused on preventing accessibility problems." - }) - .unwrap(); - - assert_eq!( - String::from_utf8(buf).unwrap(), - "Rules focused on preventing accessibility problems." - ); - } -} diff --git a/crates/pgt_console/src/write/termcolor.rs b/crates/pgt_console/src/write/termcolor.rs deleted file mode 100644 index 045ce769..00000000 --- a/crates/pgt_console/src/write/termcolor.rs +++ /dev/null @@ -1,244 +0,0 @@ -use std::{ - fmt::{self, Write as _}, - io, -}; - -use termcolor::{Color, ColorSpec, WriteColor}; -use unicode_segmentation::UnicodeSegmentation; -use unicode_width::UnicodeWidthStr; - -use crate::{MarkupElement, fmt::MarkupElements}; - -use super::Write; - -/// Adapter struct implementing [Write] over types implementing [WriteColor] -pub struct Termcolor(pub W); - -impl Write for Termcolor -where - W: WriteColor, -{ - fn write_str(&mut self, elements: &MarkupElements, content: &str) -> io::Result<()> { - with_format(&mut self.0, elements, |writer| { - let mut adapter = SanitizeAdapter { - writer, - error: Ok(()), - }; - - match adapter.write_str(content) { - Ok(()) => Ok(()), - Err(..) => { - if adapter.error.is_err() { - adapter.error - } else { - // SanitizeAdapter can only fail if the underlying - // writer returns an error - unreachable!() - } - } - } - }) - } - - fn write_fmt(&mut self, elements: &MarkupElements, content: fmt::Arguments) -> io::Result<()> { - with_format(&mut self.0, elements, |writer| { - let mut adapter = SanitizeAdapter { - writer, - error: Ok(()), - }; - - match adapter.write_fmt(content) { - Ok(()) => Ok(()), - Err(..) => { - if adapter.error.is_err() { - adapter.error - } else { - Err(io::Error::other("a Display formatter returned an error")) - } - } - } - }) - } -} - -/// Applies the current format in `state` to `writer`, calls `func` to -/// print a piece of text, then reset the printing format -fn with_format( - writer: &mut W, - state: &MarkupElements, - func: impl FnOnce(&mut W) -> io::Result<()>, -) -> io::Result<()> -where - W: WriteColor, -{ - let mut color = ColorSpec::new(); - let mut link = None; - let mut inverse = false; - - state.for_each(&mut |elements| { - for element in elements { - match element { - MarkupElement::Inverse => { - inverse = !inverse; - } - MarkupElement::Hyperlink { href } => { - link = Some(href); - } - _ => { - element.update_color(&mut color); - } - } - } - - Ok(()) - })?; - - if inverse { - let fg = color.fg().map_or(Color::White, |c| *c); - let bg = color.bg().map_or(Color::Black, |c| *c); - color.set_bg(Some(fg)); - color.set_fg(Some(bg)); - } - - if let Err(err) = writer.set_color(&color) { - writer.reset()?; - return Err(err); - } - - let mut reset_link = false; - if let Some(href) = link { - // `is_synchronous` is used to check if the underlying writer - // is using the Windows Console API, that does not support ANSI - // escape codes. Generally this would only be true when running - // in the legacy `cmd.exe` terminal emulator, since in modern - // clients like the Windows Terminal ANSI is used instead - if writer.supports_color() && !writer.is_synchronous() { - write!(writer, "\x1b]8;;{href}\x1b\\")?; - reset_link = true; - } - } - - let result = func(writer); - - if reset_link { - write!(writer, "\x1b]8;;\x1b\\")?; - } - - writer.reset()?; - result -} - -/// Adapter [fmt::Write] calls to [io::Write] with sanitization, -/// implemented as an internal struct to avoid exposing [fmt::Write] on -/// [Termcolor] -struct SanitizeAdapter { - writer: W, - error: io::Result<()>, -} - -impl fmt::Write for SanitizeAdapter -where - W: WriteColor, -{ - fn write_str(&mut self, content: &str) -> fmt::Result { - let mut buffer = [0; 4]; - - for grapheme in content.graphemes(true) { - let width = UnicodeWidthStr::width(grapheme); - let is_whitespace = grapheme_is_whitespace(grapheme); - - if !is_whitespace && width == 0 { - let char_to_write = char::REPLACEMENT_CHARACTER; - char_to_write.encode_utf8(&mut buffer); - - if let Err(err) = self.writer.write_all(&buffer[..char_to_write.len_utf8()]) { - self.error = Err(err); - return Err(fmt::Error); - } - - continue; - } - - // Unicode is currently poorly supported on most Windows - // terminal clients, so we always strip emojis in Windows - if cfg!(windows) || !self.writer.supports_color() { - let is_ascii = grapheme.is_ascii(); - - if !is_ascii { - let replacement = unicode_to_ascii(grapheme.chars().nth(0).unwrap()); - - replacement.encode_utf8(&mut buffer); - - if let Err(err) = self.writer.write_all(&buffer[..replacement.len_utf8()]) { - self.error = Err(err); - return Err(fmt::Error); - } - - continue; - } - }; - - for char in grapheme.chars() { - char.encode_utf8(&mut buffer); - - if let Err(err) = self.writer.write_all(&buffer[..char.len_utf8()]) { - self.error = Err(err); - return Err(fmt::Error); - } - } - } - - Ok(()) - } -} - -/// Determines if a unicode grapheme consists only of code points -/// which are considered whitespace characters in ASCII -fn grapheme_is_whitespace(grapheme: &str) -> bool { - grapheme.chars().all(|c| c.is_whitespace()) -} - -/// Replace emoji characters with similar but more widely supported ASCII -/// characters -fn unicode_to_ascii(c: char) -> char { - match c { - '\u{2714}' => '\u{221a}', - '\u{2139}' => 'i', - '\u{26a0}' => '!', - '\u{2716}' => '\u{00d7}', - _ => c, - } -} - -#[cfg(test)] -mod tests { - use std::{fmt::Write, str::from_utf8}; - - use super::SanitizeAdapter; - - #[test] - fn test_printing_complex_emojis() { - const INPUT: &str = "⚠️1️⃣ℹ️"; - const OUTPUT: &str = "⚠️1️⃣ℹ️"; - const WINDOWS_OUTPUT: &str = "!1i"; - - let mut buffer = Vec::new(); - - { - let writer = termcolor::Ansi::new(&mut buffer); - let mut adapter = SanitizeAdapter { - writer, - error: Ok(()), - }; - - adapter.write_str(INPUT).unwrap(); - adapter.error.unwrap(); - } - - if cfg!(windows) { - assert_eq!(from_utf8(&buffer).unwrap(), WINDOWS_OUTPUT); - } else { - assert_eq!(from_utf8(&buffer).unwrap(), OUTPUT); - } - } -} diff --git a/crates/pgt_console/tests/macro.rs b/crates/pgt_console/tests/macro.rs deleted file mode 100644 index 177a20e3..00000000 --- a/crates/pgt_console/tests/macro.rs +++ /dev/null @@ -1,37 +0,0 @@ -use pgt_console::{Markup, MarkupElement}; - -#[test] -fn test_macro() { - let category = "test"; - - match - // Due to how MarkupNode is implemented, the result of the markup macro - // cannot be stored in a binding and must be matched upon immediately - pgt_markup::markup! { - {category}" Commands" - } - { - Markup(markup) => { - let node_0 = &markup[0]; - assert_eq!(&node_0.elements, &[MarkupElement::Info, MarkupElement::Emphasis]); - // assert_eq!(node_0.content.to_string(), category.to_string()); - - let node_1 = &markup[1]; - assert_eq!(&node_1.elements, &[MarkupElement::Info]); - // assert_eq!(node_1.content.to_string(), " Commands".to_string()); - } - } -} - -#[test] -fn test_macro_attributes() { - pgt_markup::markup! { - "link" - }; -} - -#[test] -fn test_macro_errors() { - let t = trybuild::TestCases::new(); - t.compile_fail("tests/markup/*.rs"); -} diff --git a/crates/pgt_console/tests/markup/closing_element_standalone.rs b/crates/pgt_console/tests/markup/closing_element_standalone.rs deleted file mode 100644 index 815d2c4a..00000000 --- a/crates/pgt_console/tests/markup/closing_element_standalone.rs +++ /dev/null @@ -1,5 +0,0 @@ -fn main() { - pgt_console::markup! { - - } -} diff --git a/crates/pgt_console/tests/markup/closing_element_standalone.stderr b/crates/pgt_console/tests/markup/closing_element_standalone.stderr deleted file mode 100644 index 00506089..00000000 --- a/crates/pgt_console/tests/markup/closing_element_standalone.stderr +++ /dev/null @@ -1,5 +0,0 @@ -error: unexpected closing element - --> tests/markup/closing_element_standalone.rs:3:11 - | -3 | - | ^^^^^^^^ diff --git a/crates/pgt_console/tests/markup/element_non_ident_name.rs b/crates/pgt_console/tests/markup/element_non_ident_name.rs deleted file mode 100644 index 84aa0bca..00000000 --- a/crates/pgt_console/tests/markup/element_non_ident_name.rs +++ /dev/null @@ -1,5 +0,0 @@ -fn main() { - pgt_console::markup! { - <"Literal" /> - } -} diff --git a/crates/pgt_console/tests/markup/element_non_ident_name.stderr b/crates/pgt_console/tests/markup/element_non_ident_name.stderr deleted file mode 100644 index b3fbf301..00000000 --- a/crates/pgt_console/tests/markup/element_non_ident_name.stderr +++ /dev/null @@ -1,5 +0,0 @@ -error: unexpected token - --> tests/markup/element_non_ident_name.rs:3:10 - | -3 | <"Literal" /> - | ^^^^^^^^^ diff --git a/crates/pgt_console/tests/markup/invalid_group.rs b/crates/pgt_console/tests/markup/invalid_group.rs deleted file mode 100644 index c53c4494..00000000 --- a/crates/pgt_console/tests/markup/invalid_group.rs +++ /dev/null @@ -1,5 +0,0 @@ -fn main() { - pgt_console::markup! { - [] - } -} diff --git a/crates/pgt_console/tests/markup/invalid_group.stderr b/crates/pgt_console/tests/markup/invalid_group.stderr deleted file mode 100644 index f37bbe52..00000000 --- a/crates/pgt_console/tests/markup/invalid_group.stderr +++ /dev/null @@ -1,5 +0,0 @@ -error: unexpected token - --> tests/markup/invalid_group.rs:3:9 - | -3 | [] - | ^^ diff --git a/crates/pgt_console/tests/markup/invalid_punct.rs b/crates/pgt_console/tests/markup/invalid_punct.rs deleted file mode 100644 index 311bab6c..00000000 --- a/crates/pgt_console/tests/markup/invalid_punct.rs +++ /dev/null @@ -1,5 +0,0 @@ -fn main() { - pgt_console::markup! { - ! - } -} diff --git a/crates/pgt_console/tests/markup/invalid_punct.stderr b/crates/pgt_console/tests/markup/invalid_punct.stderr deleted file mode 100644 index 29b6be34..00000000 --- a/crates/pgt_console/tests/markup/invalid_punct.stderr +++ /dev/null @@ -1,5 +0,0 @@ -error: unexpected token - --> tests/markup/invalid_punct.rs:3:9 - | -3 | ! - | ^ diff --git a/crates/pgt_console/tests/markup/open_element_improper_close_1.rs b/crates/pgt_console/tests/markup/open_element_improper_close_1.rs deleted file mode 100644 index a9567457..00000000 --- a/crates/pgt_console/tests/markup/open_element_improper_close_1.rs +++ /dev/null @@ -1,5 +0,0 @@ -fn main() { - pgt_console::markup! { - tests/markup/open_element_improper_close_1.rs:3:20 - | -3 | tests/markup/open_element_improper_close_2.rs:3:20 - | -3 | - } -} diff --git a/crates/pgt_console/tests/markup/open_element_improper_prop_value.stderr b/crates/pgt_console/tests/markup/open_element_improper_prop_value.stderr deleted file mode 100644 index ceecc446..00000000 --- a/crates/pgt_console/tests/markup/open_element_improper_prop_value.stderr +++ /dev/null @@ -1,5 +0,0 @@ -error: unexpected token - --> tests/markup/open_element_improper_prop_value.rs:3:28 - | -3 | - | ^^^^^ diff --git a/crates/pgt_console/tests/markup/open_element_missing_prop_value.rs b/crates/pgt_console/tests/markup/open_element_missing_prop_value.rs deleted file mode 100644 index 73423ffd..00000000 --- a/crates/pgt_console/tests/markup/open_element_missing_prop_value.rs +++ /dev/null @@ -1,5 +0,0 @@ -fn main() { - pgt_console::markup! { - - } -} diff --git a/crates/pgt_console/tests/markup/open_element_missing_prop_value.stderr b/crates/pgt_console/tests/markup/open_element_missing_prop_value.stderr deleted file mode 100644 index 3b96a306..00000000 --- a/crates/pgt_console/tests/markup/open_element_missing_prop_value.stderr +++ /dev/null @@ -1,5 +0,0 @@ -error: unexpected token - --> tests/markup/open_element_missing_prop_value.rs:3:28 - | -3 | - | ^ diff --git a/crates/pgt_console/tests/markup/open_element_unfinished_1.rs b/crates/pgt_console/tests/markup/open_element_unfinished_1.rs deleted file mode 100644 index 0c155fc7..00000000 --- a/crates/pgt_console/tests/markup/open_element_unfinished_1.rs +++ /dev/null @@ -1,5 +0,0 @@ -fn main() { - pgt_console::markup! { - < - } -} diff --git a/crates/pgt_console/tests/markup/open_element_unfinished_1.stderr b/crates/pgt_console/tests/markup/open_element_unfinished_1.stderr deleted file mode 100644 index 923a0330..00000000 --- a/crates/pgt_console/tests/markup/open_element_unfinished_1.stderr +++ /dev/null @@ -1,9 +0,0 @@ -error: unexpected end of input - --> tests/markup/open_element_unfinished_1.rs:2:5 - | -2 | / pgt_console::markup! { -3 | | < -4 | | } - | |_____^ - | - = note: this error originates in the macro `pgt_console::markup` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/crates/pgt_console/tests/markup/open_element_unfinished_2.rs b/crates/pgt_console/tests/markup/open_element_unfinished_2.rs deleted file mode 100644 index e8d3bbf2..00000000 --- a/crates/pgt_console/tests/markup/open_element_unfinished_2.rs +++ /dev/null @@ -1,5 +0,0 @@ -fn main() { - pgt_console::markup! { - tests/markup/open_element_unfinished_2.rs:2:5 - | -2 | / pgt_console::markup! { -3 | | tests/markup/open_element_unfinished_3.rs:2:5 - | -2 | / pgt_console::markup! { -3 | | tests/markup/open_element_unfinished_4.rs:2:5 - | -2 | / pgt_console::markup! { -3 | | tests/markup/open_element_unfinished_5.rs:2:5 - | -2 | / pgt_console::markup! { -3 | | tests/markup/open_element_unfinished_6.rs:2:5 - | -2 | / pgt_console::markup! { -3 | | tests/markup/open_element_unfinished_7.rs:2:5 - | -2 | / pgt_console::markup! { -3 | | - } -} diff --git a/crates/pgt_console/tests/markup/unclosed_element.stderr b/crates/pgt_console/tests/markup/unclosed_element.stderr deleted file mode 100644 index 47adce5b..00000000 --- a/crates/pgt_console/tests/markup/unclosed_element.stderr +++ /dev/null @@ -1,5 +0,0 @@ -error: unclosed element - --> tests/markup/unclosed_element.rs:3:10 - | -3 | - | ^^^^^^^^ diff --git a/crates/pgt_diagnostics/Cargo.toml b/crates/pgt_diagnostics/Cargo.toml deleted file mode 100644 index 190b25f0..00000000 --- a/crates/pgt_diagnostics/Cargo.toml +++ /dev/null @@ -1,35 +0,0 @@ -[package] -authors.workspace = true -categories.workspace = true -description = "" -edition.workspace = true -homepage.workspace = true -keywords.workspace = true -license.workspace = true -name = "pgt_diagnostics" -repository.workspace = true -version = "0.0.0" - - -[dependencies] -backtrace = "0.3.74" -bpaf = { workspace = true } -enumflags2 = { workspace = true } -pgt_console = { workspace = true, features = ["serde"] } -pgt_diagnostics_categories = { workspace = true, features = ["serde"] } -pgt_diagnostics_macros = { workspace = true } -pgt_text_edit = { workspace = true, features = ["serde"] } -pgt_text_size.workspace = true -schemars = { workspace = true, optional = true } -serde = { workspace = true, features = ["derive"] } -serde_json = { workspace = true } -termcolor = { workspace = true } -unicode-width = { workspace = true } - -[features] -schema = ["dep:schemars", "pgt_text_edit/schema", "pgt_diagnostics_categories/schema", "pgt_console/schema"] - -[dev-dependencies] - -[lib] -doctest = false diff --git a/crates/pgt_diagnostics/src/adapters.rs b/crates/pgt_diagnostics/src/adapters.rs deleted file mode 100644 index ca627d3b..00000000 --- a/crates/pgt_diagnostics/src/adapters.rs +++ /dev/null @@ -1,136 +0,0 @@ -//! This modules exposes a number of "adapter diagnostics" that wrap error types -//! such as [std::error::Error] or [std::io::Error] in newtypes implementing the -//! [Diagnostic] trait - -use std::io; - -use pgt_console::{ - fmt::{self}, - markup, -}; - -use crate::{Category, Diagnostic, DiagnosticTags, category}; - -/// Implements [Diagnostic] over types implementing [std::error::Error]. -#[derive(Debug)] -pub struct StdError { - error: Box, -} - -impl From for StdError { - fn from(error: E) -> Self { - Self { - error: Box::new(error), - } - } -} - -impl Diagnostic for StdError { - fn description(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(fmt, "{}", self.error) - } - - fn message(&self, fmt: &mut fmt::Formatter<'_>) -> io::Result<()> { - fmt.write_markup(markup!({ AsConsoleDisplay(&self.error) })) - } -} - -struct AsConsoleDisplay<'a, T>(&'a T); - -impl fmt::Display for AsConsoleDisplay<'_, T> { - fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> io::Result<()> { - fmt.write_fmt(format_args!("{}", self.0)) - } -} - -/// Implements [Diagnostic] over for [io::Error]. -#[derive(Debug)] -pub struct IoError { - error: io::Error, -} - -impl From for IoError { - fn from(error: io::Error) -> Self { - Self { error } - } -} - -impl Diagnostic for IoError { - fn category(&self) -> Option<&'static Category> { - Some(category!("internalError/io")) - } - - fn description(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(fmt, "{}", self.error) - } - - fn tags(&self) -> DiagnosticTags { - DiagnosticTags::INTERNAL - } - - fn message(&self, fmt: &mut fmt::Formatter<'_>) -> io::Result<()> { - fmt.write_markup(markup!({ AsConsoleDisplay(&self.error) })) - } -} - -/// Implements [Diagnostic] over for [bpaf::ParseFailure]. -#[derive(Debug)] -pub struct BpafError { - error: bpaf::ParseFailure, -} - -impl From for BpafError { - fn from(error: bpaf::ParseFailure) -> Self { - Self { error } - } -} - -impl Diagnostic for BpafError { - fn category(&self) -> Option<&'static Category> { - Some(category!("flags/invalid")) - } - - fn tags(&self) -> DiagnosticTags { - DiagnosticTags::FIXABLE - } - - fn description(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - if let bpaf::ParseFailure::Stderr(reason) = &self.error { - write!(fmt, "{reason}")?; - } - Ok(()) - } - - fn message(&self, fmt: &mut fmt::Formatter<'_>) -> io::Result<()> { - if let bpaf::ParseFailure::Stderr(reason) = &self.error { - let error = reason.to_string(); - fmt.write_str(&error)?; - } - Ok(()) - } -} - -#[derive(Debug)] -pub struct SerdeJsonError { - error: serde_json::Error, -} - -impl From for SerdeJsonError { - fn from(error: serde_json::Error) -> Self { - Self { error } - } -} - -impl Diagnostic for SerdeJsonError { - fn category(&self) -> Option<&'static Category> { - Some(category!("internalError/io")) - } - - fn description(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(fmt, "{}", self.error) - } - - fn message(&self, fmt: &mut fmt::Formatter<'_>) -> io::Result<()> { - fmt.write_markup(markup!({ AsConsoleDisplay(&self.error) })) - } -} diff --git a/crates/pgt_diagnostics/src/advice.rs b/crates/pgt_diagnostics/src/advice.rs deleted file mode 100644 index 3720ff67..00000000 --- a/crates/pgt_diagnostics/src/advice.rs +++ /dev/null @@ -1,226 +0,0 @@ -use crate::Applicability; -use crate::{ - Location, - display::Backtrace, - location::{AsResource, AsSourceCode, AsSpan}, -}; -use pgt_console::fmt::{self, Display}; -use pgt_console::{MarkupBuf, markup}; -use pgt_text_edit::TextEdit; -use serde::{Deserialize, Serialize}; -use std::io; - -/// Trait implemented by types that support emitting advices into a diagnostic -pub trait Advices { - fn record(&self, visitor: &mut dyn Visit) -> io::Result<()>; -} - -/// The `Visit` trait is used to collect advices from a diagnostic: a visitor -/// instance is provided to the [Diagnostic::advices](super::Diagnostic::advices) -/// and [Diagnostic::verbose_advices](super::Diagnostic::verbose_advices) methods, -/// and the diagnostic implementation is expected to call into the various `record_*` -/// methods to communicate advices to the user. -pub trait Visit { - /// Prints a single log entry with the provided category and markup. - fn record_log(&mut self, category: LogCategory, text: &dyn fmt::Display) -> io::Result<()> { - let _ = (category, text); - Ok(()) - } - - /// Prints an unordered list of items. - fn record_list(&mut self, list: &[&dyn fmt::Display]) -> io::Result<()> { - let _ = list; - Ok(()) - } - - /// Prints a code frame outlining the provided source location. - fn record_frame(&mut self, location: Location<'_>) -> io::Result<()> { - let _ = location; - Ok(()) - } - - /// Prints the diff between the `prev` and `next` strings. - fn record_diff(&mut self, diff: &TextEdit) -> io::Result<()> { - let _ = diff; - Ok(()) - } - - /// Prints a Rust backtrace. - fn record_backtrace( - &mut self, - title: &dyn fmt::Display, - backtrace: &Backtrace, - ) -> io::Result<()> { - let _ = (title, backtrace); - Ok(()) - } - - /// Prints a command to the user. - fn record_command(&mut self, command: &str) -> io::Result<()> { - let _ = command; - Ok(()) - } - - /// Prints a group of advices under a common title. - fn record_group(&mut self, title: &dyn fmt::Display, advice: &dyn Advices) -> io::Result<()> { - let _ = (title, advice); - Ok(()) - } - - /// ## Warning - /// - /// The implementation of the table, for now, is tailored for two columns, and it assumes that - /// the longest cell is on top. - fn record_table( - &mut self, - padding: usize, - headers: &[MarkupBuf], - columns: &[&[MarkupBuf]], - ) -> io::Result<()> { - let _ = (headers, columns, padding); - Ok(()) - } -} - -/// The category for a log advice, defines how the message should be presented -/// to the user. -#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)] -#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] -#[serde(rename_all = "camelCase")] -pub enum LogCategory { - /// The advice doesn't have any specific category, the message will be - /// printed as plain markup. - None, - /// Print the advices with the information style. - Info, - /// Print the advices with the warning style. - Warn, - /// Print the advices with the error style. - Error, -} - -/// Utility type implementing [Advices] that emits a single log advice with -/// the provided category and text. -#[derive(Debug)] -pub struct LogAdvice { - pub category: LogCategory, - pub text: T, -} - -impl Advices for LogAdvice { - fn record(&self, visitor: &mut dyn Visit) -> io::Result<()> { - visitor.record_log(self.category, &self.text) - } -} - -/// Utility advice that prints a list of items. -#[derive(Debug)] -pub struct ListAdvice { - pub list: Vec, -} - -impl Advices for ListAdvice { - fn record(&self, visitor: &mut dyn Visit) -> io::Result<()> { - if self.list.is_empty() { - visitor.record_log(LogCategory::Warn, &"The list is empty.") - } else { - let pattern_list: Vec<_> = self - .list - .iter() - .map(|pattern| pattern as &dyn Display) - .collect(); - - visitor.record_list(&pattern_list) - } - } -} - -/// Utility type implementing [Advices] that emits a single code frame -/// advice with the provided path, span and source code. -#[derive(Debug)] -pub struct CodeFrameAdvice { - pub path: Path, - pub span: Span, - pub source_code: SourceCode, -} - -impl Advices for CodeFrameAdvice -where - Path: AsResource, - Span: AsSpan, - SourceCode: AsSourceCode, -{ - fn record(&self, visitor: &mut dyn Visit) -> io::Result<()> { - let location = Location::builder() - .resource(&self.path) - .span(&self.span) - .source_code(&self.source_code) - .build(); - - visitor.record_frame(location)?; - - Ok(()) - } -} - -/// Utility type implementing [Advices] that emits a diff advice with the -/// provided prev and next text. -#[derive(Debug)] -pub struct DiffAdvice { - pub diff: D, -} - -impl Advices for DiffAdvice -where - D: AsRef, -{ - fn record(&self, visitor: &mut dyn Visit) -> io::Result<()> { - visitor.record_diff(self.diff.as_ref()) - } -} - -/// Utility type implementing [Advices] that emits a command advice with -/// the provided text. -#[derive(Debug)] -pub struct CommandAdvice { - pub command: T, -} - -impl Advices for CommandAdvice -where - T: AsRef, -{ - fn record(&self, visitor: &mut dyn Visit) -> io::Result<()> { - visitor.record_command(self.command.as_ref()) - } -} - -#[derive(Debug, PartialEq)] -/// Utility type implementing [Advices] that emits a -/// code suggestion with the provided text -pub struct CodeSuggestionAdvice { - pub applicability: Applicability, - pub msg: M, - pub suggestion: TextEdit, -} - -impl Advices for CodeSuggestionAdvice -where - M: Display, -{ - fn record(&self, visitor: &mut dyn Visit) -> io::Result<()> { - let applicability = match self.applicability { - Applicability::Always => "Safe fix", - Applicability::MaybeIncorrect => "Unsafe fix", - }; - - visitor.record_log( - LogCategory::Info, - &markup! { - {applicability}": "{self.msg} - }, - )?; - - visitor.record_diff(&self.suggestion) - } -} diff --git a/crates/pgt_diagnostics/src/context.rs b/crates/pgt_diagnostics/src/context.rs deleted file mode 100644 index 88a64661..00000000 --- a/crates/pgt_diagnostics/src/context.rs +++ /dev/null @@ -1,757 +0,0 @@ -use pgt_console::fmt; - -use crate::context::internal::{SeverityDiagnostic, TagsDiagnostic}; -use crate::{ - Category, DiagnosticTags, Error, Resource, Severity, SourceCode, - diagnostic::internal::AsDiagnostic, - location::{AsResource, AsSourceCode, AsSpan}, -}; - -/// This trait is implemented for all types implementing [Diagnostic](super::Diagnostic) -/// and the [Error] struct, and exposes various combinator methods to enrich -/// existing diagnostics with additional information. -pub trait DiagnosticExt: internal::Sealed + Sized { - /// Returns a new diagnostic with the provided `message` as a message and - /// description, and `self` as a source diagnostic. This is useful to - /// create chains of diagnostics, where high level errors wrap lower level - /// causes. - fn context(self, message: M) -> Error - where - Self: 'static, - M: fmt::Display + 'static, - Error: From>; - - /// Returns a new diagnostic using the provided `category` if `self` - /// doesn't already have one. - fn with_category(self, category: &'static Category) -> Error - where - Error: From>; - - /// Returns a new diagnostic using the provided `path` if `self` - /// doesn't already have one. - fn with_file_path(self, path: impl AsResource) -> Error - where - Error: From>; - - /// Returns a new diagnostic using the provided `span` instead of the one in `self`. - fn with_file_span(self, span: impl AsSpan) -> Error - where - Error: From>; - - /// Returns a new diagnostic using the provided `source_code` if `self` - /// doesn't already have one. - fn with_file_source_code(self, source_code: impl AsSourceCode) -> Error - where - Error: From>; - - /// Returns a new diagnostic with additional `tags` - fn with_tags(self, tags: DiagnosticTags) -> Error - where - Error: From>; - - /// Returns a new diagnostic with additional `severity` - fn with_severity(self, severity: Severity) -> Error - where - Error: From>; -} - -impl internal::Sealed for E {} - -impl DiagnosticExt for E { - fn context(self, message: M) -> Error - where - E: 'static, - M: fmt::Display + 'static, - Error: From>, - { - Error::from(internal::ContextDiagnostic { - message, - source: self, - }) - } - - fn with_category(self, category: &'static Category) -> Error - where - Error: From>, - { - Error::from(internal::CategoryDiagnostic { - category, - source: self, - }) - } - - fn with_file_path(self, path: impl AsResource) -> Error - where - Error: From>, - { - Error::from(internal::FilePathDiagnostic { - path: path.as_resource().map(Resource::to_owned), - source: self, - }) - } - - fn with_file_span(self, span: impl AsSpan) -> Error - where - Error: From>, - { - Error::from(internal::FileSpanDiagnostic { - span: span.as_span(), - source: self, - }) - } - - fn with_file_source_code(self, source_code: impl AsSourceCode) -> Error - where - Error: From>, - { - Error::from(internal::FileSourceCodeDiagnostic { - source_code: source_code.as_source_code().map(SourceCode::to_owned), - source: self, - }) - } - - fn with_tags(self, tags: DiagnosticTags) -> Error - where - Error: From>, - { - Error::from(internal::TagsDiagnostic { tags, source: self }) - } - - fn with_severity(self, severity: Severity) -> Error - where - Error: From>, - { - Error::from(internal::SeverityDiagnostic { - severity, - source: self, - }) - } -} - -pub trait Context: internal::Sealed { - /// If `self` is an error, returns a new diagnostic with the provided - /// `message` as a message and description, and `self` as a source - /// diagnostic. This is useful to create chains of diagnostics, where high - /// level errors wrap lower level causes. - fn context(self, message: M) -> Result - where - E: 'static, - M: fmt::Display + 'static, - Error: From>; - - /// If `self` is an error, returns a new diagnostic using the provided - /// `category` if `self` doesn't already have one. - fn with_category(self, category: &'static Category) -> Result - where - Error: From>; - - /// If `self` is an error, returns a new diagnostic using the provided - /// `path` if `self` doesn't already have one. - fn with_file_path(self, path: impl AsResource) -> Result - where - Error: From>; - - /// If `self` is an error, returns a new diagnostic using the provided - /// `severity` if `self` doesn't already have one. - fn with_severity(self, severity: Severity) -> Result - where - Error: From>; - - /// If `self` is an error, returns a new diagnostic using the provided - /// `tags` if `self` doesn't already have one. - fn with_tags(self, tags: DiagnosticTags) -> Result - where - Error: From>; - - /// If `self` is an error, returns a new diagnostic using the provided - /// `span` instead of the one returned by `self`. - /// - /// This is useful in multi-language documents, where a given diagnostic - /// may be originally emitted with a span relative to a specific substring - /// of a larger document, and later needs to have its position remapped to - /// be relative to the entire file instead. - fn with_file_span(self, span: impl AsSpan) -> Result - where - Error: From>; -} - -impl internal::Sealed for Result {} - -impl Context for Result { - fn context(self, message: M) -> Result - where - E: 'static, - M: fmt::Display + 'static, - Error: From>, - { - match self { - Ok(value) => Ok(value), - Err(source) => Err(source.context(message)), - } - } - - fn with_category(self, category: &'static Category) -> Result - where - Error: From>, - { - match self { - Ok(value) => Ok(value), - Err(source) => Err(source.with_category(category)), - } - } - - fn with_file_path(self, path: impl AsResource) -> Result - where - Error: From>, - { - match self { - Ok(value) => Ok(value), - Err(source) => Err(source.with_file_path(path)), - } - } - - fn with_severity(self, severity: Severity) -> Result - where - Error: From>, - { - match self { - Ok(value) => Ok(value), - Err(source) => Err(source.with_severity(severity)), - } - } - - fn with_tags(self, tags: DiagnosticTags) -> Result - where - Error: From>, - { - match self { - Ok(value) => Ok(value), - Err(source) => Err(source.with_tags(tags)), - } - } - - fn with_file_span(self, span: impl AsSpan) -> Result - where - Error: From>, - { - match self { - Ok(value) => Ok(value), - Err(source) => Err(source.with_file_span(span)), - } - } -} - -mod internal { - //! These types need to be declared as public as they're referred to in the - //! `where` clause of other public items, but as they're not part of the - //! public API they are declared in a private module so they're not - //! accessible outside of the crate - - use std::{fmt::Debug, io}; - - use pgt_console::{fmt, markup}; - use pgt_text_edit::TextEdit; - use pgt_text_size::TextRange; - - use crate::{ - Advices, Backtrace, Category, Diagnostic, DiagnosticTags, LineIndex, LineIndexBuf, - Location, LogCategory, Resource, Severity, SourceCode, Visit, - diagnostic::internal::AsDiagnostic, - }; - - /// This trait is inherited by `DiagnosticExt` and `Context`, since it's - /// not visible outside of `pgt_diagnostics` this prevents these extension - /// traits from being implemented on other types outside of this module - /// - /// Making these traits "sealed" is mainly intended as a stability - /// guarantee, if these traits were simply public any change to their - /// signature or generic implementations would be a breaking change for - /// downstream implementations, so preventing these traits from ever being - /// implemented in downstream crates ensures this doesn't happen. - pub trait Sealed {} - - /// Diagnostic type returned by [super::DiagnosticExt::context], uses - /// `message` as its message and description, and `source` as its source - /// diagnostic. - pub struct ContextDiagnostic { - pub(super) message: M, - pub(super) source: E, - } - - impl Diagnostic for ContextDiagnostic { - fn category(&self) -> Option<&'static Category> { - self.source.as_diagnostic().category() - } - - fn severity(&self) -> Severity { - self.source.as_diagnostic().severity() - } - - fn description(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - let mut writer = DisplayMarkup(fmt); - let mut fmt = fmt::Formatter::new(&mut writer); - fmt.write_markup(markup!({ self.message })) - .map_err(|_| std::fmt::Error) - } - - fn message(&self, fmt: &mut fmt::Formatter<'_>) -> io::Result<()> { - fmt::Display::fmt(&self.message, fmt) - } - - fn advices(&self, visitor: &mut dyn Visit) -> io::Result<()> { - self.source.as_diagnostic().advices(visitor) - } - - fn verbose_advices(&self, visitor: &mut dyn Visit) -> io::Result<()> { - self.source.as_diagnostic().verbose_advices(visitor) - } - - fn location(&self) -> Location<'_> { - self.source.as_diagnostic().location() - } - - fn tags(&self) -> DiagnosticTags { - self.source.as_diagnostic().tags() - } - - fn source(&self) -> Option<&dyn Diagnostic> { - Some(self.source.as_dyn()) - } - } - - impl Debug for ContextDiagnostic { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - f.debug_struct("Diagnostic") - .field("message", &DebugMarkup(&self.message)) - .field("source", &self.source) - .finish() - } - } - - /// Helper wrapper implementing [Debug] for types implementing [fmt::Display], - /// prints a debug representation of the markup generated by printing `T`. - struct DebugMarkup(T); - - impl Debug for DebugMarkup { - fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - let buffer = markup!({ self.0 }).to_owned(); - Debug::fmt(&buffer, fmt) - } - } - - /// Helper wrapper implementing [fmt::Write] for [std::fmt::Formatter]. - struct DisplayMarkup<'a, 'b>(&'a mut std::fmt::Formatter<'b>); - - impl fmt::Write for DisplayMarkup<'_, '_> { - fn write_str(&mut self, _: &fmt::MarkupElements<'_>, content: &str) -> io::Result<()> { - self.0.write_str(content).map_err(io::Error::other) - } - - fn write_fmt( - &mut self, - _: &fmt::MarkupElements<'_>, - content: std::fmt::Arguments<'_>, - ) -> io::Result<()> { - self.0.write_fmt(content).map_err(io::Error::other) - } - } - - /// Diagnostic type returned by [super::DiagnosticExt::with_category], - /// uses `category` as its category if `source` doesn't return one. - pub struct CategoryDiagnostic { - pub(super) category: &'static Category, - pub(super) source: E, - } - - impl Debug for CategoryDiagnostic { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - f.debug_struct("Diagnostic") - .field("category", &self.category) - .field("source", &self.source) - .finish() - } - } - - impl Diagnostic for CategoryDiagnostic { - fn category(&self) -> Option<&'static Category> { - Some( - self.source - .as_diagnostic() - .category() - .unwrap_or(self.category), - ) - } - - fn severity(&self) -> Severity { - self.source.as_diagnostic().severity() - } - - fn description(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - self.source.as_diagnostic().description(fmt) - } - - fn message(&self, fmt: &mut fmt::Formatter<'_>) -> io::Result<()> { - self.source.as_diagnostic().message(fmt) - } - - fn advices(&self, visitor: &mut dyn Visit) -> io::Result<()> { - self.source.as_diagnostic().advices(visitor) - } - - fn verbose_advices(&self, visitor: &mut dyn Visit) -> io::Result<()> { - self.source.as_diagnostic().verbose_advices(visitor) - } - - fn location(&self) -> Location<'_> { - self.source.as_diagnostic().location() - } - - fn tags(&self) -> DiagnosticTags { - self.source.as_diagnostic().tags() - } - } - - /// Diagnostic type returned by [super::DiagnosticExt::with_file_path], - /// uses `path` as its location path if `source` doesn't return one. - pub struct FilePathDiagnostic { - pub(super) path: Option>, - pub(super) source: E, - } - - impl Debug for FilePathDiagnostic { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - f.debug_struct("Diagnostic") - .field("path", &self.path) - .field("source", &self.source) - .finish() - } - } - - impl Diagnostic for FilePathDiagnostic { - fn category(&self) -> Option<&'static Category> { - self.source.as_diagnostic().category() - } - - fn severity(&self) -> Severity { - self.source.as_diagnostic().severity() - } - - fn description(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - self.source.as_diagnostic().description(fmt) - } - - fn message(&self, fmt: &mut fmt::Formatter<'_>) -> io::Result<()> { - self.source.as_diagnostic().message(fmt) - } - - fn advices(&self, visitor: &mut dyn Visit) -> io::Result<()> { - self.source.as_diagnostic().advices(visitor) - } - - fn verbose_advices(&self, visitor: &mut dyn Visit) -> io::Result<()> { - self.source.as_diagnostic().verbose_advices(visitor) - } - - fn location(&self) -> Location<'_> { - let loc = self.source.as_diagnostic().location(); - Location { - resource: match loc.resource { - Some(Resource::Argv) => Some(Resource::Argv), - Some(Resource::Memory) => Some(Resource::Memory), - Some(Resource::File(file)) => { - if let Some(Resource::File(path)) = &self.path { - Some(Resource::File(path.as_ref())) - } else { - Some(Resource::File(file)) - } - } - None => self.path.as_ref().map(Resource::as_deref), - }, - span: loc.span, - source_code: loc.source_code, - } - } - - fn tags(&self) -> DiagnosticTags { - self.source.as_diagnostic().tags() - } - } - - /// Diagnostic type returned by [super::DiagnosticExt::with_file_span], - /// uses `span` as its location span instead of the one returned by `source`. - pub struct FileSpanDiagnostic { - pub(super) span: Option, - pub(super) source: E, - } - - impl Debug for FileSpanDiagnostic { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - f.debug_struct("Diagnostic") - .field("span", &self.span) - .field("source", &self.source) - .finish() - } - } - - impl Diagnostic for FileSpanDiagnostic { - fn category(&self) -> Option<&'static Category> { - self.source.as_diagnostic().category() - } - - fn severity(&self) -> Severity { - self.source.as_diagnostic().severity() - } - - fn description(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - self.source.as_diagnostic().description(fmt) - } - - fn message(&self, fmt: &mut fmt::Formatter<'_>) -> io::Result<()> { - self.source.as_diagnostic().message(fmt) - } - - fn advices(&self, visitor: &mut dyn Visit) -> io::Result<()> { - self.source.as_diagnostic().advices(visitor) - } - - fn verbose_advices(&self, visitor: &mut dyn Visit) -> io::Result<()> { - self.source.as_diagnostic().verbose_advices(visitor) - } - - fn location(&self) -> Location<'_> { - let loc = self.source.as_diagnostic().location(); - Location { - resource: loc.resource, - span: self.span.or(loc.span), - source_code: loc.source_code, - } - } - - fn tags(&self) -> DiagnosticTags { - self.source.as_diagnostic().tags() - } - } - - /// Diagnostic type returned by [super::DiagnosticExt::with_file_source_code], - /// uses `source_code` as its location source code if `source` doesn't - /// return one. - pub struct FileSourceCodeDiagnostic { - pub(super) source_code: Option>, - pub(super) source: E, - } - - impl Debug for FileSourceCodeDiagnostic { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - f.debug_struct("Diagnostic") - .field("source_code", &self.source_code) - .field("source", &self.source) - .finish() - } - } - - impl Diagnostic for FileSourceCodeDiagnostic { - fn category(&self) -> Option<&'static Category> { - self.source.as_diagnostic().category() - } - - fn severity(&self) -> Severity { - self.source.as_diagnostic().severity() - } - - fn description(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - self.source.as_diagnostic().description(fmt) - } - - fn message(&self, fmt: &mut fmt::Formatter<'_>) -> io::Result<()> { - self.source.as_diagnostic().message(fmt) - } - - fn advices(&self, visitor: &mut dyn Visit) -> io::Result<()> { - if let Some(source_code) = &self.source_code { - let mut visitor = FileSourceCodeVisitor { - visitor, - source_code: source_code.as_deref(), - }; - - self.source.as_diagnostic().advices(&mut visitor) - } else { - self.source.as_diagnostic().advices(visitor) - } - } - - fn verbose_advices(&self, visitor: &mut dyn Visit) -> io::Result<()> { - if let Some(source_code) = &self.source_code { - let mut visitor = FileSourceCodeVisitor { - visitor, - source_code: source_code.as_deref(), - }; - - self.source.as_diagnostic().verbose_advices(&mut visitor) - } else { - self.source.as_diagnostic().verbose_advices(visitor) - } - } - - fn location(&self) -> Location<'_> { - let location = self.source.as_diagnostic().location(); - Location { - source_code: location - .source_code - .or_else(|| Some(self.source_code.as_ref()?.as_deref())), - ..location - } - } - - fn tags(&self) -> DiagnosticTags { - self.source.as_diagnostic().tags() - } - } - - /// Helper wrapper for a [Visitor], automatically inject `source_code` into - /// the location of code frame advices if they don't have one already. - struct FileSourceCodeVisitor<'a> { - visitor: &'a mut dyn Visit, - source_code: SourceCode<&'a str, &'a LineIndex>, - } - - impl Visit for FileSourceCodeVisitor<'_> { - fn record_log(&mut self, category: LogCategory, text: &dyn fmt::Display) -> io::Result<()> { - self.visitor.record_log(category, text) - } - - fn record_list(&mut self, list: &[&dyn fmt::Display]) -> io::Result<()> { - self.visitor.record_list(list) - } - - fn record_frame(&mut self, location: Location<'_>) -> io::Result<()> { - self.visitor.record_frame(Location { - source_code: Some(location.source_code.unwrap_or(self.source_code)), - ..location - }) - } - - fn record_diff(&mut self, diff: &TextEdit) -> io::Result<()> { - self.visitor.record_diff(diff) - } - - fn record_backtrace( - &mut self, - title: &dyn fmt::Display, - backtrace: &Backtrace, - ) -> io::Result<()> { - self.visitor.record_backtrace(title, backtrace) - } - - fn record_command(&mut self, command: &str) -> io::Result<()> { - self.visitor.record_command(command) - } - - fn record_group( - &mut self, - title: &dyn fmt::Display, - advice: &dyn Advices, - ) -> io::Result<()> { - self.visitor.record_group(title, advice) - } - } - - /// Diagnostic type returned by [super::DiagnosticExt::with_tags], - /// merges `tags` with the tags of its source - pub struct TagsDiagnostic { - pub(super) tags: DiagnosticTags, - pub(super) source: E, - } - - impl Debug for TagsDiagnostic { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - f.debug_struct("Diagnostic") - .field("tags", &self.tags) - .field("source", &self.source) - .finish() - } - } - - impl Diagnostic for TagsDiagnostic { - fn category(&self) -> Option<&'static Category> { - self.source.as_diagnostic().category() - } - - fn severity(&self) -> Severity { - self.source.as_diagnostic().severity() - } - - fn description(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - self.source.as_diagnostic().description(fmt) - } - - fn message(&self, fmt: &mut fmt::Formatter<'_>) -> io::Result<()> { - self.source.as_diagnostic().message(fmt) - } - - fn advices(&self, visitor: &mut dyn Visit) -> io::Result<()> { - self.source.as_diagnostic().advices(visitor) - } - - fn verbose_advices(&self, visitor: &mut dyn Visit) -> io::Result<()> { - self.source.as_diagnostic().verbose_advices(visitor) - } - - fn location(&self) -> Location<'_> { - self.source.as_diagnostic().location() - } - - fn tags(&self) -> DiagnosticTags { - self.source.as_diagnostic().tags() | self.tags - } - } - - /// Diagnostic type returned by [super::DiagnosticExt::with_severity], - /// replaces `severity` with the severity of its source - pub struct SeverityDiagnostic { - pub(super) severity: Severity, - pub(super) source: E, - } - - impl Debug for SeverityDiagnostic { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - f.debug_struct("Diagnostic") - .field("severity", &self.severity) - .field("source", &self.source) - .finish() - } - } - - impl Diagnostic for SeverityDiagnostic { - fn category(&self) -> Option<&'static Category> { - self.source.as_diagnostic().category() - } - - fn severity(&self) -> Severity { - self.severity - } - - fn description(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - self.source.as_diagnostic().description(fmt) - } - - fn message(&self, fmt: &mut fmt::Formatter<'_>) -> io::Result<()> { - self.source.as_diagnostic().message(fmt) - } - - fn advices(&self, visitor: &mut dyn Visit) -> io::Result<()> { - self.source.as_diagnostic().advices(visitor) - } - - fn verbose_advices(&self, visitor: &mut dyn Visit) -> io::Result<()> { - self.source.as_diagnostic().verbose_advices(visitor) - } - - fn location(&self) -> Location<'_> { - self.source.as_diagnostic().location() - } - - fn tags(&self) -> DiagnosticTags { - self.source.as_diagnostic().tags() - } - } -} diff --git a/crates/pgt_diagnostics/src/diagnostic.rs b/crates/pgt_diagnostics/src/diagnostic.rs deleted file mode 100644 index 9a62ede1..00000000 --- a/crates/pgt_diagnostics/src/diagnostic.rs +++ /dev/null @@ -1,271 +0,0 @@ -use std::{ - convert::Infallible, - fmt::{Debug, Display}, - io, - ops::{BitOr, BitOrAssign}, - str::FromStr, -}; - -use enumflags2::{BitFlags, bitflags, make_bitflags}; -use serde::{Deserialize, Serialize}; - -use pgt_console::fmt; - -use crate::{Category, Location, Visit}; - -/// The `Diagnostic` trait defines the metadata that can be exposed by error -/// types in order to print details diagnostics in the console of the editor -/// -/// ## Implementation -/// -/// Most types should not have to implement this trait manually, and should -/// instead rely on the `Diagnostic` derive macro also provided by this crate: -/// -/// ``` -/// # use pgt_diagnostics::Diagnostic; -/// #[derive(Debug, Diagnostic)] -/// #[diagnostic(category = "lint/style/noShoutyConstants", tags(FIXABLE))] -/// struct ExampleDiagnostic { -/// #[message] -/// #[description] -/// message: String, -/// } -/// ``` -pub trait Diagnostic: Debug { - /// The category of a diagnostic uniquely identifying this - /// diagnostic type, such as `lint/correctness/noArguments`, `args/invalid` - /// or `format/disabled`. - fn category(&self) -> Option<&'static Category> { - None - } - - /// The severity defines whether this diagnostic reports an error, a - /// warning, an information or a hint to the user. - fn severity(&self) -> Severity { - Severity::Error - } - - /// The description is a text-only explanation of the issue this diagnostic - /// is reporting, intended for display contexts that do not support rich - /// markup such as in-editor popovers - /// - /// The description should generally be as exhaustive as possible, since - /// the clients that do not support rendering markup will not render the - /// advices for the diagnostic either. - fn description(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - let _ = fmt; - Ok(()) - } - - /// An explanation of the issue this diagnostic is reporting - /// - /// In general it's better to keep this message as short as possible, and - /// instead rely on advices to better convey contextual explanations to the - /// user. - fn message(&self, fmt: &mut fmt::Formatter<'_>) -> io::Result<()> { - let _ = fmt; - Ok(()) - } - - /// Advices are the main building blocks used compose rich errors. They are - /// implemented using a visitor pattern, where consumers of a diagnostic - /// can visit the object and collect the advices that make it up for the - /// purpose of display or introspection. - fn advices(&self, visitor: &mut dyn Visit) -> io::Result<()> { - let _ = visitor; - Ok(()) - } - - /// Diagnostics can defines additional advices to be printed if the user - /// requires more detail about the diagnostic. - fn verbose_advices(&self, visitor: &mut dyn Visit) -> io::Result<()> { - let _ = visitor; - Ok(()) - } - - /// A diagnostic can be tied to a specific "location": this can be a file, - /// memory buffer, command line argument, etc. It may also be tied to a - /// specific text range within the content of that location. Finally, it - /// may also provide the source string for that location (this is required - /// in order to display a code frame advice for the diagnostic). - fn location(&self) -> Location<'_> { - Location::builder().build() - } - - /// Tags convey additional boolean metadata about the nature of a diagnostic: - /// - If the diagnostic can be automatically fixed - /// - If the diagnostic resulted from and internal error - /// - If the diagnostic is being emitted as part of a crash / fatal error - /// - If the diagnostic is a warning about a piece of unused or unnecessary code - /// - If the diagnostic is a warning about a piece of deprecated or obsolete code. - /// - If the diagnostic is meant to provide more information - fn tags(&self) -> DiagnosticTags { - DiagnosticTags::empty() - } - - /// Similarly to the `source` method of the [std::error::Error] trait, this - /// returns another diagnostic that's the logical "cause" for this issue. - /// For instance, a "request failed" diagnostic may have been cause by a - /// "deserialization error". This allows low-level error to be wrapped in - /// higher level concepts, while retaining enough information to display - /// and fix the underlying issue. - fn source(&self) -> Option<&dyn Diagnostic> { - None - } -} - -#[derive( - Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, Deserialize, Default, -)] -#[serde(rename_all = "camelCase")] -#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] -/// The severity to associate to a diagnostic. -pub enum Severity { - /// Reports a hint. - Hint, - /// Reports an information. - #[default] - Information, - /// Reports a warning. - Warning, - /// Reports an error. - Error, - /// Reports a crash. - Fatal, -} - -impl FromStr for Severity { - type Err = String; - - fn from_str(s: &str) -> Result { - match s { - "hint" => Ok(Self::Information), - "info" => Ok(Self::Information), - "warn" => Ok(Self::Warning), - "error" => Ok(Self::Error), - v => Err(format!( - "Found unexpected value ({v}), valid values are: info, warn, error." - )), - } - } -} - -impl Display for Severity { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - match self { - Self::Hint => write!(f, "info"), - Self::Information => write!(f, "info"), - Self::Warning => write!(f, "warn"), - Self::Error => write!(f, "error"), - Self::Fatal => write!(f, "fatal"), - } - } -} - -/// Internal enum used to automatically generate bit offsets for [DiagnosticTags] -/// and help with the implementation of `serde` and `schemars` for tags. -#[derive(Debug, Copy, Clone, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] -#[bitflags] -#[repr(u8)] -pub(super) enum DiagnosticTag { - Fixable = 1 << 0, - Internal = 1 << 1, - UnnecessaryCode = 1 << 2, - DeprecatedCode = 1 << 3, - Verbose = 1 << 4, -} - -#[derive(Debug, Copy, Clone, PartialEq, Eq)] -pub struct DiagnosticTags(BitFlags); -impl DiagnosticTags { - /// This diagnostic has a fix suggestion. - pub const FIXABLE: Self = Self(make_bitflags!(DiagnosticTag::{Fixable})); - /// This diagnostic results from an internal error. - pub const INTERNAL: Self = Self(make_bitflags!(DiagnosticTag::{Internal})); - /// This diagnostic tags unused or unnecessary code, this may change - /// how the diagnostic is render in editors. - pub const UNNECESSARY_CODE: Self = Self(make_bitflags!(DiagnosticTag::{UnnecessaryCode})); - /// This diagnostic tags deprecated or obsolete code, this may change - /// how the diagnostic is render in editors. - pub const DEPRECATED_CODE: Self = Self(make_bitflags!(DiagnosticTag::{DeprecatedCode})); - /// This diagnostic is verbose and should be printed only if the `--verbose` option is provided - pub const VERBOSE: Self = Self(make_bitflags!(DiagnosticTag::{Verbose})); - pub const fn all() -> Self { - Self(BitFlags::ALL) - } - pub const fn empty() -> Self { - Self(BitFlags::EMPTY) - } - pub fn insert(&mut self, other: DiagnosticTags) { - self.0 |= other.0; - } - pub fn contains(self, other: impl Into) -> bool { - self.0.contains(other.into().0) - } - pub const fn union(self, other: Self) -> Self { - Self(self.0.union_c(other.0)) - } - pub fn is_empty(self) -> bool { - self.0.is_empty() - } - pub fn is_verbose(&self) -> bool { - self.contains(DiagnosticTag::Verbose) - } -} - -impl BitOr for DiagnosticTags { - type Output = Self; - - fn bitor(self, rhs: Self) -> Self::Output { - DiagnosticTags(self.0 | rhs.0) - } -} - -impl BitOrAssign for DiagnosticTags { - fn bitor_assign(&mut self, rhs: Self) { - self.0 |= rhs.0; - } -} - -// Implement the `Diagnostic` on the `Infallible` error type from the standard -// library as a utility for implementing signatures that require a diagnostic -// type when the operation can never fail -impl Diagnostic for Infallible {} - -pub(crate) mod internal { - //! The `AsDiagnostic` trait needs to be declared as public as its referred - //! to in the `where` clause of other public items, but as it's not part of - //! the public API it's declared in a private module so it's not accessible - //! outside of the crate - - use std::fmt::Debug; - - use crate::Diagnostic; - - /// Since [Error](crate::Error) must implement `From` to - /// be used with the `?` operator, it cannot implement the [Diagnostic] - /// trait (as that would conflict with the implementation of `From for T` - /// in the standard library). The [AsDiagnostic] exists as an internal - /// implementation detail to bridge this gap and allow various types and - /// functions in `pgt_diagnostics` to be generic over all diagnostics + - /// `Error`. - pub trait AsDiagnostic: Debug { - type Diagnostic: Diagnostic + ?Sized; - fn as_diagnostic(&self) -> &Self::Diagnostic; - fn as_dyn(&self) -> &dyn Diagnostic; - } - - impl AsDiagnostic for D { - type Diagnostic = D; - - fn as_diagnostic(&self) -> &Self::Diagnostic { - self - } - - fn as_dyn(&self) -> &dyn Diagnostic { - self - } - } -} diff --git a/crates/pgt_diagnostics/src/display.rs b/crates/pgt_diagnostics/src/display.rs deleted file mode 100644 index d270d08f..00000000 --- a/crates/pgt_diagnostics/src/display.rs +++ /dev/null @@ -1,1081 +0,0 @@ -use pgt_console::fmt::MarkupElements; -use pgt_console::{ - HorizontalLine, Markup, MarkupBuf, MarkupElement, MarkupNode, Padding, fmt, markup, -}; -use pgt_text_edit::TextEdit; -use std::path::Path; -use std::{env, io, iter}; -use unicode_width::UnicodeWidthStr; - -mod backtrace; -mod diff; -pub(super) mod frame; -mod message; - -pub use crate::display::frame::{SourceFile, SourceLocation}; -use crate::{ - Advices, Diagnostic, DiagnosticTags, Location, LogCategory, Resource, Severity, Visit, - diagnostic::internal::AsDiagnostic, -}; - -pub use self::backtrace::{Backtrace, set_bottom_frame}; -pub use self::message::MessageAndDescription; - -/// Helper struct from printing the description of a diagnostic into any -/// formatter implementing [std::fmt::Write]. -pub struct PrintDescription<'fmt, D: ?Sized>(pub &'fmt D); - -impl std::fmt::Display for PrintDescription<'_, D> { - fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - self.0 - .as_diagnostic() - .description(fmt) - .map_err(|_| std::fmt::Error) - } -} - -/// Helper struct for printing a diagnostic as markup into any formatter -/// implementing [pgt_console::fmt::Write]. -pub struct PrintDiagnostic<'fmt, D: ?Sized> { - diag: &'fmt D, - verbose: bool, - search: bool, -} - -impl<'fmt, D: AsDiagnostic + ?Sized> PrintDiagnostic<'fmt, D> { - pub fn simple(diag: &'fmt D) -> Self { - Self { - diag, - verbose: false, - search: false, - } - } - - pub fn verbose(diag: &'fmt D) -> Self { - Self { - diag, - verbose: true, - search: false, - } - } - - pub fn search(diag: &'fmt D) -> Self { - Self { - diag, - verbose: false, - search: true, - } - } -} - -impl fmt::Display for PrintDiagnostic<'_, D> { - fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> io::Result<()> { - let diagnostic = self.diag.as_diagnostic(); - - // Print the header for the diagnostic - fmt.write_markup(markup! { - {PrintHeader(diagnostic)}"\n\n" - })?; - // Wrap the formatter with an indentation level and print the advices - let mut slot = None; - let mut fmt = IndentWriter::wrap(fmt, &mut slot, true, " "); - - if self.search { - let mut visitor = PrintSearch(&mut fmt); - print_advices(&mut visitor, diagnostic, self.verbose) - } else { - let mut visitor = PrintAdvices(&mut fmt); - print_advices(&mut visitor, diagnostic, self.verbose) - } - } -} - -/// Display struct implementing the formatting of a diagnostic header. -pub(crate) struct PrintHeader<'fmt, D: ?Sized>(pub(crate) &'fmt D); - -impl fmt::Display for PrintHeader<'_, D> { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> io::Result<()> { - let Self(diagnostic) = *self; - - // Wrap the formatter with a counter to measure the width of the printed text - let mut slot = None; - let mut fmt = CountWidth::wrap(f, &mut slot); - - // Print the diagnostic location if it has a file path - let location = diagnostic.location(); - let file_name = match &location.resource { - Some(Resource::File(file)) => Some(file), - _ => None, - }; - - let is_vscode = env::var("TERM_PROGRAM").unwrap_or_default() == "vscode"; - - if let Some(name) = file_name { - if is_vscode { - fmt.write_str(name)?; - } else { - let path_name = Path::new(name); - if path_name.is_absolute() { - let link = format!("file://{name}"); - fmt.write_markup(markup! { - {name} - })?; - } else { - fmt.write_str(name)?; - } - } - - // Print the line and column position if the location has a span and source code - // (the source code is necessary to convert a byte offset into a line + column) - if let (Some(span), Some(source_code)) = (location.span, location.source_code) { - let file = SourceFile::new(source_code); - if let Ok(location) = file.location(span.start()) { - fmt.write_markup(markup! { - ":"{location.line_number.get()}":"{location.column_number.get()} - })?; - } - } - - fmt.write_str(" ")?; - } - - // Print the category of the diagnostic, with a hyperlink if - // the category has an associated link - if let Some(category) = diagnostic.category() { - if let Some(link) = category.link() { - fmt.write_markup(markup! { - {category.name()}" " - })?; - } else { - fmt.write_markup(markup! { - {category.name()}" " - })?; - } - } - - // Print the internal, fixable and fatal tags - let tags = diagnostic.tags(); - - if tags.contains(DiagnosticTags::INTERNAL) { - fmt.write_markup(markup! { - " INTERNAL "" " - })?; - } - - if tags.contains(DiagnosticTags::FIXABLE) { - fmt.write_markup(markup! { - " FIXABLE "" " - })?; - } - - if tags.contains(DiagnosticTags::DEPRECATED_CODE) { - fmt.write_markup(markup! { - " DEPRECATED "" " - })?; - } - - if tags.contains(DiagnosticTags::VERBOSE) { - fmt.write_markup(markup! { - " VERBOSE "" " - })?; - } - if diagnostic.severity() == Severity::Fatal { - fmt.write_markup(markup! { - " FATAL "" " - })?; - } - - // Load the printed width for the header, and fill the rest of the line - // with the '━' line character up to 100 columns with at least 10 characters - const HEADER_WIDTH: usize = 100; - const MIN_WIDTH: usize = 10; - - let text_width = slot.map_or(0, |writer| writer.width); - let line_width = HEADER_WIDTH.saturating_sub(text_width).max(MIN_WIDTH); - HorizontalLine::new(line_width).fmt(f) - } -} - -/// Wrapper for a type implementing [fmt::Write] that counts the total width of -/// all printed characters. -struct CountWidth<'a, W: ?Sized> { - writer: &'a mut W, - width: usize, -} - -impl<'write> CountWidth<'write, dyn fmt::Write + 'write> { - /// Wrap the writer in an existing [fmt::Formatter] with an instance of [CountWidth]. - fn wrap<'slot, 'fmt: 'write + 'slot>( - fmt: &'fmt mut fmt::Formatter<'_>, - slot: &'slot mut Option, - ) -> fmt::Formatter<'slot> { - fmt.wrap_writer(|writer| slot.get_or_insert(Self { writer, width: 0 })) - } -} - -impl fmt::Write for CountWidth<'_, W> { - fn write_str(&mut self, elements: &fmt::MarkupElements<'_>, content: &str) -> io::Result<()> { - self.writer.write_str(elements, content)?; - self.width += UnicodeWidthStr::width(content); - Ok(()) - } - - fn write_fmt( - &mut self, - elements: &fmt::MarkupElements<'_>, - content: std::fmt::Arguments<'_>, - ) -> io::Result<()> { - if let Some(content) = content.as_str() { - self.write_str(elements, content) - } else { - let content = content.to_string(); - self.write_str(elements, &content) - } - } -} - -/// Write the advices for `diagnostic` into `visitor`. -fn print_advices(visitor: &mut V, diagnostic: &D, verbose: bool) -> io::Result<()> -where - V: Visit, - D: Diagnostic + ?Sized, -{ - // Visit the advices of the diagnostic with a lightweight visitor that - // detects if the diagnostic has any frame or backtrace advice - let mut frame_visitor = FrameVisitor { - location: diagnostic.location(), - skip_frame: false, - }; - - diagnostic.advices(&mut frame_visitor)?; - - let skip_frame = frame_visitor.skip_frame; - - // Print the message for the diagnostic as a log advice - print_message_advice(visitor, diagnostic, skip_frame)?; - - // Print the other advices for the diagnostic - diagnostic.advices(visitor)?; - - // Print the tags of the diagnostic as advices - print_tags_advices(visitor, diagnostic)?; - - // If verbose printing is enabled, print the verbose advices in a nested group - if verbose { - // Count the number of verbose advices in the diagnostic - let mut counter = CountAdvices(0); - diagnostic.verbose_advices(&mut counter)?; - - // If the diagnostic has any verbose advice, print the group - if !counter.is_empty() { - let verbose_advices = PrintVerboseAdvices(diagnostic); - visitor.record_group(&"Verbose advice", &verbose_advices)?; - } - } - - Ok(()) -} - -/// Advice visitor used to detect if the diagnostic contains any frame or backtrace diagnostic. -#[derive(Debug)] -struct FrameVisitor<'diag> { - location: Location<'diag>, - skip_frame: bool, -} - -impl Visit for FrameVisitor<'_> { - fn record_frame(&mut self, location: Location<'_>) -> io::Result<()> { - if location == self.location { - self.skip_frame = true; - } - Ok(()) - } - - fn record_backtrace(&mut self, _: &dyn fmt::Display, _: &Backtrace) -> io::Result<()> { - self.skip_frame = true; - Ok(()) - } -} - -/// Print the message and code frame for the diagnostic as advices. -fn print_message_advice(visitor: &mut V, diagnostic: &D, skip_frame: bool) -> io::Result<()> -where - V: Visit, - D: Diagnostic + ?Sized, -{ - // Print the entire message / cause chain for the diagnostic to a MarkupBuf - let message = { - let mut message = MarkupBuf::default(); - let mut fmt = fmt::Formatter::new(&mut message); - fmt.write_markup(markup!({ PrintCauseChain(diagnostic) }))?; - message - }; - - // Print a log advice for the message, with a special fallback if the buffer is empty - if message.is_empty() { - visitor.record_log( - LogCategory::None, - &markup! { - "no diagnostic message provided" - }, - )?; - } else { - let category = match diagnostic.severity() { - Severity::Fatal | Severity::Error => LogCategory::Error, - Severity::Warning => LogCategory::Warn, - Severity::Information | Severity::Hint => LogCategory::Info, - }; - - visitor.record_log(category, &message)?; - } - - // If the diagnostic has no explicit code frame or backtrace advice, print - // a code frame advice with the location of the diagnostic - if !skip_frame { - let location = diagnostic.location(); - if location.span.is_some() { - visitor.record_frame(location)?; - } - } - - Ok(()) -} - -/// Display wrapper for printing the "cause chain" of a diagnostic, with the -/// message of this diagnostic and all of its sources. -struct PrintCauseChain<'fmt, D: ?Sized>(&'fmt D); - -impl fmt::Display for PrintCauseChain<'_, D> { - fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> io::Result<()> { - let Self(diagnostic) = *self; - - diagnostic.message(fmt)?; - - let chain = iter::successors(diagnostic.source(), |prev| prev.source()); - for diagnostic in chain { - fmt.write_str("\n\nCaused by:\n")?; - - let mut slot = None; - let mut fmt = IndentWriter::wrap(fmt, &mut slot, true, " "); - diagnostic.message(&mut fmt)?; - } - - Ok(()) - } -} - -struct PrintSearch<'a, 'b>(&'a mut fmt::Formatter<'b>); - -impl Visit for PrintSearch<'_, '_> { - fn record_frame(&mut self, location: Location<'_>) -> io::Result<()> { - frame::print_highlighted_frame(self.0, location) - } -} - -/// Implementation of [Visitor] that prints the advices for a diagnostic. -struct PrintAdvices<'a, 'b>(&'a mut fmt::Formatter<'b>); - -impl PrintAdvices<'_, '_> { - fn print_log( - &mut self, - kind: MarkupElement<'_>, - prefix: char, - text: &dyn fmt::Display, - ) -> io::Result<()> { - self.0.write_markup(Markup(&[MarkupNode { - elements: &[MarkupElement::Emphasis, kind.clone()], - content: &prefix as &dyn fmt::Display, - }]))?; - - self.0.write_str(" ")?; - - let mut slot = None; - let mut fmt = IndentWriter::wrap(self.0, &mut slot, false, " "); - fmt.write_markup(Markup(&[MarkupNode { - elements: &[kind], - content: text, - }]))?; - - self.0.write_str("\n\n") - } -} - -impl Visit for PrintAdvices<'_, '_> { - fn record_log(&mut self, category: LogCategory, text: &dyn fmt::Display) -> io::Result<()> { - match category { - LogCategory::None => self.0.write_markup(markup! { {text}"\n\n" }), - LogCategory::Info => self.print_log(MarkupElement::Info, '\u{2139}', text), - LogCategory::Warn => self.print_log(MarkupElement::Warn, '\u{26a0}', text), - LogCategory::Error => self.print_log(MarkupElement::Error, '\u{2716}', text), - } - } - - fn record_list(&mut self, list: &[&dyn fmt::Display]) -> io::Result<()> { - for item in list { - let mut slot = None; - let mut fmt = IndentWriter::wrap(self.0, &mut slot, false, " "); - fmt.write_markup(markup! { - "- "{*item}"\n" - })?; - } - - if list.is_empty() { - Ok(()) - } else { - self.0.write_str("\n") - } - } - - fn record_frame(&mut self, location: Location<'_>) -> io::Result<()> { - frame::print_frame(self.0, location) - } - - fn record_diff(&mut self, diff: &TextEdit) -> io::Result<()> { - diff::print_diff(self.0, diff) - } - - fn record_backtrace( - &mut self, - title: &dyn fmt::Display, - backtrace: &Backtrace, - ) -> io::Result<()> { - let mut backtrace = backtrace.clone(); - backtrace.resolve(); - - if backtrace.is_empty() { - return Ok(()); - } - - self.record_log(LogCategory::Info, title)?; - - backtrace::print_backtrace(self.0, &backtrace) - } - - fn record_command(&mut self, command: &str) -> io::Result<()> { - self.0.write_markup(markup! { - "$"" "{command}"\n\n" - }) - } - - fn record_group(&mut self, title: &dyn fmt::Display, advice: &dyn Advices) -> io::Result<()> { - self.0.write_markup(markup! { - {title}"\n\n" - })?; - - let mut slot = None; - let mut fmt = IndentWriter::wrap(self.0, &mut slot, true, " "); - let mut visitor = PrintAdvices(&mut fmt); - advice.record(&mut visitor) - } - - fn record_table( - &mut self, - padding: usize, - headers: &[MarkupBuf], - columns: &[&[MarkupBuf]], - ) -> io::Result<()> { - debug_assert_eq!( - headers.len(), - columns.len(), - "headers and columns must have the same number length" - ); - - if columns.is_empty() { - return Ok(()); - } - - let mut headers_iter = headers.iter().enumerate(); - let rows_number = columns[0].len(); - let columns_number = columns.len(); - - let mut longest_cell = 0; - for current_row_index in 0..rows_number { - for current_column_index in 0..columns_number { - let cell = columns - .get(current_column_index) - .and_then(|c| c.get(current_row_index)); - if let Some(cell) = cell { - if current_column_index == 0 && current_row_index == 0 { - longest_cell = cell.text_len(); - for (index, header_cell) in headers_iter.by_ref() { - self.0.write_markup(markup!({ header_cell }))?; - if index < headers.len() - 1 { - self.0.write_markup( - markup! {{Padding::new(padding + longest_cell - header_cell.text_len())}}, - )?; - } - } - - self.0.write_markup(markup! {"\n\n"})?; - } - let extra_padding = longest_cell.saturating_sub(cell.text_len()); - - self.0.write_markup(markup!({ cell }))?; - if columns_number != current_column_index + 1 { - self.0 - .write_markup(markup! {{Padding::new(padding + extra_padding)}})?; - } - } - } - self.0.write_markup(markup!("\n"))?; - } - - Ok(()) - } -} - -/// Print the fatal and internal tags for the diagnostic as log advices. -fn print_tags_advices(visitor: &mut V, diagnostic: &D) -> io::Result<()> -where - V: Visit, - D: Diagnostic + ?Sized, -{ - if diagnostic.severity() == Severity::Fatal { - visitor.record_log(LogCategory::Warn, &"Exited as this error could not be handled and resulted in a fatal error. Please report it if necessary.")?; - } - - if diagnostic.tags().contains(DiagnosticTags::INTERNAL) { - visitor.record_log(LogCategory::Warn, &"This diagnostic was derived from an internal error. Potential bug, please report it if necessary.")?; - } - - Ok(()) -} - -/// Advice visitor that counts how many advices are visited. -struct CountAdvices(usize); - -impl CountAdvices { - fn is_empty(&self) -> bool { - self.0 == 0 - } -} - -impl Visit for CountAdvices { - fn record_log(&mut self, _: LogCategory, _: &dyn fmt::Display) -> io::Result<()> { - self.0 += 1; - Ok(()) - } - - fn record_list(&mut self, _: &[&dyn fmt::Display]) -> io::Result<()> { - self.0 += 1; - Ok(()) - } - - fn record_frame(&mut self, _: Location<'_>) -> io::Result<()> { - self.0 += 1; - Ok(()) - } - - fn record_diff(&mut self, _: &TextEdit) -> io::Result<()> { - self.0 += 1; - Ok(()) - } - - fn record_backtrace(&mut self, _: &dyn fmt::Display, _: &Backtrace) -> io::Result<()> { - self.0 += 1; - Ok(()) - } - - fn record_command(&mut self, _: &str) -> io::Result<()> { - self.0 += 1; - Ok(()) - } - - fn record_group(&mut self, _: &dyn fmt::Display, _: &dyn Advices) -> io::Result<()> { - self.0 += 1; - Ok(()) - } -} - -/// Implements [Advices] for verbose advices of a diagnostic. -struct PrintVerboseAdvices<'a, D: ?Sized>(&'a D); - -impl Advices for PrintVerboseAdvices<'_, D> { - fn record(&self, visitor: &mut dyn Visit) -> io::Result<()> { - self.0.verbose_advices(visitor) - } -} - -/// Wrapper type over [fmt::Write] that injects `ident_text` at the start of -/// every line. -struct IndentWriter<'a, W: ?Sized> { - writer: &'a mut W, - pending_indent: bool, - ident_text: &'static str, -} - -impl<'write> IndentWriter<'write, dyn fmt::Write + 'write> { - fn wrap<'slot, 'fmt: 'write + 'slot>( - fmt: &'fmt mut fmt::Formatter<'_>, - slot: &'slot mut Option, - pending_indent: bool, - ident_text: &'static str, - ) -> fmt::Formatter<'slot> { - fmt.wrap_writer(|writer| { - slot.get_or_insert(Self { - writer, - pending_indent, - ident_text, - }) - }) - } -} - -impl fmt::Write for IndentWriter<'_, W> { - fn write_str( - &mut self, - elements: &fmt::MarkupElements<'_>, - mut content: &str, - ) -> io::Result<()> { - while !content.is_empty() { - if self.pending_indent { - self.writer - .write_str(&MarkupElements::Root, self.ident_text)?; - self.pending_indent = false; - } - - if let Some(index) = content.find('\n') { - let (start, end) = content.split_at(index + 1); - self.writer.write_str(elements, start)?; - self.pending_indent = true; - content = end; - } else { - return self.writer.write_str(elements, content); - } - } - - Ok(()) - } - - fn write_fmt( - &mut self, - elements: &fmt::MarkupElements<'_>, - content: std::fmt::Arguments<'_>, - ) -> io::Result<()> { - if let Some(content) = content.as_str() { - self.write_str(elements, content) - } else { - let content = content.to_string(); - self.write_str(elements, &content) - } - } -} - -#[cfg(test)] -mod tests { - use std::io; - - use pgt_console::{fmt, markup}; - use pgt_diagnostics::{DiagnosticTags, Severity}; - use pgt_diagnostics_categories::{Category, category}; - use pgt_text_edit::TextEdit; - use pgt_text_size::{TextRange, TextSize}; - use serde_json::{from_value, json}; - - use crate::{self as pgt_diagnostics}; - use crate::{ - Advices, Diagnostic, Location, LogCategory, PrintDiagnostic, Resource, SourceCode, Visit, - }; - - #[derive(Debug)] - struct TestDiagnostic { - path: Option, - span: Option, - source_code: Option, - advice: Option, - verbose_advice: Option, - source: Option>, - } - - impl TestDiagnostic { - fn empty() -> Self { - Self { - path: None, - span: None, - source_code: None, - advice: None, - verbose_advice: None, - source: None, - } - } - - fn with_location() -> Self { - Self { - path: Some(String::from("path")), - span: Some(TextRange::at(TextSize::from(0), TextSize::from(6))), - source_code: Some(String::from("source code")), - advice: None, - verbose_advice: None, - source: None, - } - } - } - - impl Diagnostic for TestDiagnostic { - fn category(&self) -> Option<&'static Category> { - Some(category!("internalError/io")) - } - - fn severity(&self) -> Severity { - Severity::Error - } - - fn description(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(fmt, "diagnostic message") - } - - fn message(&self, fmt: &mut fmt::Formatter<'_>) -> io::Result<()> { - write!(fmt, "diagnostic message") - } - - fn advices(&self, visitor: &mut dyn Visit) -> io::Result<()> { - if let Some(advice) = &self.advice { - advice.record(visitor)?; - } - - Ok(()) - } - - fn verbose_advices(&self, visitor: &mut dyn Visit) -> io::Result<()> { - if let Some(advice) = &self.verbose_advice { - advice.record(visitor)?; - } - - Ok(()) - } - - fn location(&self) -> Location<'_> { - Location::builder() - .resource(&self.path) - .span(&self.span) - .source_code(&self.source_code) - .build() - } - - fn tags(&self) -> DiagnosticTags { - DiagnosticTags::FIXABLE - } - - fn source(&self) -> Option<&dyn Diagnostic> { - self.source.as_deref() - } - } - - #[derive(Debug)] - struct LogAdvices; - - impl Advices for LogAdvices { - fn record(&self, visitor: &mut dyn Visit) -> io::Result<()> { - visitor.record_log(LogCategory::Error, &"error")?; - visitor.record_log(LogCategory::Warn, &"warn")?; - visitor.record_log(LogCategory::Info, &"info")?; - visitor.record_log(LogCategory::None, &"none") - } - } - - #[derive(Debug)] - struct ListAdvice; - - impl Advices for ListAdvice { - fn record(&self, visitor: &mut dyn Visit) -> io::Result<()> { - visitor.record_list(&[&"item 1", &"item 2"]) - } - } - - #[derive(Debug)] - struct FrameAdvice; - - impl Advices for FrameAdvice { - fn record(&self, visitor: &mut dyn Visit) -> io::Result<()> { - visitor.record_frame(Location { - resource: Some(Resource::File("other_path")), - span: Some(TextRange::new(TextSize::from(8), TextSize::from(16))), - source_code: Some(SourceCode { - text: "context location context", - line_starts: None, - }), - }) - } - } - - #[derive(Debug)] - struct DiffAdvice; - - impl Advices for DiffAdvice { - fn record(&self, visitor: &mut dyn Visit) -> io::Result<()> { - let diff = - TextEdit::from_unicode_words("context before context", "context after context"); - visitor.record_diff(&diff) - } - } - - #[derive(Debug)] - struct BacktraceAdvice; - - impl Advices for BacktraceAdvice { - fn record(&self, visitor: &mut dyn Visit) -> io::Result<()> { - let backtrace = from_value(json!([ - { - "ip": 0x0f0f_0f0f, - "symbols": [ - { - "name": "crate::module::function", - "filename": "crate/src/module.rs", - "lineno": 8, - "colno": 16 - } - ] - } - ])); - - visitor.record_backtrace(&"Backtrace Title", &backtrace.unwrap()) - } - } - - #[derive(Debug)] - struct CommandAdvice; - - impl Advices for CommandAdvice { - fn record(&self, visitor: &mut dyn Visit) -> io::Result<()> { - visitor.record_command("pg command --argument") - } - } - - #[derive(Debug)] - struct GroupAdvice; - - impl Advices for GroupAdvice { - fn record(&self, visitor: &mut dyn Visit) -> io::Result<()> { - visitor.record_group(&"Group Title", &LogAdvices) - } - } - - #[test] - fn test_header() { - let diag = TestDiagnostic::::with_location(); - - let diag = markup!({ PrintDiagnostic::verbose(&diag) }).to_owned(); - - let expected = markup!{ - "path:1:1 internalError/io "" FIXABLE "" ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\n" - "\n" - " " - "✖"" ""diagnostic message""\n" - " \n" - " " - ">"" ""1 │ ""source code\n" - " "" │ ""^^^^^^""\n" - " \n" - }.to_owned(); - - assert_eq!( - diag, expected, - "\nactual:\n{diag:#?}\nexpected:\n{expected:#?}" - ); - } - #[test] - fn test_log_advices() { - let diag = TestDiagnostic { - advice: Some(LogAdvices), - ..TestDiagnostic::empty() - }; - - let diag = markup!({ PrintDiagnostic::verbose(&diag) }).to_owned(); - - let expected = markup!{ - "internalError/io "" FIXABLE "" ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\n" - "\n" - " " - "✖"" ""diagnostic message""\n" - " \n" - " " - "✖"" ""error""\n" - " \n" - " " - "⚠"" ""warn""\n" - " \n" - " " - "ℹ"" ""info""\n" - " \n" - " none\n" - " \n" - }.to_owned(); - - assert_eq!( - diag, expected, - "\nactual:\n{diag:#?}\nexpected:\n{expected:#?}" - ); - } - - #[test] - fn test_list_advice() { - let diag = TestDiagnostic { - advice: Some(ListAdvice), - ..TestDiagnostic::empty() - }; - - let diag = markup!({ PrintDiagnostic::verbose(&diag) }).to_owned(); - - let expected = markup!{ - "internalError/io "" FIXABLE "" ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\n" - "\n" - " " - "✖"" ""diagnostic message""\n" - " \n" - " - item 1\n" - " - item 2\n" - " \n" - }.to_owned(); - - assert_eq!( - diag, expected, - "\nactual:\n{diag:#?}\nexpected:\n{expected:#?}" - ); - } - - #[test] - fn test_frame_advice() { - let diag = TestDiagnostic { - advice: Some(FrameAdvice), - ..TestDiagnostic::empty() - }; - - let diag = markup!({ PrintDiagnostic::verbose(&diag) }).to_owned(); - - let expected = markup!{ - "internalError/io "" FIXABLE "" ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\n" - "\n" - " " - "✖"" ""diagnostic message""\n" - " \n" - " " - ">"" ""1 │ ""context location context\n" - " "" │ "" ""^^^^^^^^""\n" - " \n" - }.to_owned(); - - assert_eq!( - diag, expected, - "\nactual:\n{diag:#?}\nexpected:\n{expected:#?}" - ); - } - - #[test] - fn test_diff_advice() { - let diag = TestDiagnostic { - advice: Some(DiffAdvice), - ..TestDiagnostic::empty() - }; - - let diag = markup!({ PrintDiagnostic::verbose(&diag) }).to_owned(); - - let expected = markup!{ - "internalError/io "" FIXABLE "" ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\n" - "\n" - " " - "✖"" ""diagnostic message""\n" - " \n" - " " - "-"" ""context""·""before""·""context""\n" - " " - "+"" ""context""·""after""·""context""\n" - " \n" - }.to_owned(); - - assert_eq!( - diag, expected, - "\nactual:\n{diag:#?}\nexpected:\n{expected:#?}" - ); - } - - #[test] - fn test_backtrace_advice() { - let diag = TestDiagnostic { - advice: Some(BacktraceAdvice), - ..TestDiagnostic::empty() - }; - - let diag = markup!({ PrintDiagnostic::verbose(&diag) }).to_owned(); - - let expected = markup!{ - "internalError/io "" FIXABLE "" ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\n" - "\n" - " " - "✖"" ""diagnostic message""\n" - " \n" - " " - "ℹ"" ""Backtrace Title""\n" - " \n" - " 0: crate::module::function\n" - " at crate/src/module.rs:8:16\n" - }.to_owned(); - - assert_eq!( - diag, expected, - "\nactual:\n{diag:#?}\nexpected:\n{expected:#?}" - ); - } - - #[test] - fn test_command_advice() { - let diag = TestDiagnostic { - advice: Some(CommandAdvice), - ..TestDiagnostic::empty() - }; - - let diag = markup!({ PrintDiagnostic::verbose(&diag) }).to_owned(); - - let expected = markup!{ - "internalError/io "" FIXABLE "" ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\n" - "\n" - " " - "✖"" ""diagnostic message""\n" - " \n" - " " - "$"" pg command --argument\n" - " \n" - }.to_owned(); - - assert_eq!( - diag, expected, - "\nactual:\n{diag:#?}\nexpected:\n{expected:#?}" - ); - } - - #[test] - fn test_group_advice() { - let diag = TestDiagnostic { - advice: Some(GroupAdvice), - ..TestDiagnostic::empty() - }; - - let diag = markup!({ PrintDiagnostic::verbose(&diag) }).to_owned(); - - let expected = markup!{ - "internalError/io "" FIXABLE "" ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\n" - "\n" - " " - "✖"" ""diagnostic message""\n" - " \n" - " " - "Group Title""\n" - " \n" - " " - "✖"" ""error""\n" - " \n" - " " - "⚠"" ""warn""\n" - " \n" - " " - "ℹ"" ""info""\n" - " \n" - " none\n" - " \n" - }.to_owned(); - - assert_eq!( - diag, expected, - "\nactual:\n{diag:#?}\nexpected:\n{expected:#?}" - ); - } -} diff --git a/crates/pgt_diagnostics/src/display/backtrace.rs b/crates/pgt_diagnostics/src/display/backtrace.rs deleted file mode 100644 index ad9addbf..00000000 --- a/crates/pgt_diagnostics/src/display/backtrace.rs +++ /dev/null @@ -1,473 +0,0 @@ -use std::{borrow::Cow, path::PathBuf}; -use std::{cell::Cell, fmt::Write as _, io, os::raw::c_void, path::Path, slice}; - -use pgt_console::{fmt, markup}; -use serde::{Deserialize, Serialize}; - -use super::IndentWriter; - -/// The [Backtrace] type can be used to capture a native Rust stack trace, to -/// be displayed a diagnostic advice for native errors. -#[derive(Clone, Debug)] -#[cfg_attr(test, derive(Eq, PartialEq))] -pub struct Backtrace { - inner: BacktraceKind, -} - -impl Default for Backtrace { - // Do not inline this function to ensure it creates a stack frame, so that - // internal functions above it in the backtrace can be hidden when the - // backtrace is printed - #[inline(never)] - fn default() -> Self { - Self::capture(Backtrace::default as usize) - } -} - -impl Backtrace { - /// Take a snapshot of the current state of the stack and return it as a [Backtrace]. - pub fn capture(top_frame: usize) -> Self { - Self { - inner: BacktraceKind::Native(NativeBacktrace::new(top_frame)), - } - } - - /// Since the `capture` function only takes a lightweight snapshot of the - /// stack, it's necessary to perform an additional resolution step to map - /// the list of instruction pointers on the stack to actual symbol - /// information (like function name and file location) before printing the - /// backtrace. - pub(super) fn resolve(&mut self) { - if let BacktraceKind::Native(inner) = &mut self.inner { - inner.resolve(); - } - } - - fn frames(&self) -> BacktraceFrames<'_> { - match &self.inner { - BacktraceKind::Native(inner) => BacktraceFrames::Native(inner.frames()), - BacktraceKind::Serialized(inner) => BacktraceFrames::Serialized(inner), - } - } - - pub(crate) fn is_empty(&self) -> bool { - self.frames().is_empty() - } -} - -impl serde::Serialize for Backtrace { - fn serialize(&self, serializer: S) -> Result - where - S: serde::ser::Serializer, - { - let frames = match &self.inner { - BacktraceKind::Native(backtrace) => { - let mut backtrace = backtrace.clone(); - backtrace.resolve(); - - let frames: Vec<_> = backtrace - .frames() - .iter() - .map(SerializedFrame::from) - .collect(); - - Cow::Owned(frames) - } - BacktraceKind::Serialized(frames) => Cow::Borrowed(frames), - }; - - frames.serialize(serializer) - } -} - -impl<'de> serde::Deserialize<'de> for Backtrace { - fn deserialize(deserializer: D) -> Result - where - D: serde::de::Deserializer<'de>, - { - Ok(Self { - inner: BacktraceKind::Serialized(>::deserialize(deserializer)?), - }) - } -} - -#[cfg(feature = "schema")] -impl schemars::JsonSchema for Backtrace { - fn schema_name() -> String { - String::from("Backtrace") - } - - fn json_schema(r#gen: &mut schemars::r#gen::SchemaGenerator) -> schemars::schema::Schema { - >::json_schema(r#gen) - } -} - -/// Internal representation of a [Backtrace], can be either a native backtrace -/// instance or a vector of serialized frames. -#[derive(Clone, Debug)] -enum BacktraceKind { - Native(NativeBacktrace), - Serialized(Vec), -} - -#[cfg(test)] -impl PartialEq for BacktraceKind { - fn eq(&self, _other: &Self) -> bool { - if let (BacktraceKind::Serialized(this), BacktraceKind::Serialized(other)) = (self, _other) - { - return this == other; - } - - false - } -} - -#[cfg(test)] -impl Eq for BacktraceKind {} - -/// Wrapper type for a native backtrace instance. -#[derive(Clone, Debug)] -struct NativeBacktrace { - backtrace: ::backtrace::Backtrace, - /// Pointer to the top frame, this frame and every entry above it on the - /// stack will not be displayed in the printed stack trace. - top_frame: usize, - /// Pointer to the bottom frame, this frame and every entry below it on the - /// stack will not be displayed in the printed stack trace. - bottom_frame: usize, -} - -impl NativeBacktrace { - fn new(top_frame: usize) -> Self { - Self { - backtrace: ::backtrace::Backtrace::new_unresolved(), - top_frame, - bottom_frame: bottom_frame(), - } - } - - fn resolve(&mut self) { - self.backtrace.resolve(); - } - - /// Returns the list of frames for this backtrace, truncated to the - /// `top_frame` and `bottom_frame`. - fn frames(&self) -> &'_ [::backtrace::BacktraceFrame] { - let mut frames = self.backtrace.frames(); - - let top_frame = frames.iter().position(|frame| { - frame.symbols().iter().any(|symbol| { - symbol - .addr() - .is_some_and(|addr| addr as usize == self.top_frame) - }) - }); - - if let Some(top_frame) = top_frame { - if let Some(bottom_frames) = frames.get(top_frame + 1..) { - frames = bottom_frames; - } - } - - let bottom_frame = frames.iter().position(|frame| { - frame.symbols().iter().any(|symbol| { - symbol - .addr() - .is_some_and(|addr| addr as usize == self.bottom_frame) - }) - }); - - if let Some(bottom_frame) = bottom_frame { - if let Some(top_frames) = frames.get(..bottom_frame + 1) { - frames = top_frames; - } - } - - frames - } -} - -thread_local! { - /// This cell holds the address of the function that conceptually sits at the - /// "bottom" of the backtraces created on the current thread (all the frames - /// below this will be hidden when the backtrace is printed) - /// - /// This value is thread-local since different threads will generally have - /// different values for the bottom frame address: for the main thread this - /// will be the address of the `main` function, while on worker threads - /// this will be the start function for the thread (see the documentation - /// of [set_bottom_frame] for examples of where to set the bottom frame). - static BOTTOM_FRAME: Cell> = const { Cell::new(None) }; -} - -/// Registers a function pointer as the "bottom frame" for this thread: all -/// instances of [Backtrace] created on this thread will omit this function and -/// all entries below it on the stack -/// -/// ## Examples -/// -/// On the main thread: -/// ``` -/// # use pgt_diagnostics::set_bottom_frame; -/// # #[allow(clippy::needless_doctest_main)] -/// pub fn main() { -/// set_bottom_frame(main as usize); -/// -/// // ... -/// } -/// ``` -/// -/// On worker threads: -/// ``` -/// # use pgt_diagnostics::set_bottom_frame; -/// fn worker_thread() { -/// set_bottom_frame(worker_thread as usize); -/// -/// // ... -/// } -/// -/// std::thread::spawn(worker_thread); -/// ``` -pub fn set_bottom_frame(ptr: usize) { - BOTTOM_FRAME.with(|cell| { - cell.set(Some(ptr)); - }); -} - -fn bottom_frame() -> usize { - BOTTOM_FRAME.with(|cell| cell.get().unwrap_or(0)) -} - -pub(super) fn print_backtrace( - fmt: &mut fmt::Formatter<'_>, - backtrace: &Backtrace, -) -> io::Result<()> { - for (frame_index, frame) in backtrace.frames().iter().enumerate() { - if frame.ip().is_null() { - continue; - } - - fmt.write_fmt(format_args!("{frame_index:4}: "))?; - - let mut slot = None; - let mut fmt = IndentWriter::wrap(fmt, &mut slot, false, " "); - - for symbol in frame.symbols().iter() { - if let Some(name) = symbol.name() { - fmt.write_fmt(format_args!("{name:#}"))?; - } - - fmt.write_str("\n")?; - - if let Some(filename) = symbol.filename() { - let mut slot = None; - let mut fmt = IndentWriter::wrap(&mut fmt, &mut slot, true, " "); - - // Print a hyperlink if the file exists on disk - let href = if filename.exists() { - Some(format!("file:///{}", filename.display())) - } else { - None - }; - - // Build up the text of the link from the file path, the line number and column number - let mut text = filename.display().to_string(); - - if let Some(lineno) = symbol.lineno() { - // SAFETY: Writing a `u32` to a string should not fail - write!(text, ":{lineno}").unwrap(); - - if let Some(colno) = symbol.colno() { - // SAFETY: Writing a `u32` to a string should not fail - write!(text, ":{colno}").unwrap(); - } - } - - if let Some(href) = href { - fmt.write_markup(markup! { - "at " - {text} - "\n" - })?; - } else { - fmt.write_markup(markup! { - "at "{text}"\n" - })?; - } - } - } - } - - Ok(()) -} - -/// Serializable representation of a backtrace frame. -#[derive(Clone, Debug, Serialize, Deserialize)] -#[cfg_attr( - feature = "schema", - derive(schemars::JsonSchema), - schemars(rename = "BacktraceFrame") -)] -#[cfg_attr(test, derive(Eq, PartialEq))] -struct SerializedFrame { - ip: u64, - symbols: Vec, -} - -impl From<&'_ backtrace::BacktraceFrame> for SerializedFrame { - fn from(frame: &'_ backtrace::BacktraceFrame) -> Self { - Self { - ip: frame.ip() as u64, - symbols: frame.symbols().iter().map(SerializedSymbol::from).collect(), - } - } -} - -/// Serializable representation of a backtrace frame symbol. -#[derive(Clone, Debug, Serialize, Deserialize)] -#[cfg_attr( - feature = "schema", - derive(schemars::JsonSchema), - schemars(rename = "BacktraceSymbol") -)] -#[cfg_attr(test, derive(Eq, PartialEq))] -struct SerializedSymbol { - name: Option, - filename: Option, - lineno: Option, - colno: Option, -} - -impl From<&'_ backtrace::BacktraceSymbol> for SerializedSymbol { - fn from(symbol: &'_ backtrace::BacktraceSymbol) -> Self { - Self { - name: symbol.name().map(|name| format!("{name:#}")), - filename: symbol.filename().map(ToOwned::to_owned), - lineno: symbol.lineno(), - colno: symbol.colno(), - } - } -} - -enum BacktraceFrames<'a> { - Native(&'a [::backtrace::BacktraceFrame]), - Serialized(&'a [SerializedFrame]), -} - -impl BacktraceFrames<'_> { - fn iter(&self) -> BacktraceFramesIter<'_> { - match self { - Self::Native(inner) => BacktraceFramesIter::Native(inner.iter()), - Self::Serialized(inner) => BacktraceFramesIter::Serialized(inner.iter()), - } - } - - fn is_empty(&self) -> bool { - match self { - Self::Native(inner) => inner.is_empty(), - Self::Serialized(inner) => inner.is_empty(), - } - } -} - -enum BacktraceFramesIter<'a> { - Native(slice::Iter<'a, ::backtrace::BacktraceFrame>), - Serialized(slice::Iter<'a, SerializedFrame>), -} - -impl<'a> Iterator for BacktraceFramesIter<'a> { - type Item = BacktraceFrame<'a>; - - fn next(&mut self) -> Option { - match self { - Self::Native(inner) => inner.next().map(BacktraceFrame::Native), - Self::Serialized(inner) => inner.next().map(BacktraceFrame::Serialized), - } - } -} - -enum BacktraceFrame<'a> { - Native(&'a ::backtrace::BacktraceFrame), - Serialized(&'a SerializedFrame), -} - -impl BacktraceFrame<'_> { - fn ip(&self) -> *mut c_void { - match self { - Self::Native(inner) => inner.ip(), - Self::Serialized(inner) => inner.ip as *mut c_void, - } - } - - fn symbols(&self) -> BacktraceSymbols<'_> { - match self { - Self::Native(inner) => BacktraceSymbols::Native(inner.symbols()), - Self::Serialized(inner) => BacktraceSymbols::Serialized(&inner.symbols), - } - } -} - -enum BacktraceSymbols<'a> { - Native(&'a [::backtrace::BacktraceSymbol]), - Serialized(&'a [SerializedSymbol]), -} - -impl BacktraceSymbols<'_> { - fn iter(&self) -> BacktraceSymbolsIter<'_> { - match self { - Self::Native(inner) => BacktraceSymbolsIter::Native(inner.iter()), - Self::Serialized(inner) => BacktraceSymbolsIter::Serialized(inner.iter()), - } - } -} - -enum BacktraceSymbolsIter<'a> { - Native(slice::Iter<'a, ::backtrace::BacktraceSymbol>), - Serialized(slice::Iter<'a, SerializedSymbol>), -} - -impl<'a> Iterator for BacktraceSymbolsIter<'a> { - type Item = BacktraceSymbol<'a>; - - fn next(&mut self) -> Option { - match self { - Self::Native(inner) => inner.next().map(BacktraceSymbol::Native), - Self::Serialized(inner) => inner.next().map(BacktraceSymbol::Serialized), - } - } -} - -enum BacktraceSymbol<'a> { - Native(&'a ::backtrace::BacktraceSymbol), - Serialized(&'a SerializedSymbol), -} - -impl BacktraceSymbol<'_> { - fn name(&self) -> Option { - match self { - Self::Native(inner) => inner.name().map(|name| format!("{name:#}")), - Self::Serialized(inner) => inner.name.clone(), - } - } - - fn filename(&self) -> Option<&Path> { - match self { - Self::Native(inner) => inner.filename(), - Self::Serialized(inner) => inner.filename.as_deref(), - } - } - - fn lineno(&self) -> Option { - match self { - Self::Native(inner) => inner.lineno(), - Self::Serialized(inner) => inner.lineno, - } - } - - fn colno(&self) -> Option { - match self { - Self::Native(inner) => inner.colno(), - Self::Serialized(inner) => inner.colno, - } - } -} diff --git a/crates/pgt_diagnostics/src/display/diff.rs b/crates/pgt_diagnostics/src/display/diff.rs deleted file mode 100644 index 1e01044a..00000000 --- a/crates/pgt_diagnostics/src/display/diff.rs +++ /dev/null @@ -1,982 +0,0 @@ -use std::{ - collections::{BTreeMap, BTreeSet}, - io, slice, -}; - -use pgt_console::{MarkupElement, fmt, markup}; -use pgt_text_edit::{ChangeTag, CompressedOp, TextEdit}; - -use super::frame::{ - CODE_FRAME_CONTEXT_LINES, IntoIter, OneIndexed, PrintInvisiblesOptions, calculate_print_width, - print_invisibles, text_width, -}; - -const MAX_PATCH_LINES: usize = 150; - -pub(super) fn print_diff(fmt: &mut fmt::Formatter<'_>, diff: &TextEdit) -> io::Result<()> { - // Before printing, we need to preprocess the list of DiffOps it's made of to classify them by line - let mut modified_lines = BTreeSet::new(); - let mut inserted_lines = BTreeMap::new(); - let mut before_line_to_after = BTreeMap::new(); - - let mut before_line = OneIndexed::MIN; - let mut after_line = OneIndexed::MIN; - - process_diff_ops( - diff, - PushToLineState { - modified_lines: &mut modified_lines, - inserted_lines: &mut inserted_lines, - before_line_to_after: &mut before_line_to_after, - }, - &mut after_line, - &mut before_line, - ); - - let before_line_count = before_line; - let after_line_count = after_line; - - // If only a single line was modified, print a "short diff" - let modified_line = if before_line_count == after_line_count { - let mut iter = modified_lines.iter().filter_map(|key| { - let line = inserted_lines.get(key)?; - - // A line has been modified if its diff list is empty (the line was - // either fully inserted or fully removed) or if its diff list has - // any delete or insert operation - let has_edits = line.diffs.is_empty() - || line.diffs.iter().any(|(tag, text)| { - matches!(tag, ChangeTag::Delete | ChangeTag::Insert) && !text.is_empty() - }); - - if has_edits { Some((key, line)) } else { None } - }); - - iter.next().and_then(|(key, line)| { - if iter.next().is_some() { - return None; - } - - // Disallow fully empty lines from being displayed in short mode - if !line.diffs.is_empty() { - Some((key, line)) - } else { - None - } - }) - } else { - None - }; - - if let Some((key, entry)) = modified_line { - return print_short_diff(fmt, key, entry); - } - - // Otherwise if multiple lines were modified we need to perform more preprocessing, - // to merge identical line numbers and calculate how many context lines need to be rendered - let mut diffs_by_line = Vec::new(); - let mut shown_line_indexes = BTreeSet::new(); - - process_diff_lines( - &mut inserted_lines, - &mut before_line_to_after, - &mut diffs_by_line, - &mut shown_line_indexes, - before_line_count, - after_line_count, - ); - - // Finally when have a flat list of lines we can now print - print_full_diff( - fmt, - &diffs_by_line, - &shown_line_indexes, - before_line_count, - after_line_count, - ) -} - -/// This function scans the list of DiffOps that make up the `diff` and derives -/// the following data structures: -/// - `modified_lines` is the set of [LineKey] that contain at least one insert -/// or delete operation -/// - `inserted_lines` maps a [LineKey] to the list of diff operations that -/// happen on the corresponding line -/// - `before_line_to_after` maps line numbers in the old revision of the text -/// to line numbers in the new revision -/// - `after_line` counts the number of lines in the new revision of the document -/// - `before_line` counts the number of lines in the old revision of the document -fn process_diff_ops<'diff>( - diff: &'diff TextEdit, - mut state: PushToLineState<'_, 'diff>, - after_line: &mut OneIndexed, - before_line: &mut OneIndexed, -) { - for (op_index, op) in diff.iter().enumerate() { - let op = match op { - CompressedOp::DiffOp(op) => op, - CompressedOp::EqualLines { line_count } => { - let is_first_op = op_index == 0; - for line_index in 0..=line_count.get() { - // Don't increment the first line if we are the first tuple marking the beginning of the file - if !(is_first_op && line_index == 0) { - *after_line = after_line.saturating_add(1); - *before_line = before_line.saturating_add(1); - } - - state.before_line_to_after.insert(*before_line, *after_line); - - push_to_line(&mut state, *before_line, *after_line, ChangeTag::Equal, ""); - } - - continue; - } - }; - - let tag = op.tag(); - let text = op.text(diff); - - // Get all the lines - let mut parts = text.split('\n'); - - // Deconstruct each text chunk - let current_line = parts.next(); - - // The first chunk belongs to the current line - if let Some(current_line) = current_line { - push_to_line(&mut state, *before_line, *after_line, tag, current_line); - } - - // Create unique lines for each other chunk - for new_line in parts { - match tag { - ChangeTag::Equal => { - *after_line = after_line.saturating_add(1); - *before_line = before_line.saturating_add(1); - } - - ChangeTag::Delete => { - *before_line = before_line.saturating_add(1); - } - ChangeTag::Insert => { - *after_line = after_line.saturating_add(1); - } - } - - state.before_line_to_after.insert(*before_line, *after_line); - - push_to_line(&mut state, *before_line, *after_line, tag, new_line); - } - } -} - -#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq, PartialOrd, Ord)] -struct LineKey { - before_line: Option, - after_line: Option, -} - -impl LineKey { - const fn before(before_line: OneIndexed) -> Self { - Self { - before_line: Some(before_line), - after_line: None, - } - } - - const fn after(after_line: OneIndexed) -> Self { - Self { - before_line: None, - after_line: Some(after_line), - } - } -} - -#[derive(Debug, Clone)] -struct GroupDiffsLine<'a> { - before_line: Option, - after_line: Option, - diffs: Vec<(ChangeTag, &'a str)>, -} - -impl<'a> GroupDiffsLine<'a> { - fn insert( - inserted_lines: &mut BTreeMap, - key: LineKey, - tag: ChangeTag, - text: &'a str, - ) { - inserted_lines - .entry(key) - .and_modify(|line| { - if !text.is_empty() { - line.diffs.push((tag, text)); - } - }) - .or_insert_with_key(|key| GroupDiffsLine { - before_line: key.before_line, - after_line: key.after_line, - diffs: if text.is_empty() { - Vec::new() - } else { - vec![(tag, text)] - }, - }); - } -} - -struct PushToLineState<'a, 'b> { - modified_lines: &'a mut BTreeSet, - inserted_lines: &'a mut BTreeMap>, - before_line_to_after: &'a mut BTreeMap, -} - -fn push_to_line<'b>( - state: &mut PushToLineState<'_, 'b>, - before_line: OneIndexed, - after_line: OneIndexed, - tag: ChangeTag, - text: &'b str, -) { - let PushToLineState { - modified_lines, - inserted_lines, - before_line_to_after, - } = state; - - match tag { - ChangeTag::Insert => { - GroupDiffsLine::insert(inserted_lines, LineKey::after(after_line), tag, text); - modified_lines.insert(LineKey::after(after_line)); - } - ChangeTag::Delete => { - GroupDiffsLine::insert(inserted_lines, LineKey::before(before_line), tag, text); - modified_lines.insert(LineKey::before(before_line)); - } - ChangeTag::Equal => { - if before_line == OneIndexed::MIN && after_line == OneIndexed::MIN { - before_line_to_after.insert(before_line, after_line); - } - - GroupDiffsLine::insert(inserted_lines, LineKey::after(after_line), tag, text); - GroupDiffsLine::insert(inserted_lines, LineKey::before(before_line), tag, text); - } - } -} - -fn process_diff_lines<'lines, 'diff>( - inserted_lines: &'lines mut BTreeMap>, - before_line_to_after: &mut BTreeMap, - diffs_by_line: &mut Vec<&'lines GroupDiffsLine<'diff>>, - shown_line_indexes: &mut BTreeSet, - before_line_count: OneIndexed, - after_line_count: OneIndexed, -) { - // Merge identical lines - for before_line in IntoIter::new(OneIndexed::MIN..=before_line_count) { - let after_line = match before_line_to_after.get(&before_line) { - Some(after_line) => *after_line, - None => continue, - }; - - let inserted_before_line = inserted_lines.get(&LineKey::before(before_line)); - let inserted_after_line = inserted_lines.get(&LineKey::after(after_line)); - - if let (Some(inserted_before_line), Some(inserted_after_line)) = - (inserted_before_line, inserted_after_line) - { - if inserted_before_line.diffs == inserted_after_line.diffs { - let line = inserted_lines - .remove(&LineKey::before(before_line)) - .unwrap(); - - inserted_lines.remove(&LineKey::after(after_line)).unwrap(); - - inserted_lines.insert( - LineKey { - before_line: Some(before_line), - after_line: Some(after_line), - }, - GroupDiffsLine { - before_line: Some(before_line), - after_line: Some(after_line), - diffs: line.diffs, - }, - ); - } - } - } - - let mut diffs_by_line_with_before_and_shared = Vec::new(); - - // Print before lines, including those that are shared - for before_line in IntoIter::new(OneIndexed::MIN..=before_line_count) { - let line = inserted_lines.get(&LineKey::before(before_line)); - - if let Some(line) = line { - diffs_by_line_with_before_and_shared.push(line); - } - - // If we have a shared line then add it - if let Some(after_line) = before_line_to_after.get(&before_line) { - let line = inserted_lines.get(&LineKey { - before_line: Some(before_line), - after_line: Some(*after_line), - }); - - if let Some(line) = line { - diffs_by_line_with_before_and_shared.push(line); - } - } - } - - // Calculate the parts of the diff we should show - let mut last_printed_after = 0; - - for line in diffs_by_line_with_before_and_shared { - if let Some(after_line) = line.after_line { - catch_up_after( - inserted_lines, - diffs_by_line, - shown_line_indexes, - last_printed_after, - after_line, - ); - - last_printed_after = after_line.get(); - } - - push_displayed_line(diffs_by_line, shown_line_indexes, line); - } - - catch_up_after( - inserted_lines, - diffs_by_line, - shown_line_indexes, - last_printed_after, - after_line_count, - ); -} - -fn push_displayed_line<'input, 'group>( - diffs_by_line: &mut Vec<&'group GroupDiffsLine<'input>>, - shown_line_indexes: &mut BTreeSet, - line: &'group GroupDiffsLine<'input>, -) { - let i = diffs_by_line.len(); - diffs_by_line.push(line); - - if line.before_line.is_none() || line.after_line.is_none() { - let first = i.saturating_sub(CODE_FRAME_CONTEXT_LINES.get()); - let last = i + CODE_FRAME_CONTEXT_LINES.get(); - shown_line_indexes.extend(first..=last); - } -} - -fn catch_up_after<'input, 'lines>( - inserted_lines: &'lines BTreeMap>, - diffs_by_line: &mut Vec<&'lines GroupDiffsLine<'input>>, - shown_line_indexes: &mut BTreeSet, - last_printed_after: usize, - after_line: OneIndexed, -) { - let iter = IntoIter::new(OneIndexed::from_zero_indexed(last_printed_after)..=after_line); - - for i in iter { - let key = LineKey::after(i); - if let Some(line) = inserted_lines.get(&key) { - push_displayed_line(diffs_by_line, shown_line_indexes, line); - } - } -} - -fn print_short_diff( - fmt: &mut fmt::Formatter<'_>, - key: &LineKey, - entry: &GroupDiffsLine<'_>, -) -> io::Result<()> { - let index = match (key.before_line, key.after_line) { - (None, Some(index)) | (Some(index), None) => index, - (None, None) | (Some(_), Some(_)) => unreachable!( - "the key of a modified line should have exactly one index in one of the two revisions" - ), - }; - - fmt.write_markup(markup! { - - {format_args!(" {} \u{2502} ", index.get())} - - })?; - - let mut at_line_start = true; - let last_index = entry.diffs.len().saturating_sub(1); - - for (i, (tag, text)) in entry.diffs.iter().enumerate() { - let is_changed = *tag != ChangeTag::Equal; - let options = PrintInvisiblesOptions { - ignore_leading_tabs: false, - ignore_lone_spaces: false, - ignore_trailing_carriage_return: is_changed, - at_line_start, - at_line_end: i == last_index, - }; - - let element = match tag { - ChangeTag::Equal => None, - ChangeTag::Delete => Some(MarkupElement::Error), - ChangeTag::Insert => Some(MarkupElement::Success), - }; - - let has_non_whitespace = if let Some(element) = element { - let mut slot = None; - let mut fmt = ElementWrapper::wrap(fmt, &mut slot, element); - print_invisibles(&mut fmt, text, options)? - } else { - print_invisibles(fmt, text, options)? - }; - - if has_non_whitespace { - at_line_start = false; - } - } - - fmt.write_str("\n")?; - - let no_length = calculate_print_width(index); - fmt.write_markup(markup! { - - {format_args!(" {: >1$} \u{2502} ", "", no_length.get())} - - })?; - - for (tag, text) in &entry.diffs { - let marker = match tag { - ChangeTag::Equal => markup! { " " }, - ChangeTag::Delete => markup! { "-" }, - ChangeTag::Insert => markup! { "+" }, - }; - - for _ in 0..text_width(text) { - fmt.write_markup(marker)?; - } - } - - fmt.write_str("\n") -} - -fn print_full_diff( - fmt: &mut fmt::Formatter<'_>, - diffs_by_line: &[&'_ GroupDiffsLine<'_>], - shown_line_indexes: &BTreeSet, - before_line_count: OneIndexed, - after_line_count: OneIndexed, -) -> io::Result<()> { - // Calculate width of line no column - let before_no_length = calculate_print_width(before_line_count); - let after_no_length = calculate_print_width(after_line_count); - let line_no_length = before_no_length.get() + 1 + after_no_length.get(); - - // Skip displaying the gutter if the file only has a single line - let single_line = before_line_count == OneIndexed::MIN && after_line_count == OneIndexed::MIN; - - let mut displayed_lines = 0; - let mut truncated = false; - let mut last_displayed_line = None; - - // Print the actual frame - for (i, line) in diffs_by_line.iter().enumerate() { - if !shown_line_indexes.contains(&i) { - continue; - } - - displayed_lines += 1; - - if displayed_lines > MAX_PATCH_LINES { - truncated = true; - continue; - } - - let mut line_type = ChangeTag::Equal; - let mut marker = markup! { " " }; - - if line.before_line.is_none() { - marker = markup! { "+" }; - line_type = ChangeTag::Insert; - } - - if line.after_line.is_none() { - marker = markup! { "-" }; - line_type = ChangeTag::Delete; - } - - if let Some(last_displayed_line) = last_displayed_line { - if last_displayed_line + 1 != i { - fmt.write_markup(markup! { - " "{"\u{b7}".repeat(line_no_length)}" \u{2502} \n" - })?; - } - } - - last_displayed_line = Some(i); - - if single_line { - let line = FormatDiffLine { - is_equal: line_type == ChangeTag::Equal, - ops: &line.diffs, - }; - - match line_type { - ChangeTag::Equal => fmt.write_markup(markup! { - " "{line}"\n" - })?, - ChangeTag::Delete => fmt.write_markup(markup! { - {marker}" "{line}"\n" - })?, - ChangeTag::Insert => fmt.write_markup(markup! { - {marker}" "{line}"\n" - })?, - } - } else { - fmt.write_str(" ")?; - - if let Some(before_line) = line.before_line { - fmt.write_markup(markup! { - - {format_args!("{: >1$}", before_line.get(), before_no_length.get())} - - })?; - } else { - for _ in 0..before_no_length.get() { - fmt.write_str(" ")?; - } - } - - fmt.write_str(" ")?; - - if let Some(after_line) = line.after_line { - fmt.write_markup(markup! { - - {format_args!("{: >1$}", after_line.get(), after_no_length.get())} - - })?; - } else { - for _ in 0..after_no_length.get() { - fmt.write_str(" ")?; - } - } - - fmt.write_markup(markup! { - " \u{2502} "{marker}' ' - })?; - - let line = FormatDiffLine { - is_equal: line_type == ChangeTag::Equal, - ops: &line.diffs, - }; - - match line_type { - ChangeTag::Equal => fmt.write_markup(markup! { - {line}"\n" - })?, - ChangeTag::Delete => fmt.write_markup(markup! { - {line}"\n" - })?, - ChangeTag::Insert => fmt.write_markup(markup! { - {line}"\n" - })?, - } - } - } - - if truncated { - fmt.write_markup(markup! { - {displayed_lines.saturating_sub(MAX_PATCH_LINES)}" more lines truncated\n" - })?; - } - - fmt.write_str("\n") -} - -struct FormatDiffLine<'a> { - is_equal: bool, - ops: &'a [(ChangeTag, &'a str)], -} - -impl fmt::Display for FormatDiffLine<'_> { - fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> io::Result<()> { - let mut at_line_start = true; - let last_index = self.ops.len().saturating_sub(1); - - for (i, (tag, text)) in self.ops.iter().enumerate() { - let is_changed = *tag != ChangeTag::Equal; - let options = PrintInvisiblesOptions { - ignore_leading_tabs: self.is_equal, - ignore_lone_spaces: self.is_equal, - ignore_trailing_carriage_return: is_changed, - at_line_start, - at_line_end: i == last_index, - }; - - let has_non_whitespace = if is_changed { - let mut slot = None; - let mut fmt = ElementWrapper::wrap(fmt, &mut slot, MarkupElement::Emphasis); - print_invisibles(&mut fmt, text, options)? - } else { - print_invisibles(fmt, text, options)? - }; - - if has_non_whitespace { - at_line_start = false; - } - } - - Ok(()) - } -} - -struct ElementWrapper<'a, W: ?Sized>(&'a mut W, MarkupElement<'static>); - -impl<'write> ElementWrapper<'write, dyn fmt::Write + 'write> { - fn wrap<'slot, 'fmt: 'write + 'slot>( - fmt: &'fmt mut fmt::Formatter<'_>, - slot: &'slot mut Option, - element: MarkupElement<'static>, - ) -> fmt::Formatter<'slot> { - fmt.wrap_writer(|writer| slot.get_or_insert(Self(writer, element))) - } -} - -impl fmt::Write for ElementWrapper<'_, W> { - fn write_str(&mut self, elements: &fmt::MarkupElements<'_>, content: &str) -> io::Result<()> { - let elements = fmt::MarkupElements::Node(elements, slice::from_ref(&self.1)); - self.0.write_str(&elements, content) - } - - fn write_fmt( - &mut self, - elements: &fmt::MarkupElements<'_>, - content: std::fmt::Arguments<'_>, - ) -> io::Result<()> { - let elements = fmt::MarkupElements::Node(elements, slice::from_ref(&self.1)); - self.0.write_fmt(&elements, content) - } -} - -#[cfg(test)] -mod tests { - use super::print_diff; - use pgt_console::{MarkupBuf, fmt, markup}; - use pgt_text_edit::TextEdit; - use termcolor::Buffer; - - fn assert_eq_markup(actual: &MarkupBuf, expected: &MarkupBuf) { - if actual != expected { - let mut buffer = Buffer::ansi(); - let mut writer = fmt::Termcolor(&mut buffer); - let mut output = fmt::Formatter::new(&mut writer); - - output - .write_markup(markup! { - "assertion failed: (actual == expected)\n" - "actual:\n" - {actual}"\n" - {format_args!("{actual:#?}")}"\n" - "expected:\n" - {expected}"\n" - {format_args!("{expected:#?}")}"\n" - }) - .unwrap(); - - let buffer = buffer.into_inner(); - let buffer = String::from_utf8(buffer).unwrap(); - panic!("{buffer}"); - } - } - - #[test] - fn test_inline() { - let diff = TextEdit::from_unicode_words("before", "after"); - - let mut output = MarkupBuf::default(); - print_diff(&mut fmt::Formatter::new(&mut output), &diff).unwrap(); - - let expected = markup! { - "-"" ""before""\n" - "+"" ""after""\n" - "\n" - } - .to_owned(); - - assert_eq_markup(&output, &expected); - } - - #[test] - fn test_single_line() { - let diff = TextEdit::from_unicode_words("start before end\n", "start after end \n"); - - let mut output = MarkupBuf::default(); - print_diff(&mut fmt::Formatter::new(&mut output), &diff).unwrap(); - - let expected = markup! { - " ""1"" "" │ ""-"" ""start""·""before""·""end""\n" - " ""1 │ ""+"" ""start""·""after""·""end""·""\n" - " ""2"" ""2 │ "" \n" - "\n" - } - .to_owned(); - - assert_eq_markup(&output, &expected); - } - - #[test] - fn test_ellipsis() { - const SOURCE_LEFT: &str = "Lorem -ipsum -dolor -sit -amet, -function -name( - args -) {} -consectetur -adipiscing -elit, -sed -do -eiusmod - -incididunt -function -name( - args -) {}"; - - const SOURCE_RIGHT: &str = "Lorem -ipsum -dolor -sit -amet, -function name(args) { -} -consectetur -adipiscing -elit, -sed -do -eiusmod - -incididunt -function name(args) { -}"; - - let diff = TextEdit::from_unicode_words(SOURCE_LEFT, SOURCE_RIGHT); - - let mut output = MarkupBuf::default(); - print_diff(&mut fmt::Formatter::new(&mut output), &diff).unwrap(); - - let expected = markup! { - " "" 4"" "" 4 │ "" sit\n" - " "" 5"" "" 5 │ "" amet,\n" - " "" 6"" "" │ ""-"" ""function""\n" - " "" 7"" "" │ ""-"" ""name(""\n" - " "" 8"" "" │ ""-"" ""····""args""\n" - " "" 9"" "" │ ""-"" "")""·""{}""\n" - " "" 6 │ ""+"" ""function""·""name(args)""·""{""\n" - " "" 7 │ ""+"" ""}""\n" - " ""10"" "" 8 │ "" consectetur\n" - " ""11"" "" 9 │ "" adipiscing\n" - " ····· │ \n" - " ""16"" ""14 │ "" \n" - " ""17"" ""15 │ "" incididunt\n" - " ""18"" "" │ ""-"" ""function""\n" - " ""19"" "" │ ""-"" ""name(""\n" - " ""20"" "" │ ""-"" ""····""args""\n" - " ""21"" "" │ ""-"" "")""·""{}""\n" - " ""16 │ ""+"" ""function""·""name(args)""·""{""\n" - " ""17 │ ""+"" ""}""\n" - "\n" - }.to_owned(); - - assert_eq_markup(&output, &expected); - } - - #[test] - fn remove_single_line() { - const SOURCE_LEFT: &str = "declare module \"test\" { - interface A { - - prop: string; - } -} -"; - - const SOURCE_RIGHT: &str = "declare module \"test\" { - interface A { - prop: string; - } -} -"; - - let diff = TextEdit::from_unicode_words(SOURCE_LEFT, SOURCE_RIGHT); - - let mut output = MarkupBuf::default(); - print_diff(&mut fmt::Formatter::new(&mut output), &diff).unwrap(); - - let expected = markup! { - " ""1"" ""1 │ "" declare module \"test\" {\n" - " ""2"" ""2 │ "" \tinterface A {\n" - " ""3"" "" │ ""-"" \n" - " ""4"" ""3 │ "" \t\tprop: string;\n" - " ""5"" ""4 │ "" \t}\n" - "\n" - } - .to_owned(); - - assert_eq_markup(&output, &expected); - } - - #[test] - fn remove_many_lines() { - const SOURCE_LEFT: &str = "declare module \"test\" { - interface A { - - - - prop: string; - } -} -"; - - const SOURCE_RIGHT: &str = "declare module \"test\" { - interface A { - prop: string; - } -} -"; - - let diff = TextEdit::from_unicode_words(SOURCE_LEFT, SOURCE_RIGHT); - - let mut output = MarkupBuf::default(); - print_diff(&mut fmt::Formatter::new(&mut output), &diff).unwrap(); - - let expected = markup! { - " ""1"" ""1 │ "" declare module \"test\" {\n" - " ""2"" ""2 │ "" \tinterface A {\n" - " ""3"" "" │ ""-"" \n" - " ""4"" "" │ ""-"" \n" - " ""5"" "" │ ""-"" \n" - " ""6"" ""3 │ "" \t\tprop: string;\n" - " ""7"" ""4 │ "" \t}\n" - "\n" - } - .to_owned(); - - assert_eq_markup(&output, &expected); - } - - #[test] - fn insert_single_line() { - const SOURCE_LEFT: &str = "declare module \"test\" { - interface A { - prop: string; - } -} -"; - - const SOURCE_RIGHT: &str = "declare module \"test\" { - interface A { - - prop: string; - } -} -"; - - let diff = TextEdit::from_unicode_words(SOURCE_LEFT, SOURCE_RIGHT); - - let mut output = MarkupBuf::default(); - print_diff(&mut fmt::Formatter::new(&mut output), &diff).unwrap(); - - let expected = markup! { - " ""1"" ""1 │ "" declare module \"test\" {\n" - " ""2"" ""2 │ "" \tinterface A {\n" - " ""3 │ ""+"" \n" - " ""3"" ""4 │ "" \t\tprop: string;\n" - " ""4"" ""5 │ "" \t}\n" - "\n" - } - .to_owned(); - - assert_eq_markup(&output, &expected); - } - - #[test] - fn insert_many_lines() { - const SOURCE_LEFT: &str = "declare module \"test\" { - interface A { - prop: string; - } -} -"; - - const SOURCE_RIGHT: &str = "declare module \"test\" { - interface A { - - - - prop: string; - } -} -"; - - let diff = TextEdit::from_unicode_words(SOURCE_LEFT, SOURCE_RIGHT); - - let mut output = MarkupBuf::default(); - print_diff(&mut fmt::Formatter::new(&mut output), &diff).unwrap(); - - let expected = markup! { - " ""1"" ""1 │ "" declare module \"test\" {\n" - " ""2"" ""2 │ "" \tinterface A {\n" - " ""3 │ ""+"" \n" - " ""4 │ ""+"" \n" - " ""5 │ ""+"" \n" - " ""3"" ""6 │ "" \t\tprop: string;\n" - " ""4"" ""7 │ "" \t}\n" - "\n" - } - .to_owned(); - - assert_eq_markup(&output, &expected); - } - - #[test] - fn remove_empty_line() { - const SOURCE_LEFT: &str = "for (; ;) { -} - -console.log(\"test\"); -"; - - const SOURCE_RIGHT: &str = "for (;;) {} - -console.log(\"test\"); -"; - - let diff = TextEdit::from_unicode_words(SOURCE_LEFT, SOURCE_RIGHT); - - let mut output = MarkupBuf::default(); - print_diff(&mut fmt::Formatter::new(&mut output), &diff).unwrap(); - - let expected = markup! { - " ""1"" "" │ ""-"" ""for""·""(;""·"";)""·""{""\n" - " ""2"" "" │ ""-"" ""}""\n" - " ""1 │ ""+"" ""for""·""(;;)""·""{}""\n" - " ""3"" ""2 │ "" \n" - " ""4"" ""3 │ "" console.log(\"test\");\n" - "\n" - } - .to_owned(); - - assert_eq_markup(&output, &expected); - } -} diff --git a/crates/pgt_diagnostics/src/display/frame.rs b/crates/pgt_diagnostics/src/display/frame.rs deleted file mode 100644 index 56577e01..00000000 --- a/crates/pgt_diagnostics/src/display/frame.rs +++ /dev/null @@ -1,755 +0,0 @@ -use std::{ - borrow::Cow, - io, - iter::FusedIterator, - num::NonZeroUsize, - ops::{Bound, RangeBounds}, -}; - -use pgt_console::{fmt, markup}; -use pgt_text_size::{TextLen, TextRange, TextSize}; -use unicode_width::UnicodeWidthChar; - -use crate::{ - LineIndexBuf, Location, - location::{BorrowedSourceCode, LineIndex}, -}; - -/// A const Option::unwrap without nightly features: -/// https://github.com/rust-lang/rust/issues/67441 -const fn unwrap(option: Option) -> T { - match option { - Some(value) => value, - None => panic!("unwrapping None"), - } -} - -const ONE: NonZeroUsize = unwrap(NonZeroUsize::new(1)); -pub(super) const CODE_FRAME_CONTEXT_LINES: NonZeroUsize = unwrap(NonZeroUsize::new(2)); - -const MAX_CODE_FRAME_LINES: usize = 8; -const HALF_MAX_CODE_FRAME_LINES: usize = MAX_CODE_FRAME_LINES / 2; - -/// Prints a code frame advice -pub(super) fn print_frame(fmt: &mut fmt::Formatter<'_>, location: Location<'_>) -> io::Result<()> { - let source_span = location - .source_code - .and_then(|source_code| Some((source_code, location.span?))); - - let (source_code, span) = match source_span { - Some(source_span) => source_span, - None => return Ok(()), - }; - - let source_file = SourceFile::new(source_code); - - let start_index = span.start(); - let start_location = match source_file.location(start_index) { - Ok(location) => location, - Err(_) => return Ok(()), - }; - - let end_index = span.end(); - let end_location = match source_file.location(end_index) { - Ok(location) => location, - Err(_) => return Ok(()), - }; - - // Increase the amount of lines we should show for "context" - let context_start = start_location - .line_number - .saturating_sub(CODE_FRAME_CONTEXT_LINES.get()); - - let mut context_end = end_location - .line_number - .saturating_add(CODE_FRAME_CONTEXT_LINES.get()) - .min(OneIndexed::new(source_file.line_starts.len()).unwrap_or(OneIndexed::MIN)); - - // Remove trailing blank lines - for line_index in IntoIter::new(context_start..=context_end).rev() { - if line_index == end_location.line_number { - break; - } - - let line_start = match source_file.line_start(line_index.to_zero_indexed()) { - Ok(index) => index, - Err(_) => continue, - }; - let line_end = match source_file.line_start(line_index.to_zero_indexed() + 1) { - Ok(index) => index, - Err(_) => continue, - }; - - let line_range = TextRange::new(line_start, line_end); - let line_text = source_file.source[line_range].trim(); - if !line_text.is_empty() { - break; - } - - context_end = line_index; - } - - // If we have too many lines in our selection, then collapse them to an ellipsis - let range_len = (context_end.get() + 1).saturating_sub(context_start.get()); - let ellipsis_range = if range_len > MAX_CODE_FRAME_LINES + 2 { - let ellipsis_start = context_start.saturating_add(HALF_MAX_CODE_FRAME_LINES); - let ellipsis_end = context_end.saturating_sub(HALF_MAX_CODE_FRAME_LINES); - Some(ellipsis_start..=ellipsis_end) - } else { - None - }; - - // Calculate the maximum width of the line number - let max_gutter_len = calculate_print_width(context_end); - let mut printed_lines = false; - - for line_index in IntoIter::new(context_start..=context_end) { - if let Some(ellipsis_range) = &ellipsis_range { - if ellipsis_range.contains(&line_index) { - if *ellipsis_range.start() == line_index { - for _ in 0..max_gutter_len.get() { - fmt.write_str(" ")?; - } - - fmt.write_markup(markup! { " ...\n" })?; - printed_lines = true; - } - continue; - } - } - - let line_start = match source_file.line_start(line_index.to_zero_indexed()) { - Ok(index) => index, - Err(_) => continue, - }; - let line_end = match source_file.line_start(line_index.to_zero_indexed() + 1) { - Ok(index) => index, - Err(_) => continue, - }; - - let line_range = TextRange::new(line_start, line_end); - let line_text = source_file.source[line_range].trim_end_matches(['\r', '\n']); - - // Ensure that the frame doesn't start with whitespace - if !printed_lines && line_index != start_location.line_number && line_text.trim().is_empty() - { - continue; - } - - printed_lines = true; - - // If this is within the highlighted line range - let should_highlight = - line_index >= start_location.line_number && line_index <= end_location.line_number; - - let padding_width = max_gutter_len - .get() - .saturating_sub(calculate_print_width(line_index).get()); - - for _ in 0..padding_width { - fmt.write_str(" ")?; - } - - if should_highlight { - fmt.write_markup(markup! { - '>'' ' - })?; - } else { - fmt.write_str(" ")?; - } - - fmt.write_markup(markup! { - {format_args!("{line_index} \u{2502} ")} - })?; - - // Show invisible characters - print_invisibles( - fmt, - line_text, - PrintInvisiblesOptions { - ignore_trailing_carriage_return: true, - ignore_leading_tabs: true, - ignore_lone_spaces: true, - at_line_start: true, - at_line_end: true, - }, - )?; - - fmt.write_str("\n")?; - - if should_highlight { - let is_first_line = line_index == start_location.line_number; - let is_last_line = line_index == end_location.line_number; - - let start_index_relative_to_line = - start_index.max(line_range.start()) - line_range.start(); - let end_index_relative_to_line = end_index.min(line_range.end()) - line_range.start(); - - let marker = if is_first_line && is_last_line { - // Only line in the selection - Some(TextRange::new( - start_index_relative_to_line, - end_index_relative_to_line, - )) - } else if is_first_line { - // First line in selection - Some(TextRange::new( - start_index_relative_to_line, - line_text.text_len(), - )) - } else if is_last_line { - // Last line in selection - let start_index = line_text - .text_len() - .checked_sub(line_text.trim_start().text_len()) - // SAFETY: The length of `line_text.trim_start()` should - // never be larger than `line_text` itself - .expect("integer overflow"); - Some(TextRange::new(start_index, end_index_relative_to_line)) - } else { - None - }; - - if let Some(marker) = marker { - for _ in 0..max_gutter_len.get() { - fmt.write_str(" ")?; - } - - fmt.write_markup(markup! { - " \u{2502} " - })?; - - // Align the start of the marker with the line above by a - // number of space characters equal to the unicode print width - // of the leading part of the line (before the start of the - // marker), with a special exception for tab characters that - // still get printed as tabs to respect the user-defined tab - // display width - let leading_range = TextRange::new(TextSize::from(0), marker.start()); - for c in line_text[leading_range].chars() { - match c { - '\t' => fmt.write_str("\t")?, - _ => { - for _ in 0..char_width(c) { - fmt.write_str(" ")?; - } - } - } - } - - let marker_width = text_width(&line_text[marker]); - for _ in 0..marker_width { - fmt.write_markup(markup! { - '^' - })?; - } - - fmt.write_str("\n")?; - } - } - } - - fmt.write_str("\n") -} - -pub(super) fn print_highlighted_frame( - fmt: &mut fmt::Formatter<'_>, - location: Location<'_>, -) -> io::Result<()> { - let Some(span) = location.span else { - return Ok(()); - }; - let Some(source_code) = location.source_code else { - return Ok(()); - }; - - // TODO: instead of calculating lines for every match, - // check if the Grit engine is able to pull them out - let source = SourceFile::new(source_code); - - let start = source.location(span.start())?; - let end = source.location(span.end())?; - - let match_line_start = start.line_number; - let match_line_end = end.line_number.saturating_add(1); - - for line_index in IntoIter::new(match_line_start..match_line_end) { - let current_range = source.line_range(line_index.to_zero_indexed()); - let current_range = match current_range { - Ok(v) => v, - Err(_) => continue, - }; - - let current_text = source_code.text[current_range].trim_end_matches(['\r', '\n']); - - let is_first_line = line_index == start.line_number; - let is_last_line = line_index == end.line_number; - - let start_index_relative_to_line = - span.start().max(current_range.start()) - current_range.start(); - let end_index_relative_to_line = - span.end().min(current_range.end()) - current_range.start(); - - let marker = if is_first_line && is_last_line { - TextRange::new(start_index_relative_to_line, end_index_relative_to_line) - } else if is_last_line { - let start_index: u32 = current_text.text_len().into(); - - let safe_start_index = - start_index.saturating_sub(current_text.trim_start().text_len().into()); - - TextRange::new(TextSize::from(safe_start_index), end_index_relative_to_line) - } else { - TextRange::new(start_index_relative_to_line, current_text.text_len()) - }; - - fmt.write_markup(markup! { - {format_args!("{line_index} \u{2502} ")} - })?; - - let start_range = ¤t_text[0..marker.start().into()]; - let highlighted_range = ¤t_text[marker.start().into()..marker.end().into()]; - let end_range = ¤t_text[marker.end().into()..current_text.text_len().into()]; - - write!(fmt, "{start_range}")?; - fmt.write_markup(markup! { {highlighted_range} })?; - write!(fmt, "{end_range}")?; - - writeln!(fmt)?; - } - - Ok(()) -} - -/// Calculate the length of the string representation of `value` -pub(super) fn calculate_print_width(mut value: OneIndexed) -> NonZeroUsize { - // SAFETY: Constant is being initialized with a non-zero value - const TEN: OneIndexed = unwrap(OneIndexed::new(10)); - - let mut width = ONE; - - while value >= TEN { - value = OneIndexed::new(value.get() / 10).unwrap_or(OneIndexed::MIN); - width = width.checked_add(1).unwrap(); - } - - width -} - -/// Compute the unicode display width of a string, with the width of tab -/// characters set to [TAB_WIDTH] and the width of control characters set to 0 -pub(super) fn text_width(text: &str) -> usize { - text.chars().map(char_width).sum() -} - -/// We need to set a value here since we have no way of knowing what the user's -/// preferred tab display width is, so this is set to `2` to match how tab -/// characters are printed by [print_invisibles] -const TAB_WIDTH: usize = 2; - -/// Some esoteric space characters don't return a width using `char.width()`, so -/// we need to assume a fixed length for them -const ESOTERIC_SPACE_WIDTH: usize = 1; - -/// Return the width of characters, treating whitespace characters in the way -/// we need to properly display it -pub(super) fn char_width(char: char) -> usize { - match char { - '\t' => TAB_WIDTH, - '\u{c}' => ESOTERIC_SPACE_WIDTH, - '\u{b}' => ESOTERIC_SPACE_WIDTH, - '\u{85}' => ESOTERIC_SPACE_WIDTH, - '\u{feff}' => ESOTERIC_SPACE_WIDTH, - '\u{180e}' => ESOTERIC_SPACE_WIDTH, - '\u{200b}' => ESOTERIC_SPACE_WIDTH, - '\u{3000}' => ESOTERIC_SPACE_WIDTH, - _ => char.width().unwrap_or(0), - } -} - -pub(super) struct PrintInvisiblesOptions { - /// Do not print tab characters at the start of the string - pub(super) ignore_leading_tabs: bool, - /// If this is set to true, space characters will only be substituted when - /// at least two of them are found in a row - pub(super) ignore_lone_spaces: bool, - /// Do not print `'\r'` characters if they're followed by `'\n'` - pub(super) ignore_trailing_carriage_return: bool, - // Set to `true` to show invisible characters at the start of the string - pub(super) at_line_start: bool, - // Set to `true` to show invisible characters at the end of the string - pub(super) at_line_end: bool, -} - -/// Print `input` to `fmt` with invisible characters replaced with an -/// appropriate visual representation. Return `true` if any non-whitespace -/// character was printed -pub(super) fn print_invisibles( - fmt: &mut fmt::Formatter<'_>, - input: &str, - options: PrintInvisiblesOptions, -) -> io::Result { - let mut had_non_whitespace = false; - - // Get the first trailing whitespace character in the string - let trailing_whitespace_index = input - .bytes() - .enumerate() - .rev() - .find(|(_, byte)| !byte.is_ascii_whitespace()) - .map_or(input.len(), |(index, _)| index); - - let mut iter = input.char_indices().peekable(); - let mut prev_char_was_whitespace = false; - - while let Some((i, char)) = iter.next() { - let mut show_invisible = true; - - // Only highlight spaces when surrounded by other spaces - if char == ' ' && options.ignore_lone_spaces { - show_invisible = false; - - let next_char_is_whitespace = iter - .peek() - .is_some_and(|(_, char)| char.is_ascii_whitespace()); - - if prev_char_was_whitespace || next_char_is_whitespace { - show_invisible = false; - } - } - - prev_char_was_whitespace = char.is_ascii_whitespace(); - - // Don't show leading tabs - if options.at_line_start - && !had_non_whitespace - && char == '\t' - && options.ignore_leading_tabs - { - show_invisible = false; - } - - // Always show if at the end of line - if options.at_line_end && i >= trailing_whitespace_index { - show_invisible = true; - } - - // If we are a carriage return next to a \n then don't show the character as visible - if options.ignore_trailing_carriage_return && char == '\r' { - let next_char_is_line_feed = iter.peek().is_some_and(|(_, char)| *char == '\n'); - if next_char_is_line_feed { - continue; - } - } - - if !show_invisible { - if !char.is_ascii_whitespace() { - had_non_whitespace = true; - } - - write!(fmt, "{char}")?; - continue; - } - - if let Some(visible) = show_invisible_char(char) { - fmt.write_markup(markup! { {visible} })?; - continue; - } - - if (char.is_whitespace() && !char.is_ascii_whitespace()) || char.is_control() { - let code = u32::from(char); - fmt.write_markup(markup! { "U+"{format_args!("{code:x}")} })?; - continue; - } - - write!(fmt, "{char}")?; - } - - Ok(had_non_whitespace) -} - -fn show_invisible_char(char: char) -> Option<&'static str> { - match char { - ' ' => Some("\u{b7}"), // Middle Dot - '\r' => Some("\u{240d}"), // Carriage Return Symbol - '\n' => Some("\u{23ce}"), // Return Symbol - '\t' => Some("\u{2192} "), // Rightwards Arrow - '\0' => Some("\u{2400}"), // Null Symbol - '\x0b' => Some("\u{240b}"), // Vertical Tabulation Symbol - '\x08' => Some("\u{232b}"), // Backspace Symbol - '\x0c' => Some("\u{21a1}"), // Downwards Two Headed Arrow - '\u{85}' => Some("\u{2420}"), // Space Symbol - '\u{a0}' => Some("\u{2420}"), // Space Symbol - '\u{1680}' => Some("\u{2420}"), // Space Symbol - '\u{2000}' => Some("\u{2420}"), // Space Symbol - '\u{2001}' => Some("\u{2420}"), // Space Symbol - '\u{2002}' => Some("\u{2420}"), // Space Symbol - '\u{2003}' => Some("\u{2420}"), // Space Symbol - '\u{2004}' => Some("\u{2420}"), // Space Symbol - '\u{2005}' => Some("\u{2420}"), // Space Symbol - '\u{2006}' => Some("\u{2420}"), // Space Symbol - '\u{2007}' => Some("\u{2420}"), // Space Symbol - '\u{2008}' => Some("\u{2420}"), // Space Symbol - '\u{2009}' => Some("\u{2420}"), // Space Symbol - '\u{200a}' => Some("\u{2420}"), // Space Symbol - '\u{202f}' => Some("\u{2420}"), // Space Symbol - '\u{205f}' => Some("\u{2420}"), // Space Symbol - '\u{3000}' => Some("\u{2420}"), // Space Symbol - _ => None, - } -} - -/// A user-facing location in a source file. -#[derive(Debug, Copy, Clone, PartialEq, Eq)] -pub struct SourceLocation { - /// The user-facing line number. - pub line_number: OneIndexed, - /// The user-facing column number. - pub column_number: OneIndexed, -} - -/// Representation of a single source file holding additional information for -/// efficiently rendering code frames -#[derive(Clone)] -pub struct SourceFile<'diagnostic> { - /// The source code of the file. - source: &'diagnostic str, - /// The starting byte indices in the source code. - line_starts: Cow<'diagnostic, LineIndex>, -} - -impl<'diagnostic> SourceFile<'diagnostic> { - /// Create a new [SourceFile] from a slice of text - pub fn new(source_code: BorrowedSourceCode<'diagnostic>) -> Self { - // Either re-use the existing line index provided by the diagnostic or create one - Self { - source: source_code.text, - line_starts: source_code.line_starts.map_or_else( - || Cow::Owned(LineIndexBuf::from_source_text(source_code.text)), - Cow::Borrowed, - ), - } - } - - /// Return the starting byte index of the line with the specified line index. - /// Convenience method that already generates errors if necessary. - fn line_start(&self, line_index: usize) -> io::Result { - use std::cmp::Ordering; - - match line_index.cmp(&self.line_starts.len()) { - Ordering::Less => Ok(self - .line_starts - .get(line_index) - .copied() - .expect("failed despite previous check")), - Ordering::Equal => Ok(self.source.text_len()), - Ordering::Greater => Err(io::Error::new( - io::ErrorKind::InvalidInput, - "overflow error", - )), - } - } - - fn line_index(&self, byte_index: TextSize) -> usize { - self.line_starts - .binary_search(&byte_index) - .unwrap_or_else(|next_line| next_line - 1) - } - - fn line_range(&self, line_index: usize) -> io::Result { - let line_start = self.line_start(line_index)?; - let next_line_start = self.line_start(line_index + 1)?; - - Ok(TextRange::new(line_start, next_line_start)) - } - - fn line_number(&self, line_index: usize) -> OneIndexed { - // SAFETY: Adding `1` to the value of `line_index` ensures it's non-zero - OneIndexed::from_zero_indexed(line_index) - } - - fn column_number(&self, line_index: usize, byte_index: TextSize) -> io::Result { - let source = self.source; - let line_range = self.line_range(line_index)?; - let column_index = column_index(source, line_range, byte_index); - - // SAFETY: Adding `1` to the value of `column_index` ensures it's non-zero - Ok(OneIndexed::from_zero_indexed(column_index)) - } - - /// Get a source location from a byte index into the text of this file - pub fn location(&self, byte_index: TextSize) -> io::Result { - let line_index = self.line_index(byte_index); - - Ok(SourceLocation { - line_number: self.line_number(line_index), - column_number: self.column_number(line_index, byte_index)?, - }) - } -} - -/// The column index at the given byte index in the source file. -/// This is the number of characters to the given byte index. -/// -/// If the byte index is smaller than the start of the line, then `0` is returned. -/// If the byte index is past the end of the line, the column index of the last -/// character `+ 1` is returned. -fn column_index(source: &str, line_range: TextRange, byte_index: TextSize) -> usize { - let end_index = std::cmp::min( - byte_index, - std::cmp::min(line_range.end(), source.text_len()), - ); - - (usize::from(line_range.start())..usize::from(end_index)) - .filter(|byte_index| source.is_char_boundary(byte_index + 1)) - .count() -} - -/// Type-safe wrapper for a value whose logical range starts at `1`, for -/// instance the line or column numbers in a file -/// -/// Internally this is represented as a [NonZeroUsize], this enables some -/// memory optimizations -#[repr(transparent)] -#[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] -pub struct OneIndexed(NonZeroUsize); - -impl OneIndexed { - // SAFETY: These constants are being initialized with non-zero values - /// The smallest value that can be represented by this integer type. - pub const MIN: Self = unwrap(Self::new(1)); - /// The largest value that can be represented by this integer type - pub const MAX: Self = unwrap(Self::new(usize::MAX)); - - /// Creates a non-zero if the given value is not zero. - pub const fn new(value: usize) -> Option { - match NonZeroUsize::new(value) { - Some(value) => Some(Self(value)), - None => None, - } - } - - /// Construct a new [OneIndexed] from a zero-indexed value - pub const fn from_zero_indexed(value: usize) -> Self { - Self(ONE.saturating_add(value)) - } - - /// Returns the value as a primitive type. - pub const fn get(self) -> usize { - self.0.get() - } - - /// Return the zero-indexed primitive value for this [OneIndexed] - pub const fn to_zero_indexed(self) -> usize { - self.0.get() - 1 - } - - /// Saturating integer addition. Computes `self + rhs`, saturating at - /// the numeric bounds instead of overflowing. - pub const fn saturating_add(self, rhs: usize) -> Self { - match NonZeroUsize::new(self.0.get().saturating_add(rhs)) { - Some(value) => Self(value), - None => Self::MAX, - } - } - - /// Saturating integer subtraction. Computes `self - rhs`, saturating - /// at the numeric bounds instead of overflowing. - pub const fn saturating_sub(self, rhs: usize) -> Self { - match NonZeroUsize::new(self.0.get().saturating_sub(rhs)) { - Some(value) => Self(value), - None => Self::MIN, - } - } -} - -impl fmt::Display for OneIndexed { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> io::Result<()> { - self.0.get().fmt(f) - } -} - -impl std::fmt::Display for OneIndexed { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - self.0.get().fmt(f) - } -} - -/// Adapter type implementing [Iterator] for ranges of [OneIndexed], -/// since [std::iter::Step] is unstable -pub struct IntoIter(std::ops::Range); - -impl IntoIter { - /// Construct a new iterator over a range of [OneIndexed] of any kind - /// (`..`, `a..`, `..b`, `..=c`, `d..e`, or `f..=g`) - pub fn new>(range: R) -> Self { - let start = match range.start_bound() { - Bound::Included(value) => value.get(), - Bound::Excluded(value) => value.get() + 1, - Bound::Unbounded => 1, - }; - - let end = match range.end_bound() { - Bound::Included(value) => value.get() + 1, - Bound::Excluded(value) => value.get(), - Bound::Unbounded => usize::MAX, - }; - - Self(start..end) - } -} - -impl Iterator for IntoIter { - type Item = OneIndexed; - - fn next(&mut self) -> Option { - self.0.next().map(|index| OneIndexed::new(index).unwrap()) - } - - fn size_hint(&self) -> (usize, Option) { - self.0.size_hint() - } -} - -impl DoubleEndedIterator for IntoIter { - fn next_back(&mut self) -> Option { - self.0 - .next_back() - .map(|index| OneIndexed::new(index).unwrap()) - } -} - -impl FusedIterator for IntoIter {} - -#[cfg(test)] -mod tests { - use std::num::NonZeroUsize; - - use super::{OneIndexed, calculate_print_width}; - - #[test] - fn print_width() { - let one = NonZeroUsize::new(1).unwrap(); - let two = NonZeroUsize::new(2).unwrap(); - let three = NonZeroUsize::new(3).unwrap(); - let four = NonZeroUsize::new(4).unwrap(); - - assert_eq!(calculate_print_width(OneIndexed::new(1).unwrap()), one); - assert_eq!(calculate_print_width(OneIndexed::new(9).unwrap()), one); - - assert_eq!(calculate_print_width(OneIndexed::new(10).unwrap()), two); - assert_eq!(calculate_print_width(OneIndexed::new(11).unwrap()), two); - assert_eq!(calculate_print_width(OneIndexed::new(19).unwrap()), two); - assert_eq!(calculate_print_width(OneIndexed::new(20).unwrap()), two); - assert_eq!(calculate_print_width(OneIndexed::new(21).unwrap()), two); - assert_eq!(calculate_print_width(OneIndexed::new(99).unwrap()), two); - - assert_eq!(calculate_print_width(OneIndexed::new(100).unwrap()), three); - assert_eq!(calculate_print_width(OneIndexed::new(101).unwrap()), three); - assert_eq!(calculate_print_width(OneIndexed::new(110).unwrap()), three); - assert_eq!(calculate_print_width(OneIndexed::new(199).unwrap()), three); - assert_eq!(calculate_print_width(OneIndexed::new(999).unwrap()), three); - - assert_eq!(calculate_print_width(OneIndexed::new(1000).unwrap()), four); - } -} diff --git a/crates/pgt_diagnostics/src/display/message.rs b/crates/pgt_diagnostics/src/display/message.rs deleted file mode 100644 index 3cf9be3f..00000000 --- a/crates/pgt_diagnostics/src/display/message.rs +++ /dev/null @@ -1,97 +0,0 @@ -use pgt_console::fmt::{Formatter, Termcolor}; -use pgt_console::{MarkupBuf, markup}; -use serde::{Deserialize, Serialize}; -use termcolor::NoColor; - -/// Convenient type that can be used when message and descriptions match, and they need to be -/// displayed using different formatters -/// -/// ## Examples -/// -/// ``` -/// use pgt_diagnostics::{Diagnostic, MessageAndDescription}; -/// -/// #[derive(Debug, Diagnostic)] -/// struct TestDiagnostic { -/// #[message] -/// #[description] -/// message: MessageAndDescription -/// } -/// ``` -#[derive(Clone, Deserialize, Serialize, PartialEq)] -pub struct MessageAndDescription { - /// Shown when medium supports custom markup - message: MarkupBuf, - /// Shown when the medium doesn't support markup - description: String, -} - -impl MessageAndDescription { - /// It sets a custom message. It updates only the message. - pub fn set_message(&mut self, new_message: MarkupBuf) { - self.message = new_message; - } - - /// It sets a custom description. It updates only the description - pub fn set_description(&mut self, new_description: String) { - self.description = new_description; - } -} - -impl From for MessageAndDescription { - fn from(description: String) -> Self { - Self { - message: markup! { {description} }.to_owned(), - description, - } - } -} - -impl From for MessageAndDescription { - fn from(message: MarkupBuf) -> Self { - let description = markup_to_string(&message); - Self { - message, - description, - } - } -} - -impl std::fmt::Display for MessageAndDescription { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - f.write_str(&self.description) - } -} - -impl std::fmt::Debug for MessageAndDescription { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - std::fmt::Display::fmt(self, f) - } -} - -impl pgt_console::fmt::Display for MessageAndDescription { - fn fmt(&self, fmt: &mut Formatter<'_>) -> std::io::Result<()> { - fmt.write_markup(markup! {{self.message}}) - } -} - -/// Utility function to transform a [MarkupBuf] into a [String] -pub fn markup_to_string(markup: &MarkupBuf) -> String { - let mut buffer = Vec::new(); - let mut write = Termcolor(NoColor::new(&mut buffer)); - let mut fmt = Formatter::new(&mut write); - fmt.write_markup(markup! { {markup} }) - .expect("to have written in the buffer"); - - String::from_utf8(buffer).expect("to have convert a buffer into a String") -} - -#[cfg(test)] -mod test { - use crate::MessageAndDescription; - - #[test] - fn message_size() { - assert_eq!(std::mem::size_of::(), 48); - } -} diff --git a/crates/pgt_diagnostics/src/display_github.rs b/crates/pgt_diagnostics/src/display_github.rs deleted file mode 100644 index b7d5f058..00000000 --- a/crates/pgt_diagnostics/src/display_github.rs +++ /dev/null @@ -1,129 +0,0 @@ -use crate::display::frame::SourceFile; -use crate::{Diagnostic, Resource, Severity, diagnostic::internal::AsDiagnostic}; -use pgt_console::{MarkupBuf, fmt, markup}; -use pgt_text_size::{TextRange, TextSize}; -use std::io; - -/// Helper struct for printing a diagnostic as markup into any formatter -/// implementing [pgt_console::fmt::Write]. -pub struct PrintGitHubDiagnostic<'fmt, D: ?Sized>(pub &'fmt D); - -impl fmt::Display for PrintGitHubDiagnostic<'_, D> { - fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> io::Result<()> { - let diagnostic = self.0.as_diagnostic(); - let location = diagnostic.location(); - - // Docs: - // https://docs.github.com/en/actions/using-workflows/workflow-commands-for-github-actions - let span = location - .span - // We fall back to 1:1. This usually covers diagnostics that belong to the formatter or organize imports - .unwrap_or(TextRange::new(TextSize::from(1), TextSize::from(1))); - - let Some(source_code) = location.source_code else { - return Ok(()); - }; - - let file_name_unescaped = match &location.resource { - Some(Resource::File(file)) => file, - _ => return Ok(()), - }; - - let source = SourceFile::new(source_code); - let start = source.location(span.start())?; - let end = source.location(span.end())?; - - let command = match diagnostic.severity() { - Severity::Error | Severity::Fatal => "error", - Severity::Warning => "warning", - Severity::Hint | Severity::Information => "notice", - }; - - let message = { - let mut message = MarkupBuf::default(); - let mut fmt = fmt::Formatter::new(&mut message); - fmt.write_markup(markup!({ PrintDiagnosticMessage(diagnostic) }))?; - markup_to_string(&message) - }; - - let title = { - diagnostic - .category() - .map(|category| category.name()) - .unwrap_or_default() - }; - - fmt.write_str( - format! { - "::{} title={},file={},line={},endLine={},col={},endColumn={}::{}", - command, // constant, doesn't need escaping - title, // the diagnostic category - escape_property(file_name_unescaped), - start.line_number, // integer, doesn't need escaping - end.line_number, // integer, doesn't need escaping - start.column_number, // integer, doesn't need escaping - end.column_number, // integer, doesn't need escaping - message.map_or_else(String::new, escape_data), - } - .as_str(), - )?; - - Ok(()) - } -} - -struct PrintDiagnosticMessage<'fmt, D: ?Sized>(&'fmt D); - -impl fmt::Display for PrintDiagnosticMessage<'_, D> { - fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> io::Result<()> { - let Self(diagnostic) = *self; - diagnostic.message(fmt)?; - Ok(()) - } -} - -fn escape_data>(value: S) -> String { - let value = value.as_ref(); - - // Refs: - // - https://github.com/actions/runner/blob/a4c57f27477077e57545af79851551ff7f5632bd/src/Runner.Common/ActionCommand.cs#L18-L22 - // - https://github.com/actions/toolkit/blob/fe3e7ce9a7f995d29d1fcfd226a32bca407f9dc8/packages/core/src/command.ts#L80-L94 - let mut result = String::with_capacity(value.len()); - for c in value.chars() { - match c { - '\r' => result.push_str("%0D"), - '\n' => result.push_str("%0A"), - '%' => result.push_str("%25"), - _ => result.push(c), - } - } - result -} - -fn escape_property>(value: S) -> String { - let value = value.as_ref(); - - // Refs: - // - https://github.com/actions/runner/blob/a4c57f27477077e57545af79851551ff7f5632bd/src/Runner.Common/ActionCommand.cs#L25-L32 - // - https://github.com/actions/toolkit/blob/fe3e7ce9a7f995d29d1fcfd226a32bca407f9dc8/packages/core/src/command.ts#L80-L94 - let mut result = String::with_capacity(value.len()); - for c in value.chars() { - match c { - '\r' => result.push_str("%0D"), - '\n' => result.push_str("%0A"), - ':' => result.push_str("%3A"), - ',' => result.push_str("%2C"), - '%' => result.push_str("%25"), - _ => result.push(c), - } - } - result -} - -fn markup_to_string(markup: &MarkupBuf) -> Option { - let mut buffer = Vec::new(); - let mut write = fmt::Termcolor(termcolor::NoColor::new(&mut buffer)); - let mut fmt = fmt::Formatter::new(&mut write); - fmt.write_markup(markup! { {markup} }).ok()?; - String::from_utf8(buffer).ok() -} diff --git a/crates/pgt_diagnostics/src/error.rs b/crates/pgt_diagnostics/src/error.rs deleted file mode 100644 index 1c0cbe30..00000000 --- a/crates/pgt_diagnostics/src/error.rs +++ /dev/null @@ -1,173 +0,0 @@ -//! The `error` module contains the implementation of [Error], a dynamic -//! container struct for any type implementing [Diagnostic]. -//! -//! We reduce the size of `Error` by using `Box>` (a thin -//! pointer to a fat pointer) rather than `Box` (a fat -//! pointer), in order to make returning a `Result` more efficient. -//! -//! When [`ThinBox`](https://doc.rust-lang.org/std/boxed/struct.ThinBox.html) -//! becomes available in stable Rust, we can switch to that. - -use std::ops::Deref; -use std::{ - fmt::{Debug, Formatter}, - io, -}; - -use pgt_console::fmt; - -use crate::{ - Category, Diagnostic, DiagnosticTags, Location, Severity, Visit, - diagnostic::internal::AsDiagnostic, -}; - -/// The `Error` struct wraps any type implementing [Diagnostic] into a single -/// dynamic type. -pub struct Error { - inner: Box>, -} - -/// Implement the [Diagnostic] trait as inherent methods on the [Error] type. -impl Error { - /// Calls [Diagnostic::category] on the [Diagnostic] wrapped by this [Error]. - pub fn category(&self) -> Option<&'static Category> { - self.as_diagnostic().category() - } - - /// Calls [Diagnostic::severity] on the [Diagnostic] wrapped by this [Error]. - pub fn severity(&self) -> Severity { - self.as_diagnostic().severity() - } - - /// Calls [Diagnostic::description] on the [Diagnostic] wrapped by this [Error]. - pub fn description(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - self.as_diagnostic().description(fmt) - } - - /// Calls [Diagnostic::message] on the [Diagnostic] wrapped by this [Error]. - pub fn message(&self, fmt: &mut fmt::Formatter<'_>) -> io::Result<()> { - self.as_diagnostic().message(fmt) - } - - /// Calls [Diagnostic::advices] on the [Diagnostic] wrapped by this [Error]. - pub fn advices(&self, visitor: &mut dyn Visit) -> io::Result<()> { - self.as_diagnostic().advices(visitor) - } - - /// Calls [Diagnostic::verbose_advices] on the [Diagnostic] wrapped by this [Error]. - pub fn verbose_advices(&self, visitor: &mut dyn Visit) -> io::Result<()> { - self.as_diagnostic().verbose_advices(visitor) - } - - /// Calls [Diagnostic::location] on the [Diagnostic] wrapped by this [Error]. - pub fn location(&self) -> Location<'_> { - self.as_diagnostic().location() - } - - /// Calls [Diagnostic::tags] on the [Diagnostic] wrapped by this [Error]. - pub fn tags(&self) -> DiagnosticTags { - self.as_diagnostic().tags() - } - - /// Calls [Diagnostic::source] on the [Diagnostic] wrapped by this [Error]. - pub fn source(&self) -> Option<&dyn Diagnostic> { - self.as_diagnostic().source() - } -} - -/// Implement [From] for all types implementing [Diagnostic], [Send], [Sync] -/// and outlives the `'static` lifetime. -impl From for Error -where - T: Diagnostic + Send + Sync + 'static, -{ - fn from(diag: T) -> Self { - Self { - inner: Box::new(Box::new(diag)), - } - } -} - -impl AsDiagnostic for Error { - type Diagnostic = dyn Diagnostic; - - fn as_diagnostic(&self) -> &Self::Diagnostic { - &**self.inner - } - - fn as_dyn(&self) -> &dyn Diagnostic { - self.as_diagnostic() - } -} - -impl AsRef for Error { - fn as_ref(&self) -> &(dyn Diagnostic + 'static) { - self.as_diagnostic() - } -} - -impl Deref for Error { - type Target = dyn Diagnostic + 'static; - - fn deref(&self) -> &Self::Target { - self.as_diagnostic() - } -} - -// Defer the implementation of `Debug` and `Drop` to the wrapped type -impl Debug for Error { - fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { - Debug::fmt(self.as_diagnostic(), f) - } -} - -/// Alias of [std::result::Result] with the `Err` type defaulting to [Error]. -pub type Result = std::result::Result; - -#[cfg(test)] -mod tests { - use std::{ - mem::size_of, - sync::{ - Arc, - atomic::{AtomicBool, Ordering}, - }, - }; - - use crate::{Diagnostic, Error, Result}; - - #[derive(Debug)] - struct TestDiagnostic(Arc); - - impl Diagnostic for TestDiagnostic {} - - impl Drop for TestDiagnostic { - fn drop(&mut self) { - let was_dropped = self.0.swap(true, Ordering::Relaxed); - assert!(!was_dropped); - } - } - - #[test] - fn test_drop() { - let drop_flag = AtomicBool::new(false); - let drop_flag = Arc::new(drop_flag); - - let diag = TestDiagnostic(drop_flag.clone()); - - let error = Error::from(diag); - drop(error); - - assert!(drop_flag.load(Ordering::Relaxed)); - } - - #[test] - fn test_error_size() { - assert_eq!(size_of::(), size_of::()); - } - - #[test] - fn test_result_size() { - assert_eq!(size_of::>(), size_of::()); - } -} diff --git a/crates/pgt_diagnostics/src/lib.rs b/crates/pgt_diagnostics/src/lib.rs deleted file mode 100644 index b6aad22a..00000000 --- a/crates/pgt_diagnostics/src/lib.rs +++ /dev/null @@ -1,91 +0,0 @@ -#![deny(rust_2018_idioms)] - -use ::serde::{Deserialize, Serialize}; - -pub mod adapters; -pub mod advice; -pub mod context; -pub mod diagnostic; -pub mod display; -pub mod display_github; -pub mod error; -pub mod location; -pub mod panic; -pub mod serde; - -mod suggestion; - -pub use self::suggestion::{Applicability, CodeSuggestion}; -pub use termcolor; - -#[doc(hidden)] -// Convenience re-export for procedural macro -pub use pgt_console as console; - -// Re-export macros from utility crates -pub use pgt_diagnostics_categories::{Category, category, category_concat}; -pub use pgt_diagnostics_macros::Diagnostic; - -pub use crate::advice::{ - Advices, CodeFrameAdvice, CommandAdvice, DiffAdvice, LogAdvice, LogCategory, Visit, -}; -pub use crate::context::{Context, DiagnosticExt}; -pub use crate::diagnostic::{Diagnostic, DiagnosticTags, Severity}; -pub use crate::display::{ - Backtrace, MessageAndDescription, PrintDescription, PrintDiagnostic, set_bottom_frame, -}; -pub use crate::display_github::PrintGitHubDiagnostic; -pub use crate::error::{Error, Result}; -pub use crate::location::{LineIndex, LineIndexBuf, Location, Resource, SourceCode}; -use pgt_console::fmt::{Formatter, Termcolor}; -use pgt_console::markup; -use std::fmt::Write; - -pub mod prelude { - //! Anonymously re-exports all the traits declared by this module, this is - //! intended to be imported as `use pgt_diagnostics::prelude::*;` to - //! automatically bring all these traits into the ambient context - - pub use crate::advice::{Advices as _, Visit as _}; - pub use crate::context::{Context as _, DiagnosticExt as _}; - pub use crate::diagnostic::Diagnostic as _; -} - -#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash, Serialize, Deserialize)] -pub enum DiagnosticTag { - Unnecessary, - Deprecated, - Both, -} - -impl DiagnosticTag { - pub fn is_unnecessary(&self) -> bool { - matches!(self, DiagnosticTag::Unnecessary | DiagnosticTag::Both) - } - - pub fn is_deprecated(&self) -> bool { - matches!(self, DiagnosticTag::Deprecated | DiagnosticTag::Both) - } -} - -/// Utility function for testing purpose. The function will print an [Error] -/// to a string, which is then returned by the function. -pub fn print_diagnostic_to_string(diagnostic: &Error) -> String { - let mut buffer = termcolor::Buffer::no_color(); - - Formatter::new(&mut Termcolor(&mut buffer)) - .write_markup(markup! { - {PrintDiagnostic::verbose(diagnostic)} - }) - .expect("failed to emit diagnostic"); - - let mut content = String::new(); - writeln!( - content, - "{}", - std::str::from_utf8(buffer.as_slice()).expect("non utf8 in error buffer") - ) - .unwrap(); - - content -} diff --git a/crates/pgt_diagnostics/src/location.rs b/crates/pgt_diagnostics/src/location.rs deleted file mode 100644 index cbd8e646..00000000 --- a/crates/pgt_diagnostics/src/location.rs +++ /dev/null @@ -1,395 +0,0 @@ -use pgt_text_size::{TextRange, TextSize}; -use serde::{Deserialize, Serialize}; -use std::fmt::Debug; -use std::ops::Range; -use std::{borrow::Borrow, ops::Deref}; - -/// Represents the location of a diagnostic in a resource. -#[derive(Debug, Default, Clone, Copy)] -pub struct Location<'a> { - /// The resource this diagnostic is associated with. - pub resource: Option>, - /// An optional range of text within the resource associated with the - /// diagnostic. - pub span: Option, - /// The optional source code of the resource. - pub source_code: Option>, -} - -impl<'a> Location<'a> { - /// Creates a new instance of [LocationBuilder]. - pub fn builder() -> LocationBuilder<'a> { - LocationBuilder { - resource: None, - span: None, - source_code: None, - } - } -} - -/// The implementation of [PartialEq] for [Location] only compares the `path` -/// and `span` fields -impl PartialEq for Location<'_> { - fn eq(&self, other: &Self) -> bool { - self.resource == other.resource && self.span == other.span - } -} - -impl Eq for Location<'_> {} - -/// Represents the resource a diagnostic is associated with. -#[derive(Debug, Clone, Copy, Eq, PartialEq, Serialize, Deserialize)] -#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] -#[serde(rename_all = "camelCase")] -pub enum Resource