diff --git a/.claude/settings.local.json b/.claude/settings.local.json new file mode 100644 index 000000000..591b91192 --- /dev/null +++ b/.claude/settings.local.json @@ -0,0 +1,14 @@ +{ + "permissions": { + "allow": [ + "Bash(grep:*)", + "Bash(rg:*)", + "Bash(cargo test:*)", + "Bash(cargo run:*)", + "Bash(cargo check:*)", + "Bash(cargo fmt:*)", + "Bash(cargo doc:*)" + ], + "deny": [] + } +} \ No newline at end of file diff --git a/.github/workflows/deploy_docs.yml b/.github/workflows/deploy_docs.yml index aabd75cd0..c32bc2afd 100644 --- a/.github/workflows/deploy_docs.yml +++ b/.github/workflows/deploy_docs.yml @@ -1,9 +1,14 @@ name: Build and Deploy Documentation on: + push: + branches: + - main + # paths: + # - "docs/**" + # - "mkdocs.yml" release: - types: [released] - workflow_dispatch: + types: [published] permissions: contents: write @@ -13,17 +18,15 @@ permissions: jobs: build: runs-on: ubuntu-latest - steps: - - name: Checkout code - uses: actions/checkout@v4 + - uses: actions/checkout@v4 with: fetch-depth: 0 - name: Install uv uses: astral-sh/setup-uv@v5 with: - enable-cache: true + enable-cache: true - name: Set up Python run: uv python install @@ -31,7 +34,19 @@ jobs: - name: Install the project run: uv sync --all-extras --dev - - run: uv run mkdocs gh-deploy --force + - name: Configure Git + run: | + git config user.name 'github-actions[bot]' + git config user.email 'github-actions[bot]@users.noreply.github.com' + + - name: Build Docs Website + run: | + if [ "${{ github.event_name }}" = "release" ]; then + uv run mike deploy --push --update-aliases ${{ github.event.release.tag_name }} latest + uv run mike set-default --push latest + else + uv run mike deploy --push main + fi deploy: needs: build @@ -44,12 +59,16 @@ jobs: uses: actions/checkout@v4 with: ref: gh-pages + - name: Setup Pages uses: actions/configure-pages@v5 + - name: Upload artifact uses: actions/upload-pages-artifact@v3 with: path: '.' + - name: Deploy to GitHub Pages id: deployment uses: actions/deploy-pages@v4 + diff --git a/.github/workflows/publish.reusable.yml b/.github/workflows/publish.reusable.yml index 31e625d7e..5be2e12f5 100644 --- a/.github/workflows/publish.reusable.yml +++ b/.github/workflows/publish.reusable.yml @@ -34,19 +34,6 @@ jobs: RELEASE_TAG: ${{ inputs.release-tag }} PRERELEASE: ${{ inputs.is-prerelease }} - - name: Verify NPM TOKEN exists - run: | - if [ -z "${{ secrets.NPM_TOKEN }}" ]; then - echo "Secret is not defined" - exit 1 - else - echo "Secret is defined" - fi - - - name: Print package.json - run: | - cat packages/@postgrestools/postgrestools/package.json - - name: Publish npm packages as nightly if: inputs.is-prerelease == 'true' run: | diff --git a/.github/workflows/pull_request.yml b/.github/workflows/pull_request.yml index f79392b76..202183786 100644 --- a/.github/workflows/pull_request.yml +++ b/.github/workflows/pull_request.yml @@ -79,15 +79,6 @@ jobs: lint: name: Lint Project runs-on: ubuntu-latest - services: - postgres: - image: postgres:latest - env: - POSTGRES_USER: postgres - POSTGRES_PASSWORD: postgres - POSTGRES_DB: postgres - ports: - - 5432:5432 steps: - name: Checkout PR Branch uses: actions/checkout@v4 @@ -103,6 +94,24 @@ jobs: env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + # we need to use the same database as we do locally for sqlx prepare to output the same hashes + - name: Build and start PostgreSQL with plpgsql_check + run: | + docker build -t postgres-plpgsql-check:latest . + docker run -d --name postgres \ + -e POSTGRES_USER=postgres \ + -e POSTGRES_PASSWORD=postgres \ + -e POSTGRES_DB=postgres \ + -p 5432:5432 \ + postgres-plpgsql-check:latest + # Wait for postgres to be ready + for _ in {1..30}; do + if docker exec postgres pg_isready -U postgres; then + break + fi + sleep 1 + done + - name: Setup sqlx-cli run: cargo install sqlx-cli @@ -154,10 +163,37 @@ jobs: env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - # running containers via `services` only works on linux - # https://github.com/actions/runner/issues/1866 - - name: Setup postgres + # For Linux, use custom Docker image with plpgsql_check + - name: Build and start PostgreSQL with plpgsql_check + if: runner.os == 'Linux' + run: | + docker build -t postgres-plpgsql-check:latest . + docker run -d --name postgres \ + -e POSTGRES_USER=postgres \ + -e POSTGRES_PASSWORD=postgres \ + -e POSTGRES_DB=postgres \ + -p 5432:5432 \ + postgres-plpgsql-check:latest + # Wait for postgres to be ready + for _ in {1..30}; do + if docker exec postgres pg_isready -U postgres; then + break + fi + sleep 1 + done + # For Windows, use the action since PostgreSQL Docker image doesn't support Windows containers + - name: Setup postgres (Windows) + if: runner.os == 'Windows' + id: postgres uses: ikalnytskyi/action-setup-postgres@v7 + - name: Print Roles + run: | + if [[ "$RUNNER_OS" == "Linux" ]]; then + docker exec postgres psql -U postgres -c "select rolname from pg_roles;" + else + psql ${{ steps.postgres.outputs.connection-uri }} -c "select rolname from pg_roles;" + fi + shell: bash - name: Run tests run: cargo test --workspace diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 934edba93..07dd6ecd4 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -122,7 +122,7 @@ jobs: fetch-depth: 0 - name: 📝 Create Changelog - uses: orhun/git-cliff-action@v3 + uses: orhun/git-cliff-action@v4 id: create_changelog with: config: cliff.toml @@ -152,7 +152,7 @@ jobs: tag_name: ${{ steps.create_changelog.outputs.version }} files: | postgrestools_* - docs/schemas/latest/schema.json + docs/schema.json fail_on_unmatched_files: true draft: true diff --git a/.gitmodules b/.gitmodules index 4b56d7486..9b62ce889 100644 --- a/.gitmodules +++ b/.gitmodules @@ -1,7 +1,3 @@ -[submodule "libpg_query"] - path = libpg_query - url = https://github.com/pganalyze/libpg_query.git - branch = 17-latest [submodule "crates/tree_sitter_sql/tree-sitter-sql"] path = lib/tree_sitter_sql/tree-sitter-sql url = https://github.com/DerekStride/tree-sitter-sql diff --git a/.sqlx/query-277e47bf46f8331549f55c8a0ebae6f3075c4f754cd379b0555c205fff95a95c.json b/.sqlx/query-277e47bf46f8331549f55c8a0ebae6f3075c4f754cd379b0555c205fff95a95c.json new file mode 100644 index 000000000..db3f4a735 --- /dev/null +++ b/.sqlx/query-277e47bf46f8331549f55c8a0ebae6f3075c4f754cd379b0555c205fff95a95c.json @@ -0,0 +1,50 @@ +{ + "db_name": "PostgreSQL", + "query": "-- we need to join tables from the pg_catalog since \"TRUNCATE\" triggers are\n-- not available in the information_schema.trigger table.\nselect\n t.tgname as \"name!\",\n c.relname as \"table_name!\",\n p.proname as \"proc_name!\",\n proc_ns.nspname as \"proc_schema!\",\n table_ns.nspname as \"table_schema!\",\n t.tgtype as \"details_bitmask!\"\nfrom\n pg_catalog.pg_trigger t\nleft join pg_catalog.pg_proc p on t.tgfoid = p.oid\nleft join pg_catalog.pg_class c on t.tgrelid = c.oid\nleft join pg_catalog.pg_namespace table_ns on c.relnamespace = table_ns.oid\nleft join pg_catalog.pg_namespace proc_ns on p.pronamespace = proc_ns.oid\nwhere\n t.tgisinternal = false and\n t.tgconstraint = 0;\n", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "name!", + "type_info": "Name" + }, + { + "ordinal": 1, + "name": "table_name!", + "type_info": "Name" + }, + { + "ordinal": 2, + "name": "proc_name!", + "type_info": "Name" + }, + { + "ordinal": 3, + "name": "proc_schema!", + "type_info": "Name" + }, + { + "ordinal": 4, + "name": "table_schema!", + "type_info": "Name" + }, + { + "ordinal": 5, + "name": "details_bitmask!", + "type_info": "Int2" + } + ], + "parameters": { + "Left": [] + }, + "nullable": [ + false, + true, + true, + true, + true, + false + ] + }, + "hash": "277e47bf46f8331549f55c8a0ebae6f3075c4f754cd379b0555c205fff95a95c" +} diff --git a/.sqlx/query-3ebf3d74eb9d0448d675882c7f8a23f1440c250590de976c5c46c5edf6746faf.json b/.sqlx/query-3ebf3d74eb9d0448d675882c7f8a23f1440c250590de976c5c46c5edf6746faf.json new file mode 100644 index 000000000..1b9220621 --- /dev/null +++ b/.sqlx/query-3ebf3d74eb9d0448d675882c7f8a23f1440c250590de976c5c46c5edf6746faf.json @@ -0,0 +1,44 @@ +{ + "db_name": "PostgreSQL", + "query": "SELECT\n e.name as \"name!\",\n n.nspname AS schema,\n e.default_version as \"default_version!\",\n x.extversion AS installed_version,\n e.comment\nFROM\n pg_available_extensions() e(name, default_version, comment)\n LEFT JOIN pg_extension x ON e.name = x.extname\n LEFT JOIN pg_namespace n ON x.extnamespace = n.oid\n", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "name!", + "type_info": "Name" + }, + { + "ordinal": 1, + "name": "schema", + "type_info": "Name" + }, + { + "ordinal": 2, + "name": "default_version!", + "type_info": "Text" + }, + { + "ordinal": 3, + "name": "installed_version", + "type_info": "Text" + }, + { + "ordinal": 4, + "name": "comment", + "type_info": "Text" + } + ], + "parameters": { + "Left": [] + }, + "nullable": [ + null, + true, + null, + true, + null + ] + }, + "hash": "3ebf3d74eb9d0448d675882c7f8a23f1440c250590de976c5c46c5edf6746faf" +} diff --git a/.sqlx/query-4ea19fee016f1daeafdc466647d117910b19f540f19393b76aa6434e9d1d8502.json b/.sqlx/query-4ea19fee016f1daeafdc466647d117910b19f540f19393b76aa6434e9d1d8502.json new file mode 100644 index 000000000..400f031d9 --- /dev/null +++ b/.sqlx/query-4ea19fee016f1daeafdc466647d117910b19f540f19393b76aa6434e9d1d8502.json @@ -0,0 +1,110 @@ +{ + "db_name": "PostgreSQL", + "query": "with functions as (\n select\n oid,\n proname,\n prosrc,\n prorettype,\n proretset,\n provolatile,\n prosecdef,\n prolang,\n pronamespace,\n proconfig,\n prokind,\n -- proargmodes is null when all arg modes are IN\n coalesce(\n p.proargmodes,\n array_fill(\n 'i' :: text,\n array [cardinality(coalesce(p.proallargtypes, p.proargtypes))]\n )\n ) as arg_modes,\n -- proargnames is null when all args are unnamed\n coalesce(\n p.proargnames,\n array_fill(\n '' :: text,\n array [cardinality(coalesce(p.proallargtypes, p.proargtypes))]\n )\n ) as arg_names,\n -- proallargtypes is null when all arg modes are IN\n coalesce(p.proallargtypes, string_to_array(proargtypes::text, ' ')::int[]) as arg_types,\n array_cat(\n array_fill(false, array [pronargs - pronargdefaults]),\n array_fill(true, array [pronargdefaults])\n ) as arg_has_defaults\n from\n pg_proc as p\n)\nselect\n f.oid :: int8 as \"id!\",\n n.nspname as \"schema!\",\n f.proname as \"name!\",\n l.lanname as \"language!\",\n f.prokind as \"kind!\",\n case\n when l.lanname = 'internal' then null\n else f.prosrc\n end as body,\n case\n when l.lanname = 'internal' then null\n else pg_get_functiondef(f.oid)\n end as definition,\n coalesce(f_args.args, '[]') as args,\n nullif(pg_get_function_arguments(f.oid), '') as argument_types,\n nullif(pg_get_function_identity_arguments(f.oid), '') as identity_argument_types,\n f.prorettype :: int8 as return_type_id,\n pg_get_function_result(f.oid) as return_type,\n nullif(rt.typrelid :: int8, 0) as return_type_relation_id,\n f.proretset as \"is_set_returning_function!\",\n case\n when f.provolatile = 'i' then 'IMMUTABLE'\n when f.provolatile = 's' then 'STABLE'\n when f.provolatile = 'v' then 'VOLATILE'\n end as behavior,\n f.prosecdef as \"security_definer!\"\nfrom\n functions f\n left join pg_namespace n on f.pronamespace = n.oid\n left join pg_language l on f.prolang = l.oid\n left join pg_type rt on rt.oid = f.prorettype\n left join (\n select\n oid,\n jsonb_object_agg(param, value) filter (\n where\n param is not null\n ) as config_params\n from\n (\n select\n oid,\n (string_to_array(unnest(proconfig), '=')) [1] as param,\n (string_to_array(unnest(proconfig), '=')) [2] as value\n from\n functions\n ) as t\n group by\n oid\n ) f_config on f_config.oid = f.oid\n left join (\n select\n oid,\n jsonb_agg(\n jsonb_build_object(\n 'mode',\n t2.mode,\n 'name',\n name,\n 'type_id',\n type_id,\n 'has_default',\n has_default\n )\n ) as args\n from\n (\n select\n oid,\n arg_modes[i] as mode,\n arg_names[i] as name,\n arg_types[i] :: int8 as type_id,\n arg_has_defaults[i] as has_default\n from\n functions,\n pg_catalog.generate_subscripts(arg_names, 1) as i\n ) as t1,\n lateral (\n select\n case\n when t1.mode = 'i' then 'in'\n when t1.mode = 'o' then 'out'\n when t1.mode = 'b' then 'inout'\n when t1.mode = 'v' then 'variadic'\n else 'table'\n end as mode\n ) as t2\n group by\n t1.oid\n ) f_args on f_args.oid = f.oid;", + "describe": { + "columns": [ + { + "ordinal": 0, + "name": "id!", + "type_info": "Int8" + }, + { + "ordinal": 1, + "name": "schema!", + "type_info": "Name" + }, + { + "ordinal": 2, + "name": "name!", + "type_info": "Name" + }, + { + "ordinal": 3, + "name": "language!", + "type_info": "Name" + }, + { + "ordinal": 4, + "name": "kind!", + "type_info": "Char" + }, + { + "ordinal": 5, + "name": "body", + "type_info": "Text" + }, + { + "ordinal": 6, + "name": "definition", + "type_info": "Text" + }, + { + "ordinal": 7, + "name": "args", + "type_info": "Jsonb" + }, + { + "ordinal": 8, + "name": "argument_types", + "type_info": "Text" + }, + { + "ordinal": 9, + "name": "identity_argument_types", + "type_info": "Text" + }, + { + "ordinal": 10, + "name": "return_type_id", + "type_info": "Int8" + }, + { + "ordinal": 11, + "name": "return_type", + "type_info": "Text" + }, + { + "ordinal": 12, + "name": "return_type_relation_id", + "type_info": "Int8" + }, + { + "ordinal": 13, + "name": "is_set_returning_function!", + "type_info": "Bool" + }, + { + "ordinal": 14, + "name": "behavior", + "type_info": "Text" + }, + { + "ordinal": 15, + "name": "security_definer!", + "type_info": "Bool" + } + ], + "parameters": { + "Left": [] + }, + "nullable": [ + null, + true, + false, + true, + false, + null, + null, + null, + null, + null, + null, + null, + null, + false, + null, + false + ] + }, + "hash": "4ea19fee016f1daeafdc466647d117910b19f540f19393b76aa6434e9d1d8502" +} diff --git a/.sqlx/query-64d9718b07516f3d2720cb7aa79e496f5337cadbad7a3fb03ccd3e3c21b71389.json b/.sqlx/query-64d9718b07516f3d2720cb7aa79e496f5337cadbad7a3fb03ccd3e3c21b71389.json deleted file mode 100644 index 43d634597..000000000 --- a/.sqlx/query-64d9718b07516f3d2720cb7aa79e496f5337cadbad7a3fb03ccd3e3c21b71389.json +++ /dev/null @@ -1,104 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "with functions as (\n select\n oid,\n proname,\n prosrc,\n prorettype,\n proretset,\n provolatile,\n prosecdef,\n prolang,\n pronamespace,\n proconfig,\n -- proargmodes is null when all arg modes are IN\n coalesce(\n p.proargmodes,\n array_fill(\n 'i' :: text,\n array [cardinality(coalesce(p.proallargtypes, p.proargtypes))]\n )\n ) as arg_modes,\n -- proargnames is null when all args are unnamed\n coalesce(\n p.proargnames,\n array_fill(\n '' :: text,\n array [cardinality(coalesce(p.proallargtypes, p.proargtypes))]\n )\n ) as arg_names,\n -- proallargtypes is null when all arg modes are IN\n coalesce(p.proallargtypes, p.proargtypes) as arg_types,\n array_cat(\n array_fill(false, array [pronargs - pronargdefaults]),\n array_fill(true, array [pronargdefaults])\n ) as arg_has_defaults\n from\n pg_proc as p\n where\n p.prokind = 'f'\n)\nselect\n f.oid :: int8 as \"id!\",\n n.nspname as \"schema!\",\n f.proname as \"name!\",\n l.lanname as \"language!\",\n case\n when l.lanname = 'internal' then null\n else f.prosrc\n end as body,\n case\n when l.lanname = 'internal' then null\n else pg_get_functiondef(f.oid)\n end as definition,\n coalesce(f_args.args, '[]') as args,\n nullif(pg_get_function_arguments(f.oid), '') as argument_types,\n nullif(pg_get_function_identity_arguments(f.oid), '') as identity_argument_types,\n f.prorettype :: int8 as \"return_type_id!\",\n pg_get_function_result(f.oid) as \"return_type!\",\n nullif(rt.typrelid :: int8, 0) as return_type_relation_id,\n f.proretset as is_set_returning_function,\n case\n when f.provolatile = 'i' then 'IMMUTABLE'\n when f.provolatile = 's' then 'STABLE'\n when f.provolatile = 'v' then 'VOLATILE'\n end as behavior,\n f.prosecdef as security_definer\nfrom\n functions f\n left join pg_namespace n on f.pronamespace = n.oid\n left join pg_language l on f.prolang = l.oid\n left join pg_type rt on rt.oid = f.prorettype\n left join (\n select\n oid,\n jsonb_object_agg(param, value) filter (\n where\n param is not null\n ) as config_params\n from\n (\n select\n oid,\n (string_to_array(unnest(proconfig), '=')) [1] as param,\n (string_to_array(unnest(proconfig), '=')) [2] as value\n from\n functions\n ) as t\n group by\n oid\n ) f_config on f_config.oid = f.oid\n left join (\n select\n oid,\n jsonb_agg(\n jsonb_build_object(\n 'mode',\n t2.mode,\n 'name',\n name,\n 'type_id',\n type_id,\n 'has_default',\n has_default\n )\n ) as args\n from\n (\n select\n oid,\n unnest(arg_modes) as mode,\n unnest(arg_names) as name,\n unnest(arg_types) :: int8 as type_id,\n unnest(arg_has_defaults) as has_default\n from\n functions\n ) as t1,\n lateral (\n select\n case\n when t1.mode = 'i' then 'in'\n when t1.mode = 'o' then 'out'\n when t1.mode = 'b' then 'inout'\n when t1.mode = 'v' then 'variadic'\n else 'table'\n end as mode\n ) as t2\n group by\n t1.oid\n ) f_args on f_args.oid = f.oid;", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "id!", - "type_info": "Int8" - }, - { - "ordinal": 1, - "name": "schema!", - "type_info": "Name" - }, - { - "ordinal": 2, - "name": "name!", - "type_info": "Name" - }, - { - "ordinal": 3, - "name": "language!", - "type_info": "Name" - }, - { - "ordinal": 4, - "name": "body", - "type_info": "Text" - }, - { - "ordinal": 5, - "name": "definition", - "type_info": "Text" - }, - { - "ordinal": 6, - "name": "args", - "type_info": "Jsonb" - }, - { - "ordinal": 7, - "name": "argument_types", - "type_info": "Text" - }, - { - "ordinal": 8, - "name": "identity_argument_types", - "type_info": "Text" - }, - { - "ordinal": 9, - "name": "return_type_id!", - "type_info": "Int8" - }, - { - "ordinal": 10, - "name": "return_type!", - "type_info": "Text" - }, - { - "ordinal": 11, - "name": "return_type_relation_id", - "type_info": "Int8" - }, - { - "ordinal": 12, - "name": "is_set_returning_function", - "type_info": "Bool" - }, - { - "ordinal": 13, - "name": "behavior", - "type_info": "Text" - }, - { - "ordinal": 14, - "name": "security_definer", - "type_info": "Bool" - } - ], - "parameters": { - "Left": [] - }, - "nullable": [ - null, - true, - false, - true, - null, - null, - null, - null, - null, - null, - null, - null, - false, - null, - false - ] - }, - "hash": "64d9718b07516f3d2720cb7aa79e496f5337cadbad7a3fb03ccd3e3c21b71389" -} diff --git a/.sqlx/query-2a964a12383b977bbbbd6fe7298dfce00358ecbe878952e8d4915c06cc5c9e0f.json b/.sqlx/query-66d92238c94b5f1c99fbf068a0b5cf4c296b594fe9e6cebbdc382acde73f4fb9.json similarity index 50% rename from .sqlx/query-2a964a12383b977bbbbd6fe7298dfce00358ecbe878952e8d4915c06cc5c9e0f.json rename to .sqlx/query-66d92238c94b5f1c99fbf068a0b5cf4c296b594fe9e6cebbdc382acde73f4fb9.json index 964394223..447ba93b4 100644 --- a/.sqlx/query-2a964a12383b977bbbbd6fe7298dfce00358ecbe878952e8d4915c06cc5c9e0f.json +++ b/.sqlx/query-66d92238c94b5f1c99fbf068a0b5cf4c296b594fe9e6cebbdc382acde73f4fb9.json @@ -1,6 +1,6 @@ { "db_name": "PostgreSQL", - "query": "select\n c.oid :: int8 as \"id!\",\n nc.nspname as schema,\n c.relname as name,\n c.relrowsecurity as rls_enabled,\n c.relforcerowsecurity as rls_forced,\n case\n when c.relreplident = 'd' then 'DEFAULT'\n when c.relreplident = 'i' then 'INDEX'\n when c.relreplident = 'f' then 'FULL'\n else 'NOTHING'\n end as \"replica_identity!\",\n pg_total_relation_size(format('%I.%I', nc.nspname, c.relname)) :: int8 as \"bytes!\",\n pg_size_pretty(\n pg_total_relation_size(format('%I.%I', nc.nspname, c.relname))\n ) as \"size!\",\n pg_stat_get_live_tuples(c.oid) as \"live_rows_estimate!\",\n pg_stat_get_dead_tuples(c.oid) as \"dead_rows_estimate!\",\n obj_description(c.oid) as comment\nfrom\n pg_namespace nc\n join pg_class c on nc.oid = c.relnamespace\nwhere\n c.relkind in ('r', 'p')\n and not pg_is_other_temp_schema(nc.oid)\n and (\n pg_has_role(c.relowner, 'USAGE')\n or has_table_privilege(\n c.oid,\n 'SELECT, INSERT, UPDATE, DELETE, TRUNCATE, REFERENCES, TRIGGER'\n )\n or has_any_column_privilege(c.oid, 'SELECT, INSERT, UPDATE, REFERENCES')\n )\ngroup by\n c.oid,\n c.relname,\n c.relrowsecurity,\n c.relforcerowsecurity,\n c.relreplident,\n nc.nspname;", + "query": "select\n c.oid :: int8 as \"id!\",\n nc.nspname as schema,\n c.relname as name,\n c.relkind as table_kind,\n c.relrowsecurity as rls_enabled,\n c.relforcerowsecurity as rls_forced,\n case\n when c.relreplident = 'd' then 'DEFAULT'\n when c.relreplident = 'i' then 'INDEX'\n when c.relreplident = 'f' then 'FULL'\n else 'NOTHING'\n end as \"replica_identity!\",\n pg_total_relation_size(format('%I.%I', nc.nspname, c.relname)) :: int8 as \"bytes!\",\n pg_size_pretty(\n pg_total_relation_size(format('%I.%I', nc.nspname, c.relname))\n ) as \"size!\",\n pg_stat_get_live_tuples(c.oid) as \"live_rows_estimate!\",\n pg_stat_get_dead_tuples(c.oid) as \"dead_rows_estimate!\",\n obj_description(c.oid) as comment\nfrom\n pg_namespace nc\n join pg_class c on nc.oid = c.relnamespace\nwhere\n c.relkind in ('r', 'p', 'v', 'm')\n and not pg_is_other_temp_schema(nc.oid)\n and (\n pg_has_role(c.relowner, 'USAGE')\n or has_table_privilege(\n c.oid,\n 'SELECT, INSERT, UPDATE, DELETE, TRUNCATE, REFERENCES, TRIGGER'\n )\n or has_any_column_privilege(c.oid, 'SELECT, INSERT, UPDATE, REFERENCES')\n )\ngroup by\n c.oid,\n c.relname,\n c.relrowsecurity,\n c.relforcerowsecurity,\n c.relreplident,\n nc.nspname;", "describe": { "columns": [ { @@ -20,41 +20,46 @@ }, { "ordinal": 3, + "name": "table_kind", + "type_info": "Char" + }, + { + "ordinal": 4, "name": "rls_enabled", "type_info": "Bool" }, { - "ordinal": 4, + "ordinal": 5, "name": "rls_forced", "type_info": "Bool" }, { - "ordinal": 5, + "ordinal": 6, "name": "replica_identity!", "type_info": "Text" }, { - "ordinal": 6, + "ordinal": 7, "name": "bytes!", "type_info": "Int8" }, { - "ordinal": 7, + "ordinal": 8, "name": "size!", "type_info": "Text" }, { - "ordinal": 8, + "ordinal": 9, "name": "live_rows_estimate!", "type_info": "Int8" }, { - "ordinal": 9, + "ordinal": 10, "name": "dead_rows_estimate!", "type_info": "Int8" }, { - "ordinal": 10, + "ordinal": 11, "name": "comment", "type_info": "Text" } @@ -68,6 +73,7 @@ false, false, false, + false, null, null, null, @@ -76,5 +82,5 @@ null ] }, - "hash": "2a964a12383b977bbbbd6fe7298dfce00358ecbe878952e8d4915c06cc5c9e0f" + "hash": "66d92238c94b5f1c99fbf068a0b5cf4c296b594fe9e6cebbdc382acde73f4fb9" } diff --git a/.sqlx/query-df57cc22f7d63847abce1d0d15675ba8951faa1be2ea6b2bf6714b1aa9127a6f.json b/.sqlx/query-df57cc22f7d63847abce1d0d15675ba8951faa1be2ea6b2bf6714b1aa9127a6f.json deleted file mode 100644 index b6fd2fc80..000000000 --- a/.sqlx/query-df57cc22f7d63847abce1d0d15675ba8951faa1be2ea6b2bf6714b1aa9127a6f.json +++ /dev/null @@ -1,44 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "-- we need to join tables from the pg_catalog since \"TRUNCATE\" triggers are \n-- not available in the information_schema.trigger table.\nselect \n t.tgname as \"name!\",\n c.relname as \"table_name!\",\n p.proname as \"proc_name!\",\n n.nspname as \"schema_name!\",\n t.tgtype as \"details_bitmask!\"\nfrom \n pg_catalog.pg_trigger t \n left join pg_catalog.pg_proc p on t.tgfoid = p.oid\n left join pg_catalog.pg_class c on t.tgrelid = c.oid\n left join pg_catalog.pg_namespace n on c.relnamespace = n.oid\nwhere \n -- triggers enforcing constraints (e.g. unique fields) should not be included.\n t.tgisinternal = false and \n t.tgconstraint = 0;\n", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "name!", - "type_info": "Name" - }, - { - "ordinal": 1, - "name": "table_name!", - "type_info": "Name" - }, - { - "ordinal": 2, - "name": "proc_name!", - "type_info": "Name" - }, - { - "ordinal": 3, - "name": "schema_name!", - "type_info": "Name" - }, - { - "ordinal": 4, - "name": "details_bitmask!", - "type_info": "Int2" - } - ], - "parameters": { - "Left": [] - }, - "nullable": [ - false, - true, - true, - true, - false - ] - }, - "hash": "df57cc22f7d63847abce1d0d15675ba8951faa1be2ea6b2bf6714b1aa9127a6f" -} diff --git a/Cargo.lock b/Cargo.lock index 4771c8a1f..94b591f3e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -24,7 +24,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e89da841a80418a9b391ebaea17f5c112ffaaa96f621d2c285b5174da76b9011" dependencies = [ "cfg-if", - "getrandom", + "getrandom 0.2.15", "once_cell", "version_check", "zerocopy", @@ -106,6 +106,18 @@ version = "1.0.94" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c1fd03a028ef38ba2276dce7e33fcd6369c158a1bca17946c4b1b701891c1ff7" +[[package]] +name = "arrayref" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "76a2e8124351fda1ef8aaaa3bbd7ebbcb486bbcd4225aca0aa0d84bb2db8fecb" + +[[package]] +name = "arrayvec" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "23b62fc65de8e4e7f52534fb52b0f3ed04746ae267519eef2a83941e8085068b" + [[package]] name = "assert_cmd" version = "2.0.16" @@ -290,6 +302,17 @@ version = "1.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1505bd5d3d116872e7271a6d4e16d81d0c8570876c8de68093a09ac269d8aac0" +[[package]] +name = "atty" +version = "0.2.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8" +dependencies = [ + "hermit-abi 0.1.19", + "libc", + "winapi", +] + [[package]] name = "auto_impl" version = "1.2.0" @@ -322,6 +345,12 @@ dependencies = [ "windows-targets 0.52.6", ] +[[package]] +name = "base64" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8" + [[package]] name = "base64" version = "0.22.1" @@ -334,27 +363,30 @@ version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8c3c1a368f70d6cf7302d78f8f7093da241fb8e8807c05cc9e51a125895a6d5b" +[[package]] +name = "beef" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3a8241f3ebb85c056b509d4327ad0358fbbba6ffb340bf388f26350aeda225b1" + [[package]] name = "bindgen" -version = "0.66.1" +version = "0.72.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f2b84e06fc203107bfbad243f4aba2af864eb7db3b1cf46ea0a023b0b433d2a7" +checksum = "4f72209734318d0b619a5e0f5129918b848c416e122a3c4ce054e03cb87b726f" dependencies = [ "bitflags 2.6.0", "cexpr", "clang-sys", - "lazy_static", - "lazycell", + "itertools 0.10.5", "log", - "peeking_take_while", "prettyplease", "proc-macro2", "quote", "regex", - "rustc-hash 1.1.0", + "rustc-hash 2.1.0", "shlex", "syn 2.0.90", - "which", ] [[package]] @@ -666,6 +698,17 @@ dependencies = [ "serde", ] +[[package]] +name = "blake2b_simd" +version = "0.5.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "afa748e348ad3be8263be728124b24a24f268266f6f5d58af9d75f6a40b5c587" +dependencies = [ + "arrayref", + "arrayvec", + "constant_time_eq", +] + [[package]] name = "block-buffer" version = "0.10.4" @@ -747,9 +790,9 @@ checksum = "37b2a672a2cb129a2e41c10b1224bb368f9f37a2b16b612598138befd7b37eb5" [[package]] name = "cc" -version = "1.2.3" +version = "1.2.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "27f657647bcff5394bf56c7317665bbf790a137a50eaaa5c6bfbb9e27a518f2d" +checksum = "d487aa071b5f64da6f19a3e848e3578944b726ee5a4854b82172f02aa876bfdc" dependencies = [ "shlex", ] @@ -802,7 +845,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "57663b653d948a338bfb3eeba9bb2fd5fcfaecb9e199e87e1eda4d9e8b240fd9" dependencies = [ "ciborium-io", - "half", + "half 2.6.0", ] [[package]] @@ -816,6 +859,17 @@ dependencies = [ "libloading", ] +[[package]] +name = "clap" +version = "2.34.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a0610544180c38b88101fecf2dd634b174a62eef6946f84dfc6a7127512b381c" +dependencies = [ + "bitflags 1.3.2", + "textwrap", + "unicode-width", +] + [[package]] name = "clap" version = "4.5.23" @@ -856,6 +910,15 @@ version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f46ad14479a25103f283c0f10005961cf086d8dc42205bb44c46ac563475dca6" +[[package]] +name = "clippy" +version = "0.0.302" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d911ee15579a3f50880d8c1d59ef6e79f9533127a3bd342462f5d584f5e8c294" +dependencies = [ + "term", +] + [[package]] name = "colorchoice" version = "1.0.3" @@ -889,6 +952,12 @@ version = "0.9.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c2459377285ad874054d797f3ccebf984978aa39129f6eafde5cdc8315b612f8" +[[package]] +name = "constant_time_eq" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "245097e9a4535ee1e3e3931fcfcd55a796a44c643e8596ff6566d68f09b87bbc" + [[package]] name = "convert_case" version = "0.6.0" @@ -937,6 +1006,32 @@ dependencies = [ "cfg-if", ] +[[package]] +name = "criterion" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b01d6de93b2b6c65e17c634a26653a29d107b3c98c607c765bf38d041531cd8f" +dependencies = [ + "atty", + "cast", + "clap 2.34.0", + "criterion-plot 0.4.5", + "csv", + "itertools 0.10.5", + "lazy_static", + "num-traits", + "oorandom", + "plotters", + "rayon", + "regex", + "serde", + "serde_cbor", + "serde_derive", + "serde_json", + "tinytemplate", + "walkdir", +] + [[package]] name = "criterion" version = "0.5.1" @@ -946,8 +1041,8 @@ dependencies = [ "anes", "cast", "ciborium", - "clap", - "criterion-plot", + "clap 4.5.23", + "criterion-plot 0.5.0", "is-terminal", "itertools 0.10.5", "num-traits", @@ -963,6 +1058,16 @@ dependencies = [ "walkdir", ] +[[package]] +name = "criterion-plot" +version = "0.4.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2673cc8207403546f45f5fd319a974b1e6983ad1a3ee7e6041650013be041876" +dependencies = [ + "cast", + "itertools 0.10.5", +] + [[package]] name = "criterion-plot" version = "0.5.0" @@ -1045,6 +1150,27 @@ dependencies = [ "typenum", ] +[[package]] +name = "csv" +version = "1.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "acdc4883a9c96732e4733212c01447ebd805833b7275a73ca3ee080fd77afdaf" +dependencies = [ + "csv-core", + "itoa", + "ryu", + "serde", +] + +[[package]] +name = "csv-core" +version = "0.1.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7d02f3b0da4c6504f86e9cd789d8dbafab48c2321be74e9987593de5a894d93d" +dependencies = [ + "memchr", +] + [[package]] name = "dashmap" version = "5.5.3" @@ -1119,6 +1245,17 @@ dependencies = [ "dirs-sys", ] +[[package]] +name = "dirs" +version = "1.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3fd78930633bd1c6e35c4b42b1df7b0cbc6bc191146e512bb3bedf243fcc3901" +dependencies = [ + "libc", + "redox_users 0.3.5", + "winapi", +] + [[package]] name = "dirs-sys" version = "0.4.1" @@ -1127,7 +1264,7 @@ checksum = "520f05a5cbd335fae5a99ff7a6ab8627577660ee5cfd6a94a6a929b52ff0321c" dependencies = [ "libc", "option-ext", - "redox_users", + "redox_users 0.4.6", "windows-sys 0.48.0", ] @@ -1162,6 +1299,7 @@ dependencies = [ "pgt_console", "pgt_diagnostics", "pgt_flags", + "pgt_query", "pgt_query_ext", "pgt_statement_splitter", "pgt_workspace", @@ -1202,6 +1340,12 @@ version = "1.0.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0d6ef0072f8a535281e4876be788938b528e9a1d43900b82c2569af7da799125" +[[package]] +name = "easy-parallel" +version = "3.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2afbb9b0aef60e4f0d2b18129b6c0dff035a6f7dbbd17c2f38c1432102ee223c" + [[package]] name = "either" version = "1.13.0" @@ -1363,6 +1507,18 @@ dependencies = [ "spin", ] +[[package]] +name = "fnv" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" + +[[package]] +name = "foldhash" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d9c4f5dac5e15c24eb999c26181a6ca40b39fe946cbe4c263c7209467bc83af2" + [[package]] name = "form_urlencoded" version = "1.2.1" @@ -1544,6 +1700,17 @@ dependencies = [ "unicode-width", ] +[[package]] +name = "getrandom" +version = "0.1.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8fc3cb4d91f53b50155bdcfd23f6a4c39ae1969c2ae85982b135750cccaf5fce" +dependencies = [ + "cfg-if", + "libc", + "wasi 0.9.0+wasi-snapshot-preview1", +] + [[package]] name = "getrandom" version = "0.2.15" @@ -1552,7 +1719,7 @@ checksum = "c4567c8db10ae91089c99af84c68c38da3ec2f087c3f82960bcdbf3656b6f4d7" dependencies = [ "cfg-if", "libc", - "wasi", + "wasi 0.11.0+wasi-snapshot-preview1", ] [[package]] @@ -1603,6 +1770,12 @@ dependencies = [ "wasm-bindgen", ] +[[package]] +name = "half" +version = "1.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1b43ede17f21864e81be2fa654110bf1e793774238d86ef8555c37e6519c0403" + [[package]] name = "half" version = "2.6.0" @@ -1634,6 +1807,11 @@ name = "hashbrown" version = "0.15.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bf151400ff0baff5465007dd2f3e717f3fe502074ca563069ce3a6629d07b289" +dependencies = [ + "allocator-api2", + "equivalent", + "foldhash", +] [[package]] name = "hashlink" @@ -1660,6 +1838,15 @@ version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" +[[package]] +name = "hermit-abi" +version = "0.1.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33" +dependencies = [ + "libc", +] + [[package]] name = "hermit-abi" version = "0.3.9" @@ -2011,12 +2198,6 @@ dependencies = [ "spin", ] -[[package]] -name = "lazycell" -version = "1.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55" - [[package]] name = "libc" version = "0.2.168" @@ -2113,6 +2294,49 @@ dependencies = [ "value-bag", ] +[[package]] +name = "logos" +version = "0.15.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ab6f536c1af4c7cc81edf73da1f8029896e7e1e16a219ef09b184e76a296f3db" +dependencies = [ + "logos-derive", +] + +[[package]] +name = "logos-codegen" +version = "0.15.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "189bbfd0b61330abea797e5e9276408f2edbe4f822d7ad08685d67419aafb34e" +dependencies = [ + "beef", + "fnv", + "lazy_static", + "proc-macro2", + "quote", + "regex-syntax 0.8.5", + "rustc_version", + "syn 2.0.90", +] + +[[package]] +name = "logos-derive" +version = "0.15.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ebfe8e1a19049ddbfccbd14ac834b215e11b85b90bab0c2dba7c7b92fb5d5cba" +dependencies = [ + "logos-codegen", +] + +[[package]] +name = "lru" +version = "0.12.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "234cf4f4a04dc1f57e24b96cc0cd600cf2af460d4161ac5ecdd0af8e1f3b2a38" +dependencies = [ + "hashbrown 0.15.2", +] + [[package]] name = "lsp-types" version = "0.94.1" @@ -2160,6 +2384,28 @@ dependencies = [ "autocfg", ] +[[package]] +name = "miette" +version = "7.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f98efec8807c63c752b5bd61f862c165c115b0a35685bdcfd9238c7aeb592b7" +dependencies = [ + "cfg-if", + "miette-derive", + "unicode-width", +] + +[[package]] +name = "miette-derive" +version = "7.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "db5b29714e950dbb20d5e6f74f9dcec4edbcc1067bb7f8ed198c097b8c1a818b" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.90", +] + [[package]] name = "mimalloc" version = "0.1.43" @@ -2191,7 +2437,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2886843bf800fba2e3377cff24abf6379b4c4d5c6681eaf9ea5b0d15090450bd" dependencies = [ "libc", - "wasi", + "wasi 0.11.0+wasi-snapshot-preview1", "windows-sys 0.52.0", ] @@ -2423,7 +2669,7 @@ checksum = "1e401f977ab385c9e4e3ab30627d6f26d00e2c73eef317493c4ec6d468726cf8" dependencies = [ "cfg-if", "libc", - "redox_syscall", + "redox_syscall 0.5.7", "smallvec", "windows-targets 0.52.6", ] @@ -2452,12 +2698,6 @@ dependencies = [ "once_cell", ] -[[package]] -name = "peeking_take_while" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "19b17cddbe7ec3f8bc800887bab5e717348c95ea2ca0b1bf0837fb964dc67099" - [[package]] name = "pem-rfc7468" version = "0.7.0" @@ -2483,24 +2723,6 @@ dependencies = [ "indexmap 2.7.0", ] -[[package]] -name = "pg_query" -version = "6.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f71c7c56dfe299ec6f98aa210aa23458be3b0610c485be60a5873c2f3627c40e" -dependencies = [ - "bindgen", - "cc", - "fs_extra", - "glob", - "itertools 0.10.5", - "prost", - "prost-build", - "serde", - "serde_json", - "thiserror 1.0.69", -] - [[package]] name = "pgt_analyse" version = "0.0.0" @@ -2510,7 +2732,8 @@ dependencies = [ "enumflags2", "pgt_console", "pgt_diagnostics", - "pgt_query_ext", + "pgt_query", + "pgt_schema_cache", "pgt_text_size", "rustc-hash 2.1.0", "schemars", @@ -2525,8 +2748,10 @@ dependencies = [ "pgt_analyse", "pgt_console", "pgt_diagnostics", - "pgt_query_ext", + "pgt_query", + "pgt_schema_cache", "pgt_test_macros", + "pgt_text_size", "serde", "termcolor", ] @@ -2575,12 +2800,12 @@ name = "pgt_completions" version = "0.0.0" dependencies = [ "async-std", - "criterion", + "criterion 0.5.1", "fuzzy-matcher", "pgt_schema_cache", "pgt_test_utils", "pgt_text_size", - "pgt_treesitter_queries", + "pgt_treesitter", "schemars", "serde", "serde_json", @@ -2599,6 +2824,7 @@ dependencies = [ "biome_deserialize_macros 0.6.0", "bpaf", "indexmap 2.7.0", + "oxc_resolver", "pgt_analyse", "pgt_analyser", "pgt_console", @@ -2631,6 +2857,7 @@ dependencies = [ "backtrace", "bpaf", "enumflags2", + "oxc_resolver", "pgt_console", "pgt_diagnostics_categories", "pgt_diagnostics_macros", @@ -2676,6 +2903,7 @@ dependencies = [ "crossbeam", "directories", "enumflags2", + "oxc_resolver", "parking_lot", "pgt_diagnostics", "rayon", @@ -2691,20 +2919,23 @@ name = "pgt_lexer" version = "0.0.0" dependencies = [ "insta", - "pg_query", "pgt_diagnostics", "pgt_lexer_codegen", "pgt_text_size", - "regex", + "pgt_tokenizer", ] [[package]] name = "pgt_lexer_codegen" version = "0.0.0" dependencies = [ - "pgt_query_proto_parser", + "anyhow", + "convert_case", "proc-macro2", + "prost-reflect", + "protox", "quote", + "ureq", ] [[package]] @@ -2746,33 +2977,59 @@ dependencies = [ ] [[package]] -name = "pgt_query_ext" +name = "pgt_plpgsql_check" version = "0.0.0" dependencies = [ - "petgraph", - "pg_query", + "pgt_console", "pgt_diagnostics", - "pgt_lexer", - "pgt_query_ext_codegen", + "pgt_query", + "pgt_query_ext", + "pgt_schema_cache", + "pgt_test_utils", "pgt_text_size", + "regex", + "serde", + "serde_json", + "sqlx", + "tree-sitter", ] [[package]] -name = "pgt_query_ext_codegen" +name = "pgt_query" version = "0.0.0" dependencies = [ - "pgt_query_proto_parser", - "proc-macro2", - "quote", + "bindgen", + "cc", + "clippy", + "easy-parallel", + "fs_extra", + "glob", + "pgt_query_macros", + "prost", + "prost-build", + "thiserror 1.0.69", + "which", ] [[package]] -name = "pgt_query_proto_parser" +name = "pgt_query_ext" +version = "0.0.0" +dependencies = [ + "pgt_diagnostics", + "pgt_query", + "pgt_text_size", +] + +[[package]] +name = "pgt_query_macros" version = "0.0.0" dependencies = [ "convert_case", - "protobuf", - "protobuf-parse", + "proc-macro2", + "prost-reflect", + "protox", + "quote", + "ureq", ] [[package]] @@ -2796,14 +3053,25 @@ dependencies = [ name = "pgt_statement_splitter" version = "0.0.0" dependencies = [ + "criterion 0.3.6", "ntest", "pgt_diagnostics", "pgt_lexer", - "pgt_query_ext", + "pgt_query", "pgt_text_size", "regex", ] +[[package]] +name = "pgt_suppressions" +version = "0.0.0" +dependencies = [ + "pgt_analyse", + "pgt_diagnostics", + "pgt_text_size", + "tracing", +] + [[package]] name = "pgt_test_macros" version = "0.0.0" @@ -2820,7 +3088,7 @@ name = "pgt_test_utils" version = "0.0.0" dependencies = [ "anyhow", - "clap", + "clap 4.5.23", "dotenv", "sqlx", "tree-sitter", @@ -2849,10 +3117,20 @@ dependencies = [ ] [[package]] -name = "pgt_treesitter_queries" +name = "pgt_tokenizer" version = "0.0.0" dependencies = [ - "clap", + "insta", +] + +[[package]] +name = "pgt_treesitter" +version = "0.0.0" +dependencies = [ + "clap 4.5.23", + "pgt_schema_cache", + "pgt_test_utils", + "pgt_text_size", "tree-sitter", "tree_sitter_sql", ] @@ -2861,7 +3139,7 @@ dependencies = [ name = "pgt_type_resolver" version = "0.0.0" dependencies = [ - "pgt_query_ext", + "pgt_query", "pgt_schema_cache", ] @@ -2872,11 +3150,11 @@ dependencies = [ "insta", "pgt_console", "pgt_diagnostics", - "pgt_query_ext", + "pgt_query", "pgt_schema_cache", "pgt_test_utils", "pgt_text_size", - "pgt_treesitter_queries", + "pgt_treesitter", "sqlx", "tokio", "tree-sitter", @@ -2891,10 +3169,10 @@ dependencies = [ "biome_js_factory", "biome_js_syntax", "biome_rowan", - "dashmap 5.5.3", "futures", "globset", "ignore", + "lru", "pgt_analyse", "pgt_analyser", "pgt_completions", @@ -2903,15 +3181,22 @@ dependencies = [ "pgt_diagnostics", "pgt_fs", "pgt_lexer", + "pgt_plpgsql_check", + "pgt_query", "pgt_query_ext", "pgt_schema_cache", "pgt_statement_splitter", + "pgt_suppressions", + "pgt_test_utils", "pgt_text_size", + "pgt_tokenizer", "pgt_typecheck", + "pgt_workspace_macros", "rustc-hash 2.1.0", "schemars", "serde", "serde_json", + "slotmap", "sqlx", "strum", "tempfile", @@ -2921,6 +3206,15 @@ dependencies = [ "tree_sitter_sql", ] +[[package]] +name = "pgt_workspace_macros" +version = "0.0.0" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + [[package]] name = "pin-project" version = "1.1.7" @@ -3140,9 +3434,9 @@ dependencies = [ [[package]] name = "proc-macro2" -version = "1.0.93" +version = "1.0.95" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "60946a68e5f9d28b0dc1c21bb8a97ee7d018a8b322fa57838ba31cc878e22d99" +checksum = "02b3e5e68a3a1a02aad3ec490a98007cbc13c37cbe84a3cd7b8e406d76e7f778" dependencies = [ "unicode-ident", ] @@ -3191,48 +3485,51 @@ dependencies = [ ] [[package]] -name = "prost-types" -version = "0.13.5" +name = "prost-reflect" +version = "0.15.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "52c2c1bf36ddb1a1c396b3601a3cec27c2462e45f07c386894ec3ccf5332bd16" +checksum = "37587d5a8a1b3dc9863403d084fc2254b91ab75a702207098837950767e2260b" dependencies = [ + "logos", + "miette", "prost", + "prost-types", ] [[package]] -name = "protobuf" -version = "3.7.1" +name = "prost-types" +version = "0.13.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a3a7c64d9bf75b1b8d981124c14c179074e8caa7dfe7b6a12e6222ddcd0c8f72" +checksum = "52c2c1bf36ddb1a1c396b3601a3cec27c2462e45f07c386894ec3ccf5332bd16" dependencies = [ - "once_cell", - "protobuf-support", - "thiserror 1.0.69", + "prost", ] [[package]] -name = "protobuf-parse" -version = "3.7.1" +name = "protox" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "322330e133eab455718444b4e033ebfac7c6528972c784fcde28d2cc783c6257" +checksum = "424c2bd294b69c49b949f3619362bc3c5d28298cd1163b6d1a62df37c16461aa" dependencies = [ - "anyhow", - "indexmap 2.7.0", - "log", - "protobuf", - "protobuf-support", - "tempfile", - "thiserror 1.0.69", - "which", + "bytes", + "miette", + "prost", + "prost-reflect", + "prost-types", + "protox-parse", + "thiserror 2.0.6", ] [[package]] -name = "protobuf-support" -version = "3.7.1" +name = "protox-parse" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b088fd20b938a875ea00843b6faf48579462630015c3788d397ad6a786663252" +checksum = "57927f9dbeeffcce7192404deee6157a640cbb3fe8ac11eabbe571565949ab75" dependencies = [ - "thiserror 1.0.69", + "logos", + "miette", + "prost-types", + "thiserror 2.0.6", ] [[package]] @@ -3314,7 +3611,7 @@ version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" dependencies = [ - "getrandom", + "getrandom 0.2.15", ] [[package]] @@ -3337,6 +3634,12 @@ dependencies = [ "crossbeam-utils", ] +[[package]] +name = "redox_syscall" +version = "0.1.57" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41cc0f7e4d5d4544e8861606a285bb08d3e70712ccc7d2b84d7c0ccfaf4b05ce" + [[package]] name = "redox_syscall" version = "0.5.7" @@ -3346,13 +3649,24 @@ dependencies = [ "bitflags 2.6.0", ] +[[package]] +name = "redox_users" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "de0737333e7a9502c789a36d7c7fa6092a49895d4faa31ca5df163857ded2e9d" +dependencies = [ + "getrandom 0.1.16", + "redox_syscall 0.1.57", + "rust-argon2", +] + [[package]] name = "redox_users" version = "0.4.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ba009ff324d1fc1b900bd1fdb31564febe58a8ccc8a6fdbb93b543d33b13ca43" dependencies = [ - "getrandom", + "getrandom 0.2.15", "libredox", "thiserror 1.0.69", ] @@ -3401,6 +3715,20 @@ version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c" +[[package]] +name = "ring" +version = "0.17.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a4689e6c2294d81e88dc6261c768b63bc4fcdb852be6d1352498b114f61383b7" +dependencies = [ + "cc", + "cfg-if", + "getrandom 0.2.15", + "libc", + "untrusted", + "windows-sys 0.52.0", +] + [[package]] name = "rsa" version = "0.9.7" @@ -3430,12 +3758,25 @@ dependencies = [ "pgt_analyser", "pgt_console", "pgt_diagnostics", + "pgt_query", "pgt_query_ext", "pgt_statement_splitter", "pgt_workspace", "pulldown-cmark", ] +[[package]] +name = "rust-argon2" +version = "0.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4b18820d944b33caa75a71378964ac46f58517c92b6ae5f762636247c09e78fb" +dependencies = [ + "base64 0.13.1", + "blake2b_simd", + "constant_time_eq", + "crossbeam-utils", +] + [[package]] name = "rustc-demangle" version = "0.1.24" @@ -3454,6 +3795,15 @@ version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c7fb8039b3032c191086b10f11f319a6e99e1e82889c5cc6046f515c9db1d497" +[[package]] +name = "rustc_version" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cfcb3a22ef46e85b45de6ee7e79d063319ebb6594faafcf1c225ea92ab6e9b92" +dependencies = [ + "semver", +] + [[package]] name = "rustix" version = "0.37.28" @@ -3481,6 +3831,41 @@ dependencies = [ "windows-sys 0.59.0", ] +[[package]] +name = "rustls" +version = "0.23.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7160e3e10bf4535308537f3c4e1641468cd0e485175d6163087c0393c7d46643" +dependencies = [ + "log", + "once_cell", + "ring", + "rustls-pki-types", + "rustls-webpki", + "subtle", + "zeroize", +] + +[[package]] +name = "rustls-pki-types" +version = "1.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "229a4a4c221013e7e1f1a043678c5cc39fe5171437c88fb47151a21e6f5b5c79" +dependencies = [ + "zeroize", +] + +[[package]] +name = "rustls-webpki" +version = "0.103.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e4a72fe2bcf7a6ac6fd7d0b9e5cb68aeb7d4c0a0271730218b3e92d43b4eb435" +dependencies = [ + "ring", + "rustls-pki-types", + "untrusted", +] + [[package]] name = "rustversion" version = "1.0.20" @@ -3535,6 +3920,12 @@ version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" +[[package]] +name = "semver" +version = "1.0.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56e6fa9c48d24d85fb3de5ad847117517440f6beceb7798af16b4a87d616b8d0" + [[package]] name = "serde" version = "1.0.215" @@ -3544,6 +3935,16 @@ dependencies = [ "serde_derive", ] +[[package]] +name = "serde_cbor" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2bef2ebfde456fb76bbcf9f59315333decc4fda0b2b44b420243c11e0f5ec1f5" +dependencies = [ + "half 1.8.3", + "serde", +] + [[package]] name = "serde_derive" version = "1.0.215" @@ -3701,6 +4102,16 @@ dependencies = [ "autocfg", ] +[[package]] +name = "slotmap" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dbff4acf519f630b3a3ddcfaea6c06b42174d9a44bc70c620e9ed1649d58b82a" +dependencies = [ + "serde", + "version_check", +] + [[package]] name = "smallvec" version = "1.13.2" @@ -3860,7 +4271,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "64bb4714269afa44aef2755150a0fc19d756fb580a67db8885608cf02f47d06a" dependencies = [ "atoi", - "base64", + "base64 0.22.1", "bitflags 2.6.0", "byteorder", "bytes", @@ -3902,7 +4313,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6fa91a732d854c5d7726349bb4bb879bb9478993ceb764247660aee25f67c2f8" dependencies = [ "atoi", - "base64", + "base64 0.22.1", "bitflags 2.6.0", "byteorder", "crc", @@ -4078,12 +4489,23 @@ checksum = "9a8a559c81686f576e8cd0290cd2a24a2a9ad80c98b3478856500fcbd7acd704" dependencies = [ "cfg-if", "fastrand 2.3.0", - "getrandom", + "getrandom 0.2.15", "once_cell", "rustix 0.38.42", "windows-sys 0.59.0", ] +[[package]] +name = "term" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "edd106a334b7657c10b7c540a0106114feadeb4dc314513e97df481d5d966f42" +dependencies = [ + "byteorder", + "dirs", + "winapi", +] + [[package]] name = "termcolor" version = "1.4.1" @@ -4121,6 +4543,15 @@ dependencies = [ "syn 2.0.90", ] +[[package]] +name = "textwrap" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d326610f408c7a4eb6f51c37c330e496b08506c9457c9d34287ecc38809fb060" +dependencies = [ + "unicode-width", +] + [[package]] name = "thiserror" version = "1.0.69" @@ -4633,6 +5064,28 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "39ec24b3121d976906ece63c9daad25b85969647682eee313cb5779fdd69e14e" +[[package]] +name = "untrusted" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" + +[[package]] +name = "ureq" +version = "2.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "02d1a66277ed75f640d608235660df48c8e3c19f3b4edb6a263315626cc3c01d" +dependencies = [ + "base64 0.22.1", + "flate2", + "log", + "once_cell", + "rustls", + "rustls-pki-types", + "url", + "webpki-roots 0.26.11", +] + [[package]] name = "url" version = "2.5.4" @@ -4669,7 +5122,7 @@ version = "1.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f8c5f0a0af699448548ad1a2fbf920fb4bee257eae39953ba95cb84891a0446a" dependencies = [ - "getrandom", + "getrandom 0.2.15", ] [[package]] @@ -4741,6 +5194,12 @@ dependencies = [ "winapi-util", ] +[[package]] +name = "wasi" +version = "0.9.0+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cccddf32554fecc6acb585f82a32a72e28b48f8c4c1883ddfeeeaa96f7d8e519" + [[package]] name = "wasi" version = "0.11.0+wasi-snapshot-preview1" @@ -4830,16 +5289,34 @@ dependencies = [ "wasm-bindgen", ] +[[package]] +name = "webpki-roots" +version = "0.26.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "521bc38abb08001b01866da9f51eb7c5d647a19260e00054a8c7fd5f9e57f7a9" +dependencies = [ + "webpki-roots 1.0.1", +] + +[[package]] +name = "webpki-roots" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8782dd5a41a24eed3a4f40b606249b3e236ca61adf1f25ea4d45c73de122b502" +dependencies = [ + "rustls-pki-types", +] + [[package]] name = "which" -version = "4.4.2" +version = "6.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87ba24419a2078cd2b0f2ede2691b6c66d8e47836da3b6db8265ebad47afbfc7" +checksum = "b4ee928febd44d98f2f459a4a79bd4d928591333a494a10a868418ac1b39cf1f" dependencies = [ "either", "home", - "once_cell", "rustix 0.38.42", + "winsafe", ] [[package]] @@ -4848,7 +5325,7 @@ version = "1.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "372d5b87f58ec45c384ba03563b03544dc5fadc3983e434b286913f5b4a9bb6d" dependencies = [ - "redox_syscall", + "redox_syscall 0.5.7", "wasite", ] @@ -5040,6 +5517,12 @@ dependencies = [ "memchr", ] +[[package]] +name = "winsafe" +version = "0.0.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d135d17ab770252ad95e9a872d365cf3090e3be864a34ab46f48555993efc904" + [[package]] name = "write-json" version = "0.1.4" @@ -5114,6 +5597,7 @@ dependencies = [ "bpaf", "pgt_analyse", "pgt_analyser", + "pgt_diagnostics", "pgt_workspace", "proc-macro2", "pulldown-cmark", diff --git a/Cargo.toml b/Cargo.toml index aaaa90357..d68aafe04 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -28,6 +28,7 @@ enumflags2 = "0.7.11" ignore = "0.4.23" indexmap = { version = "2.6.0", features = ["serde"] } insta = "1.31.0" +oxc_resolver = "1.12.0" pg_query = "6.1.0" proc-macro2 = "1.0.66" quote = "1.0.33" @@ -38,13 +39,19 @@ schemars = { version = "0.8.22", features = ["indexmap2", "small serde = "1.0.195" serde_json = "1.0.114" similar = "2.6.0" +slotmap = "1.0.7" smallvec = { version = "1.13.2", features = ["union", "const_new", "serde"] } strum = { version = "0.27.1", features = ["derive"] } # this will use tokio if available, otherwise async-std +convert_case = "0.6.0" +prost = "0.13.5" +prost-reflect = "0.15.3" +protox = "0.8.0" sqlx = { version = "0.8.2", features = ["runtime-tokio", "runtime-async-std", "postgres", "json"] } -syn = "1.0.109" +syn = { version = "1.0.109", features = ["full"] } termcolor = "1.4.1" test-log = "0.2.17" +thiserror = "1.0.31" tokio = { version = "1.40.0", features = ["full"] } tracing = { version = "0.1.40", default-features = false, features = ["std"] } tracing-bunyan-formatter = { version = "0.3.10 " } @@ -69,16 +76,20 @@ pgt_lexer = { path = "./crates/pgt_lexer", version = "0.0.0" } pgt_lexer_codegen = { path = "./crates/pgt_lexer_codegen", version = "0.0.0" } pgt_lsp = { path = "./crates/pgt_lsp", version = "0.0.0" } pgt_markup = { path = "./crates/pgt_markup", version = "0.0.0" } +pgt_plpgsql_check = { path = "./crates/pgt_plpgsql_check", version = "0.0.0" } +pgt_query = { path = "./crates/pgt_query", version = "0.0.0" } pgt_query_ext = { path = "./crates/pgt_query_ext", version = "0.0.0" } -pgt_query_ext_codegen = { path = "./crates/pgt_query_ext_codegen", version = "0.0.0" } -pgt_query_proto_parser = { path = "./crates/pgt_query_proto_parser", version = "0.0.0" } +pgt_query_macros = { path = "./crates/pgt_query_macros", version = "0.0.0" } pgt_schema_cache = { path = "./crates/pgt_schema_cache", version = "0.0.0" } pgt_statement_splitter = { path = "./crates/pgt_statement_splitter", version = "0.0.0" } +pgt_suppressions = { path = "./crates/pgt_suppressions", version = "0.0.0" } pgt_text_edit = { path = "./crates/pgt_text_edit", version = "0.0.0" } pgt_text_size = { path = "./crates/pgt_text_size", version = "0.0.0" } -pgt_treesitter_queries = { path = "./crates/pgt_treesitter_queries", version = "0.0.0" } +pgt_tokenizer = { path = "./crates/pgt_tokenizer", version = "0.0.0" } +pgt_treesitter = { path = "./crates/pgt_treesitter", version = "0.0.0" } pgt_typecheck = { path = "./crates/pgt_typecheck", version = "0.0.0" } pgt_workspace = { path = "./crates/pgt_workspace", version = "0.0.0" } +pgt_workspace_macros = { path = "./crates/pgt_workspace_macros", version = "0.0.0" } pgt_test_macros = { path = "./crates/pgt_test_macros" } pgt_test_utils = { path = "./crates/pgt_test_utils" } diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 000000000..10353bb27 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,16 @@ +FROM postgres:15 + +# Install build dependencies +RUN apt-get update && \ + apt-get install -y postgresql-server-dev-15 gcc make git && \ + cd /tmp && \ + git clone https://github.com/okbob/plpgsql_check.git && \ + cd plpgsql_check && \ + make && \ + make install && \ + apt-get remove -y postgresql-server-dev-15 gcc make git && \ + apt-get autoremove -y && \ + rm -rf /tmp/plpgsql_check /var/lib/apt/lists/* + +# Add initialization script directly +RUN echo "CREATE EXTENSION IF NOT EXISTS plpgsql_check;" > /docker-entrypoint-initdb.d/01-create-extension.sql \ No newline at end of file diff --git a/README.md b/README.md index 162bb9c0e..fa18d0fec 100644 --- a/README.md +++ b/README.md @@ -33,6 +33,21 @@ Our current focus is on refining and enhancing these core features while buildin - [psteinroe](https://github.com/psteinroe) - [juleswritescode](https://github.com/juleswritescode) +## Development + +### Using Nix + +```bash +nix develop +docker-compose up -d +``` + +### Using Docker + +```bash +docker-compose up -d +``` + ## Acknowledgements A big thanks to the following projects, without which this project wouldn't have been possible: diff --git a/crates/pgt_analyse/Cargo.toml b/crates/pgt_analyse/Cargo.toml index 75eb0211c..3da600343 100644 --- a/crates/pgt_analyse/Cargo.toml +++ b/crates/pgt_analyse/Cargo.toml @@ -13,10 +13,11 @@ version = "0.0.0" [dependencies] -pgt_console.workspace = true -pgt_diagnostics.workspace = true -pgt_query_ext.workspace = true -rustc-hash = { workspace = true } +pgt_console.workspace = true +pgt_diagnostics.workspace = true +pgt_query.workspace = true +pgt_schema_cache.workspace = true +rustc-hash = { workspace = true } biome_deserialize = { workspace = true, optional = true } biome_deserialize_macros = { workspace = true, optional = true } diff --git a/crates/pgt_analyse/src/analysed_file_context.rs b/crates/pgt_analyse/src/analysed_file_context.rs new file mode 100644 index 000000000..82dc40711 --- /dev/null +++ b/crates/pgt_analyse/src/analysed_file_context.rs @@ -0,0 +1,7 @@ +#[derive(Default)] +pub struct AnalysedFileContext {} + +impl AnalysedFileContext { + #[allow(unused)] + pub fn update_from(&mut self, stmt_root: &pgt_query::NodeEnum) {} +} diff --git a/crates/pgt_analyse/src/categories.rs b/crates/pgt_analyse/src/categories.rs index e5dd51c2b..02819a4ea 100644 --- a/crates/pgt_analyse/src/categories.rs +++ b/crates/pgt_analyse/src/categories.rs @@ -16,6 +16,27 @@ pub enum RuleCategory { Transformation, } +impl TryFrom for RuleCategory { + type Error = String; + + fn try_from(value: String) -> Result { + value.as_str().try_into() + } +} + +impl TryFrom<&str> for RuleCategory { + type Error = String; + + fn try_from(value: &str) -> Result { + match value { + "lint" => Ok(Self::Lint), + "action" => Ok(Self::Action), + "transformation" => Ok(Self::Transformation), + _ => Err(format!("Invalid Rule Category: {}", value)), + } + } +} + /// Actions that suppress rules should start with this string pub const SUPPRESSION_ACTION_CATEGORY: &str = "quickfix.suppressRule"; diff --git a/crates/pgt_analyse/src/context.rs b/crates/pgt_analyse/src/context.rs index cd0696573..ddd5d28d5 100644 --- a/crates/pgt_analyse/src/context.rs +++ b/crates/pgt_analyse/src/context.rs @@ -1,11 +1,16 @@ +use pgt_schema_cache::SchemaCache; + use crate::{ + AnalysedFileContext, categories::RuleCategory, rule::{GroupCategory, Rule, RuleGroup, RuleMetadata}, }; pub struct RuleContext<'a, R: Rule> { - stmt: &'a pgt_query_ext::NodeEnum, + stmt: &'a pgt_query::NodeEnum, options: &'a R::Options, + schema_cache: Option<&'a SchemaCache>, + file_context: &'a AnalysedFileContext, } impl<'a, R> RuleContext<'a, R> @@ -13,8 +18,18 @@ where R: Rule + Sized + 'static, { #[allow(clippy::too_many_arguments)] - pub fn new(stmt: &'a pgt_query_ext::NodeEnum, options: &'a R::Options) -> Self { - Self { stmt, options } + pub fn new( + stmt: &'a pgt_query::NodeEnum, + options: &'a R::Options, + schema_cache: Option<&'a SchemaCache>, + file_context: &'a AnalysedFileContext, + ) -> Self { + Self { + stmt, + options, + schema_cache, + file_context, + } } /// Returns the group that belongs to the current rule @@ -28,10 +43,18 @@ where } /// Returns the AST root - pub fn stmt(&self) -> &pgt_query_ext::NodeEnum { + pub fn stmt(&self) -> &pgt_query::NodeEnum { self.stmt } + pub fn file_context(&self) -> &AnalysedFileContext { + self.file_context + } + + pub fn schema_cache(&self) -> Option<&SchemaCache> { + self.schema_cache + } + /// Returns the metadata of the rule /// /// The metadata contains information about the rule, such as the name, version, language, and whether it is recommended. diff --git a/crates/pgt_analyse/src/lib.rs b/crates/pgt_analyse/src/lib.rs index f312de45a..1d4ec6aee 100644 --- a/crates/pgt_analyse/src/lib.rs +++ b/crates/pgt_analyse/src/lib.rs @@ -1,3 +1,4 @@ +mod analysed_file_context; mod categories; pub mod context; mod filter; @@ -9,6 +10,7 @@ mod rule; // Re-exported for use in the `declare_group` macro pub use pgt_diagnostics::category_concat; +pub use crate::analysed_file_context::AnalysedFileContext; pub use crate::categories::{ ActionCategory, RefactorKind, RuleCategories, RuleCategoriesBuilder, RuleCategory, SUPPRESSION_ACTION_CATEGORY, SourceActionKind, diff --git a/crates/pgt_analyse/src/macros.rs b/crates/pgt_analyse/src/macros.rs index d9f70ed3f..aa7b25c59 100644 --- a/crates/pgt_analyse/src/macros.rs +++ b/crates/pgt_analyse/src/macros.rs @@ -24,6 +24,7 @@ macro_rules! declare_lint_rule { ( $( #[doc = $doc:literal] )+ $vis:vis $id:ident { version: $version:literal, name: $name:tt, + severity: $severity:expr_2021, $( $key:ident: $value:expr_2021, )* } ) => { @@ -32,6 +33,7 @@ macro_rules! declare_lint_rule { $vis $id { version: $version, name: $name, + severity: $severity, $( $key: $value, )* } ); @@ -53,6 +55,7 @@ macro_rules! declare_rule { ( $( #[doc = $doc:literal] )+ $vis:vis $id:ident { version: $version:literal, name: $name:tt, + severity: $severity:expr_2021, $( $key:ident: $value:expr_2021, )* } ) => { $( #[doc = $doc] )* @@ -61,7 +64,7 @@ macro_rules! declare_rule { impl $crate::RuleMeta for $id { type Group = super::Group; const METADATA: $crate::RuleMetadata = - $crate::RuleMetadata::new($version, $name, concat!( $( $doc, "\n", )* )) $( .$key($value) )*; + $crate::RuleMetadata::new($version, $name, concat!( $( $doc, "\n", )* ), $severity) $( .$key($value) )*; } } } diff --git a/crates/pgt_analyse/src/registry.rs b/crates/pgt_analyse/src/registry.rs index 48b73b154..45d2c2026 100644 --- a/crates/pgt_analyse/src/registry.rs +++ b/crates/pgt_analyse/src/registry.rs @@ -2,6 +2,7 @@ use std::{borrow, collections::BTreeSet}; use crate::{ AnalyserOptions, + analysed_file_context::AnalysedFileContext, context::RuleContext, filter::{AnalysisFilter, GroupKey, RuleKey}, rule::{GroupCategory, Rule, RuleDiagnostic, RuleGroup}, @@ -156,8 +157,10 @@ impl RuleRegistry { } pub struct RegistryRuleParams<'a> { - pub root: &'a pgt_query_ext::NodeEnum, + pub root: &'a pgt_query::NodeEnum, pub options: &'a AnalyserOptions, + pub analysed_file_context: &'a AnalysedFileContext, + pub schema_cache: Option<&'a pgt_schema_cache::SchemaCache>, } /// Executor for rule as a generic function pointer @@ -174,7 +177,14 @@ impl RegistryRule { R: Rule + 'static, { let options = params.options.rule_options::().unwrap_or_default(); - let ctx = RuleContext::new(params.root, &options); + + let ctx = RuleContext::new( + params.root, + &options, + params.schema_cache, + params.analysed_file_context, + ); + R::run(&ctx) } diff --git a/crates/pgt_analyse/src/rule.rs b/crates/pgt_analyse/src/rule.rs index f135705e4..1760ce971 100644 --- a/crates/pgt_analyse/src/rule.rs +++ b/crates/pgt_analyse/src/rule.rs @@ -3,7 +3,7 @@ use pgt_console::{MarkupBuf, markup}; use pgt_diagnostics::advice::CodeSuggestionAdvice; use pgt_diagnostics::{ Advices, Category, Diagnostic, DiagnosticTags, Location, LogCategory, MessageAndDescription, - Visit, + Severity, Visit, }; use pgt_text_size::TextRange; use std::cmp::Ordering; @@ -31,10 +31,17 @@ pub struct RuleMetadata { pub recommended: bool, /// The source URL of the rule pub sources: &'static [RuleSource], + /// The default severity of the rule + pub severity: Severity, } impl RuleMetadata { - pub const fn new(version: &'static str, name: &'static str, docs: &'static str) -> Self { + pub const fn new( + version: &'static str, + name: &'static str, + docs: &'static str, + severity: Severity, + ) -> Self { Self { deprecated: None, version, @@ -42,6 +49,7 @@ impl RuleMetadata { docs, sources: &[], recommended: false, + severity, } } @@ -94,7 +102,8 @@ pub trait GroupCategory { pub trait Rule: RuleMeta + Sized { type Options: Default + Clone + Debug; - fn run(ctx: &RuleContext) -> Vec; + /// `schema_cache` will only be available if the user has a working database connection. + fn run(rule_context: &RuleContext) -> Vec; } /// Diagnostic object returned by a single analysis rule @@ -200,6 +209,12 @@ impl RuleDiagnostic { self } + /// Sets the span of this diagnostic. + pub fn span(mut self, span: TextRange) -> Self { + self.span = Some(span); + self + } + /// Marks this diagnostic as unnecessary code, which will /// be displayed in the language server. /// diff --git a/crates/pgt_analyser/CONTRIBUTING.md b/crates/pgt_analyser/CONTRIBUTING.md index 50327d5ec..b0929eda9 100644 --- a/crates/pgt_analyser/CONTRIBUTING.md +++ b/crates/pgt_analyser/CONTRIBUTING.md @@ -54,9 +54,11 @@ Let's say we want to create a new **lint** rule called `useMyRuleName`, follow t 1. Run the command ```shell - just new-lintrule safety useMyRuleName + just new-lintrule safety useMyRuleName () ``` + Where severity is optional but can be "info", "warn", or "error" (default). + The script will generate a bunch of files inside the `pgt_analyser` crate. Among the other files, you'll find a file called `use_my_new_rule_name.rs` inside the `pgt_analyser/lib/src/lint/safety` folder. You'll implement your rule in this file. @@ -187,6 +189,7 @@ declare_lint_rule! { pub(crate) ExampleRule { version: "next", name: "myRuleName", + severity: Severity::Error, recommended: false, } } @@ -206,6 +209,7 @@ declare_lint_rule! { pub(crate) ExampleRule { version: "next", name: "myRuleName", + severity: Severity::Error, recommended: false, sources: &[RuleSource::Squawk("ban-drop-column")], } @@ -228,6 +232,7 @@ declare_lint_rule! { pub(crate) ExampleRule { version: "next", name: "myRuleName", + severity: Severity::Error, recommended: false, } } @@ -280,6 +285,7 @@ declare_lint_rule! { version: "next", name: "banDropColumn", recommended: true, + severity: Severity::Error, sources: &[RuleSource::Squawk("ban-drop-column")], } } @@ -351,6 +357,7 @@ declare_lint_rule! { version: "next", name: "banDropColumn", recommended: true, + severity: Severity::Error, deprecated: true, sources: &[RuleSource::Squawk("ban-drop-column")], } diff --git a/crates/pgt_analyser/Cargo.toml b/crates/pgt_analyser/Cargo.toml index bd51c36a0..0cf7a3342 100644 --- a/crates/pgt_analyser/Cargo.toml +++ b/crates/pgt_analyser/Cargo.toml @@ -12,10 +12,13 @@ repository.workspace = true version = "0.0.0" [dependencies] -pgt_analyse = { workspace = true } -pgt_console = { workspace = true } -pgt_query_ext = { workspace = true } -serde = { workspace = true } +pgt_analyse = { workspace = true } +pgt_console = { workspace = true } +pgt_diagnostics = { workspace = true } +pgt_query = { workspace = true } +pgt_schema_cache = { workspace = true } +pgt_text_size = { workspace = true } +serde = { workspace = true } [dev-dependencies] insta = { version = "1.42.1" } diff --git a/crates/pgt_analyser/src/lib.rs b/crates/pgt_analyser/src/lib.rs index 248fe22be..ccdc04208 100644 --- a/crates/pgt_analyser/src/lib.rs +++ b/crates/pgt_analyser/src/lib.rs @@ -1,8 +1,8 @@ use std::{ops::Deref, sync::LazyLock}; use pgt_analyse::{ - AnalyserOptions, AnalysisFilter, MetadataRegistry, RegistryRuleParams, RuleDiagnostic, - RuleRegistry, + AnalysedFileContext, AnalyserOptions, AnalysisFilter, MetadataRegistry, RegistryRuleParams, + RuleDiagnostic, RuleRegistry, }; pub use registry::visit_registry; @@ -30,8 +30,15 @@ pub struct Analyser<'a> { registry: RuleRegistry, } -pub struct AnalyserContext<'a> { - pub root: &'a pgt_query_ext::NodeEnum, +#[derive(Debug)] +pub struct AnalysableStatement { + pub root: pgt_query::NodeEnum, + pub range: pgt_text_size::TextRange, +} + +pub struct AnalyserParams<'a> { + pub stmts: Vec, + pub schema_cache: Option<&'a pgt_schema_cache::SchemaCache>, } pub struct AnalyserConfig<'a> { @@ -52,17 +59,31 @@ impl<'a> Analyser<'a> { } } - pub fn run(&self, ctx: AnalyserContext) -> Vec { - let params = RegistryRuleParams { - root: ctx.root, - options: self.options, - }; + pub fn run(&self, params: AnalyserParams) -> Vec { + let mut diagnostics = vec![]; + + let mut file_context = AnalysedFileContext::default(); + + for stmt in params.stmts { + let rule_params = RegistryRuleParams { + root: &stmt.root, + options: self.options, + analysed_file_context: &file_context, + schema_cache: params.schema_cache, + }; - self.registry - .rules - .iter() - .flat_map(|rule| (rule.run)(¶ms)) - .collect::>() + diagnostics.extend( + self.registry + .rules + .iter() + .flat_map(|rule| (rule.run)(&rule_params)) + .map(|r| r.span(stmt.range)), + ); + + file_context.update_from(&stmt.root); + } + + diagnostics } } @@ -77,9 +98,10 @@ mod tests { markup, }; use pgt_diagnostics::PrintDiagnostic; + use pgt_text_size::TextRange; use termcolor::NoColor; - use crate::Analyser; + use crate::{AnalysableStatement, Analyser}; #[ignore] #[test] @@ -101,7 +123,8 @@ mod tests { ..Default::default() }; - let ast = pgt_query_ext::parse(SQL).expect("failed to parse SQL"); + let ast = pgt_query::parse(SQL).expect("failed to parse SQL"); + let range = TextRange::new(0.into(), u32::try_from(SQL.len()).unwrap().into()); let options = AnalyserOptions::default(); @@ -110,7 +133,13 @@ mod tests { filter, }); - let results = analyser.run(crate::AnalyserContext { root: &ast }); + let results = analyser.run(crate::AnalyserParams { + stmts: vec![AnalysableStatement { + root: ast.into_root().unwrap(), + range, + }], + schema_cache: None, + }); println!("*******************"); for result in &results { diff --git a/crates/pgt_analyser/src/lint/safety.rs b/crates/pgt_analyser/src/lint/safety.rs index 920326c23..a2b72fceb 100644 --- a/crates/pgt_analyser/src/lint/safety.rs +++ b/crates/pgt_analyser/src/lint/safety.rs @@ -3,6 +3,8 @@ use pgt_analyse::declare_lint_group; pub mod adding_required_field; pub mod ban_drop_column; +pub mod ban_drop_database; pub mod ban_drop_not_null; pub mod ban_drop_table; -declare_lint_group! { pub Safety { name : "safety" , rules : [self :: adding_required_field :: AddingRequiredField , self :: ban_drop_column :: BanDropColumn , self :: ban_drop_not_null :: BanDropNotNull , self :: ban_drop_table :: BanDropTable ,] } } +pub mod ban_truncate_cascade; +declare_lint_group! { pub Safety { name : "safety" , rules : [self :: adding_required_field :: AddingRequiredField , self :: ban_drop_column :: BanDropColumn , self :: ban_drop_database :: BanDropDatabase , self :: ban_drop_not_null :: BanDropNotNull , self :: ban_drop_table :: BanDropTable , self :: ban_truncate_cascade :: BanTruncateCascade ,] } } diff --git a/crates/pgt_analyser/src/lint/safety/adding_required_field.rs b/crates/pgt_analyser/src/lint/safety/adding_required_field.rs index d4f72a7ff..d853d30a2 100644 --- a/crates/pgt_analyser/src/lint/safety/adding_required_field.rs +++ b/crates/pgt_analyser/src/lint/safety/adding_required_field.rs @@ -1,5 +1,6 @@ use pgt_analyse::{Rule, RuleDiagnostic, RuleSource, context::RuleContext, declare_lint_rule}; use pgt_console::markup; +use pgt_diagnostics::Severity; declare_lint_rule! { /// Adding a new column that is NOT NULL and has no default value to an existing table effectively makes it required. @@ -17,6 +18,7 @@ declare_lint_rule! { pub AddingRequiredField { version: "next", name: "addingRequiredField", + severity: Severity::Error, recommended: false, sources: &[RuleSource::Squawk("adding-required-field")], } @@ -28,7 +30,7 @@ impl Rule for AddingRequiredField { fn run(ctx: &RuleContext) -> Vec { let mut diagnostics = vec![]; - if let pgt_query_ext::NodeEnum::AlterTableStmt(stmt) = ctx.stmt() { + if let pgt_query::NodeEnum::AlterTableStmt(stmt) = ctx.stmt() { // We are currently lacking a way to check if a `AtAddColumn` subtype sets a // not null constraint – so we'll need to check the plain SQL. let plain_sql = ctx.stmt().to_ref().deparse().unwrap().to_ascii_lowercase(); @@ -39,9 +41,8 @@ impl Rule for AddingRequiredField { } for cmd in &stmt.cmds { - if let Some(pgt_query_ext::NodeEnum::AlterTableCmd(alter_table_cmd)) = &cmd.node { - if alter_table_cmd.subtype() - == pgt_query_ext::protobuf::AlterTableType::AtAddColumn + if let Some(pgt_query::NodeEnum::AlterTableCmd(alter_table_cmd)) = &cmd.node { + if alter_table_cmd.subtype() == pgt_query::protobuf::AlterTableType::AtAddColumn { diagnostics.push( RuleDiagnostic::new( diff --git a/crates/pgt_analyser/src/lint/safety/ban_drop_column.rs b/crates/pgt_analyser/src/lint/safety/ban_drop_column.rs index aab5d5158..d73b39d24 100644 --- a/crates/pgt_analyser/src/lint/safety/ban_drop_column.rs +++ b/crates/pgt_analyser/src/lint/safety/ban_drop_column.rs @@ -1,5 +1,6 @@ use pgt_analyse::{Rule, RuleDiagnostic, RuleSource, context::RuleContext, declare_lint_rule}; use pgt_console::markup; +use pgt_diagnostics::Severity; declare_lint_rule! { /// Dropping a column may break existing clients. @@ -19,6 +20,7 @@ declare_lint_rule! { pub BanDropColumn { version: "next", name: "banDropColumn", + severity: Severity::Warning, recommended: true, sources: &[RuleSource::Squawk("ban-drop-column")], } @@ -30,10 +32,10 @@ impl Rule for BanDropColumn { fn run(ctx: &RuleContext) -> Vec { let mut diagnostics = Vec::new(); - if let pgt_query_ext::NodeEnum::AlterTableStmt(stmt) = &ctx.stmt() { + if let pgt_query::NodeEnum::AlterTableStmt(stmt) = &ctx.stmt() { for cmd in &stmt.cmds { - if let Some(pgt_query_ext::NodeEnum::AlterTableCmd(cmd)) = &cmd.node { - if cmd.subtype() == pgt_query_ext::protobuf::AlterTableType::AtDropColumn { + if let Some(pgt_query::NodeEnum::AlterTableCmd(cmd)) = &cmd.node { + if cmd.subtype() == pgt_query::protobuf::AlterTableType::AtDropColumn { diagnostics.push(RuleDiagnostic::new( rule_category!(), None, diff --git a/crates/pgt_analyser/src/lint/safety/ban_drop_database.rs b/crates/pgt_analyser/src/lint/safety/ban_drop_database.rs new file mode 100644 index 000000000..3011cf88d --- /dev/null +++ b/crates/pgt_analyser/src/lint/safety/ban_drop_database.rs @@ -0,0 +1,39 @@ +use pgt_analyse::{Rule, RuleDiagnostic, RuleSource, context::RuleContext, declare_lint_rule}; +use pgt_console::markup; +use pgt_diagnostics::Severity; + +declare_lint_rule! { + /// Dropping a database may break existing clients (and everything else, really). + /// + /// Make sure that you really want to drop it. + pub BanDropDatabase { + version: "next", + name: "banDropDatabase", + severity: Severity::Warning, + recommended: false, + sources: &[RuleSource::Squawk("ban-drop-database")], + } +} + +impl Rule for BanDropDatabase { + type Options = (); + + fn run(ctx: &RuleContext) -> Vec { + let mut diagnostics = vec![]; + + if let pgt_query::NodeEnum::DropdbStmt(_) = &ctx.stmt() { + diagnostics.push( + RuleDiagnostic::new( + rule_category!(), + None, + markup! { + "Dropping a database may break existing clients." + }, + ) + .detail(None, "You probably don't want to drop your database."), + ); + } + + diagnostics + } +} diff --git a/crates/pgt_analyser/src/lint/safety/ban_drop_not_null.rs b/crates/pgt_analyser/src/lint/safety/ban_drop_not_null.rs index eb17f6943..c1e694618 100644 --- a/crates/pgt_analyser/src/lint/safety/ban_drop_not_null.rs +++ b/crates/pgt_analyser/src/lint/safety/ban_drop_not_null.rs @@ -1,5 +1,6 @@ use pgt_analyse::{Rule, RuleDiagnostic, RuleSource, context::RuleContext, declare_lint_rule}; use pgt_console::markup; +use pgt_diagnostics::Severity; declare_lint_rule! { /// Dropping a NOT NULL constraint may break existing clients. @@ -18,6 +19,7 @@ declare_lint_rule! { pub BanDropNotNull { version: "next", name: "banDropNotNull", + severity: Severity::Warning, recommended: true, sources: &[RuleSource::Squawk("ban-drop-not-null")], @@ -30,10 +32,10 @@ impl Rule for BanDropNotNull { fn run(ctx: &RuleContext) -> Vec { let mut diagnostics = Vec::new(); - if let pgt_query_ext::NodeEnum::AlterTableStmt(stmt) = &ctx.stmt() { + if let pgt_query::NodeEnum::AlterTableStmt(stmt) = &ctx.stmt() { for cmd in &stmt.cmds { - if let Some(pgt_query_ext::NodeEnum::AlterTableCmd(cmd)) = &cmd.node { - if cmd.subtype() == pgt_query_ext::protobuf::AlterTableType::AtDropNotNull { + if let Some(pgt_query::NodeEnum::AlterTableCmd(cmd)) = &cmd.node { + if cmd.subtype() == pgt_query::protobuf::AlterTableType::AtDropNotNull { diagnostics.push(RuleDiagnostic::new( rule_category!(), None, diff --git a/crates/pgt_analyser/src/lint/safety/ban_drop_table.rs b/crates/pgt_analyser/src/lint/safety/ban_drop_table.rs index 4ce00a60a..bcf784533 100644 --- a/crates/pgt_analyser/src/lint/safety/ban_drop_table.rs +++ b/crates/pgt_analyser/src/lint/safety/ban_drop_table.rs @@ -1,5 +1,6 @@ use pgt_analyse::{Rule, RuleDiagnostic, RuleSource, context::RuleContext, declare_lint_rule}; use pgt_console::markup; +use pgt_diagnostics::Severity; declare_lint_rule! { /// Dropping a table may break existing clients. @@ -18,6 +19,7 @@ declare_lint_rule! { pub BanDropTable { version: "next", name: "banDropTable", + severity: Severity::Warning, recommended: true, sources: &[RuleSource::Squawk("ban-drop-table")], } @@ -29,8 +31,8 @@ impl Rule for BanDropTable { fn run(ctx: &RuleContext) -> Vec { let mut diagnostics = vec![]; - if let pgt_query_ext::NodeEnum::DropStmt(stmt) = &ctx.stmt() { - if stmt.remove_type() == pgt_query_ext::protobuf::ObjectType::ObjectTable { + if let pgt_query::NodeEnum::DropStmt(stmt) = &ctx.stmt() { + if stmt.remove_type() == pgt_query::protobuf::ObjectType::ObjectTable { diagnostics.push( RuleDiagnostic::new( rule_category!(), diff --git a/crates/pgt_analyser/src/lint/safety/ban_truncate_cascade.rs b/crates/pgt_analyser/src/lint/safety/ban_truncate_cascade.rs new file mode 100644 index 000000000..1bc42d49b --- /dev/null +++ b/crates/pgt_analyser/src/lint/safety/ban_truncate_cascade.rs @@ -0,0 +1,51 @@ +use pgt_analyse::{Rule, RuleDiagnostic, RuleSource, context::RuleContext, declare_lint_rule}; +use pgt_console::markup; +use pgt_diagnostics::Severity; +use pgt_query::protobuf::DropBehavior; + +declare_lint_rule! { + /// Using `TRUNCATE`'s `CASCADE` option will truncate any tables that are also foreign-keyed to the specified tables. + /// + /// So if you had tables with foreign-keys like: + /// + /// `a <- b <- c` + /// + /// and ran: + /// + /// `truncate a cascade;` + /// + /// You'd end up with a, b, & c all being truncated! + /// + /// Instead, you can manually specify the tables you want. + /// + /// `truncate a, b;` + pub BanTruncateCascade { + version: "next", + name: "banTruncateCascade", + severity: Severity::Error, + recommended: false, + sources: &[RuleSource::Squawk("ban-truncate-cascade")], + } +} + +impl Rule for BanTruncateCascade { + type Options = (); + + fn run(ctx: &RuleContext) -> Vec { + let mut diagnostics = Vec::new(); + + if let pgt_query::NodeEnum::TruncateStmt(stmt) = &ctx.stmt() { + if stmt.behavior() == DropBehavior::DropCascade { + diagnostics.push(RuleDiagnostic::new( + rule_category!(), + None, + markup! { + "The `CASCADE` option will also truncate any tables that are foreign-keyed to the specified tables." + }, + ).detail(None, "Do not use the `CASCADE` option. Instead, specify manually what you want: `TRUNCATE a, b;`.")); + } + } + + diagnostics + } +} diff --git a/crates/pgt_analyser/src/options.rs b/crates/pgt_analyser/src/options.rs index d78020f81..d893b84f4 100644 --- a/crates/pgt_analyser/src/options.rs +++ b/crates/pgt_analyser/src/options.rs @@ -5,6 +5,10 @@ pub type AddingRequiredField = ::Options; pub type BanDropColumn = ::Options; +pub type BanDropDatabase = + ::Options; pub type BanDropNotNull = ::Options; pub type BanDropTable = ::Options; +pub type BanTruncateCascade = + ::Options; diff --git a/crates/pgt_analyser/tests/rules_tests.rs b/crates/pgt_analyser/tests/rules_tests.rs index 247c02b0d..d8e5b0ef1 100644 --- a/crates/pgt_analyser/tests/rules_tests.rs +++ b/crates/pgt_analyser/tests/rules_tests.rs @@ -2,7 +2,7 @@ use core::slice; use std::{fmt::Write, fs::read_to_string, path::Path}; use pgt_analyse::{AnalyserOptions, AnalysisFilter, RuleDiagnostic, RuleFilter}; -use pgt_analyser::{Analyser, AnalyserConfig, AnalyserContext}; +use pgt_analyser::{AnalysableStatement, Analyser, AnalyserConfig, AnalyserParams}; use pgt_console::StdDisplay; use pgt_diagnostics::PrintDiagnostic; @@ -25,14 +25,22 @@ fn rule_test(full_path: &'static str, _: &str, _: &str) { let query = read_to_string(full_path).unwrap_or_else(|_| panic!("Failed to read file: {} ", full_path)); - let ast = pgt_query_ext::parse(&query).expect("failed to parse SQL"); + let ast = pgt_query::parse(&query).expect("failed to parse SQL"); let options = AnalyserOptions::default(); let analyser = Analyser::new(AnalyserConfig { options: &options, filter, }); - let results = analyser.run(AnalyserContext { root: &ast }); + let stmt = AnalysableStatement { + root: ast.into_root().expect("Failed to convert AST to root node"), + range: pgt_text_size::TextRange::new(0.into(), u32::try_from(query.len()).unwrap().into()), + }; + + let results = analyser.run(AnalyserParams { + stmts: vec![stmt], + schema_cache: None, + }); let mut snapshot = String::new(); write_snapshot(&mut snapshot, query.as_str(), results.as_slice()); diff --git a/crates/pgt_analyser/tests/specs/safety/banDropDatabase/basic.sql b/crates/pgt_analyser/tests/specs/safety/banDropDatabase/basic.sql new file mode 100644 index 000000000..0dc016524 --- /dev/null +++ b/crates/pgt_analyser/tests/specs/safety/banDropDatabase/basic.sql @@ -0,0 +1,2 @@ +-- expect_only_lint/safety/banDropDatabase +drop database all_users; \ No newline at end of file diff --git a/crates/pgt_analyser/tests/specs/safety/banDropDatabase/basic.sql.snap b/crates/pgt_analyser/tests/specs/safety/banDropDatabase/basic.sql.snap new file mode 100644 index 000000000..90e35820c --- /dev/null +++ b/crates/pgt_analyser/tests/specs/safety/banDropDatabase/basic.sql.snap @@ -0,0 +1,16 @@ +--- +source: crates/pgt_analyser/tests/rules_tests.rs +expression: snapshot +--- +# Input +``` +-- expect_only_lint/safety/banDropDatabase +drop database all_users; +``` + +# Diagnostics +lint/safety/banDropDatabase ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ + + × Dropping a database may break existing clients. + + i You probably don't want to drop your database. diff --git a/crates/pgt_analyser/tests/specs/safety/banTruncateCascade/basic.sql b/crates/pgt_analyser/tests/specs/safety/banTruncateCascade/basic.sql new file mode 100644 index 000000000..d17fed13b --- /dev/null +++ b/crates/pgt_analyser/tests/specs/safety/banTruncateCascade/basic.sql @@ -0,0 +1,2 @@ +-- expect_only_lint/safety/banTruncateCascade +truncate a cascade; \ No newline at end of file diff --git a/crates/pgt_analyser/tests/specs/safety/banTruncateCascade/basic.sql.snap b/crates/pgt_analyser/tests/specs/safety/banTruncateCascade/basic.sql.snap new file mode 100644 index 000000000..d214b978a --- /dev/null +++ b/crates/pgt_analyser/tests/specs/safety/banTruncateCascade/basic.sql.snap @@ -0,0 +1,16 @@ +--- +source: crates/pgt_analyser/tests/rules_tests.rs +expression: snapshot +--- +# Input +``` +-- expect_only_lint/safety/banTruncateCascade +truncate a cascade; +``` + +# Diagnostics +lint/safety/banTruncateCascade ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ + + × The `CASCADE` option will also truncate any tables that are foreign-keyed to the specified tables. + + i Do not use the `CASCADE` option. Instead, specify manually what you want: `TRUNCATE a, b;`. diff --git a/crates/pgt_cli/src/commands/mod.rs b/crates/pgt_cli/src/commands/mod.rs index ebd16e3d3..19dc56ade 100644 --- a/crates/pgt_cli/src/commands/mod.rs +++ b/crates/pgt_cli/src/commands/mod.rs @@ -9,9 +9,9 @@ use bpaf::Bpaf; use pgt_configuration::{PartialConfiguration, partial_configuration}; use pgt_console::Console; use pgt_fs::FileSystem; +use pgt_workspace::PartialConfigurationExt; use pgt_workspace::configuration::{LoadedConfiguration, load_configuration}; -use pgt_workspace::settings::PartialConfigurationExt; -use pgt_workspace::workspace::UpdateSettingsParams; +use pgt_workspace::workspace::{RegisterProjectFolderParams, UpdateSettingsParams}; use pgt_workspace::{DynRef, Workspace, WorkspaceError}; use std::ffi::OsString; use std::path::PathBuf; @@ -301,6 +301,10 @@ pub(crate) trait CommandRunner: Sized { let (vcs_base_path, gitignore_matches) = configuration.retrieve_gitignore_matches(fs, vcs_base_path.as_deref())?; let paths = self.get_files_to_process(fs, &configuration)?; + workspace.register_project_folder(RegisterProjectFolderParams { + path: fs.working_directory(), + set_as_current_workspace: true, + })?; workspace.update_settings(UpdateSettingsParams { workspace_directory: fs.working_directory(), diff --git a/crates/pgt_cli/src/diagnostics.rs b/crates/pgt_cli/src/diagnostics.rs index d24d02e94..20d32113c 100644 --- a/crates/pgt_cli/src/diagnostics.rs +++ b/crates/pgt_cli/src/diagnostics.rs @@ -455,7 +455,7 @@ mod test { fn termination_diagnostic_size() { assert_eq!( std::mem::size_of::(), - 80, + 96, "you successfully decreased the size of the diagnostic!" ) } diff --git a/crates/pgt_cli/src/execute/process_file/workspace_file.rs b/crates/pgt_cli/src/execute/process_file/workspace_file.rs index 790176b90..9f78c7cf1 100644 --- a/crates/pgt_cli/src/execute/process_file/workspace_file.rs +++ b/crates/pgt_cli/src/execute/process_file/workspace_file.rs @@ -2,13 +2,14 @@ use crate::execute::diagnostics::{ResultExt, ResultIoExt}; use crate::execute::process_file::SharedTraversalOptions; use pgt_diagnostics::{Error, category}; use pgt_fs::{File, OpenOptions, PgTPath}; -use pgt_workspace::workspace::{ChangeParams, FileGuard, OpenFileParams}; +use pgt_workspace::workspace::{FileGuard, OpenFileParams}; use pgt_workspace::{Workspace, WorkspaceError}; use std::path::{Path, PathBuf}; /// Small wrapper that holds information and operations around the current processed file pub(crate) struct WorkspaceFile<'ctx, 'app> { guard: FileGuard<'app, dyn Workspace + 'ctx>, + #[allow(dead_code)] file: Box, pub(crate) path: PathBuf, } @@ -57,19 +58,4 @@ impl<'ctx, 'app> WorkspaceFile<'ctx, 'app> { pub(crate) fn input(&self) -> Result { self.guard().get_file_content() } - - /// It updates the workspace file with `new_content` - #[allow(dead_code)] - pub(crate) fn update_file(&mut self, new_content: impl Into) -> Result<(), Error> { - let new_content = new_content.into(); - - self.file - .set_content(new_content.as_bytes()) - .with_file_path(self.path.display().to_string())?; - self.guard.change_file( - self.file.file_version(), - vec![ChangeParams::overwrite(new_content)], - )?; - Ok(()) - } } diff --git a/crates/pgt_completions/Cargo.toml b/crates/pgt_completions/Cargo.toml index 916a00209..0ebb8e56e 100644 --- a/crates/pgt_completions/Cargo.toml +++ b/crates/pgt_completions/Cargo.toml @@ -14,18 +14,17 @@ version = "0.0.0" [dependencies] async-std = "1.12.0" -pgt_text_size.workspace = true - - -fuzzy-matcher = "0.3.7" -pgt_schema_cache.workspace = true -pgt_treesitter_queries.workspace = true -schemars = { workspace = true, optional = true } -serde = { workspace = true, features = ["derive"] } -serde_json = { workspace = true } -tracing = { workspace = true } -tree-sitter.workspace = true -tree_sitter_sql.workspace = true +pgt_schema_cache.workspace = true +pgt_text_size.workspace = true +pgt_treesitter.workspace = true + +fuzzy-matcher = "0.3.7" +schemars = { workspace = true, optional = true } +serde = { workspace = true, features = ["derive"] } +serde_json = { workspace = true } +tracing = { workspace = true } +tree-sitter.workspace = true +tree_sitter_sql.workspace = true sqlx.workspace = true diff --git a/crates/pgt_completions/src/builder.rs b/crates/pgt_completions/src/builder.rs index 96576053f..bf8eb66a6 100644 --- a/crates/pgt_completions/src/builder.rs +++ b/crates/pgt_completions/src/builder.rs @@ -1,10 +1,11 @@ use crate::{ CompletionItemKind, CompletionText, - context::CompletionContext, item::CompletionItem, relevance::{filtering::CompletionFilter, scoring::CompletionScore}, }; +use pgt_treesitter::TreesitterContext; + pub(crate) struct PossibleCompletionItem<'a> { pub label: String, pub description: String, @@ -17,11 +18,11 @@ pub(crate) struct PossibleCompletionItem<'a> { pub(crate) struct CompletionBuilder<'a> { items: Vec>, - ctx: &'a CompletionContext<'a>, + ctx: &'a TreesitterContext<'a>, } impl<'a> CompletionBuilder<'a> { - pub fn new(ctx: &'a CompletionContext) -> Self { + pub fn new(ctx: &'a TreesitterContext) -> Self { CompletionBuilder { items: vec![], ctx } } diff --git a/crates/pgt_completions/src/complete.rs b/crates/pgt_completions/src/complete.rs index 5bc5d41ce..e18589af0 100644 --- a/crates/pgt_completions/src/complete.rs +++ b/crates/pgt_completions/src/complete.rs @@ -1,11 +1,13 @@ use pgt_text_size::TextSize; +use pgt_treesitter::{TreeSitterContextParams, context::TreesitterContext}; + use crate::{ builder::CompletionBuilder, - context::CompletionContext, item::CompletionItem, providers::{ - complete_columns, complete_functions, complete_policies, complete_schemas, complete_tables, + complete_columns, complete_functions, complete_policies, complete_roles, complete_schemas, + complete_tables, }, sanitization::SanitizedCompletionParams, }; @@ -27,15 +29,20 @@ pub struct CompletionParams<'a> { pub fn complete(params: CompletionParams) -> Vec { let sanitized_params = SanitizedCompletionParams::from(params); - let ctx = CompletionContext::new(&sanitized_params); + let ctx = TreesitterContext::new(TreeSitterContextParams { + position: sanitized_params.position, + text: &sanitized_params.text, + tree: &sanitized_params.tree, + }); let mut builder = CompletionBuilder::new(&ctx); - complete_tables(&ctx, &mut builder); - complete_functions(&ctx, &mut builder); - complete_columns(&ctx, &mut builder); - complete_schemas(&ctx, &mut builder); - complete_policies(&ctx, &mut builder); + complete_tables(&ctx, sanitized_params.schema, &mut builder); + complete_functions(&ctx, sanitized_params.schema, &mut builder); + complete_columns(&ctx, sanitized_params.schema, &mut builder); + complete_schemas(&ctx, sanitized_params.schema, &mut builder); + complete_policies(&ctx, sanitized_params.schema, &mut builder); + complete_roles(&ctx, sanitized_params.schema, &mut builder); builder.finish() } diff --git a/crates/pgt_completions/src/item.rs b/crates/pgt_completions/src/item.rs index 73e08cc02..766e436c5 100644 --- a/crates/pgt_completions/src/item.rs +++ b/crates/pgt_completions/src/item.rs @@ -12,6 +12,7 @@ pub enum CompletionItemKind { Column, Schema, Policy, + Role, } impl Display for CompletionItemKind { @@ -22,6 +23,7 @@ impl Display for CompletionItemKind { CompletionItemKind::Column => "Column", CompletionItemKind::Schema => "Schema", CompletionItemKind::Policy => "Policy", + CompletionItemKind::Role => "Role", }; write!(f, "{txt}") diff --git a/crates/pgt_completions/src/lib.rs b/crates/pgt_completions/src/lib.rs index f8ca1a550..c4e592eef 100644 --- a/crates/pgt_completions/src/lib.rs +++ b/crates/pgt_completions/src/lib.rs @@ -1,6 +1,5 @@ mod builder; mod complete; -mod context; mod item; mod providers; mod relevance; diff --git a/crates/pgt_completions/src/providers/columns.rs b/crates/pgt_completions/src/providers/columns.rs index d4767f14f..ba3b24813 100644 --- a/crates/pgt_completions/src/providers/columns.rs +++ b/crates/pgt_completions/src/providers/columns.rs @@ -1,14 +1,20 @@ +use pgt_schema_cache::SchemaCache; +use pgt_treesitter::{TreesitterContext, WrappingClause}; + use crate::{ CompletionItemKind, builder::{CompletionBuilder, PossibleCompletionItem}, - context::{CompletionContext, WrappingClause}, relevance::{CompletionRelevanceData, filtering::CompletionFilter, scoring::CompletionScore}, }; use super::helper::{find_matching_alias_for_table, get_completion_text_with_schema_or_alias}; -pub fn complete_columns<'a>(ctx: &CompletionContext<'a>, builder: &mut CompletionBuilder<'a>) { - let available_columns = &ctx.schema_cache.columns; +pub fn complete_columns<'a>( + ctx: &TreesitterContext<'a>, + schema_cache: &'a SchemaCache, + builder: &mut CompletionBuilder<'a>, +) { + let available_columns = &schema_cache.columns; for col in available_columns { let relevance = CompletionRelevanceData::Column(col); @@ -17,7 +23,7 @@ pub fn complete_columns<'a>(ctx: &CompletionContext<'a>, builder: &mut Completio label: col.name.clone(), score: CompletionScore::from(relevance.clone()), filter: CompletionFilter::from(relevance), - description: format!("Table: {}.{}", col.schema_name, col.table_name), + description: format!("{}.{}", col.schema_name, col.table_name), kind: CompletionItemKind::Column, completion_text: None, detail: col.type_name.as_ref().map(|t| t.to_string()), @@ -44,14 +50,18 @@ pub fn complete_columns<'a>(ctx: &CompletionContext<'a>, builder: &mut Completio mod tests { use std::vec; + use sqlx::{Executor, PgPool}; + use crate::{ CompletionItem, CompletionItemKind, complete, test_helper::{ - CURSOR_POS, CompletionAssertion, InputQuery, assert_complete_results, - assert_no_complete_results, get_test_deps, get_test_params, + CompletionAssertion, assert_complete_results, assert_no_complete_results, + get_test_deps, get_test_params, }, }; + use pgt_test_utils::QueryWithCursorPosition; + struct TestCase { query: String, message: &'static str, @@ -60,14 +70,14 @@ mod tests { } impl TestCase { - fn get_input_query(&self) -> InputQuery { + fn get_input_query(&self) -> QueryWithCursorPosition { let strs: Vec<&str> = self.query.split_whitespace().collect(); strs.join(" ").as_str().into() } } - #[tokio::test] - async fn completes_columns() { + #[sqlx::test(migrator = "pgt_test_utils::MIGRATIONS")] + async fn completes_columns(pool: PgPool) { let setup = r#" create schema private; @@ -87,12 +97,17 @@ mod tests { ); "#; + pool.execute(setup).await.unwrap(); + let queries: Vec = vec![ TestCase { message: "correctly prefers the columns of present tables", - query: format!(r#"select na{} from public.audio_books;"#, CURSOR_POS), + query: format!( + r#"select na{} from public.audio_books;"#, + QueryWithCursorPosition::cursor_marker() + ), label: "narrator", - description: "Table: public.audio_books", + description: "public.audio_books", }, TestCase { message: "correctly handles nested queries", @@ -107,21 +122,24 @@ mod tests { join public.users u on u.id = subquery.id; "#, - CURSOR_POS + QueryWithCursorPosition::cursor_marker() ), label: "narrator_id", - description: "Table: private.audio_books", + description: "private.audio_books", }, TestCase { message: "works without a schema", - query: format!(r#"select na{} from users;"#, CURSOR_POS), + query: format!( + r#"select na{} from users;"#, + QueryWithCursorPosition::cursor_marker() + ), label: "name", - description: "Table: public.users", + description: "public.users", }, ]; for q in queries { - let (tree, cache) = get_test_deps(setup, q.get_input_query()).await; + let (tree, cache) = get_test_deps(None, q.get_input_query(), &pool).await; let params = get_test_params(&tree, &cache, q.get_input_query()); let results = complete(params); @@ -137,8 +155,8 @@ mod tests { } } - #[tokio::test] - async fn shows_multiple_columns_if_no_relation_specified() { + #[sqlx::test(migrator = "pgt_test_utils::MIGRATIONS")] + async fn shows_multiple_columns_if_no_relation_specified(pool: PgPool) { let setup = r#" create schema private; @@ -158,18 +176,20 @@ mod tests { ); "#; + pool.execute(setup).await.unwrap(); + let case = TestCase { - query: format!(r#"select n{};"#, CURSOR_POS), + query: format!(r#"select n{};"#, QueryWithCursorPosition::cursor_marker()), description: "", label: "", message: "", }; - let (tree, cache) = get_test_deps(setup, case.get_input_query()).await; + let (tree, cache) = get_test_deps(None, case.get_input_query(), &pool).await; let params = get_test_params(&tree, &cache, case.get_input_query()); let mut items = complete(params); - let _ = items.split_off(6); + let _ = items.split_off(4); #[derive(Eq, PartialEq, Debug)] struct LabelAndDesc { @@ -186,12 +206,10 @@ mod tests { .collect(); let expected = vec![ - ("name", "Table: public.users"), - ("narrator", "Table: public.audio_books"), - ("narrator_id", "Table: private.audio_books"), - ("id", "Table: public.audio_books"), - ("name", "Schema: pg_catalog"), - ("nameconcatoid", "Schema: pg_catalog"), + ("name", "public.users"), + ("narrator", "public.audio_books"), + ("narrator_id", "private.audio_books"), + ("id", "public.audio_books"), ] .into_iter() .map(|(label, schema)| LabelAndDesc { @@ -203,8 +221,8 @@ mod tests { assert_eq!(labels, expected); } - #[tokio::test] - async fn suggests_relevant_columns_without_letters() { + #[sqlx::test(migrator = "pgt_test_utils::MIGRATIONS")] + async fn suggests_relevant_columns_without_letters(pool: PgPool) { let setup = r#" create table users ( id serial primary key, @@ -216,12 +234,15 @@ mod tests { let test_case = TestCase { message: "suggests user created tables first", - query: format!(r#"select {} from users"#, CURSOR_POS), + query: format!( + r#"select {} from users"#, + QueryWithCursorPosition::cursor_marker() + ), label: "", description: "", }; - let (tree, cache) = get_test_deps(setup, test_case.get_input_query()).await; + let (tree, cache) = get_test_deps(Some(setup), test_case.get_input_query(), &pool).await; let params = get_test_params(&tree, &cache, test_case.get_input_query()); let results = complete(params); @@ -251,8 +272,8 @@ mod tests { ); } - #[tokio::test] - async fn ignores_cols_in_from_clause() { + #[sqlx::test(migrator = "pgt_test_utils::MIGRATIONS")] + async fn ignores_cols_in_from_clause(pool: PgPool) { let setup = r#" create schema private; @@ -266,12 +287,15 @@ mod tests { let test_case = TestCase { message: "suggests user created tables first", - query: format!(r#"select * from private.{}"#, CURSOR_POS), + query: format!( + r#"select * from private.{}"#, + QueryWithCursorPosition::cursor_marker() + ), label: "", description: "", }; - let (tree, cache) = get_test_deps(setup, test_case.get_input_query()).await; + let (tree, cache) = get_test_deps(Some(setup), test_case.get_input_query(), &pool).await; let params = get_test_params(&tree, &cache, test_case.get_input_query()); let results = complete(params); @@ -282,8 +306,8 @@ mod tests { ); } - #[tokio::test] - async fn prefers_columns_of_mentioned_tables() { + #[sqlx::test(migrator = "pgt_test_utils::MIGRATIONS")] + async fn prefers_columns_of_mentioned_tables(pool: PgPool) { let setup = r#" create schema private; @@ -304,41 +328,58 @@ mod tests { ); "#; + pool.execute(setup).await.unwrap(); + assert_complete_results( - format!(r#"select {} from users"#, CURSOR_POS).as_str(), + format!( + r#"select {} from users"#, + QueryWithCursorPosition::cursor_marker() + ) + .as_str(), vec![ CompletionAssertion::Label("address2".into()), CompletionAssertion::Label("email2".into()), CompletionAssertion::Label("id2".into()), CompletionAssertion::Label("name2".into()), ], - setup, + None, + &pool, ) .await; assert_complete_results( - format!(r#"select {} from private.users"#, CURSOR_POS).as_str(), + format!( + r#"select {} from private.users"#, + QueryWithCursorPosition::cursor_marker() + ) + .as_str(), vec![ CompletionAssertion::Label("address1".into()), CompletionAssertion::Label("email1".into()), CompletionAssertion::Label("id1".into()), CompletionAssertion::Label("name1".into()), ], - setup, + None, + &pool, ) .await; // asserts fuzzy finding for "settings" assert_complete_results( - format!(r#"select sett{} from private.users"#, CURSOR_POS).as_str(), + format!( + r#"select sett{} from private.users"#, + QueryWithCursorPosition::cursor_marker() + ) + .as_str(), vec![CompletionAssertion::Label("user_settings".into())], - setup, + None, + &pool, ) .await; } - #[tokio::test] - async fn filters_out_by_aliases() { + #[sqlx::test(migrator = "pgt_test_utils::MIGRATIONS")] + async fn filters_out_by_aliases(pool: PgPool) { let setup = r#" create schema auth; @@ -357,11 +398,13 @@ mod tests { ); "#; + pool.execute(setup).await.unwrap(); + // test in SELECT clause assert_complete_results( format!( "select u.id, p.{} from auth.users u join auth.posts p on u.id = p.user_id;", - CURSOR_POS + QueryWithCursorPosition::cursor_marker() ) .as_str(), vec![ @@ -374,7 +417,8 @@ mod tests { CompletionAssertion::Label("title".to_string()), CompletionAssertion::Label("user_id".to_string()), ], - setup, + None, + &pool, ) .await; @@ -382,7 +426,7 @@ mod tests { assert_complete_results( format!( "select u.id, p.content from auth.users u join auth.posts p on u.id = p.{};", - CURSOR_POS + QueryWithCursorPosition::cursor_marker() ) .as_str(), vec![ @@ -396,13 +440,14 @@ mod tests { CompletionAssertion::Label("title".to_string()), CompletionAssertion::Label("user_id".to_string()), ], - setup, + None, + &pool, ) .await; } - #[tokio::test] - async fn does_not_complete_cols_in_join_clauses() { + #[sqlx::test(migrator = "pgt_test_utils::MIGRATIONS")] + async fn does_not_complete_cols_in_join_clauses(pool: PgPool) { let setup = r#" create schema auth; @@ -427,7 +472,7 @@ mod tests { assert_complete_results( format!( "select u.id, p.content from auth.users u join auth.{}", - CURSOR_POS + QueryWithCursorPosition::cursor_marker() ) .as_str(), vec![ @@ -435,13 +480,14 @@ mod tests { CompletionAssertion::LabelAndKind("posts".to_string(), CompletionItemKind::Table), CompletionAssertion::LabelAndKind("users".to_string(), CompletionItemKind::Table), ], - setup, + Some(setup), + &pool, ) .await; } - #[tokio::test] - async fn completes_in_join_on_clause() { + #[sqlx::test(migrator = "pgt_test_utils::MIGRATIONS")] + async fn completes_in_join_on_clause(pool: PgPool) { let setup = r#" create schema auth; @@ -460,10 +506,12 @@ mod tests { ); "#; + pool.execute(setup).await.unwrap(); + assert_complete_results( format!( "select u.id, auth.posts.content from auth.users u join auth.posts on u.{}", - CURSOR_POS + QueryWithCursorPosition::cursor_marker() ) .as_str(), vec![ @@ -472,14 +520,15 @@ mod tests { CompletionAssertion::LabelAndKind("email".to_string(), CompletionItemKind::Column), CompletionAssertion::LabelAndKind("name".to_string(), CompletionItemKind::Column), ], - setup, + None, + &pool, ) .await; assert_complete_results( format!( "select u.id, p.content from auth.users u join auth.posts p on p.user_id = u.{}", - CURSOR_POS + QueryWithCursorPosition::cursor_marker() ) .as_str(), vec![ @@ -488,13 +537,14 @@ mod tests { CompletionAssertion::LabelAndKind("email".to_string(), CompletionItemKind::Column), CompletionAssertion::LabelAndKind("name".to_string(), CompletionItemKind::Column), ], - setup, + None, + &pool, ) .await; } - #[tokio::test] - async fn prefers_not_mentioned_columns() { + #[sqlx::test(migrator = "pgt_test_utils::MIGRATIONS")] + async fn prefers_not_mentioned_columns(pool: PgPool) { let setup = r#" create schema auth; @@ -513,10 +563,12 @@ mod tests { ); "#; + pool.execute(setup).await.unwrap(); + assert_complete_results( format!( "select {} from public.one o join public.two on o.id = t.id;", - CURSOR_POS + QueryWithCursorPosition::cursor_marker() ) .as_str(), vec![ @@ -526,7 +578,8 @@ mod tests { CompletionAssertion::Label("d".to_string()), CompletionAssertion::Label("e".to_string()), ], - setup, + None, + &pool, ) .await; @@ -534,7 +587,7 @@ mod tests { assert_complete_results( format!( "select a, {} from public.one o join public.two on o.id = t.id;", - CURSOR_POS + QueryWithCursorPosition::cursor_marker() ) .as_str(), vec![ @@ -546,7 +599,8 @@ mod tests { CompletionAssertion::Label("z".to_string()), CompletionAssertion::Label("a".to_string()), ], - setup, + None, + &pool, ) .await; @@ -555,17 +609,15 @@ mod tests { assert_complete_results( format!( "select o.id, a, b, c, d, e, {} from public.one o join public.two on o.id = t.id;", - CURSOR_POS + QueryWithCursorPosition::cursor_marker() ) .as_str(), vec![ - CompletionAssertion::LabelAndDesc( - "id".to_string(), - "Table: public.two".to_string(), - ), + CompletionAssertion::LabelAndDesc("id".to_string(), "public.two".to_string()), CompletionAssertion::Label("z".to_string()), ], - setup, + None, + &pool, ) .await; @@ -573,17 +625,18 @@ mod tests { assert_complete_results( format!( "select id, a, b, c, d, e, {} from public.one o join public.two on o.id = t.id;", - CURSOR_POS + QueryWithCursorPosition::cursor_marker() ) .as_str(), vec![CompletionAssertion::Label("z".to_string())], - setup, + None, + &pool, ) .await; } - #[tokio::test] - async fn suggests_columns_in_insert_clause() { + #[sqlx::test(migrator = "pgt_test_utils::MIGRATIONS")] + async fn suggests_columns_in_insert_clause(pool: PgPool) { let setup = r#" create table instruments ( id bigint primary key generated always as identity, @@ -598,34 +651,51 @@ mod tests { ); "#; + pool.execute(setup).await.unwrap(); + // We should prefer the instrument columns, even though they // are lower in the alphabet assert_complete_results( - format!("insert into instruments ({})", CURSOR_POS).as_str(), + format!( + "insert into instruments ({})", + QueryWithCursorPosition::cursor_marker() + ) + .as_str(), vec![ CompletionAssertion::Label("id".to_string()), CompletionAssertion::Label("name".to_string()), CompletionAssertion::Label("z".to_string()), ], - setup, + None, + &pool, ) .await; assert_complete_results( - format!("insert into instruments (id, {})", CURSOR_POS).as_str(), + format!( + "insert into instruments (id, {})", + QueryWithCursorPosition::cursor_marker() + ) + .as_str(), vec![ CompletionAssertion::Label("name".to_string()), CompletionAssertion::Label("z".to_string()), ], - setup, + None, + &pool, ) .await; assert_complete_results( - format!("insert into instruments (id, {}, name)", CURSOR_POS).as_str(), + format!( + "insert into instruments (id, {}, name)", + QueryWithCursorPosition::cursor_marker() + ) + .as_str(), vec![CompletionAssertion::Label("z".to_string())], - setup, + None, + &pool, ) .await; @@ -633,27 +703,33 @@ mod tests { assert_complete_results( format!( "insert into instruments (name, {}) values ('my_bass');", - CURSOR_POS + QueryWithCursorPosition::cursor_marker() ) .as_str(), vec![ CompletionAssertion::Label("id".to_string()), CompletionAssertion::Label("z".to_string()), ], - setup, + None, + &pool, ) .await; // no completions in the values list! assert_no_complete_results( - format!("insert into instruments (id, name) values ({})", CURSOR_POS).as_str(), - setup, + format!( + "insert into instruments (id, name) values ({})", + QueryWithCursorPosition::cursor_marker() + ) + .as_str(), + None, + &pool, ) .await; } - #[tokio::test] - async fn suggests_columns_in_where_clause() { + #[sqlx::test(migrator = "pgt_test_utils::MIGRATIONS")] + async fn suggests_columns_in_where_clause(pool: PgPool) { let setup = r#" create table instruments ( id bigint primary key generated always as identity, @@ -669,22 +745,29 @@ mod tests { ); "#; + pool.execute(setup).await.unwrap(); + assert_complete_results( - format!("select name from instruments where {} ", CURSOR_POS).as_str(), + format!( + "select name from instruments where {} ", + QueryWithCursorPosition::cursor_marker() + ) + .as_str(), vec![ CompletionAssertion::Label("created_at".into()), CompletionAssertion::Label("id".into()), CompletionAssertion::Label("name".into()), CompletionAssertion::Label("z".into()), ], - setup, + None, + &pool, ) .await; assert_complete_results( format!( "select name from instruments where z = 'something' and created_at > {}", - CURSOR_POS + QueryWithCursorPosition::cursor_marker() ) .as_str(), // simply do not complete columns + schemas; functions etc. are ok @@ -692,7 +775,8 @@ mod tests { CompletionAssertion::KindNotExists(CompletionItemKind::Column), CompletionAssertion::KindNotExists(CompletionItemKind::Schema), ], - setup, + None, + &pool, ) .await; @@ -700,7 +784,7 @@ mod tests { assert_complete_results( format!( "select name from instruments where id = 'something' and {}", - CURSOR_POS + QueryWithCursorPosition::cursor_marker() ) .as_str(), vec![ @@ -708,7 +792,8 @@ mod tests { CompletionAssertion::Label("name".into()), CompletionAssertion::Label("z".into()), ], - setup, + None, + &pool, ) .await; @@ -716,7 +801,7 @@ mod tests { assert_complete_results( format!( "select name from instruments i join others o on i.z = o.a where i.{}", - CURSOR_POS + QueryWithCursorPosition::cursor_marker() ) .as_str(), vec![ @@ -724,8 +809,127 @@ mod tests { CompletionAssertion::Label("id".into()), CompletionAssertion::Label("name".into()), ], - setup, + None, + &pool, ) .await; } + + #[sqlx::test(migrator = "pgt_test_utils::MIGRATIONS")] + async fn suggests_columns_in_alter_table_and_drop_table(pool: PgPool) { + let setup = r#" + create table instruments ( + id bigint primary key generated always as identity, + name text not null, + z text, + created_at timestamp with time zone default now() + ); + + create table others ( + a text, + b text, + c text + ); + "#; + + pool.execute(setup).await.unwrap(); + + let queries = vec![ + format!( + "alter table instruments drop column {}", + QueryWithCursorPosition::cursor_marker() + ), + format!( + "alter table instruments drop column if exists {}", + QueryWithCursorPosition::cursor_marker() + ), + format!( + "alter table instruments alter column {} set default", + QueryWithCursorPosition::cursor_marker() + ), + format!( + "alter table instruments alter {} set default", + QueryWithCursorPosition::cursor_marker() + ), + format!( + "alter table public.instruments alter column {}", + QueryWithCursorPosition::cursor_marker() + ), + format!( + "alter table instruments alter {}", + QueryWithCursorPosition::cursor_marker() + ), + format!( + "alter table instruments rename {} to new_col", + QueryWithCursorPosition::cursor_marker() + ), + format!( + "alter table public.instruments rename column {} to new_col", + QueryWithCursorPosition::cursor_marker() + ), + ]; + + for query in queries { + assert_complete_results( + query.as_str(), + vec![ + CompletionAssertion::Label("created_at".into()), + CompletionAssertion::Label("id".into()), + CompletionAssertion::Label("name".into()), + CompletionAssertion::Label("z".into()), + ], + None, + &pool, + ) + .await; + } + } + + #[sqlx::test(migrator = "pgt_test_utils::MIGRATIONS")] + async fn suggests_columns_policy_using_clause(pool: PgPool) { + let setup = r#" + create table instruments ( + id bigint primary key generated always as identity, + name text not null, + z text, + created_at timestamp with time zone default now() + ); + "#; + + pool.execute(setup).await.unwrap(); + + let col_queries = vec![ + format!( + r#"create policy "my_pol" on public.instruments for select using ({})"#, + QueryWithCursorPosition::cursor_marker() + ), + format!( + r#"create policy "my_pol" on public.instruments for insert with check ({})"#, + QueryWithCursorPosition::cursor_marker() + ), + format!( + r#"create policy "my_pol" on public.instruments for update using (id = 1 and {})"#, + QueryWithCursorPosition::cursor_marker() + ), + format!( + r#"create policy "my_pol" on public.instruments for insert with check (id = 1 and {})"#, + QueryWithCursorPosition::cursor_marker() + ), + ]; + + for query in col_queries { + assert_complete_results( + query.as_str(), + vec![ + CompletionAssertion::Label("created_at".into()), + CompletionAssertion::Label("id".into()), + CompletionAssertion::Label("name".into()), + CompletionAssertion::Label("z".into()), + ], + None, + &pool, + ) + .await; + } + } } diff --git a/crates/pgt_completions/src/providers/functions.rs b/crates/pgt_completions/src/providers/functions.rs index f1b57e8c2..b2ac2fae8 100644 --- a/crates/pgt_completions/src/providers/functions.rs +++ b/crates/pgt_completions/src/providers/functions.rs @@ -1,17 +1,21 @@ -use pgt_schema_cache::Function; +use pgt_schema_cache::{Function, SchemaCache}; +use pgt_treesitter::TreesitterContext; use crate::{ CompletionItemKind, CompletionText, builder::{CompletionBuilder, PossibleCompletionItem}, - context::CompletionContext, providers::helper::get_range_to_replace, relevance::{CompletionRelevanceData, filtering::CompletionFilter, scoring::CompletionScore}, }; use super::helper::get_completion_text_with_schema_or_alias; -pub fn complete_functions<'a>(ctx: &'a CompletionContext, builder: &mut CompletionBuilder<'a>) { - let available_functions = &ctx.schema_cache.functions; +pub fn complete_functions<'a>( + ctx: &'a TreesitterContext, + schema_cache: &'a SchemaCache, + builder: &mut CompletionBuilder<'a>, +) { + let available_functions = &schema_cache.functions; for func in available_functions { let relevance = CompletionRelevanceData::Function(func); @@ -30,7 +34,7 @@ pub fn complete_functions<'a>(ctx: &'a CompletionContext, builder: &mut Completi } } -fn get_completion_text(ctx: &CompletionContext, func: &Function) -> CompletionText { +fn get_completion_text(ctx: &TreesitterContext, func: &Function) -> CompletionText { let range = get_range_to_replace(ctx); let mut text = get_completion_text_with_schema_or_alias(ctx, &func.name, &func.schema) .map(|ct| ct.text) @@ -65,13 +69,19 @@ fn get_completion_text(ctx: &CompletionContext, func: &Function) -> CompletionTe #[cfg(test)] mod tests { + use sqlx::{Executor, PgPool}; + use crate::{ CompletionItem, CompletionItemKind, complete, - test_helper::{CURSOR_POS, get_test_deps, get_test_params}, + test_helper::{ + CompletionAssertion, assert_complete_results, get_test_deps, get_test_params, + }, }; - #[tokio::test] - async fn completes_fn() { + use pgt_test_utils::QueryWithCursorPosition; + + #[sqlx::test(migrator = "pgt_test_utils::MIGRATIONS")] + async fn completes_fn(pool: PgPool) { let setup = r#" create or replace function cool() returns trigger @@ -84,9 +94,9 @@ mod tests { $$; "#; - let query = format!("select coo{}", CURSOR_POS); + let query = format!("select coo{}", QueryWithCursorPosition::cursor_marker()); - let (tree, cache) = get_test_deps(setup, query.as_str().into()).await; + let (tree, cache) = get_test_deps(Some(setup), query.as_str().into(), &pool).await; let params = get_test_params(&tree, &cache, query.as_str().into()); let results = complete(params); @@ -98,8 +108,8 @@ mod tests { assert_eq!(label, "cool"); } - #[tokio::test] - async fn prefers_fn_if_invocation() { + #[sqlx::test(migrator = "pgt_test_utils::MIGRATIONS")] + async fn prefers_fn_if_invocation(pool: PgPool) { let setup = r#" create table coos ( id serial primary key, @@ -117,9 +127,12 @@ mod tests { $$; "#; - let query = format!(r#"select * from coo{}()"#, CURSOR_POS); + let query = format!( + r#"select * from coo{}()"#, + QueryWithCursorPosition::cursor_marker() + ); - let (tree, cache) = get_test_deps(setup, query.as_str().into()).await; + let (tree, cache) = get_test_deps(Some(setup), query.as_str().into(), &pool).await; let params = get_test_params(&tree, &cache, query.as_str().into()); let results = complete(params); @@ -132,8 +145,8 @@ mod tests { assert_eq!(kind, CompletionItemKind::Function); } - #[tokio::test] - async fn prefers_fn_in_select_clause() { + #[sqlx::test(migrator = "pgt_test_utils::MIGRATIONS")] + async fn prefers_fn_in_select_clause(pool: PgPool) { let setup = r#" create table coos ( id serial primary key, @@ -151,9 +164,9 @@ mod tests { $$; "#; - let query = format!(r#"select coo{}"#, CURSOR_POS); + let query = format!(r#"select coo{}"#, QueryWithCursorPosition::cursor_marker()); - let (tree, cache) = get_test_deps(setup, query.as_str().into()).await; + let (tree, cache) = get_test_deps(Some(setup), query.as_str().into(), &pool).await; let params = get_test_params(&tree, &cache, query.as_str().into()); let results = complete(params); @@ -166,8 +179,8 @@ mod tests { assert_eq!(kind, CompletionItemKind::Function); } - #[tokio::test] - async fn prefers_function_in_from_clause_if_invocation() { + #[sqlx::test(migrator = "pgt_test_utils::MIGRATIONS")] + async fn prefers_function_in_from_clause_if_invocation(pool: PgPool) { let setup = r#" create table coos ( id serial primary key, @@ -185,9 +198,12 @@ mod tests { $$; "#; - let query = format!(r#"select * from coo{}()"#, CURSOR_POS); + let query = format!( + r#"select * from coo{}()"#, + QueryWithCursorPosition::cursor_marker() + ); - let (tree, cache) = get_test_deps(setup, query.as_str().into()).await; + let (tree, cache) = get_test_deps(Some(setup), query.as_str().into(), &pool).await; let params = get_test_params(&tree, &cache, query.as_str().into()); let results = complete(params); @@ -199,4 +215,84 @@ mod tests { assert_eq!(label, "cool"); assert_eq!(kind, CompletionItemKind::Function); } + + #[sqlx::test(migrator = "pgt_test_utils::MIGRATIONS")] + async fn only_allows_functions_and_procedures_in_policy_checks(pool: PgPool) { + let setup = r#" + create table coos ( + id serial primary key, + name text + ); + + create or replace function my_cool_foo() + returns trigger + language plpgsql + security invoker + as $$ + begin + raise exception 'dont matter'; + end; + $$; + + create or replace procedure my_cool_proc() + language plpgsql + security invoker + as $$ + begin + raise exception 'dont matter'; + end; + $$; + + create or replace function string_concat_state( + state text, + value text, + separator text) + returns text + language plpgsql + as $$ + begin + if state is null then + return value; + else + return state || separator || value; + end if; + end; + $$; + + create aggregate string_concat(text, text) ( + sfunc = string_concat_state, + stype = text, + initcond = '' + ); + "#; + + pool.execute(setup).await.unwrap(); + + let query = format!( + r#"create policy "my_pol" on public.instruments for insert with check (id = {})"#, + QueryWithCursorPosition::cursor_marker() + ); + + assert_complete_results( + query.as_str(), + vec![ + CompletionAssertion::LabelNotExists("string_concat".into()), + CompletionAssertion::LabelAndKind( + "my_cool_foo".into(), + CompletionItemKind::Function, + ), + CompletionAssertion::LabelAndKind( + "my_cool_proc".into(), + CompletionItemKind::Function, + ), + CompletionAssertion::LabelAndKind( + "string_concat_state".into(), + CompletionItemKind::Function, + ), + ], + None, + &pool, + ) + .await; + } } diff --git a/crates/pgt_completions/src/providers/helper.rs b/crates/pgt_completions/src/providers/helper.rs index 811125bd1..cd1046f12 100644 --- a/crates/pgt_completions/src/providers/helper.rs +++ b/crates/pgt_completions/src/providers/helper.rs @@ -1,9 +1,10 @@ use pgt_text_size::{TextRange, TextSize}; +use pgt_treesitter::TreesitterContext; -use crate::{CompletionText, context::CompletionContext, remove_sanitized_token}; +use crate::{CompletionText, remove_sanitized_token}; pub(crate) fn find_matching_alias_for_table( - ctx: &CompletionContext, + ctx: &TreesitterContext, table_name: &str, ) -> Option { for (alias, table) in ctx.mentioned_table_aliases.iter() { @@ -14,7 +15,7 @@ pub(crate) fn find_matching_alias_for_table( None } -pub(crate) fn get_range_to_replace(ctx: &CompletionContext) -> TextRange { +pub(crate) fn get_range_to_replace(ctx: &TreesitterContext) -> TextRange { match ctx.node_under_cursor.as_ref() { Some(node) => { let content = ctx.get_node_under_cursor_content().unwrap_or("".into()); @@ -30,7 +31,7 @@ pub(crate) fn get_range_to_replace(ctx: &CompletionContext) -> TextRange { } pub(crate) fn get_completion_text_with_schema_or_alias( - ctx: &CompletionContext, + ctx: &TreesitterContext, item_name: &str, schema_or_alias_name: &str, ) -> Option { diff --git a/crates/pgt_completions/src/providers/mod.rs b/crates/pgt_completions/src/providers/mod.rs index 7b07cee8d..ddbdf252c 100644 --- a/crates/pgt_completions/src/providers/mod.rs +++ b/crates/pgt_completions/src/providers/mod.rs @@ -2,11 +2,13 @@ mod columns; mod functions; mod helper; mod policies; +mod roles; mod schemas; mod tables; pub use columns::*; pub use functions::*; pub use policies::*; +pub use roles::*; pub use schemas::*; pub use tables::*; diff --git a/crates/pgt_completions/src/providers/policies.rs b/crates/pgt_completions/src/providers/policies.rs index a4d3a9bb6..a5ffdb43e 100644 --- a/crates/pgt_completions/src/providers/policies.rs +++ b/crates/pgt_completions/src/providers/policies.rs @@ -1,16 +1,21 @@ +use pgt_schema_cache::SchemaCache; use pgt_text_size::{TextRange, TextSize}; +use pgt_treesitter::TreesitterContext; use crate::{ CompletionItemKind, CompletionText, builder::{CompletionBuilder, PossibleCompletionItem}, - context::CompletionContext, relevance::{CompletionRelevanceData, filtering::CompletionFilter, scoring::CompletionScore}, }; use super::helper::get_range_to_replace; -pub fn complete_policies<'a>(ctx: &CompletionContext<'a>, builder: &mut CompletionBuilder<'a>) { - let available_policies = &ctx.schema_cache.policies; +pub fn complete_policies<'a>( + ctx: &TreesitterContext<'a>, + schema_cache: &'a SchemaCache, + builder: &mut CompletionBuilder<'a>, +) { + let available_policies = &schema_cache.policies; let surrounded_by_quotes = ctx .get_node_under_cursor_content() @@ -59,10 +64,13 @@ pub fn complete_policies<'a>(ctx: &CompletionContext<'a>, builder: &mut Completi #[cfg(test)] mod tests { - use crate::test_helper::{CURSOR_POS, CompletionAssertion, assert_complete_results}; + use sqlx::{Executor, PgPool}; - #[tokio::test] - async fn completes_within_quotation_marks() { + use crate::test_helper::{CompletionAssertion, assert_complete_results}; + use pgt_test_utils::QueryWithCursorPosition; + + #[sqlx::test(migrator = "pgt_test_utils::MIGRATIONS")] + async fn completes_within_quotation_marks(pool: PgPool) { let setup = r#" create schema private; @@ -84,22 +92,34 @@ mod tests { with check (true); "#; + pool.execute(setup).await.unwrap(); + assert_complete_results( - format!("alter policy \"{}\" on private.users;", CURSOR_POS).as_str(), + format!( + "alter policy \"{}\" on private.users;", + QueryWithCursorPosition::cursor_marker() + ) + .as_str(), vec![ CompletionAssertion::Label("read for public users disallowed".into()), CompletionAssertion::Label("write for public users allowed".into()), ], - setup, + None, + &pool, ) .await; assert_complete_results( - format!("alter policy \"w{}\" on private.users;", CURSOR_POS).as_str(), + format!( + "alter policy \"w{}\" on private.users;", + QueryWithCursorPosition::cursor_marker() + ) + .as_str(), vec![CompletionAssertion::Label( "write for public users allowed".into(), )], - setup, + None, + &pool, ) .await; } diff --git a/crates/pgt_completions/src/providers/roles.rs b/crates/pgt_completions/src/providers/roles.rs new file mode 100644 index 000000000..b7664349c --- /dev/null +++ b/crates/pgt_completions/src/providers/roles.rs @@ -0,0 +1,324 @@ +use crate::{ + CompletionItemKind, + builder::{CompletionBuilder, PossibleCompletionItem}, + relevance::{CompletionRelevanceData, filtering::CompletionFilter, scoring::CompletionScore}, +}; +use pgt_schema_cache::SchemaCache; +use pgt_treesitter::TreesitterContext; + +pub fn complete_roles<'a>( + _ctx: &TreesitterContext<'a>, + schema_cache: &'a SchemaCache, + builder: &mut CompletionBuilder<'a>, +) { + let available_roles = &schema_cache.roles; + + for role in available_roles { + let relevance = CompletionRelevanceData::Role(role); + + let item = PossibleCompletionItem { + label: role.name.chars().take(35).collect::(), + score: CompletionScore::from(relevance.clone()), + filter: CompletionFilter::from(relevance), + description: role.name.clone(), + kind: CompletionItemKind::Role, + completion_text: None, + detail: None, + }; + + builder.add_item(item); + } +} + +#[cfg(test)] +mod tests { + use sqlx::{Executor, PgPool}; + + use crate::test_helper::{CompletionAssertion, assert_complete_results}; + + use pgt_test_utils::QueryWithCursorPosition; + + const SETUP: &str = r#" + create table users ( + id serial primary key, + email varchar, + address text + ); + "#; + + #[sqlx::test(migrator = "pgt_test_utils::MIGRATIONS")] + async fn works_in_drop_role(pool: PgPool) { + assert_complete_results( + format!("drop role {}", QueryWithCursorPosition::cursor_marker()).as_str(), + vec![ + CompletionAssertion::LabelAndKind("owner".into(), crate::CompletionItemKind::Role), + CompletionAssertion::LabelAndKind( + "test_login".into(), + crate::CompletionItemKind::Role, + ), + CompletionAssertion::LabelAndKind( + "test_nologin".into(), + crate::CompletionItemKind::Role, + ), + ], + Some(SETUP), + &pool, + ) + .await; + } + + #[sqlx::test(migrator = "pgt_test_utils::MIGRATIONS")] + async fn works_in_alter_role(pool: PgPool) { + assert_complete_results( + format!("alter role {}", QueryWithCursorPosition::cursor_marker()).as_str(), + vec![ + CompletionAssertion::LabelAndKind("owner".into(), crate::CompletionItemKind::Role), + CompletionAssertion::LabelAndKind( + "test_login".into(), + crate::CompletionItemKind::Role, + ), + CompletionAssertion::LabelAndKind( + "test_nologin".into(), + crate::CompletionItemKind::Role, + ), + ], + Some(SETUP), + &pool, + ) + .await; + } + + #[sqlx::test(migrator = "pgt_test_utils::MIGRATIONS")] + async fn works_in_set_statement(pool: PgPool) { + pool.execute(SETUP).await.unwrap(); + + assert_complete_results( + format!("set role {}", QueryWithCursorPosition::cursor_marker()).as_str(), + vec![ + CompletionAssertion::LabelAndKind("owner".into(), crate::CompletionItemKind::Role), + CompletionAssertion::LabelAndKind( + "test_login".into(), + crate::CompletionItemKind::Role, + ), + CompletionAssertion::LabelAndKind( + "test_nologin".into(), + crate::CompletionItemKind::Role, + ), + ], + None, + &pool, + ) + .await; + + assert_complete_results( + format!( + "set session authorization {}", + QueryWithCursorPosition::cursor_marker() + ) + .as_str(), + vec![ + CompletionAssertion::LabelAndKind("owner".into(), crate::CompletionItemKind::Role), + CompletionAssertion::LabelAndKind( + "test_login".into(), + crate::CompletionItemKind::Role, + ), + CompletionAssertion::LabelAndKind( + "test_nologin".into(), + crate::CompletionItemKind::Role, + ), + ], + None, + &pool, + ) + .await; + } + + #[sqlx::test(migrator = "pgt_test_utils::MIGRATIONS")] + async fn works_in_policies(pool: PgPool) { + pool.execute(SETUP).await.unwrap(); + + assert_complete_results( + format!( + r#"create policy "my cool policy" on public.users + as restrictive + for all + to {} + using (true);"#, + QueryWithCursorPosition::cursor_marker() + ) + .as_str(), + vec![ + CompletionAssertion::LabelAndKind("owner".into(), crate::CompletionItemKind::Role), + CompletionAssertion::LabelAndKind( + "test_login".into(), + crate::CompletionItemKind::Role, + ), + CompletionAssertion::LabelAndKind( + "test_nologin".into(), + crate::CompletionItemKind::Role, + ), + ], + None, + &pool, + ) + .await; + + assert_complete_results( + format!( + r#"create policy "my cool policy" on public.users + for select + to {}"#, + QueryWithCursorPosition::cursor_marker() + ) + .as_str(), + vec![ + CompletionAssertion::LabelAndKind("owner".into(), crate::CompletionItemKind::Role), + CompletionAssertion::LabelAndKind( + "test_login".into(), + crate::CompletionItemKind::Role, + ), + CompletionAssertion::LabelAndKind( + "test_nologin".into(), + crate::CompletionItemKind::Role, + ), + ], + None, + &pool, + ) + .await; + } + + #[sqlx::test(migrator = "pgt_test_utils::MIGRATIONS")] + async fn works_in_grant_statements(pool: PgPool) { + pool.execute(SETUP).await.unwrap(); + + assert_complete_results( + format!( + r#"grant select + on table public.users + to {}"#, + QueryWithCursorPosition::cursor_marker() + ) + .as_str(), + vec![ + // recognizing already mentioned roles is not supported for now + CompletionAssertion::LabelAndKind("owner".into(), crate::CompletionItemKind::Role), + CompletionAssertion::LabelAndKind( + "test_login".into(), + crate::CompletionItemKind::Role, + ), + CompletionAssertion::LabelAndKind( + "test_nologin".into(), + crate::CompletionItemKind::Role, + ), + ], + None, + &pool, + ) + .await; + + assert_complete_results( + format!( + r#"grant select + on table public.users + to owner, {}"#, + QueryWithCursorPosition::cursor_marker() + ) + .as_str(), + vec![ + // recognizing already mentioned roles is not supported for now + CompletionAssertion::LabelAndKind("owner".into(), crate::CompletionItemKind::Role), + CompletionAssertion::LabelAndKind( + "test_login".into(), + crate::CompletionItemKind::Role, + ), + CompletionAssertion::LabelAndKind( + "test_nologin".into(), + crate::CompletionItemKind::Role, + ), + ], + None, + &pool, + ) + .await; + + assert_complete_results( + format!( + r#"grant {} to owner"#, + QueryWithCursorPosition::cursor_marker() + ) + .as_str(), + vec![ + // recognizing already mentioned roles is not supported for now + CompletionAssertion::LabelAndKind("owner".into(), crate::CompletionItemKind::Role), + CompletionAssertion::LabelAndKind( + "test_login".into(), + crate::CompletionItemKind::Role, + ), + CompletionAssertion::LabelAndKind( + "test_nologin".into(), + crate::CompletionItemKind::Role, + ), + ], + None, + &pool, + ) + .await; + } + + #[sqlx::test(migrator = "pgt_test_utils::MIGRATIONS")] + async fn works_in_revoke_statements(pool: PgPool) { + pool.execute(SETUP).await.unwrap(); + + let queries = vec![ + format!( + "revoke {} from owner", + QueryWithCursorPosition::cursor_marker() + ), + format!( + "revoke admin option for {} from owner", + QueryWithCursorPosition::cursor_marker() + ), + format!( + "revoke owner from {}", + QueryWithCursorPosition::cursor_marker() + ), + format!( + "revoke all on schema public from {} granted by", + QueryWithCursorPosition::cursor_marker() + ), + format!( + "revoke all on schema public from owner, {}", + QueryWithCursorPosition::cursor_marker() + ), + format!( + "revoke all on table userse from owner, {}", + QueryWithCursorPosition::cursor_marker() + ), + ]; + + for query in queries { + assert_complete_results( + query.as_str(), + vec![ + // recognizing already mentioned roles is not supported for now + CompletionAssertion::LabelAndKind( + "owner".into(), + crate::CompletionItemKind::Role, + ), + CompletionAssertion::LabelAndKind( + "test_login".into(), + crate::CompletionItemKind::Role, + ), + CompletionAssertion::LabelAndKind( + "test_nologin".into(), + crate::CompletionItemKind::Role, + ), + ], + None, + &pool, + ) + .await; + } + } +} diff --git a/crates/pgt_completions/src/providers/schemas.rs b/crates/pgt_completions/src/providers/schemas.rs index 02d2fd0c0..43c523875 100644 --- a/crates/pgt_completions/src/providers/schemas.rs +++ b/crates/pgt_completions/src/providers/schemas.rs @@ -1,11 +1,16 @@ use crate::{ builder::{CompletionBuilder, PossibleCompletionItem}, - context::CompletionContext, relevance::{CompletionRelevanceData, filtering::CompletionFilter, scoring::CompletionScore}, }; +use pgt_schema_cache::SchemaCache; +use pgt_treesitter::TreesitterContext; -pub fn complete_schemas<'a>(ctx: &'a CompletionContext, builder: &mut CompletionBuilder<'a>) { - let available_schemas = &ctx.schema_cache.schemas; +pub fn complete_schemas<'a>( + _ctx: &'a TreesitterContext, + schema_cache: &'a SchemaCache, + builder: &mut CompletionBuilder<'a>, +) { + let available_schemas = &schema_cache.schemas; for schema in available_schemas { let relevance = CompletionRelevanceData::Schema(schema); @@ -27,13 +32,17 @@ pub fn complete_schemas<'a>(ctx: &'a CompletionContext, builder: &mut Completion #[cfg(test)] mod tests { + use sqlx::PgPool; + use crate::{ CompletionItemKind, - test_helper::{CURSOR_POS, CompletionAssertion, assert_complete_results}, + test_helper::{CompletionAssertion, assert_complete_results}, }; - #[tokio::test] - async fn autocompletes_schemas() { + use pgt_test_utils::QueryWithCursorPosition; + + #[sqlx::test(migrator = "pgt_test_utils::MIGRATIONS")] + async fn autocompletes_schemas(pool: PgPool) { let setup = r#" create schema private; create schema auth; @@ -48,7 +57,7 @@ mod tests { "#; assert_complete_results( - format!("select * from {}", CURSOR_POS).as_str(), + format!("select * from {}", QueryWithCursorPosition::cursor_marker()).as_str(), vec![ CompletionAssertion::LabelAndKind("public".to_string(), CompletionItemKind::Schema), CompletionAssertion::LabelAndKind("auth".to_string(), CompletionItemKind::Schema), @@ -75,13 +84,14 @@ mod tests { CompletionItemKind::Schema, ), ], - setup, + Some(setup), + &pool, ) .await; } - #[tokio::test] - async fn suggests_tables_and_schemas_with_matching_keys() { + #[sqlx::test(migrator = "pgt_test_utils::MIGRATIONS")] + async fn suggests_tables_and_schemas_with_matching_keys(pool: PgPool) { let setup = r#" create schema ultimate; @@ -94,12 +104,17 @@ mod tests { "#; assert_complete_results( - format!("select * from u{}", CURSOR_POS).as_str(), + format!( + "select * from u{}", + QueryWithCursorPosition::cursor_marker() + ) + .as_str(), vec![ CompletionAssertion::LabelAndKind("users".into(), CompletionItemKind::Table), CompletionAssertion::LabelAndKind("ultimate".into(), CompletionItemKind::Schema), ], - setup, + Some(setup), + &pool, ) .await; } diff --git a/crates/pgt_completions/src/providers/tables.rs b/crates/pgt_completions/src/providers/tables.rs index 6ed3760eb..f78b697c9 100644 --- a/crates/pgt_completions/src/providers/tables.rs +++ b/crates/pgt_completions/src/providers/tables.rs @@ -1,25 +1,39 @@ +use pgt_schema_cache::SchemaCache; +use pgt_treesitter::TreesitterContext; + use crate::{ builder::{CompletionBuilder, PossibleCompletionItem}, - context::CompletionContext, item::CompletionItemKind, relevance::{CompletionRelevanceData, filtering::CompletionFilter, scoring::CompletionScore}, }; use super::helper::get_completion_text_with_schema_or_alias; -pub fn complete_tables<'a>(ctx: &'a CompletionContext, builder: &mut CompletionBuilder<'a>) { - let available_tables = &ctx.schema_cache.tables; +pub fn complete_tables<'a>( + ctx: &'a TreesitterContext, + schema_cache: &'a SchemaCache, + builder: &mut CompletionBuilder<'a>, +) { + let available_tables = &schema_cache.tables; for table in available_tables { let relevance = CompletionRelevanceData::Table(table); + let detail: Option = match table.table_kind { + pgt_schema_cache::TableKind::Ordinary | pgt_schema_cache::TableKind::Partitioned => { + None + } + pgt_schema_cache::TableKind::View => Some("View".into()), + pgt_schema_cache::TableKind::MaterializedView => Some("MView".into()), + }; + let item = PossibleCompletionItem { label: table.name.clone(), score: CompletionScore::from(relevance.clone()), filter: CompletionFilter::from(relevance), - description: format!("Schema: {}", table.schema), + description: table.schema.to_string(), kind: CompletionItemKind::Table, - detail: None, + detail, completion_text: get_completion_text_with_schema_or_alias( ctx, &table.name, @@ -34,16 +48,20 @@ pub fn complete_tables<'a>(ctx: &'a CompletionContext, builder: &mut CompletionB #[cfg(test)] mod tests { + use sqlx::{Executor, PgPool}; + use crate::{ CompletionItem, CompletionItemKind, complete, test_helper::{ - CURSOR_POS, CompletionAssertion, assert_complete_results, assert_no_complete_results, + CompletionAssertion, assert_complete_results, assert_no_complete_results, get_test_deps, get_test_params, }, }; - #[tokio::test] - async fn autocompletes_simple_table() { + use pgt_test_utils::QueryWithCursorPosition; + + #[sqlx::test(migrator = "pgt_test_utils::MIGRATIONS")] + async fn autocompletes_simple_table(pool: PgPool) { let setup = r#" create table users ( id serial primary key, @@ -52,9 +70,12 @@ mod tests { ); "#; - let query = format!("select * from u{}", CURSOR_POS); + let query = format!( + "select * from u{}", + QueryWithCursorPosition::cursor_marker() + ); - let (tree, cache) = get_test_deps(setup, query.as_str().into()).await; + let (tree, cache) = get_test_deps(Some(setup), query.as_str().into(), &pool).await; let params = get_test_params(&tree, &cache, query.as_str().into()); let items = complete(params); @@ -69,8 +90,8 @@ mod tests { ) } - #[tokio::test] - async fn autocompletes_table_alphanumerically() { + #[sqlx::test(migrator = "pgt_test_utils::MIGRATIONS")] + async fn autocompletes_table_alphanumerically(pool: PgPool) { let setup = r#" create table addresses ( id serial primary key @@ -85,14 +106,34 @@ mod tests { ); "#; + pool.execute(setup).await.unwrap(); + let test_cases = vec![ - (format!("select * from u{}", CURSOR_POS), "users"), - (format!("select * from e{}", CURSOR_POS), "emails"), - (format!("select * from a{}", CURSOR_POS), "addresses"), + ( + format!( + "select * from u{}", + QueryWithCursorPosition::cursor_marker() + ), + "users", + ), + ( + format!( + "select * from e{}", + QueryWithCursorPosition::cursor_marker() + ), + "emails", + ), + ( + format!( + "select * from a{}", + QueryWithCursorPosition::cursor_marker() + ), + "addresses", + ), ]; for (query, expected_label) in test_cases { - let (tree, cache) = get_test_deps(setup, query.as_str().into()).await; + let (tree, cache) = get_test_deps(None, query.as_str().into(), &pool).await; let params = get_test_params(&tree, &cache, query.as_str().into()); let items = complete(params); @@ -108,8 +149,8 @@ mod tests { } } - #[tokio::test] - async fn autocompletes_table_with_schema() { + #[sqlx::test(migrator = "pgt_test_utils::MIGRATIONS")] + async fn autocompletes_table_with_schema(pool: PgPool) { let setup = r#" create schema customer_support; create schema private; @@ -127,17 +168,34 @@ mod tests { ); "#; + pool.execute(setup).await.unwrap(); + let test_cases = vec![ - (format!("select * from u{}", CURSOR_POS), "user_y"), // user_y is preferred alphanumerically - (format!("select * from private.u{}", CURSOR_POS), "user_z"), ( - format!("select * from customer_support.u{}", CURSOR_POS), + format!( + "select * from u{}", + QueryWithCursorPosition::cursor_marker() + ), + "user_y", + ), // user_y is preferred alphanumerically + ( + format!( + "select * from private.u{}", + QueryWithCursorPosition::cursor_marker() + ), + "user_z", + ), + ( + format!( + "select * from customer_support.u{}", + QueryWithCursorPosition::cursor_marker() + ), "user_y", ), ]; for (query, expected_label) in test_cases { - let (tree, cache) = get_test_deps(setup, query.as_str().into()).await; + let (tree, cache) = get_test_deps(None, query.as_str().into(), &pool).await; let params = get_test_params(&tree, &cache, query.as_str().into()); let items = complete(params); @@ -153,8 +211,8 @@ mod tests { } } - #[tokio::test] - async fn prefers_table_in_from_clause() { + #[sqlx::test(migrator = "pgt_test_utils::MIGRATIONS")] + async fn prefers_table_in_from_clause(pool: PgPool) { let setup = r#" create table coos ( id serial primary key, @@ -172,9 +230,12 @@ mod tests { $$; "#; - let query = format!(r#"select * from coo{}"#, CURSOR_POS); + let query = format!( + r#"select * from coo{}"#, + QueryWithCursorPosition::cursor_marker() + ); - let (tree, cache) = get_test_deps(setup, query.as_str().into()).await; + let (tree, cache) = get_test_deps(Some(setup), query.as_str().into(), &pool).await; let params = get_test_params(&tree, &cache, query.as_str().into()); let items = complete(params); @@ -187,8 +248,8 @@ mod tests { assert_eq!(kind, CompletionItemKind::Table); } - #[tokio::test] - async fn suggests_tables_in_update() { + #[sqlx::test(migrator = "pgt_test_utils::MIGRATIONS")] + async fn suggests_tables_in_update(pool: PgPool) { let setup = r#" create table coos ( id serial primary key, @@ -196,52 +257,74 @@ mod tests { ); "#; + pool.execute(setup).await.unwrap(); + assert_complete_results( - format!("update {}", CURSOR_POS).as_str(), + format!("update {}", QueryWithCursorPosition::cursor_marker()).as_str(), vec![CompletionAssertion::LabelAndKind( "public".into(), CompletionItemKind::Schema, )], - setup, + None, + &pool, ) .await; assert_complete_results( - format!("update public.{}", CURSOR_POS).as_str(), + format!("update public.{}", QueryWithCursorPosition::cursor_marker()).as_str(), vec![CompletionAssertion::LabelAndKind( "coos".into(), CompletionItemKind::Table, )], - setup, + None, + &pool, ) .await; - assert_no_complete_results(format!("update public.coos {}", CURSOR_POS).as_str(), setup) - .await; + assert_no_complete_results( + format!( + "update public.coos {}", + QueryWithCursorPosition::cursor_marker() + ) + .as_str(), + None, + &pool, + ) + .await; assert_complete_results( - format!("update coos set {}", CURSOR_POS).as_str(), + format!( + "update coos set {}", + QueryWithCursorPosition::cursor_marker() + ) + .as_str(), vec![ CompletionAssertion::Label("id".into()), CompletionAssertion::Label("name".into()), ], - setup, + None, + &pool, ) .await; assert_complete_results( - format!("update coos set name = 'cool' where {}", CURSOR_POS).as_str(), + format!( + "update coos set name = 'cool' where {}", + QueryWithCursorPosition::cursor_marker() + ) + .as_str(), vec![ CompletionAssertion::Label("id".into()), CompletionAssertion::Label("name".into()), ], - setup, + None, + &pool, ) .await; } - #[tokio::test] - async fn suggests_tables_in_delete() { + #[sqlx::test(migrator = "pgt_test_utils::MIGRATIONS")] + async fn suggests_tables_in_delete(pool: PgPool) { let setup = r#" create table coos ( id serial primary key, @@ -249,38 +332,56 @@ mod tests { ); "#; - assert_no_complete_results(format!("delete {}", CURSOR_POS).as_str(), setup).await; + pool.execute(setup).await.unwrap(); + + assert_no_complete_results( + format!("delete {}", QueryWithCursorPosition::cursor_marker()).as_str(), + None, + &pool, + ) + .await; assert_complete_results( - format!("delete from {}", CURSOR_POS).as_str(), + format!("delete from {}", QueryWithCursorPosition::cursor_marker()).as_str(), vec![ CompletionAssertion::LabelAndKind("public".into(), CompletionItemKind::Schema), CompletionAssertion::LabelAndKind("coos".into(), CompletionItemKind::Table), ], - setup, + None, + &pool, ) .await; assert_complete_results( - format!("delete from public.{}", CURSOR_POS).as_str(), + format!( + "delete from public.{}", + QueryWithCursorPosition::cursor_marker() + ) + .as_str(), vec![CompletionAssertion::Label("coos".into())], - setup, + None, + &pool, ) .await; assert_complete_results( - format!("delete from public.coos where {}", CURSOR_POS).as_str(), + format!( + "delete from public.coos where {}", + QueryWithCursorPosition::cursor_marker() + ) + .as_str(), vec![ CompletionAssertion::Label("id".into()), CompletionAssertion::Label("name".into()), ], - setup, + None, + &pool, ) .await; } - #[tokio::test] - async fn suggests_tables_in_join() { + #[sqlx::test(migrator = "pgt_test_utils::MIGRATIONS")] + async fn suggests_tables_in_join(pool: PgPool) { let setup = r#" create schema auth; @@ -300,20 +401,25 @@ mod tests { "#; assert_complete_results( - format!("select * from auth.users u join {}", CURSOR_POS).as_str(), + format!( + "select * from auth.users u join {}", + QueryWithCursorPosition::cursor_marker() + ) + .as_str(), vec![ CompletionAssertion::LabelAndKind("public".into(), CompletionItemKind::Schema), CompletionAssertion::LabelAndKind("auth".into(), CompletionItemKind::Schema), CompletionAssertion::LabelAndKind("posts".into(), CompletionItemKind::Table), // self-join CompletionAssertion::LabelAndKind("users".into(), CompletionItemKind::Table), ], - setup, + Some(setup), + &pool, ) .await; } - #[tokio::test] - async fn suggests_tables_in_alter_and_drop_statements() { + #[sqlx::test(migrator = "pgt_test_utils::MIGRATIONS")] + async fn suggests_tables_in_alter_and_drop_statements(pool: PgPool) { let setup = r#" create schema auth; @@ -332,57 +438,71 @@ mod tests { ); "#; + pool.execute(setup).await.unwrap(); + assert_complete_results( - format!("alter table {}", CURSOR_POS).as_str(), + format!("alter table {}", QueryWithCursorPosition::cursor_marker()).as_str(), vec![ CompletionAssertion::LabelAndKind("public".into(), CompletionItemKind::Schema), CompletionAssertion::LabelAndKind("auth".into(), CompletionItemKind::Schema), CompletionAssertion::LabelAndKind("posts".into(), CompletionItemKind::Table), CompletionAssertion::LabelAndKind("users".into(), CompletionItemKind::Table), ], - setup, + None, + &pool, ) .await; assert_complete_results( - format!("alter table if exists {}", CURSOR_POS).as_str(), + format!( + "alter table if exists {}", + QueryWithCursorPosition::cursor_marker() + ) + .as_str(), vec![ CompletionAssertion::LabelAndKind("public".into(), CompletionItemKind::Schema), CompletionAssertion::LabelAndKind("auth".into(), CompletionItemKind::Schema), CompletionAssertion::LabelAndKind("posts".into(), CompletionItemKind::Table), CompletionAssertion::LabelAndKind("users".into(), CompletionItemKind::Table), ], - setup, + None, + &pool, ) .await; assert_complete_results( - format!("drop table {}", CURSOR_POS).as_str(), + format!("drop table {}", QueryWithCursorPosition::cursor_marker()).as_str(), vec![ CompletionAssertion::LabelAndKind("public".into(), CompletionItemKind::Schema), CompletionAssertion::LabelAndKind("auth".into(), CompletionItemKind::Schema), CompletionAssertion::LabelAndKind("posts".into(), CompletionItemKind::Table), CompletionAssertion::LabelAndKind("users".into(), CompletionItemKind::Table), ], - setup, + None, + &pool, ) .await; assert_complete_results( - format!("drop table if exists {}", CURSOR_POS).as_str(), + format!( + "drop table if exists {}", + QueryWithCursorPosition::cursor_marker() + ) + .as_str(), vec![ CompletionAssertion::LabelAndKind("public".into(), CompletionItemKind::Schema), CompletionAssertion::LabelAndKind("auth".into(), CompletionItemKind::Schema), CompletionAssertion::LabelAndKind("posts".into(), CompletionItemKind::Table), // self-join CompletionAssertion::LabelAndKind("users".into(), CompletionItemKind::Table), ], - setup, + None, + &pool, ) .await; } - #[tokio::test] - async fn suggests_tables_in_insert_into() { + #[sqlx::test(migrator = "pgt_test_utils::MIGRATIONS")] + async fn suggests_tables_in_insert_into(pool: PgPool) { let setup = r#" create schema auth; @@ -393,24 +513,32 @@ mod tests { ); "#; + pool.execute(setup).await.unwrap(); + assert_complete_results( - format!("insert into {}", CURSOR_POS).as_str(), + format!("insert into {}", QueryWithCursorPosition::cursor_marker()).as_str(), vec![ CompletionAssertion::LabelAndKind("public".into(), CompletionItemKind::Schema), CompletionAssertion::LabelAndKind("auth".into(), CompletionItemKind::Schema), CompletionAssertion::LabelAndKind("users".into(), CompletionItemKind::Table), ], - setup, + None, + &pool, ) .await; assert_complete_results( - format!("insert into auth.{}", CURSOR_POS).as_str(), + format!( + "insert into auth.{}", + QueryWithCursorPosition::cursor_marker() + ) + .as_str(), vec![CompletionAssertion::LabelAndKind( "users".into(), CompletionItemKind::Table, )], - setup, + None, + &pool, ) .await; @@ -418,7 +546,7 @@ mod tests { assert_complete_results( format!( "insert into {} (name, email) values ('jules', 'a@b.com');", - CURSOR_POS + QueryWithCursorPosition::cursor_marker() ) .as_str(), vec![ @@ -426,7 +554,8 @@ mod tests { CompletionAssertion::LabelAndKind("auth".into(), CompletionItemKind::Schema), CompletionAssertion::LabelAndKind("users".into(), CompletionItemKind::Table), ], - setup, + None, + &pool, ) .await; } diff --git a/crates/pgt_completions/src/providers/triggers.rs b/crates/pgt_completions/src/providers/triggers.rs deleted file mode 100644 index 6bc04debc..000000000 --- a/crates/pgt_completions/src/providers/triggers.rs +++ /dev/null @@ -1,169 +0,0 @@ -use crate::{ - CompletionItemKind, - builder::{CompletionBuilder, PossibleCompletionItem}, - context::CompletionContext, - relevance::{CompletionRelevanceData, filtering::CompletionFilter, scoring::CompletionScore}, -}; - -use super::helper::get_completion_text_with_schema_or_alias; - -pub fn complete_functions<'a>(ctx: &'a CompletionContext, builder: &mut CompletionBuilder<'a>) { - let available_functions = &ctx.schema_cache.functions; - - for func in available_functions { - let relevance = CompletionRelevanceData::Function(func); - - let item = PossibleCompletionItem { - label: func.name.clone(), - score: CompletionScore::from(relevance.clone()), - filter: CompletionFilter::from(relevance), - description: format!("Schema: {}", func.schema), - kind: CompletionItemKind::Function, - completion_text: get_completion_text_with_schema_or_alias( - ctx, - &func.name, - &func.schema, - ), - }; - - builder.add_item(item); - } -} - -#[cfg(test)] -mod tests { - use crate::{ - CompletionItem, CompletionItemKind, complete, - test_helper::{CURSOR_POS, get_test_deps, get_test_params}, - }; - - #[tokio::test] - async fn completes_fn() { - let setup = r#" - create or replace function cool() - returns trigger - language plpgsql - security invoker - as $$ - begin - raise exception 'dont matter'; - end; - $$; - "#; - - let query = format!("select coo{}", CURSOR_POS); - - let (tree, cache) = get_test_deps(setup, query.as_str().into()).await; - let params = get_test_params(&tree, &cache, query.as_str().into()); - let results = complete(params); - - let CompletionItem { label, .. } = results - .into_iter() - .next() - .expect("Should return at least one completion item"); - - assert_eq!(label, "cool"); - } - - #[tokio::test] - async fn prefers_fn_if_invocation() { - let setup = r#" - create table coos ( - id serial primary key, - name text - ); - - create or replace function cool() - returns trigger - language plpgsql - security invoker - as $$ - begin - raise exception 'dont matter'; - end; - $$; - "#; - - let query = format!(r#"select * from coo{}()"#, CURSOR_POS); - - let (tree, cache) = get_test_deps(setup, query.as_str().into()).await; - let params = get_test_params(&tree, &cache, query.as_str().into()); - let results = complete(params); - - let CompletionItem { label, kind, .. } = results - .into_iter() - .next() - .expect("Should return at least one completion item"); - - assert_eq!(label, "cool"); - assert_eq!(kind, CompletionItemKind::Function); - } - - #[tokio::test] - async fn prefers_fn_in_select_clause() { - let setup = r#" - create table coos ( - id serial primary key, - name text - ); - - create or replace function cool() - returns trigger - language plpgsql - security invoker - as $$ - begin - raise exception 'dont matter'; - end; - $$; - "#; - - let query = format!(r#"select coo{}"#, CURSOR_POS); - - let (tree, cache) = get_test_deps(setup, query.as_str().into()).await; - let params = get_test_params(&tree, &cache, query.as_str().into()); - let results = complete(params); - - let CompletionItem { label, kind, .. } = results - .into_iter() - .next() - .expect("Should return at least one completion item"); - - assert_eq!(label, "cool"); - assert_eq!(kind, CompletionItemKind::Function); - } - - #[tokio::test] - async fn prefers_function_in_from_clause_if_invocation() { - let setup = r#" - create table coos ( - id serial primary key, - name text - ); - - create or replace function cool() - returns trigger - language plpgsql - security invoker - as $$ - begin - raise exception 'dont matter'; - end; - $$; - "#; - - let query = format!(r#"select * from coo{}()"#, CURSOR_POS); - - let (tree, cache) = get_test_deps(setup, query.as_str().into()).await; - let params = get_test_params(&tree, &cache, query.as_str().into()); - let results = complete(params); - - let CompletionItem { label, kind, .. } = results - .into_iter() - .next() - .expect("Should return at least one completion item"); - - assert_eq!(label, "cool"); - assert_eq!(kind, CompletionItemKind::Function); - } -} diff --git a/crates/pgt_completions/src/relevance.rs b/crates/pgt_completions/src/relevance.rs index f51c3c52e..1d39d9bb4 100644 --- a/crates/pgt_completions/src/relevance.rs +++ b/crates/pgt_completions/src/relevance.rs @@ -8,4 +8,5 @@ pub(crate) enum CompletionRelevanceData<'a> { Column(&'a pgt_schema_cache::Column), Schema(&'a pgt_schema_cache::Schema), Policy(&'a pgt_schema_cache::Policy), + Role(&'a pgt_schema_cache::Role), } diff --git a/crates/pgt_completions/src/relevance/filtering.rs b/crates/pgt_completions/src/relevance/filtering.rs index 5323e2bce..18e3d7ce5 100644 --- a/crates/pgt_completions/src/relevance/filtering.rs +++ b/crates/pgt_completions/src/relevance/filtering.rs @@ -1,4 +1,6 @@ -use crate::context::{CompletionContext, NodeUnderCursor, WrappingClause, WrappingNode}; +use pgt_schema_cache::ProcKind; + +use pgt_treesitter::context::{NodeUnderCursor, TreesitterContext, WrappingClause, WrappingNode}; use super::CompletionRelevanceData; @@ -14,7 +16,7 @@ impl<'a> From> for CompletionFilter<'a> { } impl CompletionFilter<'_> { - pub fn is_relevant(&self, ctx: &CompletionContext) -> Option<()> { + pub fn is_relevant(&self, ctx: &TreesitterContext) -> Option<()> { self.completable_context(ctx)?; self.check_clause(ctx)?; self.check_invocation(ctx)?; @@ -23,7 +25,7 @@ impl CompletionFilter<'_> { Some(()) } - fn completable_context(&self, ctx: &CompletionContext) -> Option<()> { + fn completable_context(&self, ctx: &TreesitterContext) -> Option<()> { if ctx.wrapping_node_kind.is_none() && ctx.wrapping_clause_type.is_none() { return None; } @@ -68,15 +70,19 @@ impl CompletionFilter<'_> { Some(()) } - fn check_clause(&self, ctx: &CompletionContext) -> Option<()> { + fn check_clause(&self, ctx: &TreesitterContext) -> Option<()> { ctx.wrapping_clause_type .as_ref() .map(|clause| { match self.data { CompletionRelevanceData::Table(_) => match clause { - WrappingClause::Select - | WrappingClause::Where - | WrappingClause::ColumnDefinitions => false, + WrappingClause::From | WrappingClause::Update => true, + + WrappingClause::Join { on_node: None } => true, + WrappingClause::Join { on_node: Some(on) } => ctx + .node_under_cursor + .as_ref() + .is_some_and(|cn| cn.start_byte() < on.end_byte()), WrappingClause::Insert => { ctx.wrapping_node_kind @@ -94,15 +100,22 @@ impl CompletionFilter<'_> { "keyword_table", ]), - _ => true, + _ => false, }, CompletionRelevanceData::Column(_) => { match clause { - WrappingClause::From - | WrappingClause::ColumnDefinitions - | WrappingClause::AlterTable - | WrappingClause::DropTable => false, + WrappingClause::Select + | WrappingClause::Update + | WrappingClause::Delete + | WrappingClause::DropColumn => true, + + WrappingClause::RenameColumn => ctx + .before_cursor_matches_kind(&["keyword_rename", "keyword_column"]), + + WrappingClause::AlterColumn => { + ctx.before_cursor_matches_kind(&["keyword_alter", "keyword_column"]) + } // We can complete columns in JOIN cluases, but only if we are after the // ON node in the "ON u.id = posts.user_id" part. @@ -126,17 +139,27 @@ impl CompletionFilter<'_> { && ctx.parent_matches_one_of_kind(&["field"])) } - _ => true, + WrappingClause::PolicyCheck => { + ctx.before_cursor_matches_kind(&["keyword_and", "("]) + } + + _ => false, } } - CompletionRelevanceData::Function(_) => matches!( - clause, + CompletionRelevanceData::Function(f) => match clause { WrappingClause::From - | WrappingClause::Select - | WrappingClause::Where - | WrappingClause::Join { .. } - ), + | WrappingClause::Select + | WrappingClause::Where + | WrappingClause::Join { .. } => true, + + WrappingClause::PolicyCheck => { + ctx.before_cursor_matches_kind(&["="]) + && matches!(f.kind, ProcKind::Function | ProcKind::Procedure) + } + + _ => false, + }, CompletionRelevanceData::Schema(_) => match clause { WrappingClause::Select @@ -169,12 +192,23 @@ impl CompletionFilter<'_> { CompletionRelevanceData::Policy(_) => { matches!(clause, WrappingClause::PolicyName) } + + CompletionRelevanceData::Role(_) => match clause { + WrappingClause::DropRole + | WrappingClause::AlterRole + | WrappingClause::ToRoleAssignment => true, + + WrappingClause::SetStatement => ctx + .before_cursor_matches_kind(&["keyword_role", "keyword_authorization"]), + + _ => false, + }, } }) .and_then(|is_ok| if is_ok { Some(()) } else { None }) } - fn check_invocation(&self, ctx: &CompletionContext) -> Option<()> { + fn check_invocation(&self, ctx: &TreesitterContext) -> Option<()> { if !ctx.is_invocation { return Some(()); } @@ -187,7 +221,7 @@ impl CompletionFilter<'_> { Some(()) } - fn check_mentioned_schema_or_alias(&self, ctx: &CompletionContext) -> Option<()> { + fn check_mentioned_schema_or_alias(&self, ctx: &TreesitterContext) -> Option<()> { if ctx.schema_or_alias_name.is_none() { return Some(()); } @@ -204,8 +238,8 @@ impl CompletionFilter<'_> { // we should never allow schema suggestions if there already was one. CompletionRelevanceData::Schema(_) => false, - // no policy comletion if user typed a schema node first. - CompletionRelevanceData::Policy(_) => false, + // no policy or row completion if user typed a schema node first. + CompletionRelevanceData::Policy(_) | CompletionRelevanceData::Role(_) => false, }; if !matches { @@ -218,12 +252,16 @@ impl CompletionFilter<'_> { #[cfg(test)] mod tests { + use sqlx::{Executor, PgPool}; + use crate::test_helper::{ - CURSOR_POS, CompletionAssertion, assert_complete_results, assert_no_complete_results, + CompletionAssertion, assert_complete_results, assert_no_complete_results, }; - #[tokio::test] - async fn completion_after_asterisk() { + use pgt_test_utils::QueryWithCursorPosition; + + #[sqlx::test(migrator = "pgt_test_utils::MIGRATIONS")] + async fn completion_after_asterisk(pool: PgPool) { let setup = r#" create table users ( id serial primary key, @@ -232,29 +270,45 @@ mod tests { ); "#; - assert_no_complete_results(format!("select * {}", CURSOR_POS).as_str(), setup).await; + pool.execute(setup).await.unwrap(); + + assert_no_complete_results( + format!("select * {}", QueryWithCursorPosition::cursor_marker()).as_str(), + None, + &pool, + ) + .await; // if there s a COMMA after the asterisk, we're good assert_complete_results( - format!("select *, {}", CURSOR_POS).as_str(), + format!("select *, {}", QueryWithCursorPosition::cursor_marker()).as_str(), vec![ CompletionAssertion::Label("address".into()), CompletionAssertion::Label("email".into()), CompletionAssertion::Label("id".into()), ], - setup, + None, + &pool, ) .await; } - #[tokio::test] - async fn completion_after_create_table() { - assert_no_complete_results(format!("create table {}", CURSOR_POS).as_str(), "").await; + #[sqlx::test(migrator = "pgt_test_utils::MIGRATIONS")] + async fn completion_after_create_table(pool: PgPool) { + assert_no_complete_results( + format!("create table {}", QueryWithCursorPosition::cursor_marker()).as_str(), + None, + &pool, + ) + .await; } - #[tokio::test] - async fn completion_in_column_definitions() { - let query = format!(r#"create table instruments ( {} )"#, CURSOR_POS); - assert_no_complete_results(query.as_str(), "").await; + #[sqlx::test(migrator = "pgt_test_utils::MIGRATIONS")] + async fn completion_in_column_definitions(pool: PgPool) { + let query = format!( + r#"create table instruments ( {} )"#, + QueryWithCursorPosition::cursor_marker() + ); + assert_no_complete_results(query.as_str(), None, &pool).await; } } diff --git a/crates/pgt_completions/src/relevance/scoring.rs b/crates/pgt_completions/src/relevance/scoring.rs index 2fe125111..4bbf325f4 100644 --- a/crates/pgt_completions/src/relevance/scoring.rs +++ b/crates/pgt_completions/src/relevance/scoring.rs @@ -1,6 +1,8 @@ use fuzzy_matcher::{FuzzyMatcher, skim::SkimMatcherV2}; -use crate::context::{CompletionContext, WrappingClause, WrappingNode}; +use pgt_treesitter::context::{TreesitterContext, WrappingClause, WrappingNode}; + +use crate::sanitization; use super::CompletionRelevanceData; @@ -24,7 +26,7 @@ impl CompletionScore<'_> { self.score } - pub fn calc_score(&mut self, ctx: &CompletionContext) { + pub fn calc_score(&mut self, ctx: &TreesitterContext) { self.check_is_user_defined(); self.check_matches_schema(ctx); self.check_matches_query_input(ctx); @@ -35,10 +37,10 @@ impl CompletionScore<'_> { self.check_columns_in_stmt(ctx); } - fn check_matches_query_input(&mut self, ctx: &CompletionContext) { + fn check_matches_query_input(&mut self, ctx: &TreesitterContext) { let content = match ctx.get_node_under_cursor_content() { - Some(c) => c.replace('"', ""), - None => return, + Some(c) if !sanitization::is_sanitized_token(c.as_str()) => c.replace('"', ""), + _ => return, }; let name = match self.data { @@ -47,6 +49,7 @@ impl CompletionScore<'_> { CompletionRelevanceData::Column(c) => c.name.as_str().to_ascii_lowercase(), CompletionRelevanceData::Schema(s) => s.name.as_str().to_ascii_lowercase(), CompletionRelevanceData::Policy(p) => p.name.as_str().to_ascii_lowercase(), + CompletionRelevanceData::Role(r) => r.name.as_str().to_ascii_lowercase(), }; let fz_matcher = SkimMatcherV2::default(); @@ -68,7 +71,7 @@ impl CompletionScore<'_> { } } - fn check_matching_clause_type(&mut self, ctx: &CompletionContext) { + fn check_matching_clause_type(&mut self, ctx: &TreesitterContext) { let clause_type = match ctx.wrapping_clause_type.as_ref() { None => return, Some(ct) => ct, @@ -126,17 +129,24 @@ impl CompletionScore<'_> { WrappingClause::PolicyName => 25, _ => -50, }, + + CompletionRelevanceData::Role(_) => match clause_type { + WrappingClause::DropRole | WrappingClause::AlterRole => 25, + _ => -50, + }, } } - fn check_matching_wrapping_node(&mut self, ctx: &CompletionContext) { + fn check_matching_wrapping_node(&mut self, ctx: &TreesitterContext) { let wrapping_node = match ctx.wrapping_node_kind.as_ref() { None => return, Some(wn) => wn, }; let has_mentioned_schema = ctx.schema_or_alias_name.is_some(); - let has_node_text = ctx.get_node_under_cursor_content().is_some(); + let has_node_text = ctx + .get_node_under_cursor_content() + .is_some_and(|txt| !sanitization::is_sanitized_token(txt.as_str())); self.score += match self.data { CompletionRelevanceData::Table(_) => match wrapping_node { @@ -160,10 +170,11 @@ impl CompletionScore<'_> { _ => -50, }, CompletionRelevanceData::Policy(_) => 0, + CompletionRelevanceData::Role(_) => 0, } } - fn check_is_invocation(&mut self, ctx: &CompletionContext) { + fn check_is_invocation(&mut self, ctx: &TreesitterContext) { self.score += match self.data { CompletionRelevanceData::Function(_) if ctx.is_invocation => 30, CompletionRelevanceData::Function(_) if !ctx.is_invocation => -10, @@ -172,13 +183,16 @@ impl CompletionScore<'_> { }; } - fn check_matches_schema(&mut self, ctx: &CompletionContext) { + fn check_matches_schema(&mut self, ctx: &TreesitterContext) { let schema_name = match ctx.schema_or_alias_name.as_ref() { None => return, Some(n) => n, }; - let data_schema = self.get_schema_name(); + let data_schema = match self.get_schema_name() { + Some(s) => s, + None => return, + }; if schema_name == data_schema { self.score += 25; @@ -187,13 +201,25 @@ impl CompletionScore<'_> { } } - fn get_schema_name(&self) -> &str { + fn get_item_name(&self) -> &str { match self.data { - CompletionRelevanceData::Function(f) => f.schema.as_str(), - CompletionRelevanceData::Table(t) => t.schema.as_str(), - CompletionRelevanceData::Column(c) => c.schema_name.as_str(), + CompletionRelevanceData::Table(t) => t.name.as_str(), + CompletionRelevanceData::Function(f) => f.name.as_str(), + CompletionRelevanceData::Column(c) => c.name.as_str(), CompletionRelevanceData::Schema(s) => s.name.as_str(), - CompletionRelevanceData::Policy(p) => p.schema_name.as_str(), + CompletionRelevanceData::Policy(p) => p.name.as_str(), + CompletionRelevanceData::Role(r) => r.name.as_str(), + } + } + + fn get_schema_name(&self) -> Option<&str> { + match self.data { + CompletionRelevanceData::Function(f) => Some(f.schema.as_str()), + CompletionRelevanceData::Table(t) => Some(t.schema.as_str()), + CompletionRelevanceData::Column(c) => Some(c.schema_name.as_str()), + CompletionRelevanceData::Schema(s) => Some(s.name.as_str()), + CompletionRelevanceData::Policy(p) => Some(p.schema_name.as_str()), + CompletionRelevanceData::Role(_) => None, } } @@ -206,13 +232,16 @@ impl CompletionScore<'_> { } } - fn check_relations_in_stmt(&mut self, ctx: &CompletionContext) { + fn check_relations_in_stmt(&mut self, ctx: &TreesitterContext) { match self.data { CompletionRelevanceData::Table(_) | CompletionRelevanceData::Function(_) => return, _ => {} } - let schema = self.get_schema_name().to_string(); + let schema = match self.get_schema_name() { + Some(s) => s.to_string(), + None => return, + }; let table_name = match self.get_table_name() { Some(t) => t, None => return, @@ -234,22 +263,60 @@ impl CompletionScore<'_> { } fn check_is_user_defined(&mut self) { - let schema = self.get_schema_name().to_string(); + if let CompletionRelevanceData::Role(r) = self.data { + match r.name.as_str() { + "pg_read_all_data" + | "pg_write_all_data" + | "pg_read_all_settings" + | "pg_read_all_stats" + | "pg_stat_scan_tables" + | "pg_monitor" + | "pg_database_owner" + | "pg_signal_backend" + | "pg_read_server_files" + | "pg_write_server_files" + | "pg_execute_server_program" + | "pg_checkpoint" + | "pg_maintain" + | "pg_use_reserved_connections" + | "pg_create_subscription" + | "postgres" => self.score -= 20, + _ => {} + }; + + return; + } + + let schema_name = match self.get_schema_name() { + Some(s) => s.to_string(), + None => return, + }; let system_schemas = ["pg_catalog", "information_schema", "pg_toast"]; - if system_schemas.contains(&schema.as_str()) { + if system_schemas.contains(&schema_name.as_str()) { self.score -= 20; } // "public" is the default postgres schema where users // create objects. Prefer it by a slight bit. - if schema.as_str() == "public" { + if schema_name.as_str() == "public" { self.score += 2; } + + let item_name = self.get_item_name().to_string(); + let table_name = self.get_table_name(); + + // migrations shouldn't pop up on top + if item_name.contains("migrations") + || table_name.is_some_and(|t| t.contains("migrations")) + || schema_name.contains("migrations") + { + self.score -= 15; + } } - fn check_columns_in_stmt(&mut self, ctx: &CompletionContext) { + fn check_columns_in_stmt(&mut self, ctx: &TreesitterContext) { if let CompletionRelevanceData::Column(column) = self.data { /* * Columns can be mentioned in one of two ways: diff --git a/crates/pgt_completions/src/sanitization.rs b/crates/pgt_completions/src/sanitization.rs index 40dea7e6e..155256c8a 100644 --- a/crates/pgt_completions/src/sanitization.rs +++ b/crates/pgt_completions/src/sanitization.rs @@ -6,6 +6,7 @@ use crate::CompletionParams; static SANITIZED_TOKEN: &str = "REPLACED_TOKEN"; +#[derive(Debug)] pub(crate) struct SanitizedCompletionParams<'a> { pub position: TextSize, pub text: String, @@ -22,6 +23,10 @@ pub(crate) fn remove_sanitized_token(it: &str) -> String { it.replace(SANITIZED_TOKEN, "") } +pub(crate) fn is_sanitized_token(txt: &str) -> bool { + txt == SANITIZED_TOKEN +} + #[derive(PartialEq, Eq, Debug)] pub(crate) enum NodeText { Replaced, @@ -48,7 +53,8 @@ impl<'larger, 'smaller> From> for SanitizedCompletionP where 'larger: 'smaller, { - fn from(params: CompletionParams<'larger>) -> Self { + fn from(mut params: CompletionParams<'larger>) -> Self { + params.text = params.text.to_ascii_lowercase(); if cursor_inbetween_nodes(¶ms.text, params.position) || cursor_prepared_to_write_token_after_last_node(¶ms.text, params.position) || cursor_before_semicolon(params.tree, params.position) @@ -116,10 +122,6 @@ where tree: Cow::Borrowed(params.tree), } } - - pub fn is_sanitized_token(txt: &str) -> bool { - txt == SANITIZED_TOKEN - } } /// Checks if the cursor is positioned inbetween two SQL nodes. @@ -255,21 +257,56 @@ fn cursor_between_parentheses(sql: &str, position: TextSize) -> bool { .find(|c| !c.is_whitespace()) .unwrap_or_default(); - let before_matches = before == ',' || before == '('; - let after_matches = after == ',' || after == ')'; + // (.. and |) + let after_and_keyword = &sql[position.saturating_sub(4)..position] == "and " && after == ')'; + let after_eq_sign = before == '=' && after == ')'; + + let head_of_list = before == '(' && after == ','; + let end_of_list = before == ',' && after == ')'; + let between_list_items = before == ',' && after == ','; - before_matches && after_matches + head_of_list || end_of_list || between_list_items || after_and_keyword || after_eq_sign } #[cfg(test)] mod tests { + use pgt_schema_cache::SchemaCache; use pgt_text_size::TextSize; - use crate::sanitization::{ - cursor_before_semicolon, cursor_between_parentheses, cursor_inbetween_nodes, - cursor_on_a_dot, cursor_prepared_to_write_token_after_last_node, + use crate::{ + CompletionParams, SanitizedCompletionParams, + sanitization::{ + cursor_before_semicolon, cursor_between_parentheses, cursor_inbetween_nodes, + cursor_on_a_dot, cursor_prepared_to_write_token_after_last_node, + }, }; + #[test] + fn should_lowercase_everything_except_replaced_token() { + let input = "SELECT FROM users WHERE ts = NOW();"; + + let position = TextSize::new(7); + let cache = SchemaCache::default(); + + let mut ts = tree_sitter::Parser::new(); + ts.set_language(tree_sitter_sql::language()).unwrap(); + let tree = ts.parse(input, None).unwrap(); + + let params = CompletionParams { + position, + schema: &cache, + text: input.into(), + tree: &tree, + }; + + let sanitized = SanitizedCompletionParams::from(params); + + assert_eq!( + sanitized.text, + "select REPLACED_TOKEN from users where ts = now();" + ); + } + #[test] fn test_cursor_inbetween_nodes() { // note: two spaces between select and from. @@ -412,5 +449,22 @@ mod tests { "insert into instruments (name) values (a_function(name, ))", TextSize::new(56) )); + + // will sanitize after = + assert!(cursor_between_parentheses( + // create policy my_pol on users using (id = |), + "create policy my_pol on users using (id = )", + TextSize::new(42) + )); + + // will sanitize after and + assert!(cursor_between_parentheses( + // create policy my_pol on users using (id = 1 and |), + "create policy my_pol on users using (id = 1 and )", + TextSize::new(48) + )); + + // does not break if sql is really short + assert!(!cursor_between_parentheses("(a)", TextSize::new(2))); } } diff --git a/crates/pgt_completions/src/test_helper.rs b/crates/pgt_completions/src/test_helper.rs index 937c11af0..e6c347614 100644 --- a/crates/pgt_completions/src/test_helper.rs +++ b/crates/pgt_completions/src/test_helper.rs @@ -1,50 +1,22 @@ -use std::fmt::Display; - use pgt_schema_cache::SchemaCache; -use pgt_test_utils::test_database::get_new_test_db; -use sqlx::Executor; +use pgt_test_utils::QueryWithCursorPosition; +use sqlx::{Executor, PgPool}; use crate::{CompletionItem, CompletionItemKind, CompletionParams, complete}; -pub static CURSOR_POS: char = '€'; - -#[derive(Clone)] -pub struct InputQuery { - sql: String, - position: usize, -} - -impl From<&str> for InputQuery { - fn from(value: &str) -> Self { - let position = value - .find(CURSOR_POS) - .expect("Insert Cursor Position into your Query."); - - InputQuery { - sql: value.replace(CURSOR_POS, "").trim().to_string(), - position, - } - } -} - -impl Display for InputQuery { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(f, "{}", self.sql) - } -} - pub(crate) async fn get_test_deps( - setup: &str, - input: InputQuery, + setup: Option<&str>, + input: QueryWithCursorPosition, + test_db: &PgPool, ) -> (tree_sitter::Tree, pgt_schema_cache::SchemaCache) { - let test_db = get_new_test_db().await; - - test_db - .execute(setup) - .await - .expect("Failed to execute setup query"); + if let Some(setup) = setup { + test_db + .execute(setup) + .await + .expect("Failed to execute setup query"); + } - let schema_cache = SchemaCache::load(&test_db) + let schema_cache = SchemaCache::load(test_db) .await .expect("Failed to load Schema Cache"); @@ -63,7 +35,7 @@ pub(crate) async fn get_test_deps( #[allow(dead_code)] pub(crate) async fn test_against_connection_string( conn_str: &str, - input: InputQuery, + input: QueryWithCursorPosition, ) -> (tree_sitter::Tree, pgt_schema_cache::SchemaCache) { let pool = sqlx::PgPool::connect(conn_str) .await @@ -83,16 +55,12 @@ pub(crate) async fn test_against_connection_string( (tree, schema_cache) } -pub(crate) fn get_text_and_position(q: InputQuery) -> (usize, String) { - (q.position, q.sql) -} - pub(crate) fn get_test_params<'a>( tree: &'a tree_sitter::Tree, schema_cache: &'a pgt_schema_cache::SchemaCache, - sql: InputQuery, + sql: QueryWithCursorPosition, ) -> CompletionParams<'a> { - let (position, text) = get_text_and_position(sql); + let (position, text) = sql.get_text_and_position(); CompletionParams { position: (position as u32).into(), @@ -102,46 +70,6 @@ pub(crate) fn get_test_params<'a>( } } -#[cfg(test)] -mod tests { - use crate::test_helper::CURSOR_POS; - - use super::InputQuery; - - #[test] - fn input_query_should_extract_correct_position() { - struct TestCase { - query: String, - expected_pos: usize, - expected_sql_len: usize, - } - - let cases = vec![ - TestCase { - query: format!("select * from{}", CURSOR_POS), - expected_pos: 13, - expected_sql_len: 13, - }, - TestCase { - query: format!("{}select * from", CURSOR_POS), - expected_pos: 0, - expected_sql_len: 13, - }, - TestCase { - query: format!("select {} from", CURSOR_POS), - expected_pos: 7, - expected_sql_len: 12, - }, - ]; - - for case in cases { - let query = InputQuery::from(case.query.as_str()); - assert_eq!(query.position, case.expected_pos); - assert_eq!(query.sql.len(), case.expected_sql_len); - } - } -} - #[derive(Debug, PartialEq, Eq)] pub(crate) enum CompletionAssertion { Label(String), @@ -206,9 +134,10 @@ impl CompletionAssertion { pub(crate) async fn assert_complete_results( query: &str, assertions: Vec, - setup: &str, + setup: Option<&str>, + pool: &PgPool, ) { - let (tree, cache) = get_test_deps(setup, query.into()).await; + let (tree, cache) = get_test_deps(setup, query.into(), pool).await; let params = get_test_params(&tree, &cache, query.into()); let items = complete(params); @@ -241,8 +170,8 @@ pub(crate) async fn assert_complete_results( }); } -pub(crate) async fn assert_no_complete_results(query: &str, setup: &str) { - let (tree, cache) = get_test_deps(setup, query.into()).await; +pub(crate) async fn assert_no_complete_results(query: &str, setup: Option<&str>, pool: &PgPool) { + let (tree, cache) = get_test_deps(setup, query.into(), pool).await; let params = get_test_params(&tree, &cache, query.into()); let items = complete(params); diff --git a/crates/pgt_configuration/Cargo.toml b/crates/pgt_configuration/Cargo.toml index 61da458b3..3bd685fa5 100644 --- a/crates/pgt_configuration/Cargo.toml +++ b/crates/pgt_configuration/Cargo.toml @@ -16,6 +16,7 @@ biome_deserialize = { workspace = true, features = ["schema"] } biome_deserialize_macros = { workspace = true } bpaf = { workspace = true } indexmap = { workspace = true } +oxc_resolver = { workspace = true } pgt_analyse = { workspace = true } pgt_analyser = { workspace = true } pgt_console = { workspace = true } diff --git a/crates/pgt_configuration/src/analyser/linter/rules.rs b/crates/pgt_configuration/src/analyser/linter/rules.rs index 14d796bf8..d45199b07 100644 --- a/crates/pgt_configuration/src/analyser/linter/rules.rs +++ b/crates/pgt_configuration/src/analyser/linter/rules.rs @@ -65,10 +65,9 @@ impl Rules { } #[doc = r" Given a category coming from [Diagnostic](pgt_diagnostics::Diagnostic), this function returns"] #[doc = r" the [Severity](pgt_diagnostics::Severity) associated to the rule, if the configuration changed it."] - #[doc = r" If the severity is off or not set, then the function returns the default severity of the rule:"] - #[doc = r" [Severity::Error] for recommended rules and [Severity::Warning] for other rules."] - #[doc = r""] - #[doc = r" If not, the function returns [None]."] + #[doc = r" If the severity is off or not set, then the function returns the default severity of the rule,"] + #[doc = r" which is configured at the rule definition."] + #[doc = r" The function can return `None` if the rule is not properly configured."] pub fn get_severity_from_code(&self, category: &Category) -> Option { let mut split_code = category.name().split('/'); let _lint = split_code.next(); @@ -82,16 +81,7 @@ impl Rules { .as_ref() .and_then(|group| group.get_rule_configuration(rule_name)) .filter(|(level, _)| !matches!(level, RulePlainConfiguration::Off)) - .map_or_else( - || { - if Safety::is_recommended_rule(rule_name) { - Severity::Error - } else { - Severity::Warning - } - }, - |(level, _)| level.into(), - ), + .map_or_else(|| Safety::severity(rule_name), |(level, _)| level.into()), }; Some(severity) } @@ -131,6 +121,14 @@ impl Rules { } enabled_rules.difference(&disabled_rules).copied().collect() } + #[doc = r" It returns the disabled rules by configuration."] + pub fn as_disabled_rules(&self) -> FxHashSet> { + let mut disabled_rules = FxHashSet::default(); + if let Some(group) = self.safety.as_ref() { + disabled_rules.extend(&group.get_disabled_rules()); + } + disabled_rules + } } #[derive(Clone, Debug, Default, Deserialize, Eq, Merge, PartialEq, Serialize)] #[cfg_attr(feature = "schema", derive(JsonSchema))] @@ -150,33 +148,41 @@ pub struct Safety { #[doc = "Dropping a column may break existing clients."] #[serde(skip_serializing_if = "Option::is_none")] pub ban_drop_column: Option>, + #[doc = "Dropping a database may break existing clients (and everything else, really)."] + #[serde(skip_serializing_if = "Option::is_none")] + pub ban_drop_database: Option>, #[doc = "Dropping a NOT NULL constraint may break existing clients."] #[serde(skip_serializing_if = "Option::is_none")] pub ban_drop_not_null: Option>, #[doc = "Dropping a table may break existing clients."] #[serde(skip_serializing_if = "Option::is_none")] pub ban_drop_table: Option>, + #[doc = "Using TRUNCATE's CASCADE option will truncate any tables that are also foreign-keyed to the specified tables."] + #[serde(skip_serializing_if = "Option::is_none")] + pub ban_truncate_cascade: Option>, } impl Safety { const GROUP_NAME: &'static str = "safety"; pub(crate) const GROUP_RULES: &'static [&'static str] = &[ "addingRequiredField", "banDropColumn", + "banDropDatabase", "banDropNotNull", "banDropTable", + "banTruncateCascade", ]; - const RECOMMENDED_RULES: &'static [&'static str] = - &["banDropColumn", "banDropNotNull", "banDropTable"]; const RECOMMENDED_RULES_AS_FILTERS: &'static [RuleFilter<'static>] = &[ RuleFilter::Rule(Self::GROUP_NAME, Self::GROUP_RULES[1]), - RuleFilter::Rule(Self::GROUP_NAME, Self::GROUP_RULES[2]), RuleFilter::Rule(Self::GROUP_NAME, Self::GROUP_RULES[3]), + RuleFilter::Rule(Self::GROUP_NAME, Self::GROUP_RULES[4]), ]; const ALL_RULES_AS_FILTERS: &'static [RuleFilter<'static>] = &[ RuleFilter::Rule(Self::GROUP_NAME, Self::GROUP_RULES[0]), RuleFilter::Rule(Self::GROUP_NAME, Self::GROUP_RULES[1]), RuleFilter::Rule(Self::GROUP_NAME, Self::GROUP_RULES[2]), RuleFilter::Rule(Self::GROUP_NAME, Self::GROUP_RULES[3]), + RuleFilter::Rule(Self::GROUP_NAME, Self::GROUP_RULES[4]), + RuleFilter::Rule(Self::GROUP_NAME, Self::GROUP_RULES[5]), ]; #[doc = r" Retrieves the recommended rules"] pub(crate) fn is_recommended_true(&self) -> bool { @@ -203,16 +209,26 @@ impl Safety { index_set.insert(RuleFilter::Rule(Self::GROUP_NAME, Self::GROUP_RULES[1])); } } - if let Some(rule) = self.ban_drop_not_null.as_ref() { + if let Some(rule) = self.ban_drop_database.as_ref() { if rule.is_enabled() { index_set.insert(RuleFilter::Rule(Self::GROUP_NAME, Self::GROUP_RULES[2])); } } - if let Some(rule) = self.ban_drop_table.as_ref() { + if let Some(rule) = self.ban_drop_not_null.as_ref() { if rule.is_enabled() { index_set.insert(RuleFilter::Rule(Self::GROUP_NAME, Self::GROUP_RULES[3])); } } + if let Some(rule) = self.ban_drop_table.as_ref() { + if rule.is_enabled() { + index_set.insert(RuleFilter::Rule(Self::GROUP_NAME, Self::GROUP_RULES[4])); + } + } + if let Some(rule) = self.ban_truncate_cascade.as_ref() { + if rule.is_enabled() { + index_set.insert(RuleFilter::Rule(Self::GROUP_NAME, Self::GROUP_RULES[5])); + } + } index_set } pub(crate) fn get_disabled_rules(&self) -> FxHashSet> { @@ -227,26 +243,32 @@ impl Safety { index_set.insert(RuleFilter::Rule(Self::GROUP_NAME, Self::GROUP_RULES[1])); } } - if let Some(rule) = self.ban_drop_not_null.as_ref() { + if let Some(rule) = self.ban_drop_database.as_ref() { if rule.is_disabled() { index_set.insert(RuleFilter::Rule(Self::GROUP_NAME, Self::GROUP_RULES[2])); } } - if let Some(rule) = self.ban_drop_table.as_ref() { + if let Some(rule) = self.ban_drop_not_null.as_ref() { if rule.is_disabled() { index_set.insert(RuleFilter::Rule(Self::GROUP_NAME, Self::GROUP_RULES[3])); } } + if let Some(rule) = self.ban_drop_table.as_ref() { + if rule.is_disabled() { + index_set.insert(RuleFilter::Rule(Self::GROUP_NAME, Self::GROUP_RULES[4])); + } + } + if let Some(rule) = self.ban_truncate_cascade.as_ref() { + if rule.is_disabled() { + index_set.insert(RuleFilter::Rule(Self::GROUP_NAME, Self::GROUP_RULES[5])); + } + } index_set } #[doc = r" Checks if, given a rule name, matches one of the rules contained in this category"] pub(crate) fn has_rule(rule_name: &str) -> Option<&'static str> { Some(Self::GROUP_RULES[Self::GROUP_RULES.binary_search(&rule_name).ok()?]) } - #[doc = r" Checks if, given a rule name, it is marked as recommended"] - pub(crate) fn is_recommended_rule(rule_name: &str) -> bool { - Self::RECOMMENDED_RULES.contains(&rule_name) - } pub(crate) fn recommended_rules_as_filters() -> &'static [RuleFilter<'static>] { Self::RECOMMENDED_RULES_AS_FILTERS } @@ -268,6 +290,17 @@ impl Safety { enabled_rules.extend(Self::recommended_rules_as_filters()); } } + pub(crate) fn severity(rule_name: &str) -> Severity { + match rule_name { + "addingRequiredField" => Severity::Error, + "banDropColumn" => Severity::Warning, + "banDropDatabase" => Severity::Warning, + "banDropNotNull" => Severity::Warning, + "banDropTable" => Severity::Warning, + "banTruncateCascade" => Severity::Error, + _ => unreachable!(), + } + } pub(crate) fn get_rule_configuration( &self, rule_name: &str, @@ -281,6 +314,10 @@ impl Safety { .ban_drop_column .as_ref() .map(|conf| (conf.level(), conf.get_options())), + "banDropDatabase" => self + .ban_drop_database + .as_ref() + .map(|conf| (conf.level(), conf.get_options())), "banDropNotNull" => self .ban_drop_not_null .as_ref() @@ -289,6 +326,10 @@ impl Safety { .ban_drop_table .as_ref() .map(|conf| (conf.level(), conf.get_options())), + "banTruncateCascade" => self + .ban_truncate_cascade + .as_ref() + .map(|conf| (conf.level(), conf.get_options())), _ => None, } } diff --git a/crates/pgt_configuration/src/diagnostics.rs b/crates/pgt_configuration/src/diagnostics.rs index dc835ed72..79fd77142 100644 --- a/crates/pgt_configuration/src/diagnostics.rs +++ b/crates/pgt_configuration/src/diagnostics.rs @@ -1,5 +1,7 @@ use pgt_console::fmt::Display; use pgt_console::{MarkupBuf, markup}; +use pgt_diagnostics::adapters::ResolveError; + use pgt_diagnostics::{Advices, Diagnostic, Error, LogCategory, MessageAndDescription, Visit}; use serde::{Deserialize, Serialize}; use std::fmt::{Debug, Formatter}; @@ -21,6 +23,12 @@ pub enum ConfigurationDiagnostic { /// Thrown when the pattern inside the `ignore` field errors InvalidIgnorePattern(InvalidIgnorePattern), + + /// Thrown when there's something wrong with the files specified inside `"extends"` + CantLoadExtendFile(CantLoadExtendFile), + + /// Thrown when a configuration file can't be resolved from `node_modules` + CantResolve(CantResolve), } impl ConfigurationDiagnostic { @@ -72,6 +80,18 @@ impl ConfigurationDiagnostic { message: MessageAndDescription::from(markup! {{message}}.to_owned()), }) } + + pub fn cant_resolve(path: impl Display, source: oxc_resolver::ResolveError) -> Self { + Self::CantResolve(CantResolve { + message: MessageAndDescription::from( + markup! { + "Failed to resolve the configuration from "{{path}} + } + .to_owned(), + ), + source: Some(Error::from(ResolveError::from(source))), + }) + } } impl Debug for ConfigurationDiagnostic { @@ -168,3 +188,36 @@ pub struct CantResolve { #[source] source: Option, } + +#[derive(Debug, Serialize, Deserialize, Diagnostic)] +#[diagnostic( + category = "configuration", + severity = Error, +)] +pub struct CantLoadExtendFile { + #[location(resource)] + file_path: String, + #[message] + #[description] + message: MessageAndDescription, + + #[verbose_advice] + verbose_advice: ConfigurationAdvices, +} + +impl CantLoadExtendFile { + pub fn new(file_path: impl Into, message: impl Display) -> Self { + Self { + file_path: file_path.into(), + message: MessageAndDescription::from(markup! {{message}}.to_owned()), + verbose_advice: ConfigurationAdvices::default(), + } + } + + pub fn with_verbose_advice(mut self, messsage: impl Display) -> Self { + self.verbose_advice + .messages + .push(markup! {{messsage}}.to_owned()); + self + } +} diff --git a/crates/pgt_configuration/src/lib.rs b/crates/pgt_configuration/src/lib.rs index fcf0b5c60..b862dce4f 100644 --- a/crates/pgt_configuration/src/lib.rs +++ b/crates/pgt_configuration/src/lib.rs @@ -22,6 +22,7 @@ pub use analyser::{ RulePlainConfiguration, RuleSelector, RuleWithFixOptions, RuleWithOptions, Rules, partial_linter_configuration, }; +use biome_deserialize::StringSet; use biome_deserialize_macros::{Merge, Partial}; use bpaf::Bpaf; use database::{ @@ -50,6 +51,10 @@ pub struct Configuration { #[partial(bpaf(hide))] pub schema: String, + /// A list of paths to other JSON files, used to extends the current configuration. + #[partial(bpaf(hide))] + pub extends: StringSet, + /// The configuration of the VCS integration #[partial(type, bpaf(external(partial_vcs_configuration), optional, hide_usage))] pub vcs: VcsConfiguration, @@ -85,6 +90,7 @@ impl PartialConfiguration { pub fn init() -> Self { Self { schema: Some(format!("https://pgtools.dev/schemas/{VERSION}/schema.json")), + extends: Some(StringSet::default()), files: Some(PartialFilesConfiguration { ignore: Some(Default::default()), ..Default::default() diff --git a/crates/pgt_diagnostics/Cargo.toml b/crates/pgt_diagnostics/Cargo.toml index 190b25f07..06c6f8dcd 100644 --- a/crates/pgt_diagnostics/Cargo.toml +++ b/crates/pgt_diagnostics/Cargo.toml @@ -15,6 +15,7 @@ version = "0.0.0" backtrace = "0.3.74" bpaf = { workspace = true } enumflags2 = { workspace = true } +oxc_resolver = { workspace = true } pgt_console = { workspace = true, features = ["serde"] } pgt_diagnostics_categories = { workspace = true, features = ["serde"] } pgt_diagnostics_macros = { workspace = true } diff --git a/crates/pgt_diagnostics/src/adapters.rs b/crates/pgt_diagnostics/src/adapters.rs index ca627d3b1..5c3dcdd58 100644 --- a/crates/pgt_diagnostics/src/adapters.rs +++ b/crates/pgt_diagnostics/src/adapters.rs @@ -134,3 +134,28 @@ impl Diagnostic for SerdeJsonError { fmt.write_markup(markup!({ AsConsoleDisplay(&self.error) })) } } + +#[derive(Debug)] +pub struct ResolveError { + error: oxc_resolver::ResolveError, +} + +impl From for ResolveError { + fn from(error: oxc_resolver::ResolveError) -> Self { + Self { error } + } +} + +impl Diagnostic for ResolveError { + fn category(&self) -> Option<&'static Category> { + Some(category!("internalError/io")) + } + + fn description(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(fmt, "{}", self.error) + } + + fn message(&self, fmt: &mut fmt::Formatter<'_>) -> io::Result<()> { + fmt.write_markup(markup!({ AsConsoleDisplay(&self.error) })) + } +} diff --git a/crates/pgt_diagnostics/src/diagnostic.rs b/crates/pgt_diagnostics/src/diagnostic.rs index 9a62ede15..3f365aed7 100644 --- a/crates/pgt_diagnostics/src/diagnostic.rs +++ b/crates/pgt_diagnostics/src/diagnostic.rs @@ -6,6 +6,7 @@ use std::{ str::FromStr, }; +use bpaf::Bpaf; use enumflags2::{BitFlags, bitflags, make_bitflags}; use serde::{Deserialize, Serialize}; @@ -115,7 +116,7 @@ pub trait Diagnostic: Debug { } #[derive( - Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, Deserialize, Default, + Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, Deserialize, Default, Bpaf, )] #[serde(rename_all = "camelCase")] #[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] diff --git a/crates/pgt_diagnostics/src/display/message.rs b/crates/pgt_diagnostics/src/display/message.rs index 3cf9be3fd..20c039a9d 100644 --- a/crates/pgt_diagnostics/src/display/message.rs +++ b/crates/pgt_diagnostics/src/display/message.rs @@ -47,6 +47,15 @@ impl From for MessageAndDescription { } } +impl From<&str> for MessageAndDescription { + fn from(description: &str) -> Self { + Self { + message: markup! { {description} }.to_owned(), + description: description.into(), + } + } +} + impl From for MessageAndDescription { fn from(message: MarkupBuf) -> Self { let description = markup_to_string(&message); diff --git a/crates/pgt_diagnostics/src/location.rs b/crates/pgt_diagnostics/src/location.rs index cbd8e6464..e17ace9c3 100644 --- a/crates/pgt_diagnostics/src/location.rs +++ b/crates/pgt_diagnostics/src/location.rs @@ -41,13 +41,13 @@ impl Eq for Location<'_> {} #[derive(Debug, Clone, Copy, Eq, PartialEq, Serialize, Deserialize)] #[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] #[serde(rename_all = "camelCase")] -pub enum Resource

{ +pub enum Resource { /// The diagnostic is related to the content of the command line arguments. Argv, /// The diagnostic is related to the content of a memory buffer. Memory, /// The diagnostic is related to a file on the filesystem. - File(P), + File(Path), } impl

Resource

{ diff --git a/crates/pgt_diagnostics/src/serde.rs b/crates/pgt_diagnostics/src/serde.rs index 334bd4e9e..57ed3e280 100644 --- a/crates/pgt_diagnostics/src/serde.rs +++ b/crates/pgt_diagnostics/src/serde.rs @@ -164,6 +164,7 @@ impl From> for Location { #[serde(rename_all = "camelCase")] #[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] #[cfg_attr(test, derive(Eq, PartialEq))] + struct Advices { advices: Vec, } @@ -250,7 +251,7 @@ impl super::Advices for Advices { #[derive(Clone, Debug, Serialize, Deserialize)] #[serde(rename_all = "camelCase")] #[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] -#[cfg_attr(test, derive(Eq, PartialEq))] +#[cfg_attr(test, derive(PartialEq, Eq))] enum Advice { Log(LogCategory, MarkupBuf), List(Vec), diff --git a/crates/pgt_diagnostics_categories/src/categories.rs b/crates/pgt_diagnostics_categories/src/categories.rs index 8a91cfb53..14df90b9e 100644 --- a/crates/pgt_diagnostics_categories/src/categories.rs +++ b/crates/pgt_diagnostics_categories/src/categories.rs @@ -13,10 +13,12 @@ // must be between `define_categories! {\n` and `\n ;\n`. define_categories! { - "lint/safety/addingRequiredField": "https://pglt.dev/linter/rules/adding-required-field", - "lint/safety/banDropColumn": "https://pglt.dev/linter/rules/ban-drop-column", - "lint/safety/banDropNotNull": "https://pglt.dev/linter/rules/ban-drop-not-null", - "lint/safety/banDropTable": "https://pglt.dev/linter/rules/ban-drop-table", + "lint/safety/addingRequiredField": "https://pgtools.dev/latest/rules/adding-required-field", + "lint/safety/banDropColumn": "https://pgtools.dev/latest/rules/ban-drop-column", + "lint/safety/banDropDatabase": "https://pgtools.dev/latest/rules/ban-drop-database", + "lint/safety/banDropNotNull": "https://pgtools.dev/latest/rules/ban-drop-not-null", + "lint/safety/banDropTable": "https://pgtools.dev/latest/rules/ban-drop-table", + "lint/safety/banTruncateCascade": "https://pgtools.dev/latest/rules/ban-truncate-cascade", // end lint rules ; // General categories @@ -30,6 +32,7 @@ define_categories! { "flags/invalid", "project", "typecheck", + "plpgsql_check", "internalError/panic", "syntax", "dummy", diff --git a/crates/pgt_fs/Cargo.toml b/crates/pgt_fs/Cargo.toml index 1e4a7b4ff..40478934c 100644 --- a/crates/pgt_fs/Cargo.toml +++ b/crates/pgt_fs/Cargo.toml @@ -15,6 +15,7 @@ version = "0.0.0" crossbeam = { workspace = true } directories = "5.0.1" enumflags2 = { workspace = true } +oxc_resolver = { workspace = true } parking_lot = { version = "0.12.3", features = ["arc_lock"] } pgt_diagnostics = { workspace = true } rayon = { workspace = true } diff --git a/crates/pgt_fs/src/fs.rs b/crates/pgt_fs/src/fs.rs index b73aef6e4..2bfd2e51b 100644 --- a/crates/pgt_fs/src/fs.rs +++ b/crates/pgt_fs/src/fs.rs @@ -1,6 +1,7 @@ use crate::{PathInterner, PgTPath}; pub use memory::{ErrorEntry, MemoryFileSystem}; pub use os::OsFileSystem; +use oxc_resolver::{Resolution, ResolveError}; use pgt_diagnostics::{Advices, Diagnostic, LogCategory, Visit, console}; use pgt_diagnostics::{Error, Severity}; use serde::{Deserialize, Serialize}; @@ -164,6 +165,12 @@ pub trait FileSystem: Send + Sync + RefUnwindSafe { fn get_changed_files(&self, base: &str) -> io::Result>; fn get_staged_files(&self) -> io::Result>; + + fn resolve_configuration( + &self, + specifier: &str, + path: &Path, + ) -> Result; } /// Result of the auto search @@ -355,6 +362,14 @@ where fn get_staged_files(&self) -> io::Result> { T::get_staged_files(self) } + + fn resolve_configuration( + &self, + specifier: &str, + path: &Path, + ) -> Result { + T::resolve_configuration(self, specifier, path) + } } #[derive(Debug, Diagnostic, Deserialize, Serialize)] diff --git a/crates/pgt_fs/src/fs/memory.rs b/crates/pgt_fs/src/fs/memory.rs index baffe0abf..a744e575f 100644 --- a/crates/pgt_fs/src/fs/memory.rs +++ b/crates/pgt_fs/src/fs/memory.rs @@ -1,3 +1,4 @@ +use oxc_resolver::{Resolution, ResolveError}; use rustc_hash::FxHashMap; use std::collections::hash_map::{Entry, IntoIter}; use std::io; @@ -227,6 +228,15 @@ impl FileSystem for MemoryFileSystem { Ok(cb()) } + + fn resolve_configuration( + &self, + _specifier: &str, + _path: &Path, + ) -> Result { + // not needed for the memory file system + todo!() + } } struct MemoryFile { diff --git a/crates/pgt_fs/src/fs/os.rs b/crates/pgt_fs/src/fs/os.rs index a2e40695d..5033f296f 100644 --- a/crates/pgt_fs/src/fs/os.rs +++ b/crates/pgt_fs/src/fs/os.rs @@ -5,9 +5,11 @@ use crate::{ FileSystem, PgTPath, fs::{TraversalContext, TraversalScope}, }; +use oxc_resolver::{Resolution, ResolveError, ResolveOptions, Resolver}; use pgt_diagnostics::{DiagnosticExt, Error, Severity, adapters::IoError}; use rayon::{Scope, scope}; use std::fs::{DirEntry, FileType}; +use std::panic::AssertUnwindSafe; use std::process::Command; use std::{ env, fs, @@ -21,12 +23,18 @@ const MAX_SYMLINK_DEPTH: u8 = 3; /// Implementation of [FileSystem] that directly calls through to the underlying OS pub struct OsFileSystem { pub working_directory: Option, + pub configuration_resolver: AssertUnwindSafe, } impl OsFileSystem { pub fn new(working_directory: PathBuf) -> Self { Self { working_directory: Some(working_directory), + configuration_resolver: AssertUnwindSafe(Resolver::new(ResolveOptions { + condition_names: vec!["node".to_string(), "import".to_string()], + extensions: vec![".json".to_string(), ".jsonc".to_string()], + ..ResolveOptions::default() + })), } } } @@ -35,6 +43,11 @@ impl Default for OsFileSystem { fn default() -> Self { Self { working_directory: env::current_dir().ok(), + configuration_resolver: AssertUnwindSafe(Resolver::new(ResolveOptions { + condition_names: vec!["node".to_string(), "import".to_string()], + extensions: vec![".json".to_string(), ".jsonc".to_string()], + ..ResolveOptions::default() + })), } } } @@ -116,6 +129,14 @@ impl FileSystem for OsFileSystem { .map(|l| l.to_string()) .collect()) } + + fn resolve_configuration( + &self, + specifier: &str, + path: &Path, + ) -> Result { + self.configuration_resolver.resolve(path, specifier) + } } struct OsFile { @@ -387,8 +408,6 @@ fn follow_symlink( path: &Path, ctx: &dyn TraversalContext, ) -> Result<(PathBuf, FileType), SymlinkExpansionError> { - tracing::info!("Translating symlink: {path:?}"); - let target_path = fs::read_link(path).map_err(|err| { ctx.push_diagnostic(IoError::from(err).with_file_path(path.to_string_lossy().to_string())); SymlinkExpansionError diff --git a/crates/pgt_lexer/Cargo.toml b/crates/pgt_lexer/Cargo.toml index 4b2185882..7f4ada43f 100644 --- a/crates/pgt_lexer/Cargo.toml +++ b/crates/pgt_lexer/Cargo.toml @@ -12,16 +12,12 @@ version = "0.0.0" [dependencies] -regex = "1.9.1" - -pg_query.workspace = true pgt_diagnostics.workspace = true pgt_lexer_codegen.workspace = true - -pgt_text_size.workspace = true +pgt_text_size.workspace = true +pgt_tokenizer.workspace = true [dev-dependencies] insta.workspace = true [lib] -doctest = false diff --git a/crates/pgt_lexer/README.md b/crates/pgt_lexer/README.md index ec61c7b26..57bdaa340 100644 --- a/crates/pgt_lexer/README.md +++ b/crates/pgt_lexer/README.md @@ -1,8 +1 @@ -# pgt_lexer - -The `pgt_lexer` crate exposes the `lex` method, which turns an SQL query text into a `Vec>`: the base for the `pg_parser` and most of pgtools's operations. - -A token is always of a certain `SyntaxKind` kind. That `SyntaxKind` enum is derived from `libpg_query`'s protobuf file. - -The SQL query text is mostly lexed using the `pg_query::scan` method (`pg_query` is just a Rust wrapper around `libpg_query`). -However, that method does not parse required whitespace tokens, so the `lex` method takes care of parsing those and merging them into the result. +Heavily inspired by and copied from [squawk_parser](https://github.com/sbdchd/squawk/tree/9acfecbbb7f3c7eedcbaf060e7b25f9afa136db3/crates/squawk_parser). Thanks for making all the hard work MIT-licensed! diff --git a/crates/pgt_lexer/src/codegen.rs b/crates/pgt_lexer/src/codegen.rs deleted file mode 100644 index 6c7505904..000000000 --- a/crates/pgt_lexer/src/codegen.rs +++ /dev/null @@ -1,3 +0,0 @@ -use pgt_lexer_codegen::lexer_codegen; - -lexer_codegen!(); diff --git a/crates/pgt_lexer/src/codegen/mod.rs b/crates/pgt_lexer/src/codegen/mod.rs new file mode 100644 index 000000000..c4e67bc5c --- /dev/null +++ b/crates/pgt_lexer/src/codegen/mod.rs @@ -0,0 +1 @@ +pub mod syntax_kind; diff --git a/crates/pgt_lexer/src/codegen/syntax_kind.rs b/crates/pgt_lexer/src/codegen/syntax_kind.rs new file mode 100644 index 000000000..f50398ec2 --- /dev/null +++ b/crates/pgt_lexer/src/codegen/syntax_kind.rs @@ -0,0 +1 @@ +pgt_lexer_codegen::syntax_kind_codegen!(); diff --git a/crates/pgt_lexer/src/diagnostics.rs b/crates/pgt_lexer/src/diagnostics.rs deleted file mode 100644 index 9516387ad..000000000 --- a/crates/pgt_lexer/src/diagnostics.rs +++ /dev/null @@ -1,67 +0,0 @@ -use pgt_diagnostics::{Diagnostic, MessageAndDescription}; -use pgt_text_size::TextRange; - -/// A specialized diagnostic for scan errors. -/// -/// Scan diagnostics are always **fatal errors**. -#[derive(Clone, Debug, Diagnostic, PartialEq)] -#[diagnostic(category = "syntax", severity = Fatal)] -pub struct ScanError { - /// The location where the error is occurred - #[location(span)] - span: Option, - #[message] - #[description] - pub message: MessageAndDescription, -} - -impl ScanError { - pub fn from_pg_query_err(err: pg_query::Error, input: &str) -> Vec { - let err_msg = err.to_string(); - let re = regex::Regex::new(r#"at or near "(.*?)""#).unwrap(); - let mut diagnostics = Vec::new(); - - for captures in re.captures_iter(&err_msg) { - if let Some(matched) = captures.get(1) { - let search_term = matched.as_str(); - for (idx, _) in input.match_indices(search_term) { - let from = idx; - let to = from + search_term.len(); - diagnostics.push(ScanError { - span: Some(TextRange::new( - from.try_into().unwrap(), - to.try_into().unwrap(), - )), - message: MessageAndDescription::from(err_msg.clone()), - }); - } - } - } - - if diagnostics.is_empty() { - diagnostics.push(ScanError { - span: None, - message: MessageAndDescription::from(err_msg), - }); - } - - diagnostics - } -} - -#[cfg(test)] -mod tests { - use crate::lex; - - #[test] - fn finds_all_occurrences() { - let input = - "select 1443ddwwd33djwdkjw13331333333333; select 1443ddwwd33djwdkjw13331333333333;"; - let diagnostics = lex(input).unwrap_err(); - assert_eq!(diagnostics.len(), 2); - assert_eq!(diagnostics[0].span.unwrap().start(), 7.into()); - assert_eq!(diagnostics[0].span.unwrap().end(), 39.into()); - assert_eq!(diagnostics[1].span.unwrap().start(), 48.into()); - assert_eq!(diagnostics[1].span.unwrap().end(), 80.into()); - } -} diff --git a/crates/pgt_lexer/src/lexed.rs b/crates/pgt_lexer/src/lexed.rs new file mode 100644 index 000000000..6f0a273f9 --- /dev/null +++ b/crates/pgt_lexer/src/lexed.rs @@ -0,0 +1,107 @@ +use pgt_diagnostics::{Diagnostic, MessageAndDescription}; +use pgt_text_size::TextRange; + +use crate::SyntaxKind; + +/// Internal error type used during lexing +#[derive(Debug, Clone)] +pub struct LexError { + pub msg: String, + pub token: u32, +} + +/// A specialized diagnostic for lex errors. +#[derive(Clone, Debug, Diagnostic, PartialEq)] +#[diagnostic(category = "syntax", severity = Error)] +pub struct LexDiagnostic { + /// The location where the error is occurred + #[location(span)] + pub span: TextRange, + #[message] + #[description] + pub message: MessageAndDescription, +} + +/// Result of lexing a string, providing access to tokens and diagnostics +pub struct Lexed<'a> { + pub(crate) text: &'a str, + pub(crate) kind: Vec, + pub(crate) start: Vec, + pub(crate) error: Vec, + pub(crate) line_ending_counts: Vec, +} + +impl Lexed<'_> { + /// Returns the number of tokens + pub fn len(&self) -> usize { + self.kind.len() + } + + /// Returns true if there are no tokens + pub fn is_empty(&self) -> bool { + self.len() == 0 + } + + /// Returns an iterator over token kinds + pub fn tokens(&self) -> impl Iterator + '_ { + self.kind.iter().copied() + } + + /// Returns the kind of token at the given index + pub fn kind(&self, idx: usize) -> SyntaxKind { + assert!( + idx < self.len(), + "expected index < {}, got {}", + self.len(), + idx + ); + self.kind[idx] + } + + /// Returns the number of line endings in the token at the given index + pub fn line_ending_count(&self, idx: usize) -> usize { + assert!( + idx < self.len(), + "expected index < {}, got {}", + self.len(), + idx + ); + assert!(self.kind(idx) == SyntaxKind::LINE_ENDING); + self.line_ending_counts[idx] + } + + /// Returns the text range of token at the given index + pub fn range(&self, idx: usize) -> TextRange { + self.text_range(idx) + } + + /// Returns the text of token at the given index + pub fn text(&self, idx: usize) -> &str { + self.range_text(idx..idx + 1) + } + + /// Returns all lexing errors with their text ranges + pub fn errors(&self) -> Vec { + self.error + .iter() + .map(|it| LexDiagnostic { + message: it.msg.as_str().into(), + span: self.text_range(it.token as usize), + }) + .collect() + } + + pub(crate) fn text_range(&self, i: usize) -> TextRange { + assert!(i < self.len()); + let lo = self.start[i]; + let hi = self.start[i + 1]; + TextRange::new(lo.into(), hi.into()) + } + + fn range_text(&self, r: std::ops::Range) -> &str { + assert!(r.start < r.end && r.end <= self.len()); + let lo = self.start[r.start] as usize; + let hi = self.start[r.end] as usize; + &self.text[lo..hi] + } +} diff --git a/crates/pgt_lexer/src/lexer.rs b/crates/pgt_lexer/src/lexer.rs new file mode 100644 index 000000000..3e6912295 --- /dev/null +++ b/crates/pgt_lexer/src/lexer.rs @@ -0,0 +1,221 @@ +use pgt_tokenizer::tokenize; + +use crate::SyntaxKind; +use crate::lexed::{LexError, Lexed}; + +/// Lexer that processes input text into tokens and diagnostics +pub struct Lexer<'a> { + text: &'a str, + kind: Vec, + start: Vec, + error: Vec, + offset: usize, + /// we store line ending counts outside of SyntaxKind because of the u16 represenation of SyntaxKind + line_ending_counts: Vec, +} + +impl<'a> Lexer<'a> { + /// Create a new lexer for the given text + pub fn new(text: &'a str) -> Self { + Self { + text, + kind: Vec::new(), + start: Vec::new(), + error: Vec::new(), + offset: 0, + line_ending_counts: Vec::new(), + } + } + + /// Lex the input text and return the result + pub fn lex(mut self) -> Lexed<'a> { + for token in tokenize(&self.text[self.offset..]) { + let token_text = &self.text[self.offset..][..token.len as usize]; + self.extend_token(&token.kind, token_text); + } + + // Add EOF token + self.push(SyntaxKind::EOF, 0, None, None); + + Lexed { + text: self.text, + kind: self.kind, + start: self.start, + error: self.error, + line_ending_counts: self.line_ending_counts, + } + } + + fn push( + &mut self, + kind: SyntaxKind, + len: usize, + err: Option<&str>, + line_ending_count: Option, + ) { + self.kind.push(kind); + self.start.push(self.offset as u32); + self.offset += len; + + assert!( + kind != SyntaxKind::LINE_ENDING || line_ending_count.is_some(), + "Line ending token must have a line ending count" + ); + + self.line_ending_counts.push(line_ending_count.unwrap_or(0)); + + if let Some(err) = err { + let token = (self.kind.len() - 1) as u32; + let msg = err.to_owned(); + self.error.push(LexError { msg, token }); + } + } + + fn extend_token(&mut self, kind: &pgt_tokenizer::TokenKind, token_text: &str) { + let mut err = ""; + let mut line_ending_count = None; + + let syntax_kind = { + match kind { + pgt_tokenizer::TokenKind::LineComment => SyntaxKind::COMMENT, + pgt_tokenizer::TokenKind::BlockComment { terminated } => { + if !terminated { + err = "Missing trailing `*/` symbols to terminate the block comment"; + } + SyntaxKind::COMMENT + } + pgt_tokenizer::TokenKind::Space => SyntaxKind::SPACE, + pgt_tokenizer::TokenKind::Tab => SyntaxKind::TAB, + pgt_tokenizer::TokenKind::LineEnding { count } => { + line_ending_count = Some(*count); + SyntaxKind::LINE_ENDING + } + pgt_tokenizer::TokenKind::VerticalTab => SyntaxKind::VERTICAL_TAB, + pgt_tokenizer::TokenKind::FormFeed => SyntaxKind::FORM_FEED, + pgt_tokenizer::TokenKind::Ident => { + SyntaxKind::from_keyword(token_text).unwrap_or(SyntaxKind::IDENT) + } + pgt_tokenizer::TokenKind::Literal { kind, .. } => { + self.extend_literal(token_text.len(), kind); + return; + } + pgt_tokenizer::TokenKind::Semi => SyntaxKind::SEMICOLON, + pgt_tokenizer::TokenKind::Comma => SyntaxKind::COMMA, + pgt_tokenizer::TokenKind::Dot => SyntaxKind::DOT, + pgt_tokenizer::TokenKind::OpenParen => SyntaxKind::L_PAREN, + pgt_tokenizer::TokenKind::CloseParen => SyntaxKind::R_PAREN, + pgt_tokenizer::TokenKind::OpenBracket => SyntaxKind::L_BRACK, + pgt_tokenizer::TokenKind::CloseBracket => SyntaxKind::R_BRACK, + pgt_tokenizer::TokenKind::At => SyntaxKind::AT, + pgt_tokenizer::TokenKind::Pound => SyntaxKind::POUND, + pgt_tokenizer::TokenKind::Tilde => SyntaxKind::TILDE, + pgt_tokenizer::TokenKind::Question => SyntaxKind::QUESTION, + pgt_tokenizer::TokenKind::Colon => SyntaxKind::COLON, + pgt_tokenizer::TokenKind::DoubleColon => SyntaxKind::DOUBLE_COLON, + pgt_tokenizer::TokenKind::Eq => SyntaxKind::EQ, + pgt_tokenizer::TokenKind::Bang => SyntaxKind::BANG, + pgt_tokenizer::TokenKind::Lt => SyntaxKind::L_ANGLE, + pgt_tokenizer::TokenKind::Gt => SyntaxKind::R_ANGLE, + pgt_tokenizer::TokenKind::Minus => SyntaxKind::MINUS, + pgt_tokenizer::TokenKind::And => SyntaxKind::AMP, + pgt_tokenizer::TokenKind::Or => SyntaxKind::PIPE, + pgt_tokenizer::TokenKind::Plus => SyntaxKind::PLUS, + pgt_tokenizer::TokenKind::Star => SyntaxKind::STAR, + pgt_tokenizer::TokenKind::Slash => SyntaxKind::SLASH, + pgt_tokenizer::TokenKind::Caret => SyntaxKind::CARET, + pgt_tokenizer::TokenKind::Percent => SyntaxKind::PERCENT, + pgt_tokenizer::TokenKind::Unknown => SyntaxKind::ERROR, + pgt_tokenizer::TokenKind::Backslash => SyntaxKind::BACKSLASH, + pgt_tokenizer::TokenKind::UnknownPrefix => { + err = "unknown literal prefix"; + SyntaxKind::IDENT + } + pgt_tokenizer::TokenKind::Eof => SyntaxKind::EOF, + pgt_tokenizer::TokenKind::Backtick => SyntaxKind::BACKTICK, + pgt_tokenizer::TokenKind::PositionalParam => SyntaxKind::POSITIONAL_PARAM, + pgt_tokenizer::TokenKind::NamedParam { kind } => { + match kind { + pgt_tokenizer::NamedParamKind::ColonIdentifier { terminated: false } => { + err = "Missing trailing \" to terminate the named parameter"; + } + pgt_tokenizer::NamedParamKind::ColonString { terminated: false } => { + err = "Missing trailing ' to terminate the named parameter"; + } + _ => {} + }; + SyntaxKind::POSITIONAL_PARAM + } + pgt_tokenizer::TokenKind::QuotedIdent { terminated } => { + if !terminated { + err = "Missing trailing \" to terminate the quoted identifier" + } + SyntaxKind::IDENT + } + } + }; + + let err = if err.is_empty() { None } else { Some(err) }; + self.push(syntax_kind, token_text.len(), err, line_ending_count); + } + + fn extend_literal(&mut self, len: usize, kind: &pgt_tokenizer::LiteralKind) { + let mut err = ""; + + let syntax_kind = match *kind { + pgt_tokenizer::LiteralKind::Int { empty_int, base: _ } => { + if empty_int { + err = "Missing digits after the integer base prefix"; + } + SyntaxKind::INT_NUMBER + } + pgt_tokenizer::LiteralKind::Float { + empty_exponent, + base: _, + } => { + if empty_exponent { + err = "Missing digits after the exponent symbol"; + } + SyntaxKind::FLOAT_NUMBER + } + pgt_tokenizer::LiteralKind::Str { terminated } => { + if !terminated { + err = "Missing trailing `'` symbol to terminate the string literal"; + } + SyntaxKind::STRING + } + pgt_tokenizer::LiteralKind::ByteStr { terminated } => { + if !terminated { + err = "Missing trailing `'` symbol to terminate the hex bit string literal"; + } + SyntaxKind::BYTE_STRING + } + pgt_tokenizer::LiteralKind::BitStr { terminated } => { + if !terminated { + err = "Missing trailing `'` symbol to terminate the bit string literal"; + } + SyntaxKind::BIT_STRING + } + pgt_tokenizer::LiteralKind::DollarQuotedString { terminated } => { + if !terminated { + err = "Unterminated dollar quoted string literal"; + } + SyntaxKind::DOLLAR_QUOTED_STRING + } + pgt_tokenizer::LiteralKind::UnicodeEscStr { terminated } => { + if !terminated { + err = "Missing trailing `'` symbol to terminate the unicode escape string literal"; + } + SyntaxKind::BYTE_STRING + } + pgt_tokenizer::LiteralKind::EscStr { terminated } => { + if !terminated { + err = "Missing trailing `'` symbol to terminate the escape string literal"; + } + SyntaxKind::ESC_STRING + } + }; + + let err = if err.is_empty() { None } else { Some(err) }; + self.push(syntax_kind, len, err, None); + } +} diff --git a/crates/pgt_lexer/src/lib.rs b/crates/pgt_lexer/src/lib.rs index 32bbdd42f..45fa6c6bf 100644 --- a/crates/pgt_lexer/src/lib.rs +++ b/crates/pgt_lexer/src/lib.rs @@ -1,293 +1,151 @@ mod codegen; -pub mod diagnostics; +mod lexed; +mod lexer; -use diagnostics::ScanError; -use pg_query::protobuf::{KeywordKind, ScanToken}; -use pgt_text_size::{TextLen, TextRange, TextSize}; -use regex::Regex; -use std::{collections::VecDeque, sync::LazyLock}; +pub use crate::codegen::syntax_kind::SyntaxKind; +pub use crate::lexed::{LexDiagnostic, Lexed}; +pub use crate::lexer::Lexer; -pub use crate::codegen::SyntaxKind; - -#[derive(Debug, Clone, PartialEq, Eq)] -pub enum TokenType { - Whitespace, - NoKeyword, - UnreservedKeyword, - ColNameKeyword, - TypeFuncNameKeyword, - ReservedKeyword, -} - -impl From<&ScanToken> for TokenType { - fn from(token: &ScanToken) -> TokenType { - match token.token { - // SqlComment | CComment - 275 | 276 => TokenType::Whitespace, - _ => match token.keyword_kind() { - KeywordKind::NoKeyword => TokenType::NoKeyword, - KeywordKind::UnreservedKeyword => TokenType::UnreservedKeyword, - KeywordKind::ColNameKeyword => TokenType::ColNameKeyword, - KeywordKind::TypeFuncNameKeyword => TokenType::TypeFuncNameKeyword, - KeywordKind::ReservedKeyword => TokenType::ReservedKeyword, - }, - } - } -} - -#[derive(Debug, Clone, PartialEq, Eq)] -pub struct Token { - pub kind: SyntaxKind, - pub text: String, - pub span: TextRange, - pub token_type: TokenType, -} - -impl Token { - pub fn eof(pos: usize) -> Token { - Token { - kind: SyntaxKind::Eof, - text: "".to_string(), - span: TextRange::at(TextSize::try_from(pos).unwrap(), TextSize::from(0)), - token_type: TokenType::Whitespace, - } - } -} - -pub static WHITESPACE_TOKENS: &[SyntaxKind] = &[ - SyntaxKind::Whitespace, - SyntaxKind::Tab, - SyntaxKind::Newline, - SyntaxKind::SqlComment, - SyntaxKind::CComment, -]; - -static PATTERN_LEXER: LazyLock = LazyLock::new(|| { - #[cfg(windows)] - { - // On Windows, treat \r\n as a single newline token - Regex::new(r"(?P +)|(?P(\r\n|\n)+)|(?P\t+)").unwrap() - } - #[cfg(not(windows))] - { - // On other platforms, just check for \n - Regex::new(r"(?P +)|(?P\n+)|(?P\t+)").unwrap() - } -}); - -fn whitespace_tokens(input: &str) -> VecDeque { - let mut tokens = VecDeque::new(); - - for cap in PATTERN_LEXER.captures_iter(input) { - if let Some(whitespace) = cap.name("whitespace") { - tokens.push_back(Token { - token_type: TokenType::Whitespace, - kind: SyntaxKind::Whitespace, - text: whitespace.as_str().to_string(), - span: TextRange::new( - TextSize::from(u32::try_from(whitespace.start()).unwrap()), - TextSize::from(u32::try_from(whitespace.end()).unwrap()), - ), - }); - } else if let Some(newline) = cap.name("newline") { - tokens.push_back(Token { - token_type: TokenType::Whitespace, - kind: SyntaxKind::Newline, - text: newline.as_str().to_string(), - span: TextRange::new( - TextSize::from(u32::try_from(newline.start()).unwrap()), - TextSize::from(u32::try_from(newline.end()).unwrap()), - ), - }); - } else if let Some(tab) = cap.name("tab") { - tokens.push_back(Token { - token_type: TokenType::Whitespace, - kind: SyntaxKind::Tab, - text: tab.as_str().to_string(), - span: TextRange::new( - TextSize::from(u32::try_from(tab.start()).unwrap()), - TextSize::from(u32::try_from(tab.end()).unwrap()), - ), - }); - } else { - panic!("No match"); - }; - } - - tokens +/// Lex the input string into tokens and diagnostics +pub fn lex(input: &str) -> Lexed { + Lexer::new(input).lex() } -/// Turn a string of potentially valid sql code into a list of tokens, including their range in the source text. -/// -/// The implementation is primarily using libpg_querys `scan` method, and fills in the gaps with tokens that are not parsed by the library, e.g. whitespace. -pub fn lex(text: &str) -> Result, Vec> { - let mut whitespace_tokens = whitespace_tokens(text); - - // tokens from pg_query.rs - let mut pgt_query_tokens = match pg_query::scan(text) { - Ok(r) => r.tokens.into_iter().collect::>(), - Err(err) => return Err(ScanError::from_pg_query_err(err, text)), - }; - - // merge the two token lists - let mut tokens: Vec = Vec::new(); - let mut pos = TextSize::from(0); - - while pos < text.text_len() { - if !pgt_query_tokens.is_empty() - && TextSize::from(u32::try_from(pgt_query_tokens[0].start).unwrap()) == pos - { - let pgt_query_token = pgt_query_tokens.pop_front().unwrap(); - - // the lexer returns byte indices, so we need to slice - let token_text = &text[usize::try_from(pgt_query_token.start).unwrap() - ..usize::try_from(pgt_query_token.end).unwrap()]; - - let len = token_text.text_len(); - let has_whitespace = token_text.contains(" ") || token_text.contains("\n"); - tokens.push(Token { - token_type: TokenType::from(&pgt_query_token), - kind: SyntaxKind::from(&pgt_query_token), - text: token_text.to_string(), - span: TextRange::new( - TextSize::from(u32::try_from(pgt_query_token.start).unwrap()), - TextSize::from(u32::try_from(pgt_query_token.end).unwrap()), - ), - }); - pos += len; +#[cfg(test)] +mod tests { + use super::*; - if has_whitespace { - while !whitespace_tokens.is_empty() - && whitespace_tokens[0].span.start() < TextSize::from(u32::from(pos)) - { - whitespace_tokens.pop_front(); - } + #[test] + fn test_basic_lexing() { + let input = "SELECT * FROM users WHERE id = 1;"; + let lexed = lex(input); + + // Check we have tokens + assert!(!lexed.is_empty()); + + // Iterate over tokens and collect identifiers + let mut identifiers = Vec::new(); + for (idx, kind) in lexed.tokens().enumerate() { + if kind == SyntaxKind::IDENT { + identifiers.push((lexed.text(idx), lexed.range(idx))); } - - continue; } - if !whitespace_tokens.is_empty() - && whitespace_tokens[0].span.start() == TextSize::from(u32::from(pos)) - { - let whitespace_token = whitespace_tokens.pop_front().unwrap(); - let len = whitespace_token.text.text_len(); - tokens.push(whitespace_token); - pos += len; - continue; - } - - let usize_pos = usize::from(pos); - panic!( - "No token found at position {:?}: '{:?}'", - pos, - text.get(usize_pos..usize_pos + 1) - ); + // Should find at least "users" and "id" as identifiers + assert!(identifiers.len() >= 2); } - Ok(tokens) -} - -#[cfg(test)] -mod tests { - use super::*; - #[test] - fn test_special_chars() { - let input = "insert into c (name, full_name) values ('Å', 1);"; - let tokens = lex(input).unwrap(); - assert!(!tokens.is_empty()); + fn test_lexing_with_errors() { + let input = "SELECT 'unterminated string"; + let lexed = lex(input); + + // Should have tokens + assert!(!lexed.is_empty()); + + // Should have an error for unterminated string + let errors = lexed.errors(); + assert!(!errors.is_empty()); + // Check the error message exists + assert!(!errors[0].message.to_string().is_empty()); } #[test] - fn test_tab_tokens() { - let input = "select\t1"; - let tokens = lex(input).unwrap(); - assert_eq!(tokens[1].kind, SyntaxKind::Tab); + fn test_lexing_string_params_with_errors() { + let input = "SELECT :'unterminated string"; + let lexed = lex(input); + + // Should have tokens + assert!(!lexed.is_empty()); + + // Should have an error for unterminated string + let errors = lexed.errors(); + assert!(!errors.is_empty()); + // Check the error message exists + assert!(!errors[0].message.to_string().is_empty()); } #[test] - fn test_newline_tokens() { - let input = "select\n1"; - let tokens = lex(input).unwrap(); - assert_eq!(tokens[1].kind, SyntaxKind::Newline); + fn test_lexing_identifier_params_with_errors() { + let input = "SELECT :\"unterminated string"; + let lexed = lex(input); + + // Should have tokens + assert!(!lexed.is_empty()); + + // Should have an error for unterminated string + let errors = lexed.errors(); + assert!(!errors.is_empty()); + // Check the error message exists + assert!(!errors[0].message.to_string().is_empty()); } #[test] - fn test_consecutive_newlines() { - // Test with multiple consecutive newlines - #[cfg(windows)] - let input = "select\r\n\r\n1"; - #[cfg(not(windows))] - let input = "select\n\n1"; - - let tokens = lex(input).unwrap(); - - // Check that we have exactly one newline token between "select" and "1" - assert_eq!(tokens[0].kind, SyntaxKind::Select); - assert_eq!(tokens[1].kind, SyntaxKind::Newline); - assert_eq!(tokens[2].kind, SyntaxKind::Iconst); + fn test_token_ranges() { + let input = "SELECT id"; + let lexed = lex(input); + + // First token should be a keyword (SELECT gets parsed as a keyword) + let _first_kind = lexed.kind(0); + assert_eq!(u32::from(lexed.range(0).start()), 0); + assert_eq!(u32::from(lexed.range(0).end()), 6); + assert_eq!(lexed.text(0), "SELECT"); + + // Find the id token + for (idx, kind) in lexed.tokens().enumerate() { + if kind == SyntaxKind::IDENT && lexed.text(idx) == "id" { + assert_eq!(u32::from(lexed.range(idx).start()), 7); + assert_eq!(u32::from(lexed.range(idx).end()), 9); + } + } } #[test] - fn test_whitespace_tokens() { - let input = "select 1"; - let tokens = lex(input).unwrap(); - assert_eq!(tokens[1].kind, SyntaxKind::Whitespace); + fn test_empty_input() { + let input = ""; + let lexed = lex(input); + assert_eq!(lexed.len(), 1); + assert_eq!(lexed.kind(0), SyntaxKind::EOF); } #[test] - fn test_lexer() { - let input = "select 1; \n -- some comment \n select 2\t"; - - let tokens = lex(input).unwrap(); - let mut tokens_iter = tokens.iter(); - - let token = tokens_iter.next().unwrap(); - assert_eq!(token.kind, SyntaxKind::Select); - assert_eq!(token.text, "select"); - - let token = tokens_iter.next().unwrap(); - assert_eq!(token.kind, SyntaxKind::Whitespace); - - let token = tokens_iter.next().unwrap(); - assert_eq!(token.kind, SyntaxKind::Iconst); - assert_eq!(token.text, "1"); - - let token = tokens_iter.next().unwrap(); - assert_eq!(token.kind, SyntaxKind::Ascii59); - - let token = tokens_iter.next().unwrap(); - assert_eq!(token.kind, SyntaxKind::Whitespace); - - let token = tokens_iter.next().unwrap(); - assert_eq!(token.kind, SyntaxKind::Newline); - - let token = tokens_iter.next().unwrap(); - assert_eq!(token.kind, SyntaxKind::Whitespace); - - let token = tokens_iter.next().unwrap(); - assert_eq!(token.kind, SyntaxKind::SqlComment); - assert_eq!(token.text, "-- some comment "); - - let token = tokens_iter.next().unwrap(); - assert_eq!(token.kind, SyntaxKind::Newline); - - let token = tokens_iter.next().unwrap(); - assert_eq!(token.kind, SyntaxKind::Whitespace); - - let token = tokens_iter.next().unwrap(); - assert_eq!(token.kind, SyntaxKind::Select); - assert_eq!(token.text, "select"); - - let token = tokens_iter.next().unwrap(); - assert_eq!(token.kind, SyntaxKind::Whitespace); + fn test_whitespace_handling() { + let input = " SELECT \n id "; + let lexed = lex(input); + + // Collect non-whitespace tokens + let mut non_whitespace = Vec::new(); + for (idx, kind) in lexed.tokens().enumerate() { + if !matches!( + kind, + SyntaxKind::SPACE | SyntaxKind::TAB | SyntaxKind::LINE_ENDING | SyntaxKind::EOF + ) { + non_whitespace.push(lexed.text(idx)); + } + } - let token = tokens_iter.next().unwrap(); - assert_eq!(token.kind, SyntaxKind::Iconst); - assert_eq!(token.text, "2"); + assert_eq!(non_whitespace.len(), 2); // SELECT and id + } - let token = tokens_iter.next().unwrap(); - assert_eq!(token.kind, SyntaxKind::Tab); + #[test] + fn finds_lex_errors() { + // Test with unterminated block comment + let input = "/* unterminated comment"; + let lexed = lex(input); + let errors = lexed.errors(); + + // Should have error for unterminated block comment + assert!(!errors.is_empty()); + assert!(errors[0].message.to_string().contains("Missing trailing")); + assert!(errors[0].span.start() < errors[0].span.end()); + + // Test with unterminated string + let input2 = "SELECT 'unterminated string"; + let lexed2 = lex(input2); + let errors2 = lexed2.errors(); + + // Should have error for unterminated string + assert!(!errors2.is_empty()); + assert!(errors2[0].message.to_string().contains("Missing trailing")); } } diff --git a/crates/pgt_lexer_codegen/Cargo.toml b/crates/pgt_lexer_codegen/Cargo.toml index c58786462..b50465b07 100644 --- a/crates/pgt_lexer_codegen/Cargo.toml +++ b/crates/pgt_lexer_codegen/Cargo.toml @@ -10,12 +10,16 @@ name = "pgt_lexer_codegen" repository.workspace = true version = "0.0.0" - [dependencies] -pgt_query_proto_parser.workspace = true -proc-macro2.workspace = true -quote = "1.0.33" +anyhow = { workspace = true } +convert_case = { workspace = true } +proc-macro2.workspace = true +prost-reflect = { workspace = true } +protox = { workspace = true } +quote.workspace = true + +[build-dependencies] +ureq = "2.9" [lib] -doctest = false proc-macro = true diff --git a/crates/pgt_lexer_codegen/README.md b/crates/pgt_lexer_codegen/README.md index 843ac2f8f..57bdaa340 100644 --- a/crates/pgt_lexer_codegen/README.md +++ b/crates/pgt_lexer_codegen/README.md @@ -1,7 +1 @@ -# pgt_lexer_codegen - -This crate is responsible for reading `libpg_query`'s protobuf file and turning it into the Rust enum `SyntaxKind`. - -It does so by reading the file from the installed git submodule, parsing it with a protobuf parser, and using a procedural macro to generate the enum. - -Rust requires procedural macros to be defined in a different crate than where they're used, hence this \_codegen crate. +Heavily inspired by and copied from [squawk_parser](https://github.com/sbdchd/squawk/tree/9acfecbbb7f3c7eedcbaf060e7b25f9afa136db3/crates/squawk_parser). Thanks for making all the hard work MIT-licensed! diff --git a/crates/pgt_lexer_codegen/build.rs b/crates/pgt_lexer_codegen/build.rs new file mode 100644 index 000000000..70c9635d4 --- /dev/null +++ b/crates/pgt_lexer_codegen/build.rs @@ -0,0 +1,49 @@ +use std::env; +use std::fs; +use std::io::Write; +use std::path::PathBuf; + +// TODO make this selectable via feature flags +static LIBPG_QUERY_TAG: &str = "17-6.1.0"; + +/// Downloads the `kwlist.h` file from the specified version of `libpg_query` +fn main() -> Result<(), Box> { + let version = LIBPG_QUERY_TAG.to_string(); + + // Check for the postgres header file in the source tree first + let manifest_dir = PathBuf::from(env::var("CARGO_MANIFEST_DIR")?); + let headers_dir = manifest_dir.join("postgres").join(&version); + let kwlist_path = headers_dir.join("kwlist.h"); + + // Only download if the file doesn't exist + if !kwlist_path.exists() { + println!( + "cargo:warning=Downloading kwlist.h for libpg_query {}", + version + ); + + fs::create_dir_all(&headers_dir)?; + + let proto_url = format!( + "https://raw.githubusercontent.com/pganalyze/libpg_query/{}/src/postgres/include/parser/kwlist.h", + version + ); + + let response = ureq::get(&proto_url).call()?; + let content = response.into_string()?; + + let mut file = fs::File::create(&kwlist_path)?; + file.write_all(content.as_bytes())?; + + println!("cargo:warning=Successfully downloaded kwlist.h"); + } + + println!( + "cargo:rustc-env=PG_QUERY_KWLIST_PATH={}", + kwlist_path.display() + ); + + println!("cargo:rerun-if-changed={}", kwlist_path.display()); + + Ok(()) +} diff --git a/crates/pgt_lexer_codegen/postgres/17-6.1.0/kwlist.h b/crates/pgt_lexer_codegen/postgres/17-6.1.0/kwlist.h new file mode 100644 index 000000000..658d7ff6a --- /dev/null +++ b/crates/pgt_lexer_codegen/postgres/17-6.1.0/kwlist.h @@ -0,0 +1,518 @@ +/*------------------------------------------------------------------------- + * + * kwlist.h + * + * The keyword lists are kept in their own source files for use by + * automatic tools. The exact representation of a keyword is determined + * by the PG_KEYWORD macro, which is not defined in this file; it can + * be defined by the caller for special purposes. + * + * Portions Copyright (c) 1996-2024, PostgreSQL Global Development Group + * Portions Copyright (c) 1994, Regents of the University of California + * + * IDENTIFICATION + * src/include/parser/kwlist.h + * + *------------------------------------------------------------------------- + */ + +/* there is deliberately not an #ifndef KWLIST_H here */ + +/* + * List of keyword (name, token-value, category, bare-label-status) entries. + * + * Note: gen_keywordlist.pl requires the entries to appear in ASCII order. + */ + +/* name, value, category, is-bare-label */ +PG_KEYWORD("abort", ABORT_P, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("absent", ABSENT, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("absolute", ABSOLUTE_P, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("access", ACCESS, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("action", ACTION, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("add", ADD_P, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("admin", ADMIN, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("after", AFTER, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("aggregate", AGGREGATE, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("all", ALL, RESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("also", ALSO, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("alter", ALTER, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("always", ALWAYS, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("analyse", ANALYSE, RESERVED_KEYWORD, BARE_LABEL) /* British spelling */ +PG_KEYWORD("analyze", ANALYZE, RESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("and", AND, RESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("any", ANY, RESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("array", ARRAY, RESERVED_KEYWORD, AS_LABEL) +PG_KEYWORD("as", AS, RESERVED_KEYWORD, AS_LABEL) +PG_KEYWORD("asc", ASC, RESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("asensitive", ASENSITIVE, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("assertion", ASSERTION, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("assignment", ASSIGNMENT, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("asymmetric", ASYMMETRIC, RESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("at", AT, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("atomic", ATOMIC, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("attach", ATTACH, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("attribute", ATTRIBUTE, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("authorization", AUTHORIZATION, TYPE_FUNC_NAME_KEYWORD, BARE_LABEL) +PG_KEYWORD("backward", BACKWARD, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("before", BEFORE, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("begin", BEGIN_P, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("between", BETWEEN, COL_NAME_KEYWORD, BARE_LABEL) +PG_KEYWORD("bigint", BIGINT, COL_NAME_KEYWORD, BARE_LABEL) +PG_KEYWORD("binary", BINARY, TYPE_FUNC_NAME_KEYWORD, BARE_LABEL) +PG_KEYWORD("bit", BIT, COL_NAME_KEYWORD, BARE_LABEL) +PG_KEYWORD("boolean", BOOLEAN_P, COL_NAME_KEYWORD, BARE_LABEL) +PG_KEYWORD("both", BOTH, RESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("breadth", BREADTH, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("by", BY, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("cache", CACHE, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("call", CALL, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("called", CALLED, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("cascade", CASCADE, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("cascaded", CASCADED, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("case", CASE, RESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("cast", CAST, RESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("catalog", CATALOG_P, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("chain", CHAIN, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("char", CHAR_P, COL_NAME_KEYWORD, AS_LABEL) +PG_KEYWORD("character", CHARACTER, COL_NAME_KEYWORD, AS_LABEL) +PG_KEYWORD("characteristics", CHARACTERISTICS, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("check", CHECK, RESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("checkpoint", CHECKPOINT, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("class", CLASS, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("close", CLOSE, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("cluster", CLUSTER, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("coalesce", COALESCE, COL_NAME_KEYWORD, BARE_LABEL) +PG_KEYWORD("collate", COLLATE, RESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("collation", COLLATION, TYPE_FUNC_NAME_KEYWORD, BARE_LABEL) +PG_KEYWORD("column", COLUMN, RESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("columns", COLUMNS, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("comment", COMMENT, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("comments", COMMENTS, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("commit", COMMIT, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("committed", COMMITTED, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("compression", COMPRESSION, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("concurrently", CONCURRENTLY, TYPE_FUNC_NAME_KEYWORD, BARE_LABEL) +PG_KEYWORD("conditional", CONDITIONAL, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("configuration", CONFIGURATION, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("conflict", CONFLICT, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("connection", CONNECTION, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("constraint", CONSTRAINT, RESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("constraints", CONSTRAINTS, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("content", CONTENT_P, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("continue", CONTINUE_P, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("conversion", CONVERSION_P, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("copy", COPY, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("cost", COST, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("create", CREATE, RESERVED_KEYWORD, AS_LABEL) +PG_KEYWORD("cross", CROSS, TYPE_FUNC_NAME_KEYWORD, BARE_LABEL) +PG_KEYWORD("csv", CSV, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("cube", CUBE, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("current", CURRENT_P, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("current_catalog", CURRENT_CATALOG, RESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("current_date", CURRENT_DATE, RESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("current_role", CURRENT_ROLE, RESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("current_schema", CURRENT_SCHEMA, TYPE_FUNC_NAME_KEYWORD, BARE_LABEL) +PG_KEYWORD("current_time", CURRENT_TIME, RESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("current_timestamp", CURRENT_TIMESTAMP, RESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("current_user", CURRENT_USER, RESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("cursor", CURSOR, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("cycle", CYCLE, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("data", DATA_P, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("database", DATABASE, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("day", DAY_P, UNRESERVED_KEYWORD, AS_LABEL) +PG_KEYWORD("deallocate", DEALLOCATE, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("dec", DEC, COL_NAME_KEYWORD, BARE_LABEL) +PG_KEYWORD("decimal", DECIMAL_P, COL_NAME_KEYWORD, BARE_LABEL) +PG_KEYWORD("declare", DECLARE, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("default", DEFAULT, RESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("defaults", DEFAULTS, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("deferrable", DEFERRABLE, RESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("deferred", DEFERRED, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("definer", DEFINER, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("delete", DELETE_P, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("delimiter", DELIMITER, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("delimiters", DELIMITERS, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("depends", DEPENDS, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("depth", DEPTH, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("desc", DESC, RESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("detach", DETACH, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("dictionary", DICTIONARY, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("disable", DISABLE_P, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("discard", DISCARD, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("distinct", DISTINCT, RESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("do", DO, RESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("document", DOCUMENT_P, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("domain", DOMAIN_P, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("double", DOUBLE_P, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("drop", DROP, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("each", EACH, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("else", ELSE, RESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("empty", EMPTY_P, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("enable", ENABLE_P, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("encoding", ENCODING, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("encrypted", ENCRYPTED, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("end", END_P, RESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("enum", ENUM_P, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("error", ERROR_P, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("escape", ESCAPE, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("event", EVENT, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("except", EXCEPT, RESERVED_KEYWORD, AS_LABEL) +PG_KEYWORD("exclude", EXCLUDE, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("excluding", EXCLUDING, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("exclusive", EXCLUSIVE, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("execute", EXECUTE, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("exists", EXISTS, COL_NAME_KEYWORD, BARE_LABEL) +PG_KEYWORD("explain", EXPLAIN, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("expression", EXPRESSION, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("extension", EXTENSION, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("external", EXTERNAL, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("extract", EXTRACT, COL_NAME_KEYWORD, BARE_LABEL) +PG_KEYWORD("false", FALSE_P, RESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("family", FAMILY, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("fetch", FETCH, RESERVED_KEYWORD, AS_LABEL) +PG_KEYWORD("filter", FILTER, UNRESERVED_KEYWORD, AS_LABEL) +PG_KEYWORD("finalize", FINALIZE, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("first", FIRST_P, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("float", FLOAT_P, COL_NAME_KEYWORD, BARE_LABEL) +PG_KEYWORD("following", FOLLOWING, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("for", FOR, RESERVED_KEYWORD, AS_LABEL) +PG_KEYWORD("force", FORCE, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("foreign", FOREIGN, RESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("format", FORMAT, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("forward", FORWARD, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("freeze", FREEZE, TYPE_FUNC_NAME_KEYWORD, BARE_LABEL) +PG_KEYWORD("from", FROM, RESERVED_KEYWORD, AS_LABEL) +PG_KEYWORD("full", FULL, TYPE_FUNC_NAME_KEYWORD, BARE_LABEL) +PG_KEYWORD("function", FUNCTION, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("functions", FUNCTIONS, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("generated", GENERATED, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("global", GLOBAL, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("grant", GRANT, RESERVED_KEYWORD, AS_LABEL) +PG_KEYWORD("granted", GRANTED, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("greatest", GREATEST, COL_NAME_KEYWORD, BARE_LABEL) +PG_KEYWORD("group", GROUP_P, RESERVED_KEYWORD, AS_LABEL) +PG_KEYWORD("grouping", GROUPING, COL_NAME_KEYWORD, BARE_LABEL) +PG_KEYWORD("groups", GROUPS, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("handler", HANDLER, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("having", HAVING, RESERVED_KEYWORD, AS_LABEL) +PG_KEYWORD("header", HEADER_P, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("hold", HOLD, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("hour", HOUR_P, UNRESERVED_KEYWORD, AS_LABEL) +PG_KEYWORD("identity", IDENTITY_P, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("if", IF_P, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("ilike", ILIKE, TYPE_FUNC_NAME_KEYWORD, BARE_LABEL) +PG_KEYWORD("immediate", IMMEDIATE, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("immutable", IMMUTABLE, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("implicit", IMPLICIT_P, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("import", IMPORT_P, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("in", IN_P, RESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("include", INCLUDE, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("including", INCLUDING, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("increment", INCREMENT, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("indent", INDENT, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("index", INDEX, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("indexes", INDEXES, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("inherit", INHERIT, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("inherits", INHERITS, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("initially", INITIALLY, RESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("inline", INLINE_P, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("inner", INNER_P, TYPE_FUNC_NAME_KEYWORD, BARE_LABEL) +PG_KEYWORD("inout", INOUT, COL_NAME_KEYWORD, BARE_LABEL) +PG_KEYWORD("input", INPUT_P, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("insensitive", INSENSITIVE, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("insert", INSERT, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("instead", INSTEAD, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("int", INT_P, COL_NAME_KEYWORD, BARE_LABEL) +PG_KEYWORD("integer", INTEGER, COL_NAME_KEYWORD, BARE_LABEL) +PG_KEYWORD("intersect", INTERSECT, RESERVED_KEYWORD, AS_LABEL) +PG_KEYWORD("interval", INTERVAL, COL_NAME_KEYWORD, BARE_LABEL) +PG_KEYWORD("into", INTO, RESERVED_KEYWORD, AS_LABEL) +PG_KEYWORD("invoker", INVOKER, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("is", IS, TYPE_FUNC_NAME_KEYWORD, BARE_LABEL) +PG_KEYWORD("isnull", ISNULL, TYPE_FUNC_NAME_KEYWORD, AS_LABEL) +PG_KEYWORD("isolation", ISOLATION, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("join", JOIN, TYPE_FUNC_NAME_KEYWORD, BARE_LABEL) +PG_KEYWORD("json", JSON, COL_NAME_KEYWORD, BARE_LABEL) +PG_KEYWORD("json_array", JSON_ARRAY, COL_NAME_KEYWORD, BARE_LABEL) +PG_KEYWORD("json_arrayagg", JSON_ARRAYAGG, COL_NAME_KEYWORD, BARE_LABEL) +PG_KEYWORD("json_exists", JSON_EXISTS, COL_NAME_KEYWORD, BARE_LABEL) +PG_KEYWORD("json_object", JSON_OBJECT, COL_NAME_KEYWORD, BARE_LABEL) +PG_KEYWORD("json_objectagg", JSON_OBJECTAGG, COL_NAME_KEYWORD, BARE_LABEL) +PG_KEYWORD("json_query", JSON_QUERY, COL_NAME_KEYWORD, BARE_LABEL) +PG_KEYWORD("json_scalar", JSON_SCALAR, COL_NAME_KEYWORD, BARE_LABEL) +PG_KEYWORD("json_serialize", JSON_SERIALIZE, COL_NAME_KEYWORD, BARE_LABEL) +PG_KEYWORD("json_table", JSON_TABLE, COL_NAME_KEYWORD, BARE_LABEL) +PG_KEYWORD("json_value", JSON_VALUE, COL_NAME_KEYWORD, BARE_LABEL) +PG_KEYWORD("keep", KEEP, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("key", KEY, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("keys", KEYS, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("label", LABEL, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("language", LANGUAGE, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("large", LARGE_P, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("last", LAST_P, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("lateral", LATERAL_P, RESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("leading", LEADING, RESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("leakproof", LEAKPROOF, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("least", LEAST, COL_NAME_KEYWORD, BARE_LABEL) +PG_KEYWORD("left", LEFT, TYPE_FUNC_NAME_KEYWORD, BARE_LABEL) +PG_KEYWORD("level", LEVEL, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("like", LIKE, TYPE_FUNC_NAME_KEYWORD, BARE_LABEL) +PG_KEYWORD("limit", LIMIT, RESERVED_KEYWORD, AS_LABEL) +PG_KEYWORD("listen", LISTEN, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("load", LOAD, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("local", LOCAL, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("localtime", LOCALTIME, RESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("localtimestamp", LOCALTIMESTAMP, RESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("location", LOCATION, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("lock", LOCK_P, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("locked", LOCKED, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("logged", LOGGED, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("mapping", MAPPING, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("match", MATCH, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("matched", MATCHED, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("materialized", MATERIALIZED, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("maxvalue", MAXVALUE, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("merge", MERGE, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("merge_action", MERGE_ACTION, COL_NAME_KEYWORD, BARE_LABEL) +PG_KEYWORD("method", METHOD, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("minute", MINUTE_P, UNRESERVED_KEYWORD, AS_LABEL) +PG_KEYWORD("minvalue", MINVALUE, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("mode", MODE, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("month", MONTH_P, UNRESERVED_KEYWORD, AS_LABEL) +PG_KEYWORD("move", MOVE, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("name", NAME_P, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("names", NAMES, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("national", NATIONAL, COL_NAME_KEYWORD, BARE_LABEL) +PG_KEYWORD("natural", NATURAL, TYPE_FUNC_NAME_KEYWORD, BARE_LABEL) +PG_KEYWORD("nchar", NCHAR, COL_NAME_KEYWORD, BARE_LABEL) +PG_KEYWORD("nested", NESTED, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("new", NEW, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("next", NEXT, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("nfc", NFC, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("nfd", NFD, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("nfkc", NFKC, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("nfkd", NFKD, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("no", NO, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("none", NONE, COL_NAME_KEYWORD, BARE_LABEL) +PG_KEYWORD("normalize", NORMALIZE, COL_NAME_KEYWORD, BARE_LABEL) +PG_KEYWORD("normalized", NORMALIZED, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("not", NOT, RESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("nothing", NOTHING, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("notify", NOTIFY, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("notnull", NOTNULL, TYPE_FUNC_NAME_KEYWORD, AS_LABEL) +PG_KEYWORD("nowait", NOWAIT, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("null", NULL_P, RESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("nullif", NULLIF, COL_NAME_KEYWORD, BARE_LABEL) +PG_KEYWORD("nulls", NULLS_P, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("numeric", NUMERIC, COL_NAME_KEYWORD, BARE_LABEL) +PG_KEYWORD("object", OBJECT_P, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("of", OF, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("off", OFF, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("offset", OFFSET, RESERVED_KEYWORD, AS_LABEL) +PG_KEYWORD("oids", OIDS, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("old", OLD, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("omit", OMIT, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("on", ON, RESERVED_KEYWORD, AS_LABEL) +PG_KEYWORD("only", ONLY, RESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("operator", OPERATOR, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("option", OPTION, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("options", OPTIONS, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("or", OR, RESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("order", ORDER, RESERVED_KEYWORD, AS_LABEL) +PG_KEYWORD("ordinality", ORDINALITY, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("others", OTHERS, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("out", OUT_P, COL_NAME_KEYWORD, BARE_LABEL) +PG_KEYWORD("outer", OUTER_P, TYPE_FUNC_NAME_KEYWORD, BARE_LABEL) +PG_KEYWORD("over", OVER, UNRESERVED_KEYWORD, AS_LABEL) +PG_KEYWORD("overlaps", OVERLAPS, TYPE_FUNC_NAME_KEYWORD, AS_LABEL) +PG_KEYWORD("overlay", OVERLAY, COL_NAME_KEYWORD, BARE_LABEL) +PG_KEYWORD("overriding", OVERRIDING, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("owned", OWNED, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("owner", OWNER, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("parallel", PARALLEL, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("parameter", PARAMETER, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("parser", PARSER, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("partial", PARTIAL, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("partition", PARTITION, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("passing", PASSING, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("password", PASSWORD, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("path", PATH, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("placing", PLACING, RESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("plan", PLAN, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("plans", PLANS, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("policy", POLICY, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("position", POSITION, COL_NAME_KEYWORD, BARE_LABEL) +PG_KEYWORD("preceding", PRECEDING, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("precision", PRECISION, COL_NAME_KEYWORD, AS_LABEL) +PG_KEYWORD("prepare", PREPARE, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("prepared", PREPARED, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("preserve", PRESERVE, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("primary", PRIMARY, RESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("prior", PRIOR, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("privileges", PRIVILEGES, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("procedural", PROCEDURAL, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("procedure", PROCEDURE, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("procedures", PROCEDURES, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("program", PROGRAM, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("publication", PUBLICATION, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("quote", QUOTE, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("quotes", QUOTES, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("range", RANGE, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("read", READ, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("real", REAL, COL_NAME_KEYWORD, BARE_LABEL) +PG_KEYWORD("reassign", REASSIGN, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("recheck", RECHECK, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("recursive", RECURSIVE, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("ref", REF_P, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("references", REFERENCES, RESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("referencing", REFERENCING, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("refresh", REFRESH, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("reindex", REINDEX, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("relative", RELATIVE_P, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("release", RELEASE, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("rename", RENAME, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("repeatable", REPEATABLE, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("replace", REPLACE, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("replica", REPLICA, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("reset", RESET, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("restart", RESTART, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("restrict", RESTRICT, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("return", RETURN, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("returning", RETURNING, RESERVED_KEYWORD, AS_LABEL) +PG_KEYWORD("returns", RETURNS, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("revoke", REVOKE, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("right", RIGHT, TYPE_FUNC_NAME_KEYWORD, BARE_LABEL) +PG_KEYWORD("role", ROLE, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("rollback", ROLLBACK, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("rollup", ROLLUP, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("routine", ROUTINE, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("routines", ROUTINES, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("row", ROW, COL_NAME_KEYWORD, BARE_LABEL) +PG_KEYWORD("rows", ROWS, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("rule", RULE, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("savepoint", SAVEPOINT, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("scalar", SCALAR, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("schema", SCHEMA, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("schemas", SCHEMAS, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("scroll", SCROLL, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("search", SEARCH, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("second", SECOND_P, UNRESERVED_KEYWORD, AS_LABEL) +PG_KEYWORD("security", SECURITY, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("select", SELECT, RESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("sequence", SEQUENCE, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("sequences", SEQUENCES, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("serializable", SERIALIZABLE, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("server", SERVER, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("session", SESSION, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("session_user", SESSION_USER, RESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("set", SET, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("setof", SETOF, COL_NAME_KEYWORD, BARE_LABEL) +PG_KEYWORD("sets", SETS, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("share", SHARE, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("show", SHOW, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("similar", SIMILAR, TYPE_FUNC_NAME_KEYWORD, BARE_LABEL) +PG_KEYWORD("simple", SIMPLE, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("skip", SKIP, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("smallint", SMALLINT, COL_NAME_KEYWORD, BARE_LABEL) +PG_KEYWORD("snapshot", SNAPSHOT, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("some", SOME, RESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("source", SOURCE, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("sql", SQL_P, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("stable", STABLE, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("standalone", STANDALONE_P, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("start", START, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("statement", STATEMENT, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("statistics", STATISTICS, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("stdin", STDIN, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("stdout", STDOUT, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("storage", STORAGE, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("stored", STORED, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("strict", STRICT_P, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("string", STRING_P, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("strip", STRIP_P, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("subscription", SUBSCRIPTION, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("substring", SUBSTRING, COL_NAME_KEYWORD, BARE_LABEL) +PG_KEYWORD("support", SUPPORT, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("symmetric", SYMMETRIC, RESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("sysid", SYSID, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("system", SYSTEM_P, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("system_user", SYSTEM_USER, RESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("table", TABLE, RESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("tables", TABLES, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("tablesample", TABLESAMPLE, TYPE_FUNC_NAME_KEYWORD, BARE_LABEL) +PG_KEYWORD("tablespace", TABLESPACE, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("target", TARGET, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("temp", TEMP, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("template", TEMPLATE, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("temporary", TEMPORARY, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("text", TEXT_P, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("then", THEN, RESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("ties", TIES, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("time", TIME, COL_NAME_KEYWORD, BARE_LABEL) +PG_KEYWORD("timestamp", TIMESTAMP, COL_NAME_KEYWORD, BARE_LABEL) +PG_KEYWORD("to", TO, RESERVED_KEYWORD, AS_LABEL) +PG_KEYWORD("trailing", TRAILING, RESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("transaction", TRANSACTION, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("transform", TRANSFORM, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("treat", TREAT, COL_NAME_KEYWORD, BARE_LABEL) +PG_KEYWORD("trigger", TRIGGER, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("trim", TRIM, COL_NAME_KEYWORD, BARE_LABEL) +PG_KEYWORD("true", TRUE_P, RESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("truncate", TRUNCATE, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("trusted", TRUSTED, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("type", TYPE_P, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("types", TYPES_P, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("uescape", UESCAPE, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("unbounded", UNBOUNDED, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("uncommitted", UNCOMMITTED, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("unconditional", UNCONDITIONAL, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("unencrypted", UNENCRYPTED, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("union", UNION, RESERVED_KEYWORD, AS_LABEL) +PG_KEYWORD("unique", UNIQUE, RESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("unknown", UNKNOWN, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("unlisten", UNLISTEN, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("unlogged", UNLOGGED, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("until", UNTIL, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("update", UPDATE, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("user", USER, RESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("using", USING, RESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("vacuum", VACUUM, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("valid", VALID, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("validate", VALIDATE, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("validator", VALIDATOR, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("value", VALUE_P, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("values", VALUES, COL_NAME_KEYWORD, BARE_LABEL) +PG_KEYWORD("varchar", VARCHAR, COL_NAME_KEYWORD, BARE_LABEL) +PG_KEYWORD("variadic", VARIADIC, RESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("varying", VARYING, UNRESERVED_KEYWORD, AS_LABEL) +PG_KEYWORD("verbose", VERBOSE, TYPE_FUNC_NAME_KEYWORD, BARE_LABEL) +PG_KEYWORD("version", VERSION_P, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("view", VIEW, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("views", VIEWS, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("volatile", VOLATILE, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("when", WHEN, RESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("where", WHERE, RESERVED_KEYWORD, AS_LABEL) +PG_KEYWORD("whitespace", WHITESPACE_P, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("window", WINDOW, RESERVED_KEYWORD, AS_LABEL) +PG_KEYWORD("with", WITH, RESERVED_KEYWORD, AS_LABEL) +PG_KEYWORD("within", WITHIN, UNRESERVED_KEYWORD, AS_LABEL) +PG_KEYWORD("without", WITHOUT, UNRESERVED_KEYWORD, AS_LABEL) +PG_KEYWORD("work", WORK, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("wrapper", WRAPPER, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("write", WRITE, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("xml", XML_P, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("xmlattributes", XMLATTRIBUTES, COL_NAME_KEYWORD, BARE_LABEL) +PG_KEYWORD("xmlconcat", XMLCONCAT, COL_NAME_KEYWORD, BARE_LABEL) +PG_KEYWORD("xmlelement", XMLELEMENT, COL_NAME_KEYWORD, BARE_LABEL) +PG_KEYWORD("xmlexists", XMLEXISTS, COL_NAME_KEYWORD, BARE_LABEL) +PG_KEYWORD("xmlforest", XMLFOREST, COL_NAME_KEYWORD, BARE_LABEL) +PG_KEYWORD("xmlnamespaces", XMLNAMESPACES, COL_NAME_KEYWORD, BARE_LABEL) +PG_KEYWORD("xmlparse", XMLPARSE, COL_NAME_KEYWORD, BARE_LABEL) +PG_KEYWORD("xmlpi", XMLPI, COL_NAME_KEYWORD, BARE_LABEL) +PG_KEYWORD("xmlroot", XMLROOT, COL_NAME_KEYWORD, BARE_LABEL) +PG_KEYWORD("xmlserialize", XMLSERIALIZE, COL_NAME_KEYWORD, BARE_LABEL) +PG_KEYWORD("xmltable", XMLTABLE, COL_NAME_KEYWORD, BARE_LABEL) +PG_KEYWORD("year", YEAR_P, UNRESERVED_KEYWORD, AS_LABEL) +PG_KEYWORD("yes", YES_P, UNRESERVED_KEYWORD, BARE_LABEL) +PG_KEYWORD("zone", ZONE, UNRESERVED_KEYWORD, BARE_LABEL) diff --git a/crates/pgt_lexer_codegen/src/keywords.rs b/crates/pgt_lexer_codegen/src/keywords.rs new file mode 100644 index 000000000..f0104c8d3 --- /dev/null +++ b/crates/pgt_lexer_codegen/src/keywords.rs @@ -0,0 +1,43 @@ +// from https://github.com/sbdchd/squawk/blob/ac9f90c3b2be8d2c46fd5454eb48975afd268dbe/crates/xtask/src/keywords.rs +use anyhow::{Context, Ok, Result}; +use std::path; + +fn parse_header() -> Result> { + // use the environment variable set by the build script to locate the kwlist.h file + let kwlist_file = path::PathBuf::from(env!("PG_QUERY_KWLIST_PATH")); + let data = std::fs::read_to_string(kwlist_file).context("Failed to read kwlist.h")?; + + let mut keywords = Vec::new(); + + for line in data.lines() { + if line.starts_with("PG_KEYWORD") { + let line = line + .split(&['(', ')']) + .nth(1) + .context("Invalid kwlist.h structure")?; + + let row_items: Vec<&str> = line.split(',').collect(); + + match row_items[..] { + [name, _value, _category, _is_bare_label] => { + let name = name.trim().replace('\"', ""); + keywords.push(name); + } + _ => anyhow::bail!("Problem reading kwlist.h row"), + } + } + } + + Ok(keywords) +} + +pub(crate) struct KeywordKinds { + pub(crate) all_keywords: Vec, +} + +pub(crate) fn keyword_kinds() -> Result { + let mut all_keywords = parse_header()?; + all_keywords.sort(); + + Ok(KeywordKinds { all_keywords }) +} diff --git a/crates/pgt_lexer_codegen/src/lib.rs b/crates/pgt_lexer_codegen/src/lib.rs index 8f492e4b4..b620b6a63 100644 --- a/crates/pgt_lexer_codegen/src/lib.rs +++ b/crates/pgt_lexer_codegen/src/lib.rs @@ -1,29 +1,9 @@ +mod keywords; mod syntax_kind; -use pgt_query_proto_parser::ProtoParser; -use quote::quote; -use std::{env, path, path::Path}; +use syntax_kind::syntax_kind_mod; #[proc_macro] -pub fn lexer_codegen(_item: proc_macro::TokenStream) -> proc_macro::TokenStream { - let parser = ProtoParser::new(&proto_file_path()); - let proto_file = parser.parse(); - - let syntax_kind = syntax_kind::syntax_kind_mod(&proto_file); - - quote! { - use pg_query::{protobuf, protobuf::ScanToken, protobuf::Token, NodeEnum, NodeRef}; - - #syntax_kind - } - .into() -} - -fn proto_file_path() -> path::PathBuf { - Path::new(env!("CARGO_MANIFEST_DIR")) - .ancestors() - .nth(2) - .unwrap() - .join("libpg_query/protobuf/pg_query.proto") - .to_path_buf() +pub fn syntax_kind_codegen(_input: proc_macro::TokenStream) -> proc_macro::TokenStream { + syntax_kind_mod().into() } diff --git a/crates/pgt_lexer_codegen/src/syntax_kind.rs b/crates/pgt_lexer_codegen/src/syntax_kind.rs index 091b1e022..3a0054374 100644 --- a/crates/pgt_lexer_codegen/src/syntax_kind.rs +++ b/crates/pgt_lexer_codegen/src/syntax_kind.rs @@ -1,111 +1,122 @@ -use std::collections::HashSet; - -use pgt_query_proto_parser::{Node, ProtoFile, Token}; -use proc_macro2::{Ident, Literal}; +use convert_case::{Case, Casing}; +use proc_macro2::TokenStream; use quote::{format_ident, quote}; -pub fn syntax_kind_mod(proto_file: &ProtoFile) -> proc_macro2::TokenStream { - let custom_node_names = custom_node_names(); - let custom_node_identifiers = custom_node_identifiers(&custom_node_names); - - let node_identifiers = node_identifiers(&proto_file.nodes); +use crate::keywords::{KeywordKinds, keyword_kinds}; + +const WHITESPACE: &[&str] = &[ + "SPACE", // " " + "TAB", // "\t" + "VERTICAL_TAB", // "\x0B" + "FORM_FEED", // "\x0C" + "LINE_ENDING", // "\n" or "\r" in any combination +]; + +const PUNCT: &[(&str, &str)] = &[ + ("$", "DOLLAR"), + (";", "SEMICOLON"), + (",", "COMMA"), + ("(", "L_PAREN"), + (")", "R_PAREN"), + ("[", "L_BRACK"), + ("]", "R_BRACK"), + ("<", "L_ANGLE"), + (">", "R_ANGLE"), + ("@", "AT"), + ("#", "POUND"), + ("~", "TILDE"), + ("?", "QUESTION"), + ("&", "AMP"), + ("|", "PIPE"), + ("+", "PLUS"), + ("*", "STAR"), + ("/", "SLASH"), + ("\\", "BACKSLASH"), + ("^", "CARET"), + ("%", "PERCENT"), + ("_", "UNDERSCORE"), + (".", "DOT"), + (":", "COLON"), + ("::", "DOUBLE_COLON"), + ("=", "EQ"), + ("!", "BANG"), + ("-", "MINUS"), + ("`", "BACKTICK"), +]; + +const EXTRA: &[&str] = &["POSITIONAL_PARAM", "NAMED_PARAM", "ERROR", "COMMENT", "EOF"]; + +const LITERALS: &[&str] = &[ + "BIT_STRING", + "BYTE_STRING", + "DOLLAR_QUOTED_STRING", + "ESC_STRING", + "FLOAT_NUMBER", + "INT_NUMBER", + "NULL", + "STRING", + "IDENT", +]; + +pub fn syntax_kind_mod() -> proc_macro2::TokenStream { + let keywords = keyword_kinds().expect("Failed to get keyword kinds"); + + let KeywordKinds { all_keywords, .. } = keywords; + + let mut enum_variants: Vec = Vec::new(); + let mut from_kw_match_arms: Vec = Vec::new(); + + // collect keywords + for kw in &all_keywords { + if kw.to_uppercase().contains("WHITESPACE") { + continue; // Skip whitespace as it is handled separately + } - let token_identifiers = token_identifiers(&proto_file.tokens); - let token_value_literals = token_value_literals(&proto_file.tokens); + let kind_ident = format_ident!("{}_KW", kw.to_case(Case::UpperSnake)); - let syntax_kind_from_impl = - syntax_kind_from_impl(&node_identifiers, &token_identifiers, &token_value_literals); + enum_variants.push(quote! { #kind_ident }); + from_kw_match_arms.push(quote! { + #kw => Some(SyntaxKind::#kind_ident) + }); + } - let mut enum_variants = HashSet::new(); - enum_variants.extend(&custom_node_identifiers); - enum_variants.extend(&node_identifiers); - enum_variants.extend(&token_identifiers); - let unique_enum_variants = enum_variants.into_iter().collect::>(); + // collect extra keywords + EXTRA.iter().for_each(|&name| { + let variant_name = format_ident!("{}", name); + enum_variants.push(quote! { #variant_name }); + }); + + // collect whitespace variants + WHITESPACE.iter().for_each(|&name| { + let variant_name = format_ident!("{}", name); + enum_variants.push(quote! { #variant_name }); + }); + + // collect punctuations + PUNCT.iter().for_each(|&(_ascii_name, variant)| { + let variant_name = format_ident!("{}", variant); + enum_variants.push(quote! { #variant_name }); + }); + + // collect literals + LITERALS.iter().for_each(|&name| { + let variant_name = format_ident!("{}", name); + enum_variants.push(quote! { #variant_name }); + }); quote! { - /// An u32 enum of all valid syntax elements (nodes and tokens) of the postgres - /// sql dialect, and a few custom ones that are not parsed by pg_query.rs, such - /// as `Whitespace`. #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)] - #[repr(u32)] + #[repr(u16)] pub enum SyntaxKind { - #(#unique_enum_variants),*, - } - - #syntax_kind_from_impl - } -} - -fn custom_node_names() -> Vec<&'static str> { - vec![ - "SourceFile", - "Comment", - "Whitespace", - "Newline", - "Tab", - "Stmt", - "Eof", - ] -} - -fn custom_node_identifiers(custom_node_names: &[&str]) -> Vec { - custom_node_names - .iter() - .map(|&node_name| format_ident!("{}", node_name)) - .collect() -} - -fn node_identifiers(nodes: &[Node]) -> Vec { - nodes - .iter() - .map(|node| format_ident!("{}", &node.name)) - .collect() -} - -fn token_identifiers(tokens: &[Token]) -> Vec { - tokens - .iter() - .map(|token| format_ident!("{}", &token.name)) - .collect() -} - -fn token_value_literals(tokens: &[Token]) -> Vec { - tokens - .iter() - .map(|token| Literal::i32_unsuffixed(token.value)) - .collect() -} - -fn syntax_kind_from_impl( - node_identifiers: &[Ident], - token_identifiers: &[Ident], - token_value_literals: &[Literal], -) -> proc_macro2::TokenStream { - quote! { - /// Converts a `pg_query` node to a `SyntaxKind` - impl From<&NodeEnum> for SyntaxKind { - fn from(node: &NodeEnum) -> SyntaxKind { - match node { - #(NodeEnum::#node_identifiers(_) => SyntaxKind::#node_identifiers),* - } - } - - } - - impl From for SyntaxKind { - fn from(token: Token) -> SyntaxKind { - match i32::from(token) { - #(#token_value_literals => SyntaxKind::#token_identifiers),*, - _ => panic!("Unknown token: {:?}", token), - } - } + #(#enum_variants),*, } - impl From<&ScanToken> for SyntaxKind { - fn from(token: &ScanToken) -> SyntaxKind { - match token.token { - #(#token_value_literals => SyntaxKind::#token_identifiers),*, - _ => panic!("Unknown token: {:?}", token.token), + impl SyntaxKind { + pub(crate) fn from_keyword(ident: &str) -> Option { + let lower_ident = ident.to_ascii_lowercase(); + match lower_ident.as_str() { + #(#from_kw_match_arms),*, + _ => None } } } diff --git a/crates/pgt_lsp/src/capabilities.rs b/crates/pgt_lsp/src/capabilities.rs index acfc60edc..3b473eb73 100644 --- a/crates/pgt_lsp/src/capabilities.rs +++ b/crates/pgt_lsp/src/capabilities.rs @@ -1,4 +1,5 @@ use crate::adapters::{PositionEncoding, WideEncoding, negotiated_encoding}; +use crate::handlers::code_actions::command_id; use pgt_workspace::features::code_actions::CommandActionCategory; use strum::IntoEnumIterator; use tower_lsp::lsp_types::{ @@ -7,8 +8,6 @@ use tower_lsp::lsp_types::{ TextDocumentSyncOptions, TextDocumentSyncSaveOptions, WorkDoneProgressOptions, }; -use crate::handlers::code_actions::command_id; - /// The capabilities to send from server as part of [`InitializeResult`] /// /// [`InitializeResult`]: lspower::lsp::InitializeResult @@ -54,7 +53,6 @@ pub(crate) fn server_capabilities(capabilities: &ClientCapabilities) -> ServerCa commands: CommandActionCategory::iter() .map(|c| command_id(&c)) .collect::>(), - ..Default::default() }), document_formatting_provider: None, diff --git a/crates/pgt_lsp/src/handlers/completions.rs b/crates/pgt_lsp/src/handlers/completions.rs index 7e901c799..4a035fcf8 100644 --- a/crates/pgt_lsp/src/handlers/completions.rs +++ b/crates/pgt_lsp/src/handlers/completions.rs @@ -76,5 +76,6 @@ fn to_lsp_types_completion_item_kind( pgt_completions::CompletionItemKind::Column => lsp_types::CompletionItemKind::FIELD, pgt_completions::CompletionItemKind::Schema => lsp_types::CompletionItemKind::CLASS, pgt_completions::CompletionItemKind::Policy => lsp_types::CompletionItemKind::CONSTANT, + pgt_completions::CompletionItemKind::Role => lsp_types::CompletionItemKind::CONSTANT, } } diff --git a/crates/pgt_lsp/src/handlers/text_document.rs b/crates/pgt_lsp/src/handlers/text_document.rs index 63250ef5a..1c5a9a115 100644 --- a/crates/pgt_lsp/src/handlers/text_document.rs +++ b/crates/pgt_lsp/src/handlers/text_document.rs @@ -1,13 +1,10 @@ -use crate::adapters::from_lsp; -use crate::{ - diagnostics::LspError, documents::Document, session::Session, utils::apply_document_changes, -}; +use crate::{documents::Document, session::Session, utils::apply_document_changes}; use anyhow::Result; use pgt_workspace::workspace::{ - ChangeFileParams, ChangeParams, CloseFileParams, GetFileContentParams, OpenFileParams, + ChangeFileParams, CloseFileParams, GetFileContentParams, OpenFileParams, }; use tower_lsp::lsp_types; -use tracing::error; +use tracing::{error, field}; /// Handler for `textDocument/didOpen` LSP notification #[tracing::instrument(level = "debug", skip(session), err)] @@ -37,51 +34,39 @@ pub(crate) async fn did_open( Ok(()) } -// Handler for `textDocument/didChange` LSP notification -#[tracing::instrument(level = "debug", skip(session), err)] +/// Handler for `textDocument/didChange` LSP notification +#[tracing::instrument(level = "debug", skip_all, fields(url = field::display(¶ms.text_document.uri), version = params.text_document.version), err)] pub(crate) async fn did_change( session: &Session, params: lsp_types::DidChangeTextDocumentParams, -) -> Result<(), LspError> { +) -> Result<()> { let url = params.text_document.uri; let version = params.text_document.version; let pgt_path = session.file_path(&url)?; - let old_doc = session.document(&url)?; let old_text = session.workspace.get_file_content(GetFileContentParams { path: pgt_path.clone(), })?; - - let start = params - .content_changes - .iter() - .rev() - .position(|change| change.range.is_none()) - .map_or(0, |idx| params.content_changes.len() - idx - 1); + tracing::trace!("old document: {:?}", old_text); + tracing::trace!("content changes: {:?}", params.content_changes); let text = apply_document_changes( session.position_encoding(), old_text, - ¶ms.content_changes[start..], + params.content_changes, ); + tracing::trace!("new document: {:?}", text); + + session.insert_document(url.clone(), Document::new(version, &text)); + session.workspace.change_file(ChangeFileParams { path: pgt_path, version, - changes: params.content_changes[start..] - .iter() - .map(|c| ChangeParams { - range: c.range.and_then(|r| { - from_lsp::text_range(&old_doc.line_index, r, session.position_encoding()).ok() - }), - text: c.text.clone(), - }) - .collect(), + content: text, })?; - session.insert_document(url.clone(), Document::new(version, &text)); - if let Err(err) = session.update_diagnostics(url).await { error!("Failed to update diagnostics: {}", err); } diff --git a/crates/pgt_lsp/src/server.rs b/crates/pgt_lsp/src/server.rs index 4c05c0e4d..6420c5113 100644 --- a/crates/pgt_lsp/src/server.rs +++ b/crates/pgt_lsp/src/server.rs @@ -1,10 +1,13 @@ use crate::capabilities::server_capabilities; use crate::handlers; -use crate::session::{CapabilitySet, CapabilityStatus, Session, SessionHandle, SessionKey}; +use crate::session::{ + CapabilitySet, CapabilityStatus, ClientInformation, Session, SessionHandle, SessionKey, +}; use crate::utils::{into_lsp_error, panic_to_lsp_error}; use futures::FutureExt; use futures::future::ready; use pgt_fs::{ConfigName, FileSystem, OsFileSystem}; +use pgt_workspace::workspace::{RegisterProjectFolderParams, UnregisterProjectFolderParams}; use pgt_workspace::{DynRef, Workspace, workspace}; use rustc_hash::FxHashMap; use serde_json::json; @@ -107,6 +110,10 @@ impl LanguageServer for LSPServer { self.session.initialize( params.capabilities, + params.client_info.map(|client_info| ClientInformation { + name: client_info.name, + version: client_info.version, + }), params.root_uri, params.workspace_folders, ); @@ -217,6 +224,47 @@ impl LanguageServer for LSPServer { .ok(); } + async fn did_change_workspace_folders(&self, params: DidChangeWorkspaceFoldersParams) { + for removed in ¶ms.event.removed { + if let Ok(project_path) = self.session.file_path(&removed.uri) { + let result = self + .session + .workspace + .unregister_project_folder(UnregisterProjectFolderParams { path: project_path }) + .map_err(into_lsp_error); + + if let Err(err) = result { + error!("Failed to remove project from the workspace: {}", err); + self.session + .client + .log_message(MessageType::ERROR, err) + .await; + } + } + } + + for added in ¶ms.event.added { + if let Ok(project_path) = self.session.file_path(&added.uri) { + let result = self + .session + .workspace + .register_project_folder(RegisterProjectFolderParams { + path: Some(project_path.to_path_buf()), + set_as_current_workspace: true, + }) + .map_err(into_lsp_error); + + if let Err(err) = result { + error!("Failed to add project to the workspace: {}", err); + self.session + .client + .log_message(MessageType::ERROR, err) + .await; + } + } + } + } + #[tracing::instrument(level = "trace", skip_all)] async fn completion(&self, params: CompletionParams) -> LspResult> { match handlers::completions::get_completions(&self.session, params) { @@ -398,6 +446,8 @@ impl ServerFactory { workspace_method!(builder, close_file); workspace_method!(builder, pull_diagnostics); workspace_method!(builder, get_completions); + workspace_method!(builder, register_project_folder); + workspace_method!(builder, unregister_project_folder); let (service, socket) = builder.finish(); ServerConnection { socket, service } diff --git a/crates/pgt_lsp/src/session.rs b/crates/pgt_lsp/src/session.rs index 7ccf2babf..ede0469f2 100644 --- a/crates/pgt_lsp/src/session.rs +++ b/crates/pgt_lsp/src/session.rs @@ -10,11 +10,11 @@ use pgt_analyse::RuleCategoriesBuilder; use pgt_configuration::{ConfigurationPathHint, PartialConfiguration}; use pgt_diagnostics::{DiagnosticExt, Error}; use pgt_fs::{FileSystem, PgTPath}; +use pgt_workspace::PartialConfigurationExt; use pgt_workspace::Workspace; use pgt_workspace::configuration::{LoadedConfiguration, load_configuration}; use pgt_workspace::features; -use pgt_workspace::settings::PartialConfigurationExt; -use pgt_workspace::workspace::UpdateSettingsParams; +use pgt_workspace::workspace::{RegisterProjectFolderParams, UpdateSettingsParams}; use pgt_workspace::{DynRef, WorkspaceError}; use rustc_hash::FxHashMap; use serde_json::Value; @@ -31,6 +31,16 @@ use tower_lsp::lsp_types::{MessageType, Registration}; use tower_lsp::lsp_types::{Unregistration, WorkspaceFolder}; use tracing::{error, info}; +pub(crate) struct ClientInformation { + #[allow(dead_code)] + /// The name of the client + pub(crate) name: String, + + #[allow(dead_code)] + /// The version of the client + pub(crate) version: Option, +} + /// Key, uniquely identifying a LSP session. #[derive(Clone, Copy, Eq, PartialEq, Hash, Debug)] pub(crate) struct SessionKey(pub u64); @@ -68,6 +78,8 @@ pub(crate) struct Session { struct InitializeParams { /// The capabilities provided by the client as part of [`lsp_types::InitializeParams`] client_capabilities: lsp_types::ClientCapabilities, + #[allow(dead_code)] + client_information: Option, root_uri: Option, #[allow(unused)] workspace_folders: Option>, @@ -164,11 +176,13 @@ impl Session { pub(crate) fn initialize( &self, client_capabilities: lsp_types::ClientCapabilities, + client_information: Option, root_uri: Option, workspace_folders: Option>, ) { let result = self.initialize_params.set(InitializeParams { client_capabilities, + client_information, root_uri, workspace_folders, }); @@ -446,6 +460,8 @@ impl Session { info!("Configuration loaded successfully from disk."); info!("Update workspace settings."); + let fs = &self.fs; + if let Some(ws_configuration) = extra_config { fs_configuration.merge_with(ws_configuration); } @@ -455,6 +471,31 @@ impl Session { match result { Ok((vcs_base_path, gitignore_matches)) => { + let register_result = + if let ConfigurationPathHint::FromWorkspace(path) = &base_path { + // We don't need the key + self.workspace + .register_project_folder(RegisterProjectFolderParams { + path: Some(path.clone()), + // This is naive, but we don't know if the user has a file already open or not, so we register every project as the current one. + // The correct one is actually set when the LSP calls `textDocument/didOpen` + set_as_current_workspace: true, + }) + .err() + } else { + self.workspace + .register_project_folder(RegisterProjectFolderParams { + path: fs.working_directory(), + set_as_current_workspace: true, + }) + .err() + }; + if let Some(error) = register_result { + error!("Failed to register the project folder: {}", error); + self.client.log_message(MessageType::ERROR, &error).await; + return ConfigurationStatus::Error; + } + let result = self.workspace.update_settings(UpdateSettingsParams { workspace_directory: self.fs.working_directory(), configuration: fs_configuration, diff --git a/crates/pgt_lsp/src/utils.rs b/crates/pgt_lsp/src/utils.rs index 92059b66c..8361cf08e 100644 --- a/crates/pgt_lsp/src/utils.rs +++ b/crates/pgt_lsp/src/utils.rs @@ -1,5 +1,6 @@ +use crate::adapters::from_lsp::text_range; use crate::adapters::line_index::LineIndex; -use crate::adapters::{PositionEncoding, from_lsp, to_lsp}; +use crate::adapters::{PositionEncoding, to_lsp}; use anyhow::{Context, Result, ensure}; use pgt_console::MarkupBuf; use pgt_console::fmt::Termcolor; @@ -10,8 +11,8 @@ use pgt_text_size::{TextRange, TextSize}; use std::any::Any; use std::borrow::Cow; use std::fmt::{Debug, Display}; -use std::io; use std::ops::{Add, Range}; +use std::{io, mem}; use tower_lsp::jsonrpc::Error as LspError; use tower_lsp::lsp_types; use tower_lsp::lsp_types::{self as lsp, CodeDescription, Url}; @@ -183,7 +184,7 @@ pub(crate) fn panic_to_lsp_error(err: Box) -> LspError { pub(crate) fn apply_document_changes( position_encoding: PositionEncoding, current_content: String, - content_changes: &[lsp_types::TextDocumentContentChangeEvent], + mut content_changes: Vec, ) -> String { // Skip to the last full document change, as it invalidates all previous changes anyways. let mut start = content_changes @@ -192,12 +193,12 @@ pub(crate) fn apply_document_changes( .position(|change| change.range.is_none()) .map_or(0, |idx| content_changes.len() - idx - 1); - let mut text: String = match content_changes.get(start) { + let mut text: String = match content_changes.get_mut(start) { // peek at the first content change as an optimization Some(lsp_types::TextDocumentContentChangeEvent { range: None, text, .. }) => { - let text = text.clone(); + let text = mem::take(text); start += 1; // The only change is a full document update @@ -225,12 +226,11 @@ pub(crate) fn apply_document_changes( line_index = LineIndex::new(&text); } index_valid = range.start.line; - if let Ok(range) = from_lsp::text_range(&line_index, range, position_encoding) { + if let Ok(range) = text_range(&line_index, range, position_encoding) { text.replace_range(Range::::from(range), &change.text); } } } - text } diff --git a/crates/pgt_lsp/tests/server.rs b/crates/pgt_lsp/tests/server.rs index 581ea1fe0..63953590b 100644 --- a/crates/pgt_lsp/tests/server.rs +++ b/crates/pgt_lsp/tests/server.rs @@ -3,6 +3,7 @@ use anyhow::Error; use anyhow::Result; use anyhow::bail; use biome_deserialize::Merge; +use biome_deserialize::StringSet; use futures::Sink; use futures::SinkExt; use futures::Stream; @@ -13,13 +14,13 @@ use pgt_configuration::database::PartialDatabaseConfiguration; use pgt_fs::MemoryFileSystem; use pgt_lsp::LSPServer; use pgt_lsp::ServerFactory; -use pgt_test_utils::test_database::get_new_test_db; use pgt_workspace::DynRef; use serde::Serialize; use serde::de::DeserializeOwned; use serde_json::Value; use serde_json::{from_value, to_value}; use sqlx::Executor; +use sqlx::PgPool; use std::any::type_name; use std::fmt::Display; use std::time::Duration; @@ -40,6 +41,7 @@ use tower_lsp::lsp_types::Position; use tower_lsp::lsp_types::Range; use tower_lsp::lsp_types::TextDocumentPositionParams; use tower_lsp::lsp_types::WorkDoneProgressParams; +use tower_lsp::lsp_types::WorkspaceFolder; use tower_lsp::lsp_types::{ ClientCapabilities, DidChangeConfigurationParams, DidChangeTextDocumentParams, DidCloseTextDocumentParams, DidOpenTextDocumentParams, InitializeResult, InitializedParams, @@ -164,6 +166,42 @@ impl Server { Ok(()) } + /// It creates two workspaces, one at folder `test_one` and the other in `test_two`. + /// + /// Hence, the two roots will be `/workspace/test_one` and `/workspace/test_two` + #[allow(deprecated)] + async fn initialize_workspaces(&mut self) -> Result<()> { + let _res: InitializeResult = self + .request( + "initialize", + "_init", + InitializeParams { + process_id: None, + root_path: None, + root_uri: Some(url!("/")), + initialization_options: None, + capabilities: ClientCapabilities::default(), + trace: None, + workspace_folders: Some(vec![ + WorkspaceFolder { + name: "test_one".to_string(), + uri: url!("test_one"), + }, + WorkspaceFolder { + name: "test_two".to_string(), + uri: url!("test_two"), + }, + ]), + client_info: None, + locale: None, + }, + ) + .await? + .context("initialize returned None")?; + + Ok(()) + } + /// Basic implementation of the `initialized` notification for tests async fn initialized(&mut self) -> Result<()> { self.notify("initialized", InitializedParams {}).await @@ -204,13 +242,18 @@ impl Server { } /// Opens a document with given contents and given name. The name must contain the extension too - async fn open_named_document(&mut self, text: impl Display, document_name: Url) -> Result<()> { + async fn open_named_document( + &mut self, + text: impl Display, + document_name: Url, + language: impl Display, + ) -> Result<()> { self.notify( "textDocument/didOpen", DidOpenTextDocumentParams { text_document: TextDocumentItem { uri: document_name, - language_id: String::from("sql"), + language_id: language.to_string(), version: 0, text: text.to_string(), }, @@ -230,24 +273,31 @@ impl Server { .await } - async fn change_document( + async fn change_named_document( &mut self, + uri: Url, version: i32, content_changes: Vec, ) -> Result<()> { self.notify( "textDocument/didChange", DidChangeTextDocumentParams { - text_document: VersionedTextDocumentIdentifier { - uri: url!("document.sql"), - version, - }, + text_document: VersionedTextDocumentIdentifier { uri, version }, content_changes, }, ) .await } + async fn change_document( + &mut self, + version: i32, + content_changes: Vec, + ) -> Result<()> { + self.change_named_document(url!("document.sql"), version, content_changes) + .await + } + #[allow(unused)] async fn close_document(&mut self) -> Result<()> { self.notify( @@ -345,11 +395,10 @@ async fn basic_lifecycle() -> Result<()> { Ok(()) } -#[tokio::test] -async fn test_database_connection() -> Result<()> { +#[sqlx::test(migrator = "pgt_test_utils::MIGRATIONS")] +async fn test_database_connection(test_db: PgPool) -> Result<()> { let factory = ServerFactory::default(); let mut fs = MemoryFileSystem::default(); - let test_db = get_new_test_db().await; let setup = r#" create table public.users ( @@ -457,11 +506,10 @@ async fn server_shutdown() -> Result<()> { Ok(()) } -#[tokio::test] -async fn test_completions() -> Result<()> { +#[sqlx::test(migrator = "pgt_test_utils::MIGRATIONS")] +async fn test_completions(test_db: PgPool) -> Result<()> { let factory = ServerFactory::default(); let mut fs = MemoryFileSystem::default(); - let test_db = get_new_test_db().await; let setup = r#" create table public.users ( @@ -558,11 +606,10 @@ async fn test_completions() -> Result<()> { Ok(()) } -#[tokio::test] -async fn test_issue_271() -> Result<()> { +#[sqlx::test(migrator = "pgt_test_utils::MIGRATIONS")] +async fn test_issue_271(test_db: PgPool) -> Result<()> { let factory = ServerFactory::default(); let mut fs = MemoryFileSystem::default(); - let test_db = get_new_test_db().await; let setup = r#" create table public.users ( @@ -760,11 +807,10 @@ async fn test_issue_271() -> Result<()> { Ok(()) } -#[tokio::test] -async fn test_execute_statement() -> Result<()> { +#[sqlx::test(migrator = "pgt_test_utils::MIGRATIONS")] +async fn test_execute_statement(test_db: PgPool) -> Result<()> { let factory = ServerFactory::default(); let mut fs = MemoryFileSystem::default(); - let test_db = get_new_test_db().await; let database = test_db .connect_options() @@ -835,7 +881,7 @@ async fn test_execute_statement() -> Result<()> { let doc_url = url!("test.sql"); server - .open_named_document(doc_content.to_string(), doc_url.clone()) + .open_named_document(doc_content.to_string(), doc_url.clone(), "sql") .await?; let code_actions_response = server @@ -899,11 +945,10 @@ async fn test_execute_statement() -> Result<()> { Ok(()) } -#[tokio::test] -async fn test_issue_281() -> Result<()> { +#[sqlx::test(migrator = "pgt_test_utils::MIGRATIONS")] +async fn test_issue_281(test_db: PgPool) -> Result<()> { let factory = ServerFactory::default(); let mut fs = MemoryFileSystem::default(); - let test_db = get_new_test_db().await; let setup = r#" create table public.users ( @@ -983,11 +1028,10 @@ async fn test_issue_281() -> Result<()> { Ok(()) } -#[tokio::test] -async fn test_issue_303() -> Result<()> { +#[sqlx::test(migrator = "pgt_test_utils::MIGRATIONS")] +async fn test_issue_303(test_db: PgPool) -> Result<()> { let factory = ServerFactory::default(); let mut fs = MemoryFileSystem::default(); - let test_db = get_new_test_db().await; let setup = r#" create table public.users ( @@ -1113,3 +1157,1106 @@ async fn test_issue_303() -> Result<()> { Ok(()) } + +#[sqlx::test(migrator = "pgt_test_utils::MIGRATIONS")] +async fn multiple_projects(test_db: PgPool) -> Result<()> { + let factory = ServerFactory::default(); + let mut fs = MemoryFileSystem::default(); + + let setup = r#" + create table public.users ( + id serial primary key, + name varchar(255) not null + ); + "#; + + test_db + .execute(setup) + .await + .expect("Failed to setup test database"); + + // Setup configurations + // - test_one with db connection + let mut conf_with_db = PartialConfiguration::init(); + conf_with_db.merge_with(PartialConfiguration { + db: Some(PartialDatabaseConfiguration { + database: Some( + test_db + .connect_options() + .get_database() + .unwrap() + .to_string(), + ), + ..Default::default() + }), + ..Default::default() + }); + fs.insert( + url!("test_one/postgrestools.jsonc").to_file_path().unwrap(), + serde_json::to_string_pretty(&conf_with_db).unwrap(), + ); + + // -- test_two without db connection + let mut conf_without_db = PartialConfiguration::init(); + conf_without_db.merge_with(PartialConfiguration { + db: Some(PartialDatabaseConfiguration { + disable_connection: Some(true), + ..Default::default() + }), + ..Default::default() + }); + fs.insert( + url!("test_two/postgrestools.jsonc").to_file_path().unwrap(), + serde_json::to_string_pretty(&conf_without_db).unwrap(), + ); + + let (service, client) = factory + .create_with_fs(None, DynRef::Owned(Box::new(fs))) + .into_inner(); + + let (stream, sink) = client.split(); + let mut server = Server::new(service); + + let (sender, _) = channel(CHANNEL_BUFFER_SIZE); + let reader = tokio::spawn(client_handler(stream, sink, sender)); + + server.initialize_workspaces().await?; + server.initialized().await?; + + server.load_configuration().await?; + + // do the same change in both workspaces and request completions in both workspaces + + server + .open_named_document( + "select from public.users;\n", + url!("test_one/document.sql"), + "sql", + ) + .await?; + + server + .change_named_document( + url!("test_one/document.sql"), + 3, + vec![TextDocumentContentChangeEvent { + range: Some(Range { + start: Position { + line: 0, + character: 7, + }, + end: Position { + line: 0, + character: 7, + }, + }), + range_length: Some(0), + text: " ".to_string(), + }], + ) + .await?; + + let res_ws_one = server + .get_completion(CompletionParams { + work_done_progress_params: WorkDoneProgressParams::default(), + partial_result_params: PartialResultParams::default(), + context: None, + text_document_position: TextDocumentPositionParams { + text_document: TextDocumentIdentifier { + uri: url!("test_one/document.sql"), + }, + position: Position { + line: 0, + character: 8, + }, + }, + }) + .await? + .unwrap(); + + server + .open_named_document( + "select from public.users;\n", + url!("test_two/document.sql"), + "sql", + ) + .await?; + + server + .change_named_document( + url!("test_two/document.sql"), + 3, + vec![TextDocumentContentChangeEvent { + range: Some(Range { + start: Position { + line: 0, + character: 7, + }, + end: Position { + line: 0, + character: 7, + }, + }), + range_length: Some(0), + text: " ".to_string(), + }], + ) + .await?; + + let res_ws_two = server + .get_completion(CompletionParams { + work_done_progress_params: WorkDoneProgressParams::default(), + partial_result_params: PartialResultParams::default(), + context: None, + text_document_position: TextDocumentPositionParams { + text_document: TextDocumentIdentifier { + uri: url!("test_two/document.sql"), + }, + position: Position { + line: 0, + character: 8, + }, + }, + }) + .await? + .unwrap(); + + // only the first one has a db connection and should return completion items + assert!(!match res_ws_one { + CompletionResponse::Array(a) => a.is_empty(), + CompletionResponse::List(l) => l.items.is_empty(), + }); + assert!(match res_ws_two { + CompletionResponse::Array(a) => a.is_empty(), + CompletionResponse::List(l) => l.items.is_empty(), + }); + + server.shutdown().await?; + reader.abort(); + + Ok(()) +} + +#[sqlx::test(migrator = "pgt_test_utils::MIGRATIONS")] +async fn extends_config(test_db: PgPool) -> Result<()> { + let factory = ServerFactory::default(); + let mut fs = MemoryFileSystem::default(); + + let setup = r#" + create table public.extends_config_test ( + id serial primary key, + name varchar(255) not null + ); + "#; + + test_db + .execute(setup) + .await + .expect("Failed to setup test database"); + + // shared config with default db connection + let conf_with_db = PartialConfiguration::init(); + fs.insert( + url!("postgrestools.jsonc").to_file_path().unwrap(), + serde_json::to_string_pretty(&conf_with_db).unwrap(), + ); + + let relative_path = if cfg!(windows) { + "..\\postgrestools.jsonc" + } else { + "../postgrestools.jsonc" + }; + + // test_one extends the shared config but sets our test db + let mut conf_with_db = PartialConfiguration::init(); + conf_with_db.merge_with(PartialConfiguration { + extends: Some(StringSet::from_iter([relative_path.to_string()])), + db: Some(PartialDatabaseConfiguration { + database: Some( + test_db + .connect_options() + .get_database() + .unwrap() + .to_string(), + ), + ..Default::default() + }), + ..Default::default() + }); + + fs.insert( + url!("test_one/postgrestools.jsonc").to_file_path().unwrap(), + serde_json::to_string_pretty(&conf_with_db).unwrap(), + ); + + // test_two extends it but keeps the default one + let mut conf_without_db = PartialConfiguration::init(); + conf_without_db.merge_with(PartialConfiguration { + extends: Some(StringSet::from_iter([relative_path.to_string()])), + ..Default::default() + }); + fs.insert( + url!("test_two/postgrestools.jsonc").to_file_path().unwrap(), + serde_json::to_string_pretty(&conf_without_db).unwrap(), + ); + + let (service, client) = factory + .create_with_fs(None, DynRef::Owned(Box::new(fs))) + .into_inner(); + + let (stream, sink) = client.split(); + let mut server = Server::new(service); + + let (sender, _) = channel(CHANNEL_BUFFER_SIZE); + let reader = tokio::spawn(client_handler(stream, sink, sender)); + + server.initialize_workspaces().await?; + server.initialized().await?; + + server.load_configuration().await?; + + server + .open_named_document( + "select from public.extends_config_test;\n", + url!("test_one/document.sql"), + "sql", + ) + .await?; + + server + .change_named_document( + url!("test_one/document.sql"), + 3, + vec![TextDocumentContentChangeEvent { + range: Some(Range { + start: Position { + line: 0, + character: 7, + }, + end: Position { + line: 0, + character: 7, + }, + }), + range_length: Some(0), + text: " ".to_string(), + }], + ) + .await?; + + let res_ws_one = server + .get_completion(CompletionParams { + work_done_progress_params: WorkDoneProgressParams::default(), + partial_result_params: PartialResultParams::default(), + context: None, + text_document_position: TextDocumentPositionParams { + text_document: TextDocumentIdentifier { + uri: url!("test_one/document.sql"), + }, + position: Position { + line: 0, + character: 8, + }, + }, + }) + .await? + .unwrap(); + + server + .open_named_document( + "select from public.users;\n", + url!("test_two/document.sql"), + "sql", + ) + .await?; + + server + .change_named_document( + url!("test_two/document.sql"), + 3, + vec![TextDocumentContentChangeEvent { + range: Some(Range { + start: Position { + line: 0, + character: 7, + }, + end: Position { + line: 0, + character: 7, + }, + }), + range_length: Some(0), + text: " ".to_string(), + }], + ) + .await?; + + let res_ws_two = server + .get_completion(CompletionParams { + work_done_progress_params: WorkDoneProgressParams::default(), + partial_result_params: PartialResultParams::default(), + context: None, + text_document_position: TextDocumentPositionParams { + text_document: TextDocumentIdentifier { + uri: url!("test_two/document.sql"), + }, + position: Position { + line: 0, + character: 8, + }, + }, + }) + .await? + .unwrap(); + + let items_one = match res_ws_one { + CompletionResponse::Array(ref a) => a, + CompletionResponse::List(ref l) => &l.items, + }; + + // test one should have our test db connection and should return the completion items for the `extends_config_test` table + assert!(items_one.iter().any(|item| { + item.label_details.clone().is_some_and(|details| { + details + .description + .is_some_and(|desc| desc.contains("public.extends_config_test")) + }) + })); + + let items_two = match res_ws_two { + CompletionResponse::Array(ref a) => a, + CompletionResponse::List(ref l) => &l.items, + }; + + // test two should not have a db connection and should not return the completion items for the `extends_config_test` table + assert!(!items_two.iter().any(|item| { + item.label_details.clone().is_some_and(|details| { + details + .description + .is_some_and(|desc| desc.contains("public.extends_config_test")) + }) + })); + + server.shutdown().await?; + reader.abort(); + + Ok(()) +} + +#[sqlx::test(migrator = "pgt_test_utils::MIGRATIONS")] +async fn test_multiple_content_changes_single_request(test_db: PgPool) -> Result<()> { + let factory = ServerFactory::default(); + let mut fs = MemoryFileSystem::default(); + + let setup = r#" + create table public.campaign_contact_list ( + id serial primary key, + contact_list_id integer + ); + + create table public.contact_list ( + id serial primary key, + name varchar(255) + ); + + create table public.journey_node_contact_list ( + id serial primary key, + contact_list_id integer + ); + "#; + + test_db + .execute(setup) + .await + .expect("Failed to setup test database"); + + let mut conf = PartialConfiguration::init(); + conf.merge_with(PartialConfiguration { + db: Some(PartialDatabaseConfiguration { + database: Some( + test_db + .connect_options() + .get_database() + .unwrap() + .to_string(), + ), + ..Default::default() + }), + ..Default::default() + }); + fs.insert( + url!("postgrestools.jsonc").to_file_path().unwrap(), + serde_json::to_string_pretty(&conf).unwrap(), + ); + + let (service, client) = factory + .create_with_fs(None, DynRef::Owned(Box::new(fs))) + .into_inner(); + + let (stream, sink) = client.split(); + let mut server = Server::new(service); + + let (sender, mut receiver) = channel(CHANNEL_BUFFER_SIZE); + let reader = tokio::spawn(client_handler(stream, sink, sender)); + + server.initialize().await?; + server.initialized().await?; + + server.load_configuration().await?; + + // Open document with initial content that matches the log trace + let initial_content = r#" + + + +ALTER TABLE ONLY "public"."campaign_contact_list" + ADD CONSTRAINT "campaign_contact_list_contact_list_id_fkey" FOREIGN KEY ("contact_list_id") REFERENCES "public"."contact_list"("id") ON UPDATE RESTRICT ON DELETE CASCADE; +"#; + + server.open_document(initial_content).await?; + + // Apply multiple content changes in a single request, similar to the log trace + // This simulates changing "campaign" to "journey_node" in two places simultaneously + server + .change_document( + 4, + vec![ + // First change: line 4, character 27-35 (changing "campaign" to "journey_node") + TextDocumentContentChangeEvent { + range: Some(Range { + start: Position { + line: 4, + character: 27, + }, + end: Position { + line: 4, + character: 35, + }, + }), + range_length: Some(8), + text: "journey_node".to_string(), + }, + // Second change: line 5, character 20-28 (changing "campaign" to "journey_node") + TextDocumentContentChangeEvent { + range: Some(Range { + start: Position { + line: 5, + character: 20, + }, + end: Position { + line: 5, + character: 28, + }, + }), + range_length: Some(8), + text: "journey_node".to_string(), + }, + ], + ) + .await?; + + // make sure there is no diagnostics + let notification = tokio::time::timeout(Duration::from_secs(2), async { + loop { + match receiver.next().await { + Some(ServerNotification::PublishDiagnostics(msg)) => { + if !msg.diagnostics.is_empty() { + return true; + } + } + _ => continue, + } + } + }) + .await + .is_ok(); + + assert!(!notification, "did not expect diagnostics"); + + server.shutdown().await?; + reader.abort(); + + Ok(()) +} + +#[sqlx::test(migrator = "pgt_test_utils::MIGRATIONS")] +async fn test_plpgsql(test_db: PgPool) -> Result<()> { + let factory = ServerFactory::default(); + let mut fs = MemoryFileSystem::default(); + + let mut conf = PartialConfiguration::init(); + conf.merge_with(PartialConfiguration { + db: Some(PartialDatabaseConfiguration { + database: Some( + test_db + .connect_options() + .get_database() + .unwrap() + .to_string(), + ), + ..Default::default() + }), + ..Default::default() + }); + fs.insert( + url!("postgrestools.jsonc").to_file_path().unwrap(), + serde_json::to_string_pretty(&conf).unwrap(), + ); + + let (service, client) = factory + .create_with_fs(None, DynRef::Owned(Box::new(fs))) + .into_inner(); + + let (stream, sink) = client.split(); + let mut server = Server::new(service); + + let (sender, mut receiver) = channel(CHANNEL_BUFFER_SIZE); + let reader = tokio::spawn(client_handler(stream, sink, sender)); + + server.initialize().await?; + server.initialized().await?; + + server.load_configuration().await?; + + let initial_content = r#" +create function test_organisation_id () + returns setof text + language plpgsql + security invoker + as $$ + declre + v_organisation_id uuid; +begin + return next is(private.organisation_id(), v_organisation_id, 'should return organisation_id of token'); +end +$$; +"#; + + server.open_document(initial_content).await?; + + let got_notification = tokio::time::timeout(Duration::from_secs(5), async { + loop { + match receiver.next().await { + Some(ServerNotification::PublishDiagnostics(msg)) => { + if msg.diagnostics.iter().any(|d| { + d.message + .contains("Invalid statement: syntax error at or near \"declre\"") + && d.range + == Range { + start: Position { + line: 5, + character: 9, + }, + end: Position { + line: 11, + character: 0, + }, + } + }) { + return true; + } + } + _ => continue, + } + } + }) + .await + .is_ok(); + + assert!( + got_notification, + "expected diagnostics for invalid declare statement" + ); + + server.shutdown().await?; + reader.abort(); + + Ok(()) +} + +#[tokio::test] +async fn test_crash_on_delete_character() -> Result<()> { + let factory = ServerFactory::default(); + let (service, client) = factory.create(None).into_inner(); + let (stream, sink) = client.split(); + let mut server = Server::new(service); + + let (sender, _) = channel(CHANNEL_BUFFER_SIZE); + let reader = tokio::spawn(client_handler(stream, sink, sender)); + + server.initialize().await?; + server.initialized().await?; + + // Open document with initial CREATE INDEX statement - exactly as in log + let initial_content = "\n\n\n\nCREATE INDEX \"idx_analytics_read_ratio\" ON \"public\".\"message\" USING \"btree\" (\"inbox_id\", \"timestamp\") INCLUDE (\"status\") WHERE (\"is_inbound\" = false);\n"; + + server.open_document(initial_content).await?; + + // Add a space after false (position 148 from the log) + server + .change_document( + 3, + vec![TextDocumentContentChangeEvent { + range: Some(Range { + start: Position { + line: 4, + character: 148, + }, + end: Position { + line: 4, + character: 148, + }, + }), + range_length: Some(0), + text: " ".to_string(), + }], + ) + .await?; + + // Follow the exact sequence from the logfile + // Type character by character in exact order + + // Version 4: "a" at 149 + server + .change_document( + 4, + vec![TextDocumentContentChangeEvent { + range: Some(Range { + start: Position { + line: 4, + character: 149, + }, + end: Position { + line: 4, + character: 149, + }, + }), + range_length: Some(0), + text: "a".to_string(), + }], + ) + .await?; + + // Version 5: "n" at 150 + server + .change_document( + 5, + vec![TextDocumentContentChangeEvent { + range: Some(Range { + start: Position { + line: 4, + character: 150, + }, + end: Position { + line: 4, + character: 150, + }, + }), + range_length: Some(0), + text: "n".to_string(), + }], + ) + .await?; + + // Version 6: "d" at 151 + server + .change_document( + 6, + vec![TextDocumentContentChangeEvent { + range: Some(Range { + start: Position { + line: 4, + character: 151, + }, + end: Position { + line: 4, + character: 151, + }, + }), + range_length: Some(0), + text: "d".to_string(), + }], + ) + .await?; + + // Version 7: " " at 152 + server + .change_document( + 7, + vec![TextDocumentContentChangeEvent { + range: Some(Range { + start: Position { + line: 4, + character: 152, + }, + end: Position { + line: 4, + character: 152, + }, + }), + range_length: Some(0), + text: " ".to_string(), + }], + ) + .await?; + + // Version 8: "c" at 153 + server + .change_document( + 8, + vec![TextDocumentContentChangeEvent { + range: Some(Range { + start: Position { + line: 4, + character: 153, + }, + end: Position { + line: 4, + character: 153, + }, + }), + range_length: Some(0), + text: "c".to_string(), + }], + ) + .await?; + + // Version 10: "h" at 154 and "a" at 155 (two changes in one version) + server + .change_document( + 10, + vec![ + TextDocumentContentChangeEvent { + range: Some(Range { + start: Position { + line: 4, + character: 154, + }, + end: Position { + line: 4, + character: 154, + }, + }), + range_length: Some(0), + text: "h".to_string(), + }, + TextDocumentContentChangeEvent { + range: Some(Range { + start: Position { + line: 4, + character: 155, + }, + end: Position { + line: 4, + character: 155, + }, + }), + range_length: Some(0), + text: "a".to_string(), + }, + ], + ) + .await?; + + // Version 11: "n" at 156 + server + .change_document( + 11, + vec![TextDocumentContentChangeEvent { + range: Some(Range { + start: Position { + line: 4, + character: 156, + }, + end: Position { + line: 4, + character: 156, + }, + }), + range_length: Some(0), + text: "n".to_string(), + }], + ) + .await?; + + // Version 12: "n" at 157 + server + .change_document( + 12, + vec![TextDocumentContentChangeEvent { + range: Some(Range { + start: Position { + line: 4, + character: 157, + }, + end: Position { + line: 4, + character: 157, + }, + }), + range_length: Some(0), + text: "n".to_string(), + }], + ) + .await?; + + // Version 13: "e" at 158 + server + .change_document( + 13, + vec![TextDocumentContentChangeEvent { + range: Some(Range { + start: Position { + line: 4, + character: 158, + }, + end: Position { + line: 4, + character: 158, + }, + }), + range_length: Some(0), + text: "e".to_string(), + }], + ) + .await?; + + // Version 14: "l" at 159 + server + .change_document( + 14, + vec![TextDocumentContentChangeEvent { + range: Some(Range { + start: Position { + line: 4, + character: 159, + }, + end: Position { + line: 4, + character: 159, + }, + }), + range_length: Some(0), + text: "l".to_string(), + }], + ) + .await?; + + // Version 15: "_" at 160 + server + .change_document( + 15, + vec![TextDocumentContentChangeEvent { + range: Some(Range { + start: Position { + line: 4, + character: 160, + }, + end: Position { + line: 4, + character: 160, + }, + }), + range_length: Some(0), + text: "_".to_string(), + }], + ) + .await?; + + // Version 16: "t" at 161 + server + .change_document( + 16, + vec![TextDocumentContentChangeEvent { + range: Some(Range { + start: Position { + line: 4, + character: 161, + }, + end: Position { + line: 4, + character: 161, + }, + }), + range_length: Some(0), + text: "t".to_string(), + }], + ) + .await?; + + // Version 17: "y" at 162 + server + .change_document( + 17, + vec![TextDocumentContentChangeEvent { + range: Some(Range { + start: Position { + line: 4, + character: 162, + }, + end: Position { + line: 4, + character: 162, + }, + }), + range_length: Some(0), + text: "y".to_string(), + }], + ) + .await?; + + // Version 18: "p" at 163 + server + .change_document( + 18, + vec![TextDocumentContentChangeEvent { + range: Some(Range { + start: Position { + line: 4, + character: 163, + }, + end: Position { + line: 4, + character: 163, + }, + }), + range_length: Some(0), + text: "p".to_string(), + }], + ) + .await?; + + // Version 19: "e" at 164 + server + .change_document( + 19, + vec![TextDocumentContentChangeEvent { + range: Some(Range { + start: Position { + line: 4, + character: 164, + }, + end: Position { + line: 4, + character: 164, + }, + }), + range_length: Some(0), + text: "e".to_string(), + }], + ) + .await?; + + // Version 20: " " at 165 + server + .change_document( + 20, + vec![TextDocumentContentChangeEvent { + range: Some(Range { + start: Position { + line: 4, + character: 165, + }, + end: Position { + line: 4, + character: 165, + }, + }), + range_length: Some(0), + text: " ".to_string(), + }], + ) + .await?; + + // Now we should have: "WHERE ("is_inbound" = false and channel_type )" + + // Version 21: Paste the problematic text with double single quotes + server + .change_document( + 21, + vec![TextDocumentContentChangeEvent { + range: Some(Range { + start: Position { + line: 4, + character: 166, + }, + end: Position { + line: 4, + character: 166, + }, + }), + range_length: Some(0), + text: "channel_type not in (''postal'', ''sms'')".to_string(), + }], + ) + .await?; + + // Delete "channel_type" + server + .change_document( + 22, + vec![TextDocumentContentChangeEvent { + range: Some(Range { + start: Position { + line: 4, + character: 166, + }, + end: Position { + line: 4, + character: 178, + }, + }), + range_length: Some(12), + text: "".to_string(), + }], + ) + .await?; + + // Delete one more character + server + .change_document( + 23, + vec![TextDocumentContentChangeEvent { + range: Some(Range { + start: Position { + line: 4, + character: 166, + }, + end: Position { + line: 4, + character: 167, + }, + }), + range_length: Some(1), + text: "".to_string(), + }], + ) + .await?; + + // This final delete should trigger the panic + let result = server + .change_document( + 24, + vec![TextDocumentContentChangeEvent { + range: Some(Range { + start: Position { + line: 4, + character: 175, + }, + end: Position { + line: 4, + character: 176, + }, + }), + range_length: Some(1), + text: "".to_string(), + }], + ) + .await; + + assert!(result.is_ok()); + + reader.abort(); + + Ok(()) +} diff --git a/crates/pgt_plpgsql_check/Cargo.toml b/crates/pgt_plpgsql_check/Cargo.toml new file mode 100644 index 000000000..75d1a52b3 --- /dev/null +++ b/crates/pgt_plpgsql_check/Cargo.toml @@ -0,0 +1,30 @@ +[package] +authors.workspace = true +categories.workspace = true +description = "" +edition.workspace = true +homepage.workspace = true +keywords.workspace = true +license.workspace = true +name = "pgt_plpgsql_check" +repository.workspace = true +version = "0.0.0" + + +[dependencies] +pgt_console = { workspace = true } +pgt_diagnostics = { workspace = true } +pgt_query = { workspace = true } +pgt_query_ext = { workspace = true } +pgt_schema_cache = { workspace = true } +pgt_text_size = { workspace = true } +regex = { workspace = true } +serde = { workspace = true } +serde_json = { workspace = true } +sqlx = { workspace = true } +tree-sitter = { workspace = true } + +[dev-dependencies] +pgt_test_utils = { workspace = true } + +[lib] diff --git a/crates/pgt_plpgsql_check/src/diagnostics.rs b/crates/pgt_plpgsql_check/src/diagnostics.rs new file mode 100644 index 000000000..a0daec13d --- /dev/null +++ b/crates/pgt_plpgsql_check/src/diagnostics.rs @@ -0,0 +1,245 @@ +use std::io; + +use pgt_console::markup; +use pgt_diagnostics::{Advices, Diagnostic, LogCategory, MessageAndDescription, Severity, Visit}; +use pgt_text_size::TextRange; + +use crate::{PlpgSqlCheckIssue, PlpgSqlCheckResult}; + +/// Find the first occurrence of target text that is not within string literals +fn find_text_outside_strings(text: &str, target: &str) -> Option { + let text_lower = text.to_lowercase(); + let target_lower = target.to_lowercase(); + let mut in_string = false; + let mut quote_char = '\0'; + let bytes = text_lower.as_bytes(); + let mut i = 0; + + while i < bytes.len() { + let ch = bytes[i] as char; + + if !in_string { + // Check if we're starting a string literal + if ch == '\'' || ch == '"' { + in_string = true; + quote_char = ch; + } else { + // Check if we found our target at this position + if text_lower[i..].starts_with(&target_lower) { + // Check if this is a complete word (not part of another identifier) + let is_word_start = + i == 0 || !bytes[i - 1].is_ascii_alphanumeric() && bytes[i - 1] != b'_'; + let target_end = i + target_lower.len(); + let is_word_end = target_end >= bytes.len() + || (!bytes[target_end].is_ascii_alphanumeric() + && bytes[target_end] != b'_'); + + if is_word_start && is_word_end { + return Some(i); + } + } + } + } else { + // We're inside a string literal + if ch == quote_char { + // Check if it's escaped (look for double quotes/apostrophes) + if i + 1 < bytes.len() && bytes[i + 1] as char == quote_char { + // Skip the escaped quote + i += 1; + } else { + // End of string literal + in_string = false; + quote_char = '\0'; + } + } + } + + i += 1; + } + + None +} + +/// A specialized diagnostic for plpgsql_check. +#[derive(Clone, Debug, Diagnostic)] +#[diagnostic(category = "plpgsql_check")] +pub struct PlPgSqlCheckDiagnostic { + #[location(span)] + pub span: Option, + #[description] + #[message] + pub message: MessageAndDescription, + #[advice] + pub advices: PlPgSqlCheckAdvices, + #[severity] + pub severity: Severity, +} + +#[derive(Debug, Clone)] +pub struct PlPgSqlCheckAdvices { + pub code: Option, + /// the relation (table or view) where the issue was found, if applicable + /// only applicable for trigger functions + pub relation: Option, +} + +impl Advices for PlPgSqlCheckAdvices { + fn record(&self, visitor: &mut dyn Visit) -> io::Result<()> { + // Show the error code if available + if let Some(code) = &self.code { + visitor.record_log( + LogCategory::Error, + &markup! { "SQL State: " {code} }, + )?; + } + + // Show relation information if available + if let Some(relation) = &self.relation { + visitor.record_log( + LogCategory::Info, + &markup! { "Relation: " {relation} }, + )?; + } + + Ok(()) + } +} + +/// Convert plpgsql_check results into diagnostics with optional relation info for triggers +pub fn create_diagnostics_from_check_result( + result: &PlpgSqlCheckResult, + fn_body: &str, + offset: usize, + relation: Option, +) -> Vec { + result + .issues + .iter() + .map(|issue| { + let severity = match issue.level.as_str() { + "error" => Severity::Error, + "warning" => Severity::Warning, + "notice" => Severity::Hint, + _ => Severity::Information, + }; + + PlPgSqlCheckDiagnostic { + message: issue.message.clone().into(), + severity, + span: resolve_span(issue, fn_body, offset), + advices: PlPgSqlCheckAdvices { + code: issue.sql_state.clone(), + relation: relation.clone(), + }, + } + }) + .collect() +} + +fn resolve_span(issue: &PlpgSqlCheckIssue, fn_body: &str, offset: usize) -> Option { + let stmt = match issue.statement.as_ref() { + Some(s) => s, + None => { + return Some(TextRange::new( + (offset as u32).into(), + ((offset + fn_body.len()) as u32).into(), + )); + } + }; + + let line_number = stmt + .line_number + .parse::() + .expect("Expected line number to be a valid usize"); + + let text = &stmt.text; + + // calculate the offset to the target line + let line_offset: usize = fn_body + .lines() + .take(line_number - 1) + .map(|line| line.len() + 1) // +1 for newline + .sum(); + + // find the position within the target line + let line = fn_body.lines().nth(line_number - 1)?; + let start = line + .to_lowercase() + .find(&text.to_lowercase()) + .unwrap_or_else(|| { + line.char_indices() + .find_map(|(i, c)| if !c.is_whitespace() { Some(i) } else { None }) + .unwrap_or(0) + }); + + let stmt_offset = line_offset + start; + + if let Some(q) = &issue.query { + // first find the query within the fn body *after* stmt_offset, ignoring string literals + let query_start = find_text_outside_strings(&fn_body[stmt_offset..], &q.text) + .map(|pos| pos + stmt_offset); + + // the position is *within* the query text + let pos = q + .position + .parse::() + .expect("Expected query position to be a valid usize") + - 1; // -1 because the position is 1-based + + let start = query_start? + pos; + + // the range of the diagnostics is the token that `pos` is on + // Find the end of the current token by looking for whitespace or SQL delimiters + let remaining = &fn_body[start..]; + let end = remaining + .char_indices() + .find(|(_, c)| { + c.is_whitespace() || matches!(c, ',' | ';' | ')' | '(' | '=' | '<' | '>') + }) + .map(|(i, _c)| { + i // just the token end, don't include delimiters + }) + .unwrap_or(remaining.len()); + + return Some(TextRange::new( + ((offset + start) as u32).into(), + ((offset + start + end) as u32).into(), + )); + } + + // if no query is present, the end range covers + // - if text is "IF" or "ELSIF", then until the next "THEN" + // - TODO: check "LOOP", "CASE", "WHILE", "EXPECTION" and others + // - else: until the next semicolon or end of line + + if text.to_uppercase() == "IF" || text.to_uppercase() == "ELSIF" { + // Find the position of the next "THEN" after the statement + let remaining = &fn_body[stmt_offset..]; + if let Some(then_pos) = remaining.to_uppercase().find("THEN") { + let end = then_pos + "THEN".len(); + return Some(TextRange::new( + ((offset + stmt_offset) as u32).into(), + ((offset + stmt_offset + end) as u32).into(), + )); + } + } + + // if no specific end is found, use the next semicolon or the end of the line + let remaining = &fn_body[stmt_offset..]; + let end = remaining + .char_indices() + .find(|(_, c)| matches!(c, ';' | '\n' | '\r')) + .map(|(i, c)| { + if c == ';' { + i + 1 // include the semicolon + } else { + i // just the end of the line + } + }) + .unwrap_or(remaining.len()); + + Some(TextRange::new( + ((offset + stmt_offset) as u32).into(), + ((offset + stmt_offset + end) as u32).into(), + )) +} diff --git a/crates/pgt_plpgsql_check/src/lib.rs b/crates/pgt_plpgsql_check/src/lib.rs new file mode 100644 index 000000000..05e2f5709 --- /dev/null +++ b/crates/pgt_plpgsql_check/src/lib.rs @@ -0,0 +1,794 @@ +mod diagnostics; + +pub use diagnostics::PlPgSqlCheckDiagnostic; +use diagnostics::create_diagnostics_from_check_result; +use pgt_query::protobuf::CreateFunctionStmt; +use regex::Regex; +use serde::Deserialize; +pub use sqlx::postgres::PgSeverity; +use sqlx::{Acquire, PgPool, Postgres, Transaction}; + +#[derive(Debug)] +pub struct PlPgSqlCheckParams<'a> { + pub conn: &'a PgPool, + pub sql: &'a str, + pub ast: &'a pgt_query::NodeEnum, + pub schema_cache: &'a pgt_schema_cache::SchemaCache, +} + +#[derive(Debug, Deserialize)] +pub struct PlpgSqlCheckResult { + pub function: String, + pub issues: Vec, +} + +#[derive(Debug, Deserialize)] +pub struct PlpgSqlCheckIssue { + pub level: String, + pub message: String, + pub statement: Option, + pub query: Option, + #[serde(rename = "sqlState")] + pub sql_state: Option, +} + +#[derive(Debug, Deserialize)] +pub struct Statement { + #[serde(rename = "lineNumber")] + pub line_number: String, + pub text: String, +} + +#[derive(Debug, Deserialize)] +pub struct Query { + pub position: String, + pub text: String, +} + +/// check if the given node is a plpgsql function that should be checked +fn should_check_function<'a>( + ast: &'a pgt_query::NodeEnum, + schema_cache: &pgt_schema_cache::SchemaCache, +) -> Option<&'a CreateFunctionStmt> { + let create_fn = match ast { + pgt_query::NodeEnum::CreateFunctionStmt(stmt) => stmt, + _ => return None, + }; + + if pgt_query_ext::utils::find_option_value(create_fn, "language") != Some("plpgsql".to_string()) + { + return None; + } + + if !schema_cache + .extensions + .iter() + .any(|e| e.name == "plpgsql_check") + { + return None; + } + + Some(create_fn) +} + +/// check if a function is a trigger function +fn is_trigger_function(create_fn: &CreateFunctionStmt) -> bool { + create_fn + .return_type + .as_ref() + .map(|n| { + matches!( + pgt_query_ext::utils::parse_name(&n.names), + Some((None, name)) if name == "trigger" + ) + }) + .unwrap_or(false) +} + +/// build the function identifier string used by plpgsql_check +fn build_function_identifier( + create_fn: &CreateFunctionStmt, + fn_schema: &Option, + fn_name: &str, +) -> String { + let args = create_fn + .parameters + .iter() + .filter_map(|arg| { + let node = match &arg.node { + Some(pgt_query::NodeEnum::FunctionParameter(n)) => n, + _ => return None, + }; + let type_name_node = node.arg_type.as_ref()?; + let type_name = match pgt_query_ext::utils::parse_name(&type_name_node.names) { + Some((schema, name)) => match schema { + Some(s) => format!("{}.{}", s, name), + None => name, + }, + None => return None, + }; + + if !type_name_node.array_bounds.is_empty() { + Some(format!("{}[]", type_name)) + } else { + Some(type_name) + } + }) + .collect::>(); + + let fn_qualified_name = match fn_schema { + Some(schema) => format!("{}.{}", schema, fn_name), + None => fn_name.to_string(), + }; + + if args.is_empty() { + fn_qualified_name + } else { + format!("{}({})", fn_qualified_name, args.join(", ")) + } +} + +pub async fn check_plpgsql( + params: PlPgSqlCheckParams<'_>, +) -> Result, sqlx::Error> { + let create_fn = match should_check_function(params.ast, params.schema_cache) { + Some(stmt) => stmt, + None => return Ok(vec![]), + }; + + let (fn_schema, fn_name) = match pgt_query_ext::utils::parse_name(&create_fn.funcname) { + Some(n) => n, + None => return Ok(vec![]), + }; + + let fn_identifier = build_function_identifier(create_fn, &fn_schema, &fn_name); + + let fn_body = pgt_query_ext::utils::find_option_value(create_fn, "as") + .ok_or_else(|| sqlx::Error::Protocol("Failed to find function body".to_string()))?; + let offset = params + .sql + .find(&fn_body) + .ok_or_else(|| sqlx::Error::Protocol("Failed to find function body in SQL".to_string()))?; + let is_trigger = is_trigger_function(create_fn); + + let mut conn = params.conn.acquire().await?; + conn.close_on_drop(); + + let mut tx: Transaction<'_, Postgres> = conn.begin().await?; + + // disable function body checking to rely on plpgsql_check + sqlx::query("SET LOCAL check_function_bodies = off") + .execute(&mut *tx) + .await?; + + // make sure we run with "or replace" + let sql_with_replace = if !create_fn.replace { + let re = Regex::new(r"(?i)\bCREATE\s+FUNCTION\b").unwrap(); + re.replace(params.sql, "CREATE OR REPLACE FUNCTION") + .to_string() + } else { + params.sql.to_string() + }; + + // create the function - this should always succeed + sqlx::query(&sql_with_replace).execute(&mut *tx).await?; + + // run plpgsql_check and collect results with their relations + let results_with_relations: Vec<(String, Option)> = if is_trigger { + let mut results = Vec::new(); + + for trigger in params.schema_cache.triggers.iter() { + if trigger.proc_name == fn_name + && (fn_schema.is_none() || fn_schema.as_deref() == Some(&trigger.proc_schema)) + { + let relation = format!("{}.{}", trigger.table_schema, trigger.table_name); + + let result: Option = sqlx::query_scalar(&format!( + "select plpgsql_check_function('{}', '{}', format := 'json')", + fn_identifier, relation + )) + .fetch_optional(&mut *tx) + .await? + .flatten(); + + if let Some(result) = result { + results.push((result, Some(relation))); + } + } + } + + results + } else { + let result: Option = sqlx::query_scalar(&format!( + "select plpgsql_check_function('{}', format := 'json')", + fn_identifier + )) + .fetch_optional(&mut *tx) + .await? + .flatten(); + + if let Some(result) = result { + vec![(result, None)] + } else { + vec![] + } + }; + + tx.rollback().await?; + + // Parse results and create diagnostics + let mut diagnostics = Vec::new(); + for (result_json, relation) in results_with_relations { + let check_result: PlpgSqlCheckResult = serde_json::from_str(&result_json).map_err(|e| { + sqlx::Error::Protocol(format!("Failed to parse plpgsql_check result: {}", e)) + })?; + + let mut result_diagnostics = + create_diagnostics_from_check_result(&check_result, &fn_body, offset, relation); + diagnostics.append(&mut result_diagnostics); + } + + Ok(diagnostics) +} + +#[cfg(all(test, not(target_os = "windows")))] +mod tests { + use sqlx::{Executor, PgPool}; + + /// Test helper to run plpgsql_check and return diagnostics with span text + async fn run_plpgsql_check_test( + test_db: &PgPool, + setup_sql: &str, + create_fn_sql: &str, + ) -> Result<(Vec, Vec>), Box> + { + test_db.execute(setup_sql).await?; + + let ast = pgt_query::parse(create_fn_sql)? + .into_root() + .ok_or("Failed to parse SQL root")?; + let schema_cache = pgt_schema_cache::SchemaCache::load(test_db).await?; + + let diagnostics = super::check_plpgsql(super::PlPgSqlCheckParams { + conn: test_db, + sql: create_fn_sql, + ast: &ast, + schema_cache: &schema_cache, + }) + .await?; + + let span_texts = diagnostics + .iter() + .map(|diag| { + diag.span.as_ref().map(|s| { + let start = usize::from(s.start()); + let end = usize::from(s.end()); + create_fn_sql[start..end].to_string() + }) + }) + .collect(); + + Ok((diagnostics, span_texts)) + } + + #[sqlx::test(migrator = "pgt_test_utils::MIGRATIONS")] + async fn test_plpgsql_check_if_expr(test_db: PgPool) { + let setup = r#" + create extension if not exists plpgsql_check; + + CREATE TABLE t1(a int, b int); + "#; + + let create_fn_sql = r#" + CREATE OR REPLACE FUNCTION public.f1() + RETURNS void + LANGUAGE plpgsql + AS $function$ + declare r t1 := (select t1 from t1 where a = 1); + BEGIN + if r.c is null or + true is false + then -- there is bug - table t1 missing "c" column + RAISE NOTICE 'c is null'; + end if; + END; + $function$; + "#; + + let (diagnostics, span_texts) = run_plpgsql_check_test(&test_db, setup, create_fn_sql) + .await + .expect("Failed to run plpgsql_check test"); + + assert_eq!(diagnostics.len(), 1); + assert!(matches!( + diagnostics[0].severity, + pgt_diagnostics::Severity::Error + )); + assert_eq!( + span_texts[0].as_deref(), + Some("if r.c is null or\n true is false\n then") + ); + } + + #[sqlx::test(migrator = "pgt_test_utils::MIGRATIONS")] + async fn test_plpgsql_check_missing_var(test_db: PgPool) { + let setup = r#" + create extension if not exists plpgsql_check; + + CREATE TABLE t1(a int, b int); + "#; + + let create_fn_sql = r#" + CREATE OR REPLACE FUNCTION public.f1() + RETURNS void + LANGUAGE plpgsql + AS $function$ + BEGIN + SELECT 1 from t1 where a = v_c; + END; + $function$; + "#; + + let (diagnostics, span_texts) = run_plpgsql_check_test(&test_db, setup, create_fn_sql) + .await + .expect("Failed to run plpgsql_check test"); + assert_eq!(diagnostics.len(), 1); + assert!(matches!( + diagnostics[0].severity, + pgt_diagnostics::Severity::Error + )); + assert_eq!(span_texts[0].as_deref(), Some("v_c")); + } + + #[sqlx::test(migrator = "pgt_test_utils::MIGRATIONS")] + async fn test_plpgsql_check_missing_col_if_stmt(test_db: PgPool) { + let setup = r#" + create extension if not exists plpgsql_check; + + CREATE TABLE t1(a int, b int); + "#; + + let create_fn_sql = r#" + CREATE OR REPLACE FUNCTION public.f1() + RETURNS void + LANGUAGE plpgsql + AS $function$ + BEGIN + if (select c from t1 where id = 1) is null then -- there is bug - table t1 missing "c" column + RAISE NOTICE 'c is null'; + end if; + END; + $function$; + "#; + + let (diagnostics, span_texts) = run_plpgsql_check_test(&test_db, setup, create_fn_sql) + .await + .expect("Failed to run plpgsql_check test"); + assert_eq!(diagnostics.len(), 1); + assert!(matches!( + diagnostics[0].severity, + pgt_diagnostics::Severity::Error + )); + assert_eq!(span_texts[0].as_deref(), Some("c")); + } + + #[sqlx::test(migrator = "pgt_test_utils::MIGRATIONS")] + async fn test_plpgsql_check(test_db: PgPool) { + let setup = r#" + create extension if not exists plpgsql_check; + + CREATE TABLE t1(a int, b int); + "#; + + let create_fn_sql = r#" + CREATE OR REPLACE FUNCTION public.f1() + RETURNS void + LANGUAGE plpgsql + AS $function$ + DECLARE r record; + BEGIN + FOR r IN SELECT * FROM t1 + LOOP + RAISE NOTICE '%', r.c; -- there is bug - table t1 missing "c" column + END LOOP; + END; + $function$; + "#; + + let (diagnostics, span_texts) = run_plpgsql_check_test(&test_db, setup, create_fn_sql) + .await + .expect("Failed to run plpgsql_check test"); + + assert_eq!(diagnostics.len(), 1); + assert!(matches!( + diagnostics[0].severity, + pgt_diagnostics::Severity::Error + )); + assert_eq!(span_texts[0].as_deref(), Some("RAISE NOTICE '%', r.c;")); + } + + #[sqlx::test(migrator = "pgt_test_utils::MIGRATIONS")] + async fn test_plpgsql_check_stacked_diagnostics(test_db: PgPool) { + let setup = r#" + create extension if not exists plpgsql_check; + "#; + + let create_fn_sql = r#" + create or replace function fxtest() + returns void as $$ + declare + v_sqlstate text; + v_message text; + v_context text; + begin + get stacked diagnostics + v_sqlstate = returned_sqlstate, + v_message = message_text, + v_context = pg_exception_context; + end; + $$ language plpgsql; + "#; + + let (diagnostics, span_texts) = run_plpgsql_check_test(&test_db, setup, create_fn_sql) + .await + .expect("Failed to run plpgsql_check test"); + + assert!(!diagnostics.is_empty()); + assert!(matches!( + diagnostics[0].severity, + pgt_diagnostics::Severity::Error + )); + assert_eq!(span_texts[0].as_deref(), Some("get stacked diagnostics")); + } + + #[sqlx::test(migrator = "pgt_test_utils::MIGRATIONS")] + async fn test_plpgsql_check_constant_refcursor(test_db: PgPool) { + let setup = r#" + create extension if not exists plpgsql_check; + create table rc_test(a int); + "#; + + let create_fn_sql = r#" + create function return_constant_refcursor() returns refcursor as $$ + declare + rc constant refcursor; + begin + open rc for select a from rc_test; + return rc; + end + $$ language plpgsql; + "#; + + let (diagnostics, span_texts) = run_plpgsql_check_test(&test_db, setup, create_fn_sql) + .await + .expect("Failed to run plpgsql_check test"); + + assert!(!diagnostics.is_empty()); + assert!(matches!( + diagnostics[0].severity, + pgt_diagnostics::Severity::Error + )); + assert_eq!( + span_texts[0].as_deref(), + Some("open rc for select a from rc_test;") + ); + } + + #[sqlx::test(migrator = "pgt_test_utils::MIGRATIONS")] + async fn test_plpgsql_check_constant_assignment(test_db: PgPool) { + let setup = r#" + create extension if not exists plpgsql_check; + + create procedure p1(a int, out b int) + as $$ + begin + b := a + 10; + end; + $$ language plpgsql; + "#; + + let create_fn_sql = r#" + create function f1() + returns void as $$ + declare b constant int; + begin + call p1(10, b); + end; + $$ language plpgsql; + "#; + + let (diagnostics, span_texts) = run_plpgsql_check_test(&test_db, setup, create_fn_sql) + .await + .expect("Failed to run plpgsql_check test"); + + assert!(!diagnostics.is_empty()); + assert!(matches!( + diagnostics[0].severity, + pgt_diagnostics::Severity::Error + )); + assert_eq!(span_texts[0].as_deref(), Some("call p1(10, b);")); + } + + #[sqlx::test(migrator = "pgt_test_utils::MIGRATIONS")] + async fn test_plpgsql_check_missing_procedure(test_db: PgPool) { + let setup = r#" + create extension if not exists plpgsql_check; + "#; + + let create_fn_sql = r#" + create function f1() + returns void as $$ + declare b constant int; + begin + call p1(10, b); + end; + $$ language plpgsql; + "#; + + let (diagnostics, span_texts) = run_plpgsql_check_test(&test_db, setup, create_fn_sql) + .await + .expect("Failed to run plpgsql_check test"); + + assert!(!diagnostics.is_empty()); + assert!(matches!( + diagnostics[0].severity, + pgt_diagnostics::Severity::Error + )); + assert_eq!(span_texts[0].as_deref(), Some("p1")); + } + + #[sqlx::test(migrator = "pgt_test_utils::MIGRATIONS")] + async fn test_plpgsql_check_dml_in_stable_function(test_db: PgPool) { + let setup = r#" + create extension if not exists plpgsql_check; + create table t1(a int, b int); + "#; + + let create_fn_sql = r#" + create function f1() + returns void as $$ + begin + if false then + insert into t1 values(10,20); + update t1 set a = 10; + delete from t1; + end if; + end; + $$ language plpgsql stable; + "#; + + let (diagnostics, span_texts) = run_plpgsql_check_test(&test_db, setup, create_fn_sql) + .await + .expect("Failed to run plpgsql_check test"); + + assert_eq!(diagnostics.len(), 1); + assert!(span_texts[0].is_some()); + + assert_eq!(diagnostics[0].advices.code.as_deref(), Some("0A000")); + } + + #[sqlx::test(migrator = "pgt_test_utils::MIGRATIONS")] + async fn test_plpgsql_check_record_field_assignment(test_db: PgPool) { + let setup = r#" + create extension if not exists plpgsql_check; + + create function g1() returns table(a int, b int) as $$ + begin + return query select 1, 2; + end; + $$ language plpgsql; + "#; + + let create_fn_sql = r#" + create or replace function f1() + returns void as $$ + declare r record; + begin + for r in select * from g1() + loop + r.c := 20; + end loop; + end; + $$ language plpgsql; + "#; + + let (diagnostics, span_texts) = run_plpgsql_check_test(&test_db, setup, create_fn_sql) + .await + .expect("Failed to run plpgsql_check test"); + + assert!(!diagnostics.is_empty()); + assert!(matches!( + diagnostics[0].severity, + pgt_diagnostics::Severity::Error + )); + assert!(span_texts[0].is_some()); + } + + #[sqlx::test(migrator = "pgt_test_utils::MIGRATIONS")] + async fn test_plpgsql_check_trigger_basic(test_db: PgPool) { + let setup = r#" + create extension if not exists plpgsql_check; + + CREATE TABLE users( + id serial primary key, + name text not null, + email text + ); + + CREATE OR REPLACE FUNCTION public.log_user_changes() + RETURNS trigger + LANGUAGE plpgsql + AS $function$ + BEGIN + -- Intentional error: referencing non-existent column + INSERT INTO audit_log(table_name, changed_id, old_email, new_email) + VALUES ('users', NEW.id, OLD.email, NEW.email); + RETURN NEW; + END; + $function$; + + CREATE TRIGGER trg_users_audit + AFTER UPDATE ON users + FOR EACH ROW + EXECUTE FUNCTION public.log_user_changes(); + "#; + + let create_fn_sql = r#" + CREATE OR REPLACE FUNCTION public.log_user_changes() + RETURNS trigger + LANGUAGE plpgsql + AS $function$ + BEGIN + -- Intentional error: referencing non-existent column + INSERT INTO audit_log(table_name, changed_id, old_email, new_email) + VALUES ('users', NEW.id, OLD.email, NEW.email); + RETURN NEW; + END; + $function$; + "#; + + let (diagnostics, span_texts) = run_plpgsql_check_test(&test_db, setup, create_fn_sql) + .await + .expect("Failed to run plpgsql_check test"); + + assert!(!diagnostics.is_empty()); + assert!(matches!( + diagnostics[0].severity, + pgt_diagnostics::Severity::Error + )); + assert!(diagnostics[0].advices.relation.is_some()); + assert_eq!( + diagnostics[0].advices.relation.as_deref(), + Some("public.users") + ); + assert_eq!(span_texts[0].as_deref(), Some("audit_log")); + } + + #[sqlx::test(migrator = "pgt_test_utils::MIGRATIONS")] + async fn test_plpgsql_check_trigger_missing_column(test_db: PgPool) { + let setup = r#" + create extension if not exists plpgsql_check; + + CREATE TABLE products( + id serial primary key, + name text not null, + price numeric(10,2) + ); + + CREATE OR REPLACE FUNCTION public.validate_product() + RETURNS trigger + LANGUAGE plpgsql + AS $function$ + BEGIN + -- Error: referencing non-existent column + IF NEW.category IS NULL THEN + RAISE EXCEPTION 'Category is required'; + END IF; + RETURN NEW; + END; + $function$; + + CREATE TRIGGER trg_product_validation + BEFORE INSERT OR UPDATE ON products + FOR EACH ROW + EXECUTE FUNCTION public.validate_product(); + "#; + + let create_fn_sql = r#" + CREATE OR REPLACE FUNCTION public.validate_product() + RETURNS trigger + LANGUAGE plpgsql + AS $function$ + BEGIN + -- Error: referencing non-existent column + IF NEW.category IS NULL THEN + RAISE EXCEPTION 'Category is required'; + END IF; + RETURN NEW; + END; + $function$; + "#; + + let (diagnostics, span_texts) = run_plpgsql_check_test(&test_db, setup, create_fn_sql) + .await + .expect("Failed to run plpgsql_check test"); + + assert!(!diagnostics.is_empty()); + assert!(matches!( + diagnostics[0].severity, + pgt_diagnostics::Severity::Error + )); + assert!(span_texts[0].as_deref().unwrap().contains("category")); + assert_eq!( + diagnostics[0].advices.relation.as_deref(), + Some("public.products") + ); + } + + #[sqlx::test(migrator = "pgt_test_utils::MIGRATIONS")] + async fn test_plpgsql_check_trigger_multiple_tables(test_db: PgPool) { + let setup = r#" + create extension if not exists plpgsql_check; + + CREATE TABLE table_a( + id serial primary key, + name text + ); + + CREATE TABLE table_b( + id serial primary key, + description text + ); + + CREATE OR REPLACE FUNCTION public.generic_audit() + RETURNS trigger + LANGUAGE plpgsql + AS $function$ + BEGIN + -- Error: referencing column that doesn't exist in both tables + INSERT INTO audit_log(table_name, record_id, old_status) + VALUES (TG_TABLE_NAME, NEW.id, OLD.status); + RETURN NEW; + END; + $function$; + + CREATE TRIGGER trg_audit_a + AFTER UPDATE ON table_a + FOR EACH ROW + EXECUTE FUNCTION public.generic_audit(); + + CREATE TRIGGER trg_audit_b + AFTER UPDATE ON table_b + FOR EACH ROW + EXECUTE FUNCTION public.generic_audit(); + "#; + + let create_fn_sql = r#" + CREATE OR REPLACE FUNCTION public.generic_audit() + RETURNS trigger + LANGUAGE plpgsql + AS $function$ + BEGIN + -- Error: referencing column that doesn't exist in both tables + INSERT INTO audit_log(table_name, record_id, old_status) + VALUES (TG_TABLE_NAME, NEW.id, OLD.status); + RETURN NEW; + END; + $function$; + "#; + + let (diagnostics, _span_texts) = run_plpgsql_check_test(&test_db, setup, create_fn_sql) + .await + .expect("Failed to run plpgsql_check test"); + + assert!(!diagnostics.is_empty()); + assert!(diagnostics.len() >= 2); + + let relations: Vec<_> = diagnostics + .iter() + .filter_map(|d| d.advices.relation.as_ref()) + .collect(); + assert!(relations.contains(&&"public.table_a".to_string())); + assert!(relations.contains(&&"public.table_b".to_string())); + } +} diff --git a/crates/pgt_query/Cargo.toml b/crates/pgt_query/Cargo.toml new file mode 100644 index 000000000..881b1b800 --- /dev/null +++ b/crates/pgt_query/Cargo.toml @@ -0,0 +1,36 @@ +[package] +authors.workspace = true +categories.workspace = true +description = "" +edition.workspace = true +homepage.workspace = true +keywords.workspace = true +license.workspace = true +name = "pgt_query" +repository.workspace = true +version = "0.0.0" + +[dependencies] +prost = { workspace = true } +thiserror = { workspace = true } + +pgt_query_macros = { workspace = true } + + +[features] +default = ["postgres-17"] +postgres-15 = [] +postgres-16 = [] +postgres-17 = [] + +[build-dependencies] +bindgen = "0.72.0" +cc = "1.0.83" +clippy = { version = "0.0.302", optional = true } +fs_extra = "1.2.0" +glob = "0.3.1" +prost-build = "0.13.5" +which = "6.0.0" + +[dev-dependencies] +easy-parallel = "3.2.0" diff --git a/crates/pgt_query/build.rs b/crates/pgt_query/build.rs new file mode 100644 index 000000000..292b3af28 --- /dev/null +++ b/crates/pgt_query/build.rs @@ -0,0 +1,260 @@ +#![cfg_attr(feature = "clippy", feature(plugin))] +#![cfg_attr(feature = "clippy", plugin(clippy))] + +use fs_extra::dir::CopyOptions; +use glob::glob; +use std::env; +use std::path::PathBuf; +use std::process::Command; + +static LIBRARY_NAME: &str = "pg_query"; +static LIBPG_QUERY_REPO: &str = "https://github.com/pganalyze/libpg_query.git"; +fn get_libpg_query_tag() -> &'static str { + #[cfg(feature = "postgres-15")] + return "15-5.3.0"; + #[cfg(feature = "postgres-16")] + return "16-6.1.0"; + #[cfg(feature = "postgres-17")] + return "17-6.1.0"; +} + +fn main() -> Result<(), Box> { + let libpg_query_tag = get_libpg_query_tag(); + let out_dir = PathBuf::from(env::var("OUT_DIR")?); + let vendor_dir = out_dir.join("vendor"); + let libpg_query_dir = vendor_dir.join("libpg_query").join(libpg_query_tag); + let stamp_file = libpg_query_dir.join(".stamp"); + + let src_dir = PathBuf::from(env::var("CARGO_MANIFEST_DIR")?).join("src"); + let target = env::var("TARGET").unwrap(); + let is_emscripten = target.contains("emscripten"); + + // Configure cargo through stdout + println!("cargo:rustc-link-search=native={}", out_dir.display()); + println!("cargo:rustc-link-lib=static={LIBRARY_NAME}"); + + // Clone libpg_query if not already present + if !stamp_file.exists() { + println!("cargo:warning=Cloning libpg_query {}", libpg_query_tag); + + // Create vendor directory + std::fs::create_dir_all(&vendor_dir)?; + + // Clone the repository with partial clone for faster download + let status = Command::new("git") + .args([ + "clone", + "--filter=blob:none", + "--depth", + "1", + "--branch", + libpg_query_tag, + LIBPG_QUERY_REPO, + libpg_query_dir.to_str().unwrap(), + ]) + .status()?; + + if !status.success() { + return Err("Failed to clone libpg_query".into()); + } + + // Create stamp file + std::fs::File::create(&stamp_file)?; + } + + // Tell cargo to rerun if the stamp file is deleted + println!("cargo:rerun-if-changed={}", stamp_file.display()); + + // Copy necessary files to OUT_DIR for compilation + let out_header_path = out_dir.join(LIBRARY_NAME).with_extension("h"); + let out_protobuf_path = out_dir.join("protobuf"); + + let source_paths = vec![ + libpg_query_dir.join(LIBRARY_NAME).with_extension("h"), + libpg_query_dir.join("Makefile"), + libpg_query_dir.join("src"), + libpg_query_dir.join("protobuf"), + libpg_query_dir.join("vendor"), + ]; + + let copy_options = CopyOptions { + overwrite: true, + ..CopyOptions::default() + }; + + fs_extra::copy_items(&source_paths, &out_dir, ©_options)?; + + // Compile the C library. + let mut build = cc::Build::new(); + + // Configure for Emscripten if needed + if is_emscripten { + // Use emcc as the compiler instead of gcc/clang + build.compiler("emcc"); + // Use emar as the archiver instead of ar + build.archiver("emar"); + // Note: We don't add WASM-specific flags here as this creates a static library + // The final linking flags should be added when building the final WASM module + } + + build + .files( + glob(out_dir.join("src/*.c").to_str().unwrap()) + .unwrap() + .map(|p| p.unwrap()), + ) + .files( + glob(out_dir.join("src/postgres/*.c").to_str().unwrap()) + .unwrap() + .map(|p| p.unwrap()), + ) + .file(out_dir.join("vendor/protobuf-c/protobuf-c.c")) + .file(out_dir.join("vendor/xxhash/xxhash.c")) + .file(out_dir.join("protobuf/pg_query.pb-c.c")) + .include(out_dir.join(".")) + .include(out_dir.join("./vendor")) + .include(out_dir.join("./src/postgres/include")) + .include(out_dir.join("./src/include")) + .warnings(false); // Avoid unnecessary warnings, as they are already considered as part of libpg_query development + if env::var("PROFILE").unwrap() == "debug" || env::var("DEBUG").unwrap() == "1" { + build.define("USE_ASSERT_CHECKING", None); + } + if target.contains("windows") && !is_emscripten { + build.include(out_dir.join("./src/postgres/include/port/win32")); + if target.contains("msvc") { + build.include(out_dir.join("./src/postgres/include/port/win32_msvc")); + } + } + build.compile(LIBRARY_NAME); + + // Generate bindings for Rust + let mut bindgen_builder = bindgen::Builder::default() + .header(out_header_path.to_str().ok_or("Invalid header path")?) + // Allowlist only the functions we need + .allowlist_function("pg_query_parse_protobuf") + .allowlist_function("pg_query_scan") + .allowlist_function("pg_query_deparse_protobuf") + .allowlist_function("pg_query_normalize") + .allowlist_function("pg_query_fingerprint") + .allowlist_function("pg_query_split_with_parser") + .allowlist_function("pg_query_split_with_scanner") + .allowlist_function("pg_query_parse_plpgsql") + .allowlist_function("pg_query_free_protobuf_parse_result") + .allowlist_function("pg_query_free_scan_result") + .allowlist_function("pg_query_free_deparse_result") + .allowlist_function("pg_query_free_normalize_result") + .allowlist_function("pg_query_free_fingerprint_result") + .allowlist_function("pg_query_free_split_result") + .allowlist_function("pg_query_free_plpgsql_parse_result") + // Allowlist the types used by these functions + .allowlist_type("PgQueryProtobufParseResult") + .allowlist_type("PgQueryScanResult") + .allowlist_type("PgQueryError") + .allowlist_type("PgQueryProtobuf") + .allowlist_type("PgQueryDeparseResult") + .allowlist_type("PgQueryNormalizeResult") + .allowlist_type("PgQueryFingerprintResult") + .allowlist_type("PgQuerySplitResult") + .allowlist_type("PgQuerySplitStmt") + // Also generate bindings for size_t since it's used in PgQueryProtobuf + .allowlist_type("size_t") + .allowlist_var("PG_VERSION_NUM"); + + // Configure bindgen for Emscripten target + if is_emscripten { + // Tell bindgen to generate bindings for the wasm32 target + bindgen_builder = bindgen_builder.clang_arg("--target=wasm32-unknown-emscripten"); + + // Add emscripten sysroot includes + // First try to use EMSDK environment variable (set in CI and when sourcing emsdk_env.sh) + if let Ok(emsdk) = env::var("EMSDK") { + bindgen_builder = bindgen_builder.clang_arg(format!( + "-I{}/upstream/emscripten/cache/sysroot/include", + emsdk + )); + } else { + // Fallback to the default path if EMSDK is not set + bindgen_builder = + bindgen_builder.clang_arg("-I/emsdk/upstream/emscripten/cache/sysroot/include"); + } + + // Ensure we have the basic C standard library headers + bindgen_builder = bindgen_builder.clang_arg("-D__EMSCRIPTEN__"); + + // Use environment variable if set (from our justfile) + if let Ok(extra_args) = env::var("BINDGEN_EXTRA_CLANG_ARGS") { + for arg in extra_args.split_whitespace() { + bindgen_builder = bindgen_builder.clang_arg(arg); + } + } + } + + let bindings = bindgen_builder + .generate() + .map_err(|_| "Unable to generate bindings")?; + + let bindings_path = out_dir.join("bindings.rs"); + bindings.write_to_file(&bindings_path)?; + + // For WASM/emscripten builds, manually add the function declarations + // since bindgen sometimes misses them due to preprocessor conditions + if is_emscripten { + let mut bindings_content = std::fs::read_to_string(&bindings_path)?; + + // Check if we need to add the extern "C" block + if !bindings_content.contains("extern \"C\"") { + bindings_content.push_str("\nextern \"C\" {\n"); + bindings_content.push_str(" pub fn pg_query_scan(input: *const ::std::os::raw::c_char) -> PgQueryScanResult;\n"); + bindings_content.push_str(" pub fn pg_query_parse_protobuf(input: *const ::std::os::raw::c_char) -> PgQueryProtobufParseResult;\n"); + bindings_content.push_str(" pub fn pg_query_parse_plpgsql(input: *const ::std::os::raw::c_char) -> PgQueryPlpgsqlParseResult;\n"); + bindings_content.push_str(" pub fn pg_query_deparse_protobuf(protobuf: PgQueryProtobuf) -> PgQueryDeparseResult;\n"); + bindings_content.push_str(" pub fn pg_query_normalize(input: *const ::std::os::raw::c_char) -> PgQueryNormalizeResult;\n"); + bindings_content.push_str(" pub fn pg_query_fingerprint(input: *const ::std::os::raw::c_char) -> PgQueryFingerprintResult;\n"); + bindings_content.push_str(" pub fn pg_query_split_with_parser(input: *const ::std::os::raw::c_char) -> PgQuerySplitResult;\n"); + bindings_content.push_str(" pub fn pg_query_split_with_scanner(input: *const ::std::os::raw::c_char) -> PgQuerySplitResult;\n"); + bindings_content + .push_str(" pub fn pg_query_free_scan_result(result: PgQueryScanResult);\n"); + bindings_content.push_str(" pub fn pg_query_free_protobuf_parse_result(result: PgQueryProtobufParseResult);\n"); + bindings_content.push_str(" pub fn pg_query_free_plpgsql_parse_result(result: PgQueryPlpgsqlParseResult);\n"); + bindings_content.push_str( + " pub fn pg_query_free_deparse_result(result: PgQueryDeparseResult);\n", + ); + bindings_content.push_str( + " pub fn pg_query_free_normalize_result(result: PgQueryNormalizeResult);\n", + ); + bindings_content.push_str( + " pub fn pg_query_free_fingerprint_result(result: PgQueryFingerprintResult);\n", + ); + bindings_content + .push_str(" pub fn pg_query_free_split_result(result: PgQuerySplitResult);\n"); + bindings_content.push_str("}\n"); + + std::fs::write(&bindings_path, bindings_content)?; + } + } + + let protoc_exists = Command::new("protoc").arg("--version").status().is_ok(); + if protoc_exists { + println!("generating protobuf bindings"); + // HACK: Set OUT_DIR to src/ so that the generated protobuf file is copied to src/protobuf.rs + unsafe { + env::set_var("OUT_DIR", &src_dir); + } + + prost_build::compile_protos( + &[&out_protobuf_path.join(LIBRARY_NAME).with_extension("proto")], + &[&out_protobuf_path], + )?; + + std::fs::rename(src_dir.join("pg_query.rs"), src_dir.join("protobuf.rs"))?; + + // Reset OUT_DIR to the original value + unsafe { + env::set_var("OUT_DIR", &out_dir); + } + } else { + println!("skipping protobuf generation"); + } + + Ok(()) +} diff --git a/crates/pgt_query/examples/api_example.rs b/crates/pgt_query/examples/api_example.rs new file mode 100644 index 000000000..d71b1c0fe --- /dev/null +++ b/crates/pgt_query/examples/api_example.rs @@ -0,0 +1,42 @@ +use pgt_query::{NodeRef, parse}; + +fn main() { + let mut result = parse("SELECT * FROM users WHERE id IN (SELECT id FROM admins)").unwrap(); + + // Immutable access + { + let stmts = result.stmts(); + let stmt = stmts.first().unwrap(); + + // nodes() returns a Vec + let all_nodes = stmt.nodes(); + println!("Total nodes in AST: {}", all_nodes.len()); + + // Can still iterate with iter() + let select_count = stmt + .iter() + .filter(|n| matches!(n, NodeRef::SelectStmt(_))) + .count(); + println!("Number of SELECT statements: {}", select_count); + } + + // Mutable access - no cloning needed! + { + let mut stmts = result.stmts_mut(); + if let Some(stmt) = stmts.first_mut() { + // Now we can iterate mutably without cloning + for mut_node in stmt.iter_mut() { + // Modify nodes here if needed + if let pgt_query::NodeMut::SelectStmt(_select) = mut_node { + println!("Found a SELECT statement to modify"); + // You can modify _select here + } + } + } + } + + // Alternative: using root_mut() for single statement queries + if let Some(root) = result.root_mut() { + println!("Root node type: {:?}", std::mem::discriminant(root)); + } +} diff --git a/crates/pgt_query/src/deparse.rs b/crates/pgt_query/src/deparse.rs new file mode 100644 index 000000000..91f3d4503 --- /dev/null +++ b/crates/pgt_query/src/deparse.rs @@ -0,0 +1,93 @@ +use std::ffi::CStr; +use std::os::raw::c_char; + +use crate::bindings::*; +use crate::error::*; +use crate::protobuf; + +use prost::Message; + +/// Converts a parsed tree back into a string. +/// +/// # Example +/// +/// ```rust +/// use pgt_query::{parse, NodeEnum, NodeRef}; +/// +/// let result = parse("INSERT INTO other (name) SELECT name FROM contacts"); +/// let result = result.unwrap(); +/// let stmts = result.stmts(); +/// let insert = stmts.first().unwrap(); +/// assert!(matches!(insert, NodeEnum::InsertStmt(_))); +/// let select = insert.iter().find(|n| matches!(n, NodeRef::SelectStmt(_))).unwrap(); +/// +/// // The entire parse result can be deparsed: +/// assert_eq!(result.deparse().unwrap(), "INSERT INTO other (name) SELECT name FROM contacts"); +/// // Or an individual node can be deparsed: +/// assert_eq!(insert.deparse().unwrap(), "INSERT INTO other (name) SELECT name FROM contacts"); +/// assert_eq!(select.deparse().unwrap(), "SELECT name FROM contacts"); +/// ``` +/// +/// Note that this function will panic if called on a node not defined in `deparseStmt` +pub fn deparse(protobuf: &protobuf::ParseResult) -> Result { + let buffer = protobuf.encode_to_vec(); + let len = buffer.len(); + let data = buffer.as_ptr() as *const c_char as *mut c_char; + let protobuf = PgQueryProtobuf { data, len }; + let result = unsafe { pg_query_deparse_protobuf(protobuf) }; + + let deparse_result = if !result.error.is_null() { + let message = unsafe { CStr::from_ptr((*result.error).message) } + .to_string_lossy() + .to_string(); + Err(Error::Parse(message)) + } else { + let query = unsafe { CStr::from_ptr(result.query) } + .to_string_lossy() + .to_string(); + Ok(query) + }; + + unsafe { pg_query_free_deparse_result(result) }; + deparse_result +} + +#[cfg(test)] +mod tests { + use crate::parse; + + fn assert_deparse(input: &str, output: &str) { + let result = parse(input).unwrap(); + assert_eq!(result.deparse().unwrap(), output); + } + + #[test] + fn it_deparses_select() { + let query = "SELECT a AS b FROM x WHERE y = 5 AND z = y"; + assert_deparse(query, query); + } + + #[test] + fn it_deparses_select_with_empty_target_list() { + let query = "SELECT FROM x WHERE y = 5 AND z = y"; + assert_deparse(query, query); + } + + #[test] + fn it_deparses_select_with_schema() { + let query = "SELECT a AS b FROM public.x WHERE y = 5 AND z = y"; + assert_deparse(query, query); + } + + #[test] + fn it_deparses_select_with_distinct() { + let query = "SELECT DISTINCT a, b, * FROM c WHERE d = e"; + assert_deparse(query, query); + } + + #[test] + fn it_deparses_select_with_distinct_on() { + let query = "SELECT DISTINCT ON (a) a, b FROM c"; + assert_deparse(query, query); + } +} diff --git a/crates/pgt_query/src/error.rs b/crates/pgt_query/src/error.rs new file mode 100644 index 000000000..50845b44c --- /dev/null +++ b/crates/pgt_query/src/error.rs @@ -0,0 +1,23 @@ +use thiserror::Error; + +/// Error structure representing the basic error scenarios for `pg_query`. +#[derive(Debug, Error, Eq, PartialEq)] +pub enum Error { + #[error("Invalid statement format: {0}")] + Conversion(#[from] std::ffi::NulError), + #[error("Error decoding result: {0}")] + Decode(#[from] prost::DecodeError), + #[error("Invalid statement: {0}")] + Parse(String), + #[error("Error parsing JSON: {0}")] + InvalidJson(String), + #[error("Invalid pointer")] + InvalidPointer, + #[error("Error scanning: {0}")] + Scan(String), + #[error("Error splitting: {0}")] + Split(String), +} + +/// Convenient Result alias for returning `pg_query::Error`. +pub type Result = core::result::Result; diff --git a/crates/pgt_query/src/fingerprint.rs b/crates/pgt_query/src/fingerprint.rs new file mode 100644 index 000000000..127b6ca6a --- /dev/null +++ b/crates/pgt_query/src/fingerprint.rs @@ -0,0 +1,359 @@ +use std::ffi::{CStr, CString}; + +use crate::bindings::*; +use crate::error::*; + +/// Represents the resulting fingerprint containing both the raw integer form as well as the +/// corresponding 16 character hex value. +pub struct Fingerprint { + pub value: u64, + pub hex: String, +} + +/// Fingerprints the given SQL statement. Useful for comparing parse trees across different implementations +/// of `libpg_query`. +/// +/// # Example +/// +/// ```rust +/// let result = pgt_query::fingerprint("SELECT * FROM contacts WHERE name='Paul'"); +/// assert!(result.is_ok()); +/// let result = result.unwrap(); +/// assert_eq!(result.hex, "0e2581a461ece536"); +/// ``` +pub fn fingerprint(statement: &str) -> Result { + let input = CString::new(statement)?; + let result = unsafe { pg_query_fingerprint(input.as_ptr()) }; + let fingerprint = if !result.error.is_null() { + let message = unsafe { CStr::from_ptr((*result.error).message) } + .to_string_lossy() + .to_string(); + Err(Error::Parse(message)) + } else { + let hex = unsafe { CStr::from_ptr(result.fingerprint_str) }; + Ok(Fingerprint { + value: result.fingerprint, + hex: hex.to_string_lossy().to_string(), + }) + }; + unsafe { pg_query_free_fingerprint_result(result) }; + fingerprint +} + +#[cfg(test)] +mod tests { + use crate::{Error, fingerprint}; + + #[test] + fn it_can_fingerprint_a_simple_statement() { + let result = + fingerprint("SELECT * FROM contacts.person WHERE id IN (1, 2, 3, 4);").unwrap(); + assert_eq!(result.hex, "643d2a3c294ab8a7"); + } + + #[test] + fn it_will_error_on_invalid_input() { + let error = fingerprint("CREATE RANDOM ix_test ON contacts.person;") + .err() + .unwrap(); + assert_eq!( + error, + Error::Parse("syntax error at or near \"RANDOM\"".into()) + ); + } + + #[test] + fn it_works_for_multi_statement_queries() { + let q1 = "SET x=$1; SELECT A"; + let q2 = "SET x=$1; SELECT a"; + assert_eq!(fingerprint(q1).unwrap().hex, fingerprint(q2).unwrap().hex); + + let q1 = "SET x=$1; SELECT A"; + let q2 = "SELECT a"; + assert_ne!(fingerprint(q1).unwrap().hex, fingerprint(q2).unwrap().hex); + } + + #[test] + fn it_ignores_aliases() { + let q1 = "SELECT a AS b"; + let q2 = "SELECT a AS c"; + assert_eq!(fingerprint(q1).unwrap().hex, fingerprint(q2).unwrap().hex); + + let q1 = "SELECT a"; + let q2 = "SELECT a AS c"; + assert_eq!(fingerprint(q1).unwrap().hex, fingerprint(q2).unwrap().hex); + + let q1 = "SELECT * FROM a AS b"; + let q2 = "SELECT * FROM a AS c"; + assert_eq!(fingerprint(q1).unwrap().hex, fingerprint(q2).unwrap().hex); + + let q1 = "SELECT * FROM a"; + let q2 = "SELECT * FROM a AS c"; + assert_eq!(fingerprint(q1).unwrap().hex, fingerprint(q2).unwrap().hex); + + let q1 = "SELECT * FROM (SELECT * FROM x AS y) AS a"; + let q2 = "SELECT * FROM (SELECT * FROM x AS z) AS b"; + assert_eq!(fingerprint(q1).unwrap().hex, fingerprint(q2).unwrap().hex); + + let q1 = "SELECT a AS b UNION SELECT x AS y"; + let q2 = "SELECT a AS c UNION SELECT x AS z"; + assert_eq!(fingerprint(q1).unwrap().hex, fingerprint(q2).unwrap().hex); + } + + #[test] + fn it_ignores_param_references() { + let q1 = "SELECT $1"; + let q2 = "SELECT $2"; + assert_eq!(fingerprint(q1).unwrap().hex, fingerprint(q2).unwrap().hex); + } + + #[test] + fn it_ignores_select_target_list_ordering() { + let q1 = "SELECT a, b FROM x"; + let q2 = "SELECT b, a FROM x"; + assert_eq!(fingerprint(q1).unwrap().hex, fingerprint(q2).unwrap().hex); + let q1 = "SELECT $1, b FROM x"; + let q2 = "SELECT b, $1 FROM x"; + assert_eq!(fingerprint(q1).unwrap().hex, fingerprint(q2).unwrap().hex); + let q1 = "SELECT $1, $2, b FROM x"; + let q2 = "SELECT $1, b, $2 FROM x"; + assert_eq!(fingerprint(q1).unwrap().hex, fingerprint(q2).unwrap().hex); + + // Testing uniqueness + let q1 = "SELECT a, c FROM x"; + let q2 = "SELECT b, a FROM x"; + assert_ne!(fingerprint(q1).unwrap().hex, fingerprint(q2).unwrap().hex); + let q1 = "SELECT b FROM x"; + let q2 = "SELECT b, a FROM x"; + assert_ne!(fingerprint(q1).unwrap().hex, fingerprint(q2).unwrap().hex); + } + + #[test] + fn it_ignores_insert_col_ordering() { + let q1 = "INSERT INTO test (a, b) VALUES ($1, $2)"; + let q2 = "INSERT INTO test (b, a) VALUES ($1, $2)"; + assert_eq!(fingerprint(q1).unwrap().hex, fingerprint(q2).unwrap().hex); + + // Testing uniqueness + let q1 = "INSERT INTO test (a, c) VALUES ($1, $2)"; + let q2 = "INSERT INTO test (b, a) VALUES ($1, $2)"; + assert_ne!(fingerprint(q1).unwrap().hex, fingerprint(q2).unwrap().hex); + let q1 = "INSERT INTO test (b) VALUES ($1, $2)"; + let q2 = "INSERT INTO test (b, a) VALUES ($1, $2)"; + assert_ne!(fingerprint(q1).unwrap().hex, fingerprint(q2).unwrap().hex); + } + + #[test] + fn it_ignores_in_list_size() { + let q1 = "SELECT * FROM x WHERE y IN ($1, $2, $3)"; + let q2 = "SELECT * FROM x WHERE y IN ($1)"; + assert_eq!(fingerprint(q1).unwrap().hex, fingerprint(q2).unwrap().hex); + + let q1 = "SELECT * FROM x WHERE y IN ( $1::uuid, $2::uuid, $3::uuid )"; + let q2 = "SELECT * FROM x WHERE y IN ( $1::uuid )"; + assert_eq!(fingerprint(q1).unwrap().hex, fingerprint(q2).unwrap().hex); + } + + #[test] + fn it_works() { + let result = fingerprint("SELECT 1").unwrap(); + assert_eq!(result.hex, "50fde20626009aba"); + + let result = fingerprint("SELECT 2").unwrap(); + assert_eq!(result.hex, "50fde20626009aba"); + + let result = fingerprint("SELECT $1").unwrap(); + assert_eq!(result.hex, "50fde20626009aba"); + + let result = fingerprint("SELECT 1; SELECT a FROM b").unwrap(); + assert_eq!(result.hex, "3efa3b10d558d06d"); + + let result = fingerprint("SELECT COUNT(DISTINCT id), * FROM targets WHERE something IS NOT NULL AND elsewhere::interval < now()").unwrap(); + assert_eq!(result.hex, "26b6553101185d22"); + + let result = fingerprint("INSERT INTO test (a, b) VALUES ($1, $2)").unwrap(); + assert_eq!(result.hex, "51e63b8083b48bdd"); + + let result = fingerprint("INSERT INTO test (b, a) VALUES ($1, $2)").unwrap(); + assert_eq!(result.hex, "51e63b8083b48bdd"); + + let result = fingerprint( + "INSERT INTO test (a, b) VALUES (ARRAY[$1, $2, $3, $4], $5::timestamptz), (ARRAY[$6, $7, $8, $9], $10::timestamptz), ($11, $12::timestamptz)", + ) + .unwrap(); + assert_eq!(result.hex, "4dfdd5260cac5acf"); + + let result = fingerprint("SELECT b AS x, a AS y FROM z").unwrap(); + assert_eq!(result.hex, "1a8bf5d7614de3a5"); + + let result = fingerprint("SELECT * FROM x WHERE y = $1").unwrap(); + assert_eq!(result.hex, "4ff39426bd074231"); + + let result = fingerprint("SELECT * FROM x WHERE y = ANY ($1)").unwrap(); + assert_eq!(result.hex, "4ff39426bd074231"); + + let result = fingerprint("SELECT * FROM x WHERE y IN ($1)").unwrap(); + assert_eq!(result.hex, "4ff39426bd074231"); + + let result = fingerprint("SELECT * FROM x WHERE y IN ($1, $2, $3)").unwrap(); + assert_eq!(result.hex, "4ff39426bd074231"); + + let result = fingerprint("SELECT * FROM x WHERE y IN ( $1::uuid )").unwrap(); + assert_eq!(result.hex, "4ff39426bd074231"); + + let result = + fingerprint("SELECT * FROM x WHERE y IN ( $1::uuid, $2::uuid, $3::uuid )").unwrap(); + assert_eq!(result.hex, "4ff39426bd074231"); + + let result = fingerprint("PREPARE a123 AS SELECT a").unwrap(); + assert_eq!(result.hex, "9b5e6ead8be993e8"); + + let result = fingerprint("EXECUTE a123").unwrap(); + assert_eq!(result.hex, "44ef1d2beabd53e8"); + + let result = fingerprint("DEALLOCATE a123").unwrap(); + assert_eq!(result.hex, "d8a65a814fbc5f95"); + + let result = fingerprint("DEALLOCATE ALL").unwrap(); + assert_eq!(result.hex, "2debfb8745df64a7"); + + let result = fingerprint("EXPLAIN ANALYZE SELECT a").unwrap(); + assert_eq!(result.hex, "82845c1b5c6102e5"); + + let result = + fingerprint("WITH a AS (SELECT * FROM x WHERE x.y = $1 AND x.z = 1) SELECT * FROM a") + .unwrap(); + assert_eq!(result.hex, "6831e38bbb3dd18c"); + + let result = + fingerprint("CREATE TABLE types (a float(2), b float(49), c NUMERIC(2, 3), d character(4), e char(5), f varchar(6), g character varying(7))") + .unwrap(); + assert_eq!(result.hex, "008d6ba4aa0f4c6e"); + + let result = + fingerprint("CREATE VIEW view_a (a, b) AS WITH RECURSIVE view_a (a, b) AS (SELECT * FROM a(1)) SELECT \"a\", \"b\" FROM \"view_a\"").unwrap(); + assert_eq!(result.hex, "c6ef6b9f498feda4"); + + let result = fingerprint("VACUUM FULL my_table").unwrap(); + assert_eq!(result.hex, "fdf2f4127644f4d8"); + + let result = fingerprint("SELECT * FROM x AS a, y AS b").unwrap(); + assert_eq!(result.hex, "4e9acae841dae228"); + + let result = fingerprint("SELECT * FROM y AS a, x AS b").unwrap(); + assert_eq!(result.hex, "4e9acae841dae228"); + + let result = fingerprint("SELECT x AS a, y AS b FROM x").unwrap(); + assert_eq!(result.hex, "65dff5f5e9a643ad"); + + let result = fingerprint("SELECT y AS a, x AS b FROM x").unwrap(); + assert_eq!(result.hex, "65dff5f5e9a643ad"); + + let result = fingerprint("SELECT x, y FROM z").unwrap(); + assert_eq!(result.hex, "330267237da5535f"); + + let result = fingerprint("SELECT y, x FROM z").unwrap(); + assert_eq!(result.hex, "330267237da5535f"); + + let result = fingerprint("INSERT INTO films (code, title, did) VALUES ('UA502', 'Bananas', 105), ('T_601', 'Yojimbo', DEFAULT)").unwrap(); + assert_eq!(result.hex, "459fdc70778b841e"); + + let result = + fingerprint("INSERT INTO films (code, title, did) VALUES ($1, $2, $3)").unwrap(); + assert_eq!(result.hex, "459fdc70778b841e"); + + let result = fingerprint("SELECT * FROM a").unwrap(); + assert_eq!(result.hex, "fcf44da7b597ef43"); + + let result = fingerprint("SELECT * FROM a AS b").unwrap(); + assert_eq!(result.hex, "fcf44da7b597ef43"); + + let result = + fingerprint("UPDATE users SET one_thing = $1, second_thing = $2 WHERE users.id = $1") + .unwrap(); + assert_eq!(result.hex, "a0ea386c1cfd1e69"); + + let result = + fingerprint("UPDATE users SET something_else = $1 WHERE users.id = $1").unwrap(); + assert_eq!(result.hex, "3172bc3e0d631d55"); + + let result = fingerprint("UPDATE users SET something_else = (SELECT a FROM x WHERE uid = users.id LIMIT 1) WHERE users.id = $1").unwrap(); + assert_eq!(result.hex, "f1127a8b91fbecbf"); + + let result = fingerprint("SAVEPOINT some_id").unwrap(); + assert_eq!(result.hex, "8ebd566ea1bf947b"); + + let result = fingerprint("RELEASE some_id").unwrap(); + assert_eq!(result.hex, "60d618658252d2af"); + + let result = fingerprint("PREPARE TRANSACTION 'some_id'").unwrap(); + assert_eq!(result.hex, "d993959a33d627d4"); + + let result = fingerprint("START TRANSACTION READ WRITE").unwrap(); + assert_eq!(result.hex, "4ca25828c835d55a"); + + let result = + fingerprint("DECLARE cursor_123 CURSOR FOR SELECT * FROM test WHERE id = 123").unwrap(); + assert_eq!(result.hex, "d2bec62d2a7ec7cb"); + + let result = fingerprint("FETCH 1000 FROM cursor_123").unwrap(); + assert_eq!(result.hex, "37f4d2f6a957ae48"); + + let result = fingerprint("CLOSE cursor_123").unwrap(); + assert_eq!(result.hex, "2c7963684fc2bad9"); + + let result = fingerprint("-- nothing").unwrap(); + assert_eq!(result.hex, "d8d13f8b2da6c9ad"); + + let result = fingerprint("CREATE FOREIGN TABLE ft1 () SERVER no_server").unwrap(); + assert_eq!(result.hex, "74481c4af7c76be1"); + + let result = fingerprint("UPDATE x SET a = 1, b = 2, c = 3").unwrap(); + assert_eq!(result.hex, "fd5c248c0e642ce4"); + + let result = fingerprint("UPDATE x SET z = now()").unwrap(); + assert_eq!(result.hex, "a222eaabaa1e7cb1"); + + let result = fingerprint( + "CREATE TEMPORARY TABLE my_temp_table (test_id integer NOT NULL) ON COMMIT DROP", + ) + .unwrap(); + assert_eq!(result.hex, "1407ed5c5bb00967"); + + let result = fingerprint("CREATE TEMPORARY TABLE my_temp_table AS SELECT 1").unwrap(); + assert_eq!(result.hex, "695ebe73a3abc45c"); + + let result = fingerprint("SELECT INTERVAL (0) $2").unwrap(); + assert_eq!(result.hex, "50fde20626009aba"); + + let result = fingerprint("SELECT INTERVAL (2) $2").unwrap(); + assert_eq!(result.hex, "50fde20626009aba"); + + let result = fingerprint("SELECT * FROM t WHERE t.a IN (1, 2) AND t.b = 3").unwrap(); + assert_eq!(result.hex, "346aea01be9173b6"); + + let result = fingerprint("SELECT * FROM t WHERE t.b = 3 AND t.a IN (1, 2)").unwrap(); + assert_eq!(result.hex, "346aea01be9173b6"); + + let result = fingerprint("SELECT * FROM t WHERE a && '[1,2]'").unwrap(); + assert_eq!(result.hex, "673f199f13dfe665"); + + let result = fingerprint("SELECT * FROM t WHERE a && '[1,2]'::int4range").unwrap(); + assert_eq!(result.hex, "673f199f13dfe665"); + + let result = fingerprint("SELECT * FROM t_20210301_x").unwrap(); + assert_eq!(result.hex, "6f8169980cd70a25"); + + let result = fingerprint("SELECT * FROM t_20210302_x").unwrap(); + assert_eq!(result.hex, "6f8169980cd70a25"); + + let result = fingerprint("SELECT * FROM t_20210302_y").unwrap(); + assert_eq!(result.hex, "d357dac4a24fcf1b"); + + let result = fingerprint("SELECT * FROM t_1").unwrap(); + assert_eq!(result.hex, "018bd9230646143e"); + + let result = fingerprint("SELECT * FROM t_2").unwrap(); + assert_eq!(result.hex, "3f1444da570c1a66"); + } +} diff --git a/crates/pgt_query/src/iter_mut.rs b/crates/pgt_query/src/iter_mut.rs new file mode 100644 index 000000000..fe5e88068 --- /dev/null +++ b/crates/pgt_query/src/iter_mut.rs @@ -0,0 +1 @@ +pgt_query_macros::iter_mut_codegen!(); diff --git a/crates/pgt_query/src/iter_ref.rs b/crates/pgt_query/src/iter_ref.rs new file mode 100644 index 000000000..6ac4f220a --- /dev/null +++ b/crates/pgt_query/src/iter_ref.rs @@ -0,0 +1 @@ +pgt_query_macros::iter_ref_codegen!(); diff --git a/crates/pgt_query/src/lib.rs b/crates/pgt_query/src/lib.rs new file mode 100644 index 000000000..e89817196 --- /dev/null +++ b/crates/pgt_query/src/lib.rs @@ -0,0 +1,91 @@ +mod deparse; +mod error; +mod fingerprint; +mod iter_mut; +mod iter_ref; +mod node_enum; +mod node_mut; +mod node_ref; +mod node_structs; +mod normalize; +mod parse; +mod plpgsql; +mod scan; +mod split; + +pub use deparse::*; +pub use error::*; +pub use fingerprint::*; +pub use iter_mut::*; +pub use iter_ref::*; +pub use node_enum::*; +pub use node_mut::*; +pub use node_ref::*; +pub use normalize::*; +pub use parse::*; +pub use plpgsql::*; +pub use scan::*; +pub use split::*; + +pub use protobuf::Node; + +// Include the generated bindings with 2024 edition compatibility +#[allow(non_upper_case_globals)] +#[allow(non_camel_case_types)] +#[allow(non_snake_case)] +#[allow(dead_code)] +#[allow(improper_ctypes)] +#[allow(unsafe_op_in_unsafe_fn)] +mod bindings { + include!(concat!(env!("OUT_DIR"), "/bindings.rs")); +} + +// Include the generated protobuf code +#[allow(clippy::all)] +pub mod protobuf { + include!(concat!(env!("CARGO_MANIFEST_DIR"), "/src/protobuf.rs")); +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn it_does_not_error_when_run_in_parallel() { + use easy_parallel::Parallel; + + let mut queries = vec![]; + for _ in 0..100 { + queries.push( + r#" + SELECT * FROM "t0" + JOIN "t1" ON (1) JOIN "t2" ON (1) JOIN "t3" ON (1) JOIN "t4" ON (1) JOIN "t5" ON (1) + JOIN "t6" ON (1) JOIN "t7" ON (1) JOIN "t8" ON (1) JOIN "t9" ON (1) JOIN "t10" ON (1) + JOIN "t11" ON (1) JOIN "t12" ON (1) JOIN "t13" ON (1) JOIN "t14" ON (1) JOIN "t15" ON (1) + JOIN "t16" ON (1) JOIN "t17" ON (1) JOIN "t18" ON (1) JOIN "t19" ON (1) JOIN "t20" ON (1) + JOIN "t21" ON (1) JOIN "t22" ON (1) JOIN "t23" ON (1) JOIN "t24" ON (1) JOIN "t25" ON (1) + JOIN "t26" ON (1) JOIN "t27" ON (1) JOIN "t28" ON (1) JOIN "t29" ON (1) + "#, + ); + queries.push( + " + SELECT memory_total_bytes, memory_free_bytes, memory_pagecache_bytes, memory_buffers_bytes, memory_applications_bytes, + (memory_swap_total_bytes - memory_swap_free_bytes) AS swap, date_part($0, s.collected_at) AS collected_at + FROM snapshots s JOIN system_snapshots ON (snapshot_id = s.id) + WHERE s.database_id = $0 AND s.collected_at BETWEEN $0 AND $0 + ORDER BY collected_at + ", + ); + } + + Parallel::new() + .each(queries, |query| { + for _ in 0..100 { + let _result = parse(query).unwrap(); + fingerprint(query).unwrap(); + normalize(query).unwrap(); + } + }) + .run(); + } +} diff --git a/crates/pgt_query/src/node_enum.rs b/crates/pgt_query/src/node_enum.rs new file mode 100644 index 000000000..5d5b6bf71 --- /dev/null +++ b/crates/pgt_query/src/node_enum.rs @@ -0,0 +1,33 @@ +use crate::*; + +use protobuf::Node; +pub use protobuf::node::Node as NodeEnum; + +pgt_query_macros::node_enum_codegen!(); + +impl NodeEnum { + pub fn deparse(&self) -> Result { + crate::deparse(&protobuf::ParseResult { + version: crate::bindings::PG_VERSION_NUM as i32, + stmts: vec![protobuf::RawStmt { + stmt: Some(Box::new(Node { + node: Some(self.clone()), + })), + stmt_location: 0, + stmt_len: 0, + }], + }) + } + + pub fn nodes(&self) -> Vec> { + self.iter().collect() + } + + pub fn iter(&self) -> NodeRefIterator<'_> { + NodeRefIterator::new(self.to_ref()) + } + + pub fn iter_mut(&mut self) -> NodeMutIterator { + NodeMutIterator::new(self.to_mut()) + } +} diff --git a/crates/pgt_query/src/node_mut.rs b/crates/pgt_query/src/node_mut.rs new file mode 100644 index 000000000..f2da254b2 --- /dev/null +++ b/crates/pgt_query/src/node_mut.rs @@ -0,0 +1,26 @@ +use protobuf::Node; + +pgt_query_macros::node_mut_codegen!(); + +impl NodeMut { + pub fn deparse(&self) -> Result { + crate::deparse(&protobuf::ParseResult { + version: crate::bindings::PG_VERSION_NUM as i32, + stmts: vec![protobuf::RawStmt { + stmt: Some(Box::new(Node { + node: Some(self.to_enum()?), + })), + stmt_location: 0, + stmt_len: 0, + }], + }) + } + + pub fn nodes_mut(&self) -> Vec { + self.iter_mut().collect() + } + + pub fn iter_mut(&self) -> NodeMutIterator { + NodeMutIterator::new(*self) + } +} diff --git a/crates/pgt_query/src/node_ref.rs b/crates/pgt_query/src/node_ref.rs new file mode 100644 index 000000000..603913cb7 --- /dev/null +++ b/crates/pgt_query/src/node_ref.rs @@ -0,0 +1,26 @@ +use protobuf::Node; + +pgt_query_macros::node_ref_codegen!(); + +impl<'a> NodeRef<'a> { + pub fn deparse(&self) -> Result { + crate::deparse(&protobuf::ParseResult { + version: crate::bindings::PG_VERSION_NUM as i32, + stmts: vec![protobuf::RawStmt { + stmt: Some(Box::new(Node { + node: Some(self.to_enum()), + })), + stmt_location: 0, + stmt_len: 0, + }], + }) + } + + pub fn nodes(&self) -> Vec> { + self.iter().collect() + } + + pub fn iter(&self) -> NodeRefIterator<'a> { + NodeRefIterator::new(*self) + } +} diff --git a/crates/pgt_query/src/node_structs.rs b/crates/pgt_query/src/node_structs.rs new file mode 100644 index 000000000..8b81c98e5 --- /dev/null +++ b/crates/pgt_query/src/node_structs.rs @@ -0,0 +1,16 @@ +use protobuf::Node; + +pgt_query_macros::node_structs_codegen!(); + +impl Node { + pub fn deparse(&self) -> Result { + crate::deparse(&protobuf::ParseResult { + version: crate::bindings::PG_VERSION_NUM as i32, + stmts: vec![protobuf::RawStmt { + stmt: Some(Box::new(self.clone())), + stmt_location: 0, + stmt_len: 0, + }], + }) + } +} diff --git a/crates/pgt_query/src/normalize.rs b/crates/pgt_query/src/normalize.rs new file mode 100644 index 000000000..71ff683c7 --- /dev/null +++ b/crates/pgt_query/src/normalize.rs @@ -0,0 +1,136 @@ +use std::ffi::{CStr, CString}; + +use crate::bindings::*; +use crate::error::*; + +/// Normalizes the given SQL statement, returning a parametized version. +/// +/// # Example +/// +/// ```rust +/// let result = pgt_query::normalize("SELECT * FROM contacts WHERE name='Paul'"); +/// assert!(result.is_ok()); +/// let result = result.unwrap(); +/// assert_eq!(result, "SELECT * FROM contacts WHERE name=$1"); +/// ``` +pub fn normalize(statement: &str) -> Result { + let input = CString::new(statement).unwrap(); + let result = unsafe { pg_query_normalize(input.as_ptr()) }; + let normalized_query = if !result.error.is_null() { + let message = unsafe { CStr::from_ptr((*result.error).message) } + .to_string_lossy() + .to_string(); + Err(Error::Parse(message)) + } else { + let n = unsafe { CStr::from_ptr(result.normalized_query) }; + Ok(n.to_string_lossy().to_string()) + }; + unsafe { pg_query_free_normalize_result(result) }; + normalized_query +} + +#[cfg(test)] +mod tests { + use crate::{Error, normalize}; + + #[test] + fn it_normalizes_simple_query() { + let result = normalize("SELECT 1").unwrap(); + assert_eq!(result, "SELECT $1"); + } + + #[test] + fn it_normalizes_in() { + let result = + normalize("SELECT 1 FROM x WHERE y = 12561 AND z = '124' AND b IN (1, 2, 3)").unwrap(); + assert_eq!( + result, + "SELECT $1 FROM x WHERE y = $2 AND z = $3 AND b IN ($4, $5, $6)" + ); + } + + #[test] + fn it_errors_on_invalid_input() { + let error = normalize("CREATE RANDOM ix_test ON contacts.person;") + .err() + .unwrap(); + assert_eq!( + error, + Error::Parse("syntax error at or near \"RANDOM\"".into()) + ); + } + + #[test] + fn it_normalizes_subselects() { + let result = + normalize("SELECT 1 FROM x WHERE y = (SELECT 123 FROM a WHERE z = 'bla')").unwrap(); + assert_eq!( + result, + "SELECT $1 FROM x WHERE y = (SELECT $2 FROM a WHERE z = $3)" + ); + } + + #[test] + fn it_normalizes_any() { + let result = normalize("SELECT * FROM x WHERE y = ANY(array[1, 2])").unwrap(); + assert_eq!(result, "SELECT * FROM x WHERE y = ANY(array[$1, $2])"); + + let result = normalize("SELECT * FROM x WHERE y = ANY(SELECT 1)").unwrap(); + assert_eq!(result, "SELECT * FROM x WHERE y = ANY(SELECT $1)"); + } + + #[test] + fn it_normalizes_complicated_strings() { + let result = normalize("SELECT U&'d\\0061t\\+000061' FROM x").unwrap(); + assert_eq!(result, "SELECT $1 FROM x"); + + let result = normalize("SELECT u&'d\\0061t\\+000061' FROM x").unwrap(); + assert_eq!(result, "SELECT $1 FROM x"); + + let result = normalize("SELECT * FROM x WHERE z NOT LIKE E'abc'AND TRUE").unwrap(); + assert_eq!(result, "SELECT * FROM x WHERE z NOT LIKE $1AND $2"); + + let result = normalize("SELECT U&'d\\0061t\\+000061'-- comment\nFROM x").unwrap(); + assert_eq!(result, "SELECT $1-- comment\nFROM x"); + } + + #[test] + fn it_normalizes_copy() { + let result = normalize("COPY (SELECT * FROM t WHERE id IN ('1', '2')) TO STDOUT").unwrap(); + assert_eq!( + result, + "COPY (SELECT * FROM t WHERE id IN ($1, $2)) TO STDOUT" + ); + } + + #[test] + fn it_normalizes_set() { + let result = normalize("SET test=123").unwrap(); + assert_eq!(result, "SET test=$1"); + + let result = normalize("SET CLIENT_ENCODING = UTF8").unwrap(); + assert_eq!(result, "SET CLIENT_ENCODING = $1"); + } + + #[test] + fn it_does_not_error_on_deallocate() { + let result = normalize("DEALLOCATE bla; SELECT 1").unwrap(); + assert_eq!(result, "DEALLOCATE bla; SELECT $1"); + } + + #[test] + fn it_normalizes_explain() { + let result = normalize("EXPLAIN SELECT x FROM y WHERE z = 1").unwrap(); + assert_eq!(result, "EXPLAIN SELECT x FROM y WHERE z = $1"); + } + + #[test] + fn it_normalizes_declare_curson() { + let result = + normalize("DECLARE cursor_b CURSOR FOR SELECT * FROM databases WHERE id = 23").unwrap(); + assert_eq!( + result, + "DECLARE cursor_b CURSOR FOR SELECT * FROM databases WHERE id = $1" + ); + } +} diff --git a/crates/pgt_query/src/parse.rs b/crates/pgt_query/src/parse.rs new file mode 100644 index 000000000..5853dfbc2 --- /dev/null +++ b/crates/pgt_query/src/parse.rs @@ -0,0 +1,149 @@ +use std::ffi::{CStr, CString}; + +use crate::NodeEnum; +use crate::bindings::*; +use crate::error::*; +use crate::protobuf; + +use prost::Message; + +/// Parses the given SQL statement into the given abstract syntax tree. +/// +/// # Example +/// +/// ```rust +/// use pgt_query::parse; +/// +/// let result = parse("SELECT * FROM contacts"); +/// assert!(result.is_ok()); +/// let result = result.unwrap(); +/// assert_eq!(result.protobuf.stmts.len(), 1); +/// ``` +pub fn parse(statement: &str) -> Result { + let input = CString::new(statement)?; + let result = unsafe { pg_query_parse_protobuf(input.as_ptr()) }; + let parse_result = if !result.error.is_null() { + let message = unsafe { CStr::from_ptr((*result.error).message) } + .to_string_lossy() + .to_string(); + Err(Error::Parse(message)) + } else { + let data = unsafe { + std::slice::from_raw_parts( + result.parse_tree.data as *const u8, + result.parse_tree.len as usize, + ) + }; + let stderr = unsafe { CStr::from_ptr(result.stderr_buffer) } + .to_string_lossy() + .to_string(); + protobuf::ParseResult::decode(data) + .map_err(Error::Decode) + .map(|result| ParseResult::new(result, stderr)) + }; + unsafe { pg_query_free_protobuf_parse_result(result) }; + parse_result +} + +/// The result of parsing a SQL query +#[derive(Debug)] +pub struct ParseResult { + /// The parsed protobuf result + pub protobuf: protobuf::ParseResult, + /// Warnings captured during parsing + pub warnings: Vec, +} + +impl ParseResult { + /// Create a new ParseResult + pub fn new(protobuf: protobuf::ParseResult, stderr: String) -> Self { + let warnings = stderr + .lines() + .filter_map(|l| { + if l.starts_with("WARNING") { + Some(l.trim().into()) + } else { + None + } + }) + .collect(); + + Self { protobuf, warnings } + } + + pub fn deparse(&self) -> Result { + crate::deparse(&self.protobuf) + } + + pub fn stmts(&self) -> Vec<&NodeEnum> { + self.protobuf + .stmts + .iter() + .filter_map(|s| s.stmt.as_ref().and_then(|s| s.node.as_ref())) + .collect() + } + + pub fn stmts_mut(&mut self) -> Vec<&mut NodeEnum> { + self.protobuf + .stmts + .iter_mut() + .filter_map(|s| s.stmt.as_mut().and_then(|s| s.node.as_mut())) + .collect() + } + + /// Returns a reference to the root node of the parse tree. + /// + /// Returns None if there is not exactly one statement in the parse result. + pub fn root(&self) -> Option<&NodeEnum> { + if self.protobuf.stmts.len() != 1 { + return None; + } + + // Get the first (and only) statement + let raw_stmt = &self.protobuf.stmts[0]; + + // Navigate: RawStmt -> Node -> NodeEnum + raw_stmt.stmt.as_ref().and_then(|stmt| stmt.node.as_ref()) + } + + /// Consumes the ParseResult and returns the root node of the parse tree. + /// + /// Returns None if there is not exactly one statement in the parse result. + /// This method avoids cloning by taking ownership of the ParseResult. + pub fn into_root(self) -> Option { + if self.protobuf.stmts.len() != 1 { + return None; + } + + // Extract the first (and only) statement by taking ownership + let raw_stmt = self.protobuf.stmts.into_iter().next()?; + + // Navigate: RawStmt -> Node -> NodeEnum + raw_stmt.stmt.and_then(|stmt| stmt.node) + } + + /// Returns a mutable reference to the root node of the parse tree. + /// + /// Returns None if there is not exactly one statement in the parse result. + pub fn root_mut(&mut self) -> Option<&mut NodeEnum> { + if self.protobuf.stmts.len() != 1 { + return None; + } + + // Get the first (and only) statement + let raw_stmt = &mut self.protobuf.stmts[0]; + + // Navigate: RawStmt -> Node -> NodeEnum + raw_stmt.stmt.as_mut().and_then(|stmt| stmt.node.as_mut()) + } +} + +#[cfg(test)] +mod tests { + use crate::parse; + + #[test] + fn it_parses_parameter_queries() { + assert!(parse("select $0 + $1 + $2 + $3 + $4 + $5").is_ok()); + } +} diff --git a/crates/pgt_query/src/plpgsql.rs b/crates/pgt_query/src/plpgsql.rs new file mode 100644 index 000000000..fbaa9694b --- /dev/null +++ b/crates/pgt_query/src/plpgsql.rs @@ -0,0 +1,38 @@ +use std::ffi::{CStr, CString}; + +use crate::bindings::*; +use crate::error::*; + +/// An experimental API which parses a PLPGSQL function. This currently drops the returned +/// structure and returns only a Result<()>. +/// +/// # Example +/// +/// ```rust +/// let result = pgt_query::parse_plpgsql(" +/// CREATE OR REPLACE FUNCTION cs_fmt_browser_version(v_name varchar, v_version varchar) +/// RETURNS varchar AS $$ +/// BEGIN +/// IF v_version IS NULL THEN +/// RETURN v_name; +/// END IF; +/// RETURN v_name || '/' || v_version; +/// END; +/// $$ LANGUAGE plpgsql; +/// "); +/// assert!(result.is_ok()); +/// ``` +pub fn parse_plpgsql(stmt: &str) -> Result<()> { + let input = CString::new(stmt)?; + let result = unsafe { pg_query_parse_plpgsql(input.as_ptr()) }; + let structure = if !result.error.is_null() { + let message = unsafe { CStr::from_ptr((*result.error).message) } + .to_string_lossy() + .to_string(); + Err(Error::Parse(message)) + } else { + Ok(()) + }; + unsafe { pg_query_free_plpgsql_parse_result(result) }; + structure +} diff --git a/crates/pgt_query/src/protobuf.rs b/crates/pgt_query/src/protobuf.rs new file mode 100644 index 000000000..c47bfe52b --- /dev/null +++ b/crates/pgt_query/src/protobuf.rs @@ -0,0 +1,8846 @@ +// This file is @generated by prost-build. +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct ParseResult { + #[prost(int32, tag = "1")] + pub version: i32, + #[prost(message, repeated, tag = "2")] + pub stmts: ::prost::alloc::vec::Vec, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct ScanResult { + #[prost(int32, tag = "1")] + pub version: i32, + #[prost(message, repeated, tag = "2")] + pub tokens: ::prost::alloc::vec::Vec, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct Node { + #[prost( + oneof = "node::Node", + tags = "1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 256, 257, 258, 259, 260, 261, 262, 263, 264, 265, 266, 267, 268" + )] + pub node: ::core::option::Option, +} +/// Nested message and enum types in `Node`. +pub mod node { + #[derive(Clone, PartialEq, ::prost::Oneof)] + pub enum Node { + #[prost(message, tag = "1")] + Alias(super::Alias), + #[prost(message, tag = "2")] + RangeVar(super::RangeVar), + #[prost(message, tag = "3")] + TableFunc(::prost::alloc::boxed::Box), + #[prost(message, tag = "4")] + IntoClause(::prost::alloc::boxed::Box), + #[prost(message, tag = "5")] + Var(::prost::alloc::boxed::Box), + #[prost(message, tag = "6")] + Param(::prost::alloc::boxed::Box), + #[prost(message, tag = "7")] + Aggref(::prost::alloc::boxed::Box), + #[prost(message, tag = "8")] + GroupingFunc(::prost::alloc::boxed::Box), + #[prost(message, tag = "9")] + WindowFunc(::prost::alloc::boxed::Box), + #[prost(message, tag = "10")] + WindowFuncRunCondition( + ::prost::alloc::boxed::Box, + ), + #[prost(message, tag = "11")] + MergeSupportFunc(::prost::alloc::boxed::Box), + #[prost(message, tag = "12")] + SubscriptingRef(::prost::alloc::boxed::Box), + #[prost(message, tag = "13")] + FuncExpr(::prost::alloc::boxed::Box), + #[prost(message, tag = "14")] + NamedArgExpr(::prost::alloc::boxed::Box), + #[prost(message, tag = "15")] + OpExpr(::prost::alloc::boxed::Box), + #[prost(message, tag = "16")] + DistinctExpr(::prost::alloc::boxed::Box), + #[prost(message, tag = "17")] + NullIfExpr(::prost::alloc::boxed::Box), + #[prost(message, tag = "18")] + ScalarArrayOpExpr(::prost::alloc::boxed::Box), + #[prost(message, tag = "19")] + BoolExpr(::prost::alloc::boxed::Box), + #[prost(message, tag = "20")] + SubLink(::prost::alloc::boxed::Box), + #[prost(message, tag = "21")] + SubPlan(::prost::alloc::boxed::Box), + #[prost(message, tag = "22")] + AlternativeSubPlan(::prost::alloc::boxed::Box), + #[prost(message, tag = "23")] + FieldSelect(::prost::alloc::boxed::Box), + #[prost(message, tag = "24")] + FieldStore(::prost::alloc::boxed::Box), + #[prost(message, tag = "25")] + RelabelType(::prost::alloc::boxed::Box), + #[prost(message, tag = "26")] + CoerceViaIo(::prost::alloc::boxed::Box), + #[prost(message, tag = "27")] + ArrayCoerceExpr(::prost::alloc::boxed::Box), + #[prost(message, tag = "28")] + ConvertRowtypeExpr(::prost::alloc::boxed::Box), + #[prost(message, tag = "29")] + CollateExpr(::prost::alloc::boxed::Box), + #[prost(message, tag = "30")] + CaseExpr(::prost::alloc::boxed::Box), + #[prost(message, tag = "31")] + CaseWhen(::prost::alloc::boxed::Box), + #[prost(message, tag = "32")] + CaseTestExpr(::prost::alloc::boxed::Box), + #[prost(message, tag = "33")] + ArrayExpr(::prost::alloc::boxed::Box), + #[prost(message, tag = "34")] + RowExpr(::prost::alloc::boxed::Box), + #[prost(message, tag = "35")] + RowCompareExpr(::prost::alloc::boxed::Box), + #[prost(message, tag = "36")] + CoalesceExpr(::prost::alloc::boxed::Box), + #[prost(message, tag = "37")] + MinMaxExpr(::prost::alloc::boxed::Box), + #[prost(message, tag = "38")] + SqlvalueFunction(::prost::alloc::boxed::Box), + #[prost(message, tag = "39")] + XmlExpr(::prost::alloc::boxed::Box), + #[prost(message, tag = "40")] + JsonFormat(super::JsonFormat), + #[prost(message, tag = "41")] + JsonReturning(super::JsonReturning), + #[prost(message, tag = "42")] + JsonValueExpr(::prost::alloc::boxed::Box), + #[prost(message, tag = "43")] + JsonConstructorExpr(::prost::alloc::boxed::Box), + #[prost(message, tag = "44")] + JsonIsPredicate(::prost::alloc::boxed::Box), + #[prost(message, tag = "45")] + JsonBehavior(::prost::alloc::boxed::Box), + #[prost(message, tag = "46")] + JsonExpr(::prost::alloc::boxed::Box), + #[prost(message, tag = "47")] + JsonTablePath(super::JsonTablePath), + #[prost(message, tag = "48")] + JsonTablePathScan(::prost::alloc::boxed::Box), + #[prost(message, tag = "49")] + JsonTableSiblingJoin(::prost::alloc::boxed::Box), + #[prost(message, tag = "50")] + NullTest(::prost::alloc::boxed::Box), + #[prost(message, tag = "51")] + BooleanTest(::prost::alloc::boxed::Box), + #[prost(message, tag = "52")] + MergeAction(::prost::alloc::boxed::Box), + #[prost(message, tag = "53")] + CoerceToDomain(::prost::alloc::boxed::Box), + #[prost(message, tag = "54")] + CoerceToDomainValue(::prost::alloc::boxed::Box), + #[prost(message, tag = "55")] + SetToDefault(::prost::alloc::boxed::Box), + #[prost(message, tag = "56")] + CurrentOfExpr(::prost::alloc::boxed::Box), + #[prost(message, tag = "57")] + NextValueExpr(::prost::alloc::boxed::Box), + #[prost(message, tag = "58")] + InferenceElem(::prost::alloc::boxed::Box), + #[prost(message, tag = "59")] + TargetEntry(::prost::alloc::boxed::Box), + #[prost(message, tag = "60")] + RangeTblRef(super::RangeTblRef), + #[prost(message, tag = "61")] + JoinExpr(::prost::alloc::boxed::Box), + #[prost(message, tag = "62")] + FromExpr(::prost::alloc::boxed::Box), + #[prost(message, tag = "63")] + OnConflictExpr(::prost::alloc::boxed::Box), + #[prost(message, tag = "64")] + Query(::prost::alloc::boxed::Box), + #[prost(message, tag = "65")] + TypeName(super::TypeName), + #[prost(message, tag = "66")] + ColumnRef(super::ColumnRef), + #[prost(message, tag = "67")] + ParamRef(super::ParamRef), + #[prost(message, tag = "68")] + AExpr(::prost::alloc::boxed::Box), + #[prost(message, tag = "69")] + TypeCast(::prost::alloc::boxed::Box), + #[prost(message, tag = "70")] + CollateClause(::prost::alloc::boxed::Box), + #[prost(message, tag = "71")] + RoleSpec(super::RoleSpec), + #[prost(message, tag = "72")] + FuncCall(::prost::alloc::boxed::Box), + #[prost(message, tag = "73")] + AStar(super::AStar), + #[prost(message, tag = "74")] + AIndices(::prost::alloc::boxed::Box), + #[prost(message, tag = "75")] + AIndirection(::prost::alloc::boxed::Box), + #[prost(message, tag = "76")] + AArrayExpr(super::AArrayExpr), + #[prost(message, tag = "77")] + ResTarget(::prost::alloc::boxed::Box), + #[prost(message, tag = "78")] + MultiAssignRef(::prost::alloc::boxed::Box), + #[prost(message, tag = "79")] + SortBy(::prost::alloc::boxed::Box), + #[prost(message, tag = "80")] + WindowDef(::prost::alloc::boxed::Box), + #[prost(message, tag = "81")] + RangeSubselect(::prost::alloc::boxed::Box), + #[prost(message, tag = "82")] + RangeFunction(super::RangeFunction), + #[prost(message, tag = "83")] + RangeTableFunc(::prost::alloc::boxed::Box), + #[prost(message, tag = "84")] + RangeTableFuncCol(::prost::alloc::boxed::Box), + #[prost(message, tag = "85")] + RangeTableSample(::prost::alloc::boxed::Box), + #[prost(message, tag = "86")] + ColumnDef(::prost::alloc::boxed::Box), + #[prost(message, tag = "87")] + TableLikeClause(super::TableLikeClause), + #[prost(message, tag = "88")] + IndexElem(::prost::alloc::boxed::Box), + #[prost(message, tag = "89")] + DefElem(::prost::alloc::boxed::Box), + #[prost(message, tag = "90")] + LockingClause(super::LockingClause), + #[prost(message, tag = "91")] + XmlSerialize(::prost::alloc::boxed::Box), + #[prost(message, tag = "92")] + PartitionElem(::prost::alloc::boxed::Box), + #[prost(message, tag = "93")] + PartitionSpec(super::PartitionSpec), + #[prost(message, tag = "94")] + PartitionBoundSpec(super::PartitionBoundSpec), + #[prost(message, tag = "95")] + PartitionRangeDatum(::prost::alloc::boxed::Box), + #[prost(message, tag = "96")] + SinglePartitionSpec(super::SinglePartitionSpec), + #[prost(message, tag = "97")] + PartitionCmd(super::PartitionCmd), + #[prost(message, tag = "98")] + RangeTblEntry(::prost::alloc::boxed::Box), + #[prost(message, tag = "99")] + RtepermissionInfo(super::RtePermissionInfo), + #[prost(message, tag = "100")] + RangeTblFunction(::prost::alloc::boxed::Box), + #[prost(message, tag = "101")] + TableSampleClause(::prost::alloc::boxed::Box), + #[prost(message, tag = "102")] + WithCheckOption(::prost::alloc::boxed::Box), + #[prost(message, tag = "103")] + SortGroupClause(super::SortGroupClause), + #[prost(message, tag = "104")] + GroupingSet(super::GroupingSet), + #[prost(message, tag = "105")] + WindowClause(::prost::alloc::boxed::Box), + #[prost(message, tag = "106")] + RowMarkClause(super::RowMarkClause), + #[prost(message, tag = "107")] + WithClause(super::WithClause), + #[prost(message, tag = "108")] + InferClause(::prost::alloc::boxed::Box), + #[prost(message, tag = "109")] + OnConflictClause(::prost::alloc::boxed::Box), + #[prost(message, tag = "110")] + CtesearchClause(super::CteSearchClause), + #[prost(message, tag = "111")] + CtecycleClause(::prost::alloc::boxed::Box), + #[prost(message, tag = "112")] + CommonTableExpr(::prost::alloc::boxed::Box), + #[prost(message, tag = "113")] + MergeWhenClause(::prost::alloc::boxed::Box), + #[prost(message, tag = "114")] + TriggerTransition(super::TriggerTransition), + #[prost(message, tag = "115")] + JsonOutput(super::JsonOutput), + #[prost(message, tag = "116")] + JsonArgument(::prost::alloc::boxed::Box), + #[prost(message, tag = "117")] + JsonFuncExpr(::prost::alloc::boxed::Box), + #[prost(message, tag = "118")] + JsonTablePathSpec(::prost::alloc::boxed::Box), + #[prost(message, tag = "119")] + JsonTable(::prost::alloc::boxed::Box), + #[prost(message, tag = "120")] + JsonTableColumn(::prost::alloc::boxed::Box), + #[prost(message, tag = "121")] + JsonKeyValue(::prost::alloc::boxed::Box), + #[prost(message, tag = "122")] + JsonParseExpr(::prost::alloc::boxed::Box), + #[prost(message, tag = "123")] + JsonScalarExpr(::prost::alloc::boxed::Box), + #[prost(message, tag = "124")] + JsonSerializeExpr(::prost::alloc::boxed::Box), + #[prost(message, tag = "125")] + JsonObjectConstructor(super::JsonObjectConstructor), + #[prost(message, tag = "126")] + JsonArrayConstructor(super::JsonArrayConstructor), + #[prost(message, tag = "127")] + JsonArrayQueryConstructor( + ::prost::alloc::boxed::Box, + ), + #[prost(message, tag = "128")] + JsonAggConstructor(::prost::alloc::boxed::Box), + #[prost(message, tag = "129")] + JsonObjectAgg(::prost::alloc::boxed::Box), + #[prost(message, tag = "130")] + JsonArrayAgg(::prost::alloc::boxed::Box), + #[prost(message, tag = "131")] + RawStmt(::prost::alloc::boxed::Box), + #[prost(message, tag = "132")] + InsertStmt(::prost::alloc::boxed::Box), + #[prost(message, tag = "133")] + DeleteStmt(::prost::alloc::boxed::Box), + #[prost(message, tag = "134")] + UpdateStmt(::prost::alloc::boxed::Box), + #[prost(message, tag = "135")] + MergeStmt(::prost::alloc::boxed::Box), + #[prost(message, tag = "136")] + SelectStmt(::prost::alloc::boxed::Box), + #[prost(message, tag = "137")] + SetOperationStmt(::prost::alloc::boxed::Box), + #[prost(message, tag = "138")] + ReturnStmt(::prost::alloc::boxed::Box), + #[prost(message, tag = "139")] + PlassignStmt(::prost::alloc::boxed::Box), + #[prost(message, tag = "140")] + CreateSchemaStmt(super::CreateSchemaStmt), + #[prost(message, tag = "141")] + AlterTableStmt(super::AlterTableStmt), + #[prost(message, tag = "142")] + ReplicaIdentityStmt(super::ReplicaIdentityStmt), + #[prost(message, tag = "143")] + AlterTableCmd(::prost::alloc::boxed::Box), + #[prost(message, tag = "144")] + AlterCollationStmt(super::AlterCollationStmt), + #[prost(message, tag = "145")] + AlterDomainStmt(::prost::alloc::boxed::Box), + #[prost(message, tag = "146")] + GrantStmt(super::GrantStmt), + #[prost(message, tag = "147")] + ObjectWithArgs(super::ObjectWithArgs), + #[prost(message, tag = "148")] + AccessPriv(super::AccessPriv), + #[prost(message, tag = "149")] + GrantRoleStmt(super::GrantRoleStmt), + #[prost(message, tag = "150")] + AlterDefaultPrivilegesStmt(super::AlterDefaultPrivilegesStmt), + #[prost(message, tag = "151")] + CopyStmt(::prost::alloc::boxed::Box), + #[prost(message, tag = "152")] + VariableSetStmt(super::VariableSetStmt), + #[prost(message, tag = "153")] + VariableShowStmt(super::VariableShowStmt), + #[prost(message, tag = "154")] + CreateStmt(super::CreateStmt), + #[prost(message, tag = "155")] + Constraint(::prost::alloc::boxed::Box), + #[prost(message, tag = "156")] + CreateTableSpaceStmt(super::CreateTableSpaceStmt), + #[prost(message, tag = "157")] + DropTableSpaceStmt(super::DropTableSpaceStmt), + #[prost(message, tag = "158")] + AlterTableSpaceOptionsStmt(super::AlterTableSpaceOptionsStmt), + #[prost(message, tag = "159")] + AlterTableMoveAllStmt(super::AlterTableMoveAllStmt), + #[prost(message, tag = "160")] + CreateExtensionStmt(super::CreateExtensionStmt), + #[prost(message, tag = "161")] + AlterExtensionStmt(super::AlterExtensionStmt), + #[prost(message, tag = "162")] + AlterExtensionContentsStmt( + ::prost::alloc::boxed::Box, + ), + #[prost(message, tag = "163")] + CreateFdwStmt(super::CreateFdwStmt), + #[prost(message, tag = "164")] + AlterFdwStmt(super::AlterFdwStmt), + #[prost(message, tag = "165")] + CreateForeignServerStmt(super::CreateForeignServerStmt), + #[prost(message, tag = "166")] + AlterForeignServerStmt(super::AlterForeignServerStmt), + #[prost(message, tag = "167")] + CreateForeignTableStmt(super::CreateForeignTableStmt), + #[prost(message, tag = "168")] + CreateUserMappingStmt(super::CreateUserMappingStmt), + #[prost(message, tag = "169")] + AlterUserMappingStmt(super::AlterUserMappingStmt), + #[prost(message, tag = "170")] + DropUserMappingStmt(super::DropUserMappingStmt), + #[prost(message, tag = "171")] + ImportForeignSchemaStmt(super::ImportForeignSchemaStmt), + #[prost(message, tag = "172")] + CreatePolicyStmt(::prost::alloc::boxed::Box), + #[prost(message, tag = "173")] + AlterPolicyStmt(::prost::alloc::boxed::Box), + #[prost(message, tag = "174")] + CreateAmStmt(super::CreateAmStmt), + #[prost(message, tag = "175")] + CreateTrigStmt(::prost::alloc::boxed::Box), + #[prost(message, tag = "176")] + CreateEventTrigStmt(super::CreateEventTrigStmt), + #[prost(message, tag = "177")] + AlterEventTrigStmt(super::AlterEventTrigStmt), + #[prost(message, tag = "178")] + CreatePlangStmt(super::CreatePLangStmt), + #[prost(message, tag = "179")] + CreateRoleStmt(super::CreateRoleStmt), + #[prost(message, tag = "180")] + AlterRoleStmt(super::AlterRoleStmt), + #[prost(message, tag = "181")] + AlterRoleSetStmt(super::AlterRoleSetStmt), + #[prost(message, tag = "182")] + DropRoleStmt(super::DropRoleStmt), + #[prost(message, tag = "183")] + CreateSeqStmt(super::CreateSeqStmt), + #[prost(message, tag = "184")] + AlterSeqStmt(super::AlterSeqStmt), + #[prost(message, tag = "185")] + DefineStmt(super::DefineStmt), + #[prost(message, tag = "186")] + CreateDomainStmt(::prost::alloc::boxed::Box), + #[prost(message, tag = "187")] + CreateOpClassStmt(super::CreateOpClassStmt), + #[prost(message, tag = "188")] + CreateOpClassItem(super::CreateOpClassItem), + #[prost(message, tag = "189")] + CreateOpFamilyStmt(super::CreateOpFamilyStmt), + #[prost(message, tag = "190")] + AlterOpFamilyStmt(super::AlterOpFamilyStmt), + #[prost(message, tag = "191")] + DropStmt(super::DropStmt), + #[prost(message, tag = "192")] + TruncateStmt(super::TruncateStmt), + #[prost(message, tag = "193")] + CommentStmt(::prost::alloc::boxed::Box), + #[prost(message, tag = "194")] + SecLabelStmt(::prost::alloc::boxed::Box), + #[prost(message, tag = "195")] + DeclareCursorStmt(::prost::alloc::boxed::Box), + #[prost(message, tag = "196")] + ClosePortalStmt(super::ClosePortalStmt), + #[prost(message, tag = "197")] + FetchStmt(super::FetchStmt), + #[prost(message, tag = "198")] + IndexStmt(::prost::alloc::boxed::Box), + #[prost(message, tag = "199")] + CreateStatsStmt(super::CreateStatsStmt), + #[prost(message, tag = "200")] + StatsElem(::prost::alloc::boxed::Box), + #[prost(message, tag = "201")] + AlterStatsStmt(::prost::alloc::boxed::Box), + #[prost(message, tag = "202")] + CreateFunctionStmt(::prost::alloc::boxed::Box), + #[prost(message, tag = "203")] + FunctionParameter(::prost::alloc::boxed::Box), + #[prost(message, tag = "204")] + AlterFunctionStmt(super::AlterFunctionStmt), + #[prost(message, tag = "205")] + DoStmt(super::DoStmt), + #[prost(message, tag = "206")] + InlineCodeBlock(super::InlineCodeBlock), + #[prost(message, tag = "207")] + CallStmt(::prost::alloc::boxed::Box), + #[prost(message, tag = "208")] + CallContext(super::CallContext), + #[prost(message, tag = "209")] + RenameStmt(::prost::alloc::boxed::Box), + #[prost(message, tag = "210")] + AlterObjectDependsStmt( + ::prost::alloc::boxed::Box, + ), + #[prost(message, tag = "211")] + AlterObjectSchemaStmt(::prost::alloc::boxed::Box), + #[prost(message, tag = "212")] + AlterOwnerStmt(::prost::alloc::boxed::Box), + #[prost(message, tag = "213")] + AlterOperatorStmt(super::AlterOperatorStmt), + #[prost(message, tag = "214")] + AlterTypeStmt(super::AlterTypeStmt), + #[prost(message, tag = "215")] + RuleStmt(::prost::alloc::boxed::Box), + #[prost(message, tag = "216")] + NotifyStmt(super::NotifyStmt), + #[prost(message, tag = "217")] + ListenStmt(super::ListenStmt), + #[prost(message, tag = "218")] + UnlistenStmt(super::UnlistenStmt), + #[prost(message, tag = "219")] + TransactionStmt(super::TransactionStmt), + #[prost(message, tag = "220")] + CompositeTypeStmt(super::CompositeTypeStmt), + #[prost(message, tag = "221")] + CreateEnumStmt(super::CreateEnumStmt), + #[prost(message, tag = "222")] + CreateRangeStmt(super::CreateRangeStmt), + #[prost(message, tag = "223")] + AlterEnumStmt(super::AlterEnumStmt), + #[prost(message, tag = "224")] + ViewStmt(::prost::alloc::boxed::Box), + #[prost(message, tag = "225")] + LoadStmt(super::LoadStmt), + #[prost(message, tag = "226")] + CreatedbStmt(super::CreatedbStmt), + #[prost(message, tag = "227")] + AlterDatabaseStmt(super::AlterDatabaseStmt), + #[prost(message, tag = "228")] + AlterDatabaseRefreshCollStmt(super::AlterDatabaseRefreshCollStmt), + #[prost(message, tag = "229")] + AlterDatabaseSetStmt(super::AlterDatabaseSetStmt), + #[prost(message, tag = "230")] + DropdbStmt(super::DropdbStmt), + #[prost(message, tag = "231")] + AlterSystemStmt(super::AlterSystemStmt), + #[prost(message, tag = "232")] + ClusterStmt(super::ClusterStmt), + #[prost(message, tag = "233")] + VacuumStmt(super::VacuumStmt), + #[prost(message, tag = "234")] + VacuumRelation(super::VacuumRelation), + #[prost(message, tag = "235")] + ExplainStmt(::prost::alloc::boxed::Box), + #[prost(message, tag = "236")] + CreateTableAsStmt(::prost::alloc::boxed::Box), + #[prost(message, tag = "237")] + RefreshMatViewStmt(super::RefreshMatViewStmt), + #[prost(message, tag = "238")] + CheckPointStmt(super::CheckPointStmt), + #[prost(message, tag = "239")] + DiscardStmt(super::DiscardStmt), + #[prost(message, tag = "240")] + LockStmt(super::LockStmt), + #[prost(message, tag = "241")] + ConstraintsSetStmt(super::ConstraintsSetStmt), + #[prost(message, tag = "242")] + ReindexStmt(super::ReindexStmt), + #[prost(message, tag = "243")] + CreateConversionStmt(super::CreateConversionStmt), + #[prost(message, tag = "244")] + CreateCastStmt(super::CreateCastStmt), + #[prost(message, tag = "245")] + CreateTransformStmt(super::CreateTransformStmt), + #[prost(message, tag = "246")] + PrepareStmt(::prost::alloc::boxed::Box), + #[prost(message, tag = "247")] + ExecuteStmt(super::ExecuteStmt), + #[prost(message, tag = "248")] + DeallocateStmt(super::DeallocateStmt), + #[prost(message, tag = "249")] + DropOwnedStmt(super::DropOwnedStmt), + #[prost(message, tag = "250")] + ReassignOwnedStmt(super::ReassignOwnedStmt), + #[prost(message, tag = "251")] + AlterTsdictionaryStmt(super::AlterTsDictionaryStmt), + #[prost(message, tag = "252")] + AlterTsconfigurationStmt(super::AlterTsConfigurationStmt), + #[prost(message, tag = "253")] + PublicationTable(::prost::alloc::boxed::Box), + #[prost(message, tag = "254")] + PublicationObjSpec(::prost::alloc::boxed::Box), + #[prost(message, tag = "255")] + CreatePublicationStmt(super::CreatePublicationStmt), + #[prost(message, tag = "256")] + AlterPublicationStmt(super::AlterPublicationStmt), + #[prost(message, tag = "257")] + CreateSubscriptionStmt(super::CreateSubscriptionStmt), + #[prost(message, tag = "258")] + AlterSubscriptionStmt(super::AlterSubscriptionStmt), + #[prost(message, tag = "259")] + DropSubscriptionStmt(super::DropSubscriptionStmt), + #[prost(message, tag = "260")] + Integer(super::Integer), + #[prost(message, tag = "261")] + Float(super::Float), + #[prost(message, tag = "262")] + Boolean(super::Boolean), + #[prost(message, tag = "263")] + String(super::String), + #[prost(message, tag = "264")] + BitString(super::BitString), + #[prost(message, tag = "265")] + List(super::List), + #[prost(message, tag = "266")] + IntList(super::IntList), + #[prost(message, tag = "267")] + OidList(super::OidList), + #[prost(message, tag = "268")] + AConst(super::AConst), + } +} +#[derive(Clone, Copy, PartialEq, ::prost::Message)] +pub struct Integer { + /// machine integer + #[prost(int32, tag = "1")] + pub ival: i32, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct Float { + /// string + #[prost(string, tag = "1")] + pub fval: ::prost::alloc::string::String, +} +#[derive(Clone, Copy, PartialEq, ::prost::Message)] +pub struct Boolean { + #[prost(bool, tag = "1")] + pub boolval: bool, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct String { + /// string + #[prost(string, tag = "1")] + pub sval: ::prost::alloc::string::String, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct BitString { + /// string + #[prost(string, tag = "1")] + pub bsval: ::prost::alloc::string::String, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct List { + #[prost(message, repeated, tag = "1")] + pub items: ::prost::alloc::vec::Vec, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct OidList { + #[prost(message, repeated, tag = "1")] + pub items: ::prost::alloc::vec::Vec, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct IntList { + #[prost(message, repeated, tag = "1")] + pub items: ::prost::alloc::vec::Vec, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct AConst { + #[prost(bool, tag = "10")] + pub isnull: bool, + #[prost(int32, tag = "11")] + pub location: i32, + #[prost(oneof = "a_const::Val", tags = "1, 2, 3, 4, 5")] + pub val: ::core::option::Option, +} +/// Nested message and enum types in `A_Const`. +pub mod a_const { + #[derive(Clone, PartialEq, ::prost::Oneof)] + pub enum Val { + #[prost(message, tag = "1")] + Ival(super::Integer), + #[prost(message, tag = "2")] + Fval(super::Float), + #[prost(message, tag = "3")] + Boolval(super::Boolean), + #[prost(message, tag = "4")] + Sval(super::String), + #[prost(message, tag = "5")] + Bsval(super::BitString), + } +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct Alias { + #[prost(string, tag = "1")] + pub aliasname: ::prost::alloc::string::String, + #[prost(message, repeated, tag = "2")] + pub colnames: ::prost::alloc::vec::Vec, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct RangeVar { + #[prost(string, tag = "1")] + pub catalogname: ::prost::alloc::string::String, + #[prost(string, tag = "2")] + pub schemaname: ::prost::alloc::string::String, + #[prost(string, tag = "3")] + pub relname: ::prost::alloc::string::String, + #[prost(bool, tag = "4")] + pub inh: bool, + #[prost(string, tag = "5")] + pub relpersistence: ::prost::alloc::string::String, + #[prost(message, optional, tag = "6")] + pub alias: ::core::option::Option, + #[prost(int32, tag = "7")] + pub location: i32, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct TableFunc { + #[prost(enumeration = "TableFuncType", tag = "1")] + pub functype: i32, + #[prost(message, repeated, tag = "2")] + pub ns_uris: ::prost::alloc::vec::Vec, + #[prost(message, repeated, tag = "3")] + pub ns_names: ::prost::alloc::vec::Vec, + #[prost(message, optional, boxed, tag = "4")] + pub docexpr: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(message, optional, boxed, tag = "5")] + pub rowexpr: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(message, repeated, tag = "6")] + pub colnames: ::prost::alloc::vec::Vec, + #[prost(message, repeated, tag = "7")] + pub coltypes: ::prost::alloc::vec::Vec, + #[prost(message, repeated, tag = "8")] + pub coltypmods: ::prost::alloc::vec::Vec, + #[prost(message, repeated, tag = "9")] + pub colcollations: ::prost::alloc::vec::Vec, + #[prost(message, repeated, tag = "10")] + pub colexprs: ::prost::alloc::vec::Vec, + #[prost(message, repeated, tag = "11")] + pub coldefexprs: ::prost::alloc::vec::Vec, + #[prost(message, repeated, tag = "12")] + pub colvalexprs: ::prost::alloc::vec::Vec, + #[prost(message, repeated, tag = "13")] + pub passingvalexprs: ::prost::alloc::vec::Vec, + #[prost(uint64, repeated, tag = "14")] + pub notnulls: ::prost::alloc::vec::Vec, + #[prost(message, optional, boxed, tag = "15")] + pub plan: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(int32, tag = "16")] + pub ordinalitycol: i32, + #[prost(int32, tag = "17")] + pub location: i32, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct IntoClause { + #[prost(message, optional, tag = "1")] + pub rel: ::core::option::Option, + #[prost(message, repeated, tag = "2")] + pub col_names: ::prost::alloc::vec::Vec, + #[prost(string, tag = "3")] + pub access_method: ::prost::alloc::string::String, + #[prost(message, repeated, tag = "4")] + pub options: ::prost::alloc::vec::Vec, + #[prost(enumeration = "OnCommitAction", tag = "5")] + pub on_commit: i32, + #[prost(string, tag = "6")] + pub table_space_name: ::prost::alloc::string::String, + #[prost(message, optional, boxed, tag = "7")] + pub view_query: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(bool, tag = "8")] + pub skip_data: bool, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct Var { + #[prost(message, optional, boxed, tag = "1")] + pub xpr: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(int32, tag = "2")] + pub varno: i32, + #[prost(int32, tag = "3")] + pub varattno: i32, + #[prost(uint32, tag = "4")] + pub vartype: u32, + #[prost(int32, tag = "5")] + pub vartypmod: i32, + #[prost(uint32, tag = "6")] + pub varcollid: u32, + #[prost(uint64, repeated, tag = "7")] + pub varnullingrels: ::prost::alloc::vec::Vec, + #[prost(uint32, tag = "8")] + pub varlevelsup: u32, + #[prost(int32, tag = "9")] + pub location: i32, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct Param { + #[prost(message, optional, boxed, tag = "1")] + pub xpr: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(enumeration = "ParamKind", tag = "2")] + pub paramkind: i32, + #[prost(int32, tag = "3")] + pub paramid: i32, + #[prost(uint32, tag = "4")] + pub paramtype: u32, + #[prost(int32, tag = "5")] + pub paramtypmod: i32, + #[prost(uint32, tag = "6")] + pub paramcollid: u32, + #[prost(int32, tag = "7")] + pub location: i32, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct Aggref { + #[prost(message, optional, boxed, tag = "1")] + pub xpr: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(uint32, tag = "2")] + pub aggfnoid: u32, + #[prost(uint32, tag = "3")] + pub aggtype: u32, + #[prost(uint32, tag = "4")] + pub aggcollid: u32, + #[prost(uint32, tag = "5")] + pub inputcollid: u32, + #[prost(message, repeated, tag = "6")] + pub aggargtypes: ::prost::alloc::vec::Vec, + #[prost(message, repeated, tag = "7")] + pub aggdirectargs: ::prost::alloc::vec::Vec, + #[prost(message, repeated, tag = "8")] + pub args: ::prost::alloc::vec::Vec, + #[prost(message, repeated, tag = "9")] + pub aggorder: ::prost::alloc::vec::Vec, + #[prost(message, repeated, tag = "10")] + pub aggdistinct: ::prost::alloc::vec::Vec, + #[prost(message, optional, boxed, tag = "11")] + pub aggfilter: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(bool, tag = "12")] + pub aggstar: bool, + #[prost(bool, tag = "13")] + pub aggvariadic: bool, + #[prost(string, tag = "14")] + pub aggkind: ::prost::alloc::string::String, + #[prost(uint32, tag = "15")] + pub agglevelsup: u32, + #[prost(enumeration = "AggSplit", tag = "16")] + pub aggsplit: i32, + #[prost(int32, tag = "17")] + pub aggno: i32, + #[prost(int32, tag = "18")] + pub aggtransno: i32, + #[prost(int32, tag = "19")] + pub location: i32, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct GroupingFunc { + #[prost(message, optional, boxed, tag = "1")] + pub xpr: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(message, repeated, tag = "2")] + pub args: ::prost::alloc::vec::Vec, + #[prost(message, repeated, tag = "3")] + pub refs: ::prost::alloc::vec::Vec, + #[prost(uint32, tag = "4")] + pub agglevelsup: u32, + #[prost(int32, tag = "5")] + pub location: i32, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct WindowFunc { + #[prost(message, optional, boxed, tag = "1")] + pub xpr: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(uint32, tag = "2")] + pub winfnoid: u32, + #[prost(uint32, tag = "3")] + pub wintype: u32, + #[prost(uint32, tag = "4")] + pub wincollid: u32, + #[prost(uint32, tag = "5")] + pub inputcollid: u32, + #[prost(message, repeated, tag = "6")] + pub args: ::prost::alloc::vec::Vec, + #[prost(message, optional, boxed, tag = "7")] + pub aggfilter: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(message, repeated, tag = "8")] + pub run_condition: ::prost::alloc::vec::Vec, + #[prost(uint32, tag = "9")] + pub winref: u32, + #[prost(bool, tag = "10")] + pub winstar: bool, + #[prost(bool, tag = "11")] + pub winagg: bool, + #[prost(int32, tag = "12")] + pub location: i32, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct WindowFuncRunCondition { + #[prost(message, optional, boxed, tag = "1")] + pub xpr: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(uint32, tag = "2")] + pub opno: u32, + #[prost(uint32, tag = "3")] + pub inputcollid: u32, + #[prost(bool, tag = "4")] + pub wfunc_left: bool, + #[prost(message, optional, boxed, tag = "5")] + pub arg: ::core::option::Option<::prost::alloc::boxed::Box>, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct MergeSupportFunc { + #[prost(message, optional, boxed, tag = "1")] + pub xpr: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(uint32, tag = "2")] + pub msftype: u32, + #[prost(uint32, tag = "3")] + pub msfcollid: u32, + #[prost(int32, tag = "4")] + pub location: i32, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct SubscriptingRef { + #[prost(message, optional, boxed, tag = "1")] + pub xpr: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(uint32, tag = "2")] + pub refcontainertype: u32, + #[prost(uint32, tag = "3")] + pub refelemtype: u32, + #[prost(uint32, tag = "4")] + pub refrestype: u32, + #[prost(int32, tag = "5")] + pub reftypmod: i32, + #[prost(uint32, tag = "6")] + pub refcollid: u32, + #[prost(message, repeated, tag = "7")] + pub refupperindexpr: ::prost::alloc::vec::Vec, + #[prost(message, repeated, tag = "8")] + pub reflowerindexpr: ::prost::alloc::vec::Vec, + #[prost(message, optional, boxed, tag = "9")] + pub refexpr: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(message, optional, boxed, tag = "10")] + pub refassgnexpr: ::core::option::Option<::prost::alloc::boxed::Box>, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct FuncExpr { + #[prost(message, optional, boxed, tag = "1")] + pub xpr: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(uint32, tag = "2")] + pub funcid: u32, + #[prost(uint32, tag = "3")] + pub funcresulttype: u32, + #[prost(bool, tag = "4")] + pub funcretset: bool, + #[prost(bool, tag = "5")] + pub funcvariadic: bool, + #[prost(enumeration = "CoercionForm", tag = "6")] + pub funcformat: i32, + #[prost(uint32, tag = "7")] + pub funccollid: u32, + #[prost(uint32, tag = "8")] + pub inputcollid: u32, + #[prost(message, repeated, tag = "9")] + pub args: ::prost::alloc::vec::Vec, + #[prost(int32, tag = "10")] + pub location: i32, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct NamedArgExpr { + #[prost(message, optional, boxed, tag = "1")] + pub xpr: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(message, optional, boxed, tag = "2")] + pub arg: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(string, tag = "3")] + pub name: ::prost::alloc::string::String, + #[prost(int32, tag = "4")] + pub argnumber: i32, + #[prost(int32, tag = "5")] + pub location: i32, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct OpExpr { + #[prost(message, optional, boxed, tag = "1")] + pub xpr: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(uint32, tag = "2")] + pub opno: u32, + #[prost(uint32, tag = "3")] + pub opresulttype: u32, + #[prost(bool, tag = "4")] + pub opretset: bool, + #[prost(uint32, tag = "5")] + pub opcollid: u32, + #[prost(uint32, tag = "6")] + pub inputcollid: u32, + #[prost(message, repeated, tag = "7")] + pub args: ::prost::alloc::vec::Vec, + #[prost(int32, tag = "8")] + pub location: i32, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct DistinctExpr { + #[prost(message, optional, boxed, tag = "1")] + pub xpr: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(uint32, tag = "2")] + pub opno: u32, + #[prost(uint32, tag = "3")] + pub opresulttype: u32, + #[prost(bool, tag = "4")] + pub opretset: bool, + #[prost(uint32, tag = "5")] + pub opcollid: u32, + #[prost(uint32, tag = "6")] + pub inputcollid: u32, + #[prost(message, repeated, tag = "7")] + pub args: ::prost::alloc::vec::Vec, + #[prost(int32, tag = "8")] + pub location: i32, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct NullIfExpr { + #[prost(message, optional, boxed, tag = "1")] + pub xpr: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(uint32, tag = "2")] + pub opno: u32, + #[prost(uint32, tag = "3")] + pub opresulttype: u32, + #[prost(bool, tag = "4")] + pub opretset: bool, + #[prost(uint32, tag = "5")] + pub opcollid: u32, + #[prost(uint32, tag = "6")] + pub inputcollid: u32, + #[prost(message, repeated, tag = "7")] + pub args: ::prost::alloc::vec::Vec, + #[prost(int32, tag = "8")] + pub location: i32, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct ScalarArrayOpExpr { + #[prost(message, optional, boxed, tag = "1")] + pub xpr: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(uint32, tag = "2")] + pub opno: u32, + #[prost(bool, tag = "3")] + pub use_or: bool, + #[prost(uint32, tag = "4")] + pub inputcollid: u32, + #[prost(message, repeated, tag = "5")] + pub args: ::prost::alloc::vec::Vec, + #[prost(int32, tag = "6")] + pub location: i32, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct BoolExpr { + #[prost(message, optional, boxed, tag = "1")] + pub xpr: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(enumeration = "BoolExprType", tag = "2")] + pub boolop: i32, + #[prost(message, repeated, tag = "3")] + pub args: ::prost::alloc::vec::Vec, + #[prost(int32, tag = "4")] + pub location: i32, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct SubLink { + #[prost(message, optional, boxed, tag = "1")] + pub xpr: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(enumeration = "SubLinkType", tag = "2")] + pub sub_link_type: i32, + #[prost(int32, tag = "3")] + pub sub_link_id: i32, + #[prost(message, optional, boxed, tag = "4")] + pub testexpr: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(message, repeated, tag = "5")] + pub oper_name: ::prost::alloc::vec::Vec, + #[prost(message, optional, boxed, tag = "6")] + pub subselect: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(int32, tag = "7")] + pub location: i32, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct SubPlan { + #[prost(message, optional, boxed, tag = "1")] + pub xpr: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(enumeration = "SubLinkType", tag = "2")] + pub sub_link_type: i32, + #[prost(message, optional, boxed, tag = "3")] + pub testexpr: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(message, repeated, tag = "4")] + pub param_ids: ::prost::alloc::vec::Vec, + #[prost(int32, tag = "5")] + pub plan_id: i32, + #[prost(string, tag = "6")] + pub plan_name: ::prost::alloc::string::String, + #[prost(uint32, tag = "7")] + pub first_col_type: u32, + #[prost(int32, tag = "8")] + pub first_col_typmod: i32, + #[prost(uint32, tag = "9")] + pub first_col_collation: u32, + #[prost(bool, tag = "10")] + pub use_hash_table: bool, + #[prost(bool, tag = "11")] + pub unknown_eq_false: bool, + #[prost(bool, tag = "12")] + pub parallel_safe: bool, + #[prost(message, repeated, tag = "13")] + pub set_param: ::prost::alloc::vec::Vec, + #[prost(message, repeated, tag = "14")] + pub par_param: ::prost::alloc::vec::Vec, + #[prost(message, repeated, tag = "15")] + pub args: ::prost::alloc::vec::Vec, + #[prost(double, tag = "16")] + pub startup_cost: f64, + #[prost(double, tag = "17")] + pub per_call_cost: f64, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct AlternativeSubPlan { + #[prost(message, optional, boxed, tag = "1")] + pub xpr: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(message, repeated, tag = "2")] + pub subplans: ::prost::alloc::vec::Vec, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct FieldSelect { + #[prost(message, optional, boxed, tag = "1")] + pub xpr: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(message, optional, boxed, tag = "2")] + pub arg: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(int32, tag = "3")] + pub fieldnum: i32, + #[prost(uint32, tag = "4")] + pub resulttype: u32, + #[prost(int32, tag = "5")] + pub resulttypmod: i32, + #[prost(uint32, tag = "6")] + pub resultcollid: u32, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct FieldStore { + #[prost(message, optional, boxed, tag = "1")] + pub xpr: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(message, optional, boxed, tag = "2")] + pub arg: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(message, repeated, tag = "3")] + pub newvals: ::prost::alloc::vec::Vec, + #[prost(message, repeated, tag = "4")] + pub fieldnums: ::prost::alloc::vec::Vec, + #[prost(uint32, tag = "5")] + pub resulttype: u32, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct RelabelType { + #[prost(message, optional, boxed, tag = "1")] + pub xpr: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(message, optional, boxed, tag = "2")] + pub arg: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(uint32, tag = "3")] + pub resulttype: u32, + #[prost(int32, tag = "4")] + pub resulttypmod: i32, + #[prost(uint32, tag = "5")] + pub resultcollid: u32, + #[prost(enumeration = "CoercionForm", tag = "6")] + pub relabelformat: i32, + #[prost(int32, tag = "7")] + pub location: i32, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct CoerceViaIo { + #[prost(message, optional, boxed, tag = "1")] + pub xpr: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(message, optional, boxed, tag = "2")] + pub arg: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(uint32, tag = "3")] + pub resulttype: u32, + #[prost(uint32, tag = "4")] + pub resultcollid: u32, + #[prost(enumeration = "CoercionForm", tag = "5")] + pub coerceformat: i32, + #[prost(int32, tag = "6")] + pub location: i32, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct ArrayCoerceExpr { + #[prost(message, optional, boxed, tag = "1")] + pub xpr: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(message, optional, boxed, tag = "2")] + pub arg: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(message, optional, boxed, tag = "3")] + pub elemexpr: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(uint32, tag = "4")] + pub resulttype: u32, + #[prost(int32, tag = "5")] + pub resulttypmod: i32, + #[prost(uint32, tag = "6")] + pub resultcollid: u32, + #[prost(enumeration = "CoercionForm", tag = "7")] + pub coerceformat: i32, + #[prost(int32, tag = "8")] + pub location: i32, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct ConvertRowtypeExpr { + #[prost(message, optional, boxed, tag = "1")] + pub xpr: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(message, optional, boxed, tag = "2")] + pub arg: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(uint32, tag = "3")] + pub resulttype: u32, + #[prost(enumeration = "CoercionForm", tag = "4")] + pub convertformat: i32, + #[prost(int32, tag = "5")] + pub location: i32, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct CollateExpr { + #[prost(message, optional, boxed, tag = "1")] + pub xpr: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(message, optional, boxed, tag = "2")] + pub arg: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(uint32, tag = "3")] + pub coll_oid: u32, + #[prost(int32, tag = "4")] + pub location: i32, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct CaseExpr { + #[prost(message, optional, boxed, tag = "1")] + pub xpr: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(uint32, tag = "2")] + pub casetype: u32, + #[prost(uint32, tag = "3")] + pub casecollid: u32, + #[prost(message, optional, boxed, tag = "4")] + pub arg: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(message, repeated, tag = "5")] + pub args: ::prost::alloc::vec::Vec, + #[prost(message, optional, boxed, tag = "6")] + pub defresult: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(int32, tag = "7")] + pub location: i32, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct CaseWhen { + #[prost(message, optional, boxed, tag = "1")] + pub xpr: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(message, optional, boxed, tag = "2")] + pub expr: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(message, optional, boxed, tag = "3")] + pub result: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(int32, tag = "4")] + pub location: i32, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct CaseTestExpr { + #[prost(message, optional, boxed, tag = "1")] + pub xpr: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(uint32, tag = "2")] + pub type_id: u32, + #[prost(int32, tag = "3")] + pub type_mod: i32, + #[prost(uint32, tag = "4")] + pub collation: u32, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct ArrayExpr { + #[prost(message, optional, boxed, tag = "1")] + pub xpr: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(uint32, tag = "2")] + pub array_typeid: u32, + #[prost(uint32, tag = "3")] + pub array_collid: u32, + #[prost(uint32, tag = "4")] + pub element_typeid: u32, + #[prost(message, repeated, tag = "5")] + pub elements: ::prost::alloc::vec::Vec, + #[prost(bool, tag = "6")] + pub multidims: bool, + #[prost(int32, tag = "7")] + pub location: i32, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct RowExpr { + #[prost(message, optional, boxed, tag = "1")] + pub xpr: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(message, repeated, tag = "2")] + pub args: ::prost::alloc::vec::Vec, + #[prost(uint32, tag = "3")] + pub row_typeid: u32, + #[prost(enumeration = "CoercionForm", tag = "4")] + pub row_format: i32, + #[prost(message, repeated, tag = "5")] + pub colnames: ::prost::alloc::vec::Vec, + #[prost(int32, tag = "6")] + pub location: i32, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct RowCompareExpr { + #[prost(message, optional, boxed, tag = "1")] + pub xpr: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(enumeration = "RowCompareType", tag = "2")] + pub rctype: i32, + #[prost(message, repeated, tag = "3")] + pub opnos: ::prost::alloc::vec::Vec, + #[prost(message, repeated, tag = "4")] + pub opfamilies: ::prost::alloc::vec::Vec, + #[prost(message, repeated, tag = "5")] + pub inputcollids: ::prost::alloc::vec::Vec, + #[prost(message, repeated, tag = "6")] + pub largs: ::prost::alloc::vec::Vec, + #[prost(message, repeated, tag = "7")] + pub rargs: ::prost::alloc::vec::Vec, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct CoalesceExpr { + #[prost(message, optional, boxed, tag = "1")] + pub xpr: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(uint32, tag = "2")] + pub coalescetype: u32, + #[prost(uint32, tag = "3")] + pub coalescecollid: u32, + #[prost(message, repeated, tag = "4")] + pub args: ::prost::alloc::vec::Vec, + #[prost(int32, tag = "5")] + pub location: i32, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct MinMaxExpr { + #[prost(message, optional, boxed, tag = "1")] + pub xpr: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(uint32, tag = "2")] + pub minmaxtype: u32, + #[prost(uint32, tag = "3")] + pub minmaxcollid: u32, + #[prost(uint32, tag = "4")] + pub inputcollid: u32, + #[prost(enumeration = "MinMaxOp", tag = "5")] + pub op: i32, + #[prost(message, repeated, tag = "6")] + pub args: ::prost::alloc::vec::Vec, + #[prost(int32, tag = "7")] + pub location: i32, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct SqlValueFunction { + #[prost(message, optional, boxed, tag = "1")] + pub xpr: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(enumeration = "SqlValueFunctionOp", tag = "2")] + pub op: i32, + #[prost(uint32, tag = "3")] + pub r#type: u32, + #[prost(int32, tag = "4")] + pub typmod: i32, + #[prost(int32, tag = "5")] + pub location: i32, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct XmlExpr { + #[prost(message, optional, boxed, tag = "1")] + pub xpr: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(enumeration = "XmlExprOp", tag = "2")] + pub op: i32, + #[prost(string, tag = "3")] + pub name: ::prost::alloc::string::String, + #[prost(message, repeated, tag = "4")] + pub named_args: ::prost::alloc::vec::Vec, + #[prost(message, repeated, tag = "5")] + pub arg_names: ::prost::alloc::vec::Vec, + #[prost(message, repeated, tag = "6")] + pub args: ::prost::alloc::vec::Vec, + #[prost(enumeration = "XmlOptionType", tag = "7")] + pub xmloption: i32, + #[prost(bool, tag = "8")] + pub indent: bool, + #[prost(uint32, tag = "9")] + pub r#type: u32, + #[prost(int32, tag = "10")] + pub typmod: i32, + #[prost(int32, tag = "11")] + pub location: i32, +} +#[derive(Clone, Copy, PartialEq, ::prost::Message)] +pub struct JsonFormat { + #[prost(enumeration = "JsonFormatType", tag = "1")] + pub format_type: i32, + #[prost(enumeration = "JsonEncoding", tag = "2")] + pub encoding: i32, + #[prost(int32, tag = "3")] + pub location: i32, +} +#[derive(Clone, Copy, PartialEq, ::prost::Message)] +pub struct JsonReturning { + #[prost(message, optional, tag = "1")] + pub format: ::core::option::Option, + #[prost(uint32, tag = "2")] + pub typid: u32, + #[prost(int32, tag = "3")] + pub typmod: i32, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct JsonValueExpr { + #[prost(message, optional, boxed, tag = "1")] + pub raw_expr: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(message, optional, boxed, tag = "2")] + pub formatted_expr: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(message, optional, tag = "3")] + pub format: ::core::option::Option, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct JsonConstructorExpr { + #[prost(message, optional, boxed, tag = "1")] + pub xpr: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(enumeration = "JsonConstructorType", tag = "2")] + pub r#type: i32, + #[prost(message, repeated, tag = "3")] + pub args: ::prost::alloc::vec::Vec, + #[prost(message, optional, boxed, tag = "4")] + pub func: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(message, optional, boxed, tag = "5")] + pub coercion: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(message, optional, tag = "6")] + pub returning: ::core::option::Option, + #[prost(bool, tag = "7")] + pub absent_on_null: bool, + #[prost(bool, tag = "8")] + pub unique: bool, + #[prost(int32, tag = "9")] + pub location: i32, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct JsonIsPredicate { + #[prost(message, optional, boxed, tag = "1")] + pub expr: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(message, optional, tag = "2")] + pub format: ::core::option::Option, + #[prost(enumeration = "JsonValueType", tag = "3")] + pub item_type: i32, + #[prost(bool, tag = "4")] + pub unique_keys: bool, + #[prost(int32, tag = "5")] + pub location: i32, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct JsonBehavior { + #[prost(enumeration = "JsonBehaviorType", tag = "1")] + pub btype: i32, + #[prost(message, optional, boxed, tag = "2")] + pub expr: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(bool, tag = "3")] + pub coerce: bool, + #[prost(int32, tag = "4")] + pub location: i32, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct JsonExpr { + #[prost(message, optional, boxed, tag = "1")] + pub xpr: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(enumeration = "JsonExprOp", tag = "2")] + pub op: i32, + #[prost(string, tag = "3")] + pub column_name: ::prost::alloc::string::String, + #[prost(message, optional, boxed, tag = "4")] + pub formatted_expr: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(message, optional, tag = "5")] + pub format: ::core::option::Option, + #[prost(message, optional, boxed, tag = "6")] + pub path_spec: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(message, optional, tag = "7")] + pub returning: ::core::option::Option, + #[prost(message, repeated, tag = "8")] + pub passing_names: ::prost::alloc::vec::Vec, + #[prost(message, repeated, tag = "9")] + pub passing_values: ::prost::alloc::vec::Vec, + #[prost(message, optional, boxed, tag = "10")] + pub on_empty: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(message, optional, boxed, tag = "11")] + pub on_error: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(bool, tag = "12")] + pub use_io_coercion: bool, + #[prost(bool, tag = "13")] + pub use_json_coercion: bool, + #[prost(enumeration = "JsonWrapper", tag = "14")] + pub wrapper: i32, + #[prost(bool, tag = "15")] + pub omit_quotes: bool, + #[prost(uint32, tag = "16")] + pub collation: u32, + #[prost(int32, tag = "17")] + pub location: i32, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct JsonTablePath { + #[prost(string, tag = "1")] + pub name: ::prost::alloc::string::String, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct JsonTablePathScan { + #[prost(message, optional, boxed, tag = "1")] + pub plan: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(message, optional, tag = "2")] + pub path: ::core::option::Option, + #[prost(bool, tag = "3")] + pub error_on_error: bool, + #[prost(message, optional, boxed, tag = "4")] + pub child: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(int32, tag = "5")] + pub col_min: i32, + #[prost(int32, tag = "6")] + pub col_max: i32, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct JsonTableSiblingJoin { + #[prost(message, optional, boxed, tag = "1")] + pub plan: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(message, optional, boxed, tag = "2")] + pub lplan: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(message, optional, boxed, tag = "3")] + pub rplan: ::core::option::Option<::prost::alloc::boxed::Box>, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct NullTest { + #[prost(message, optional, boxed, tag = "1")] + pub xpr: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(message, optional, boxed, tag = "2")] + pub arg: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(enumeration = "NullTestType", tag = "3")] + pub nulltesttype: i32, + #[prost(bool, tag = "4")] + pub argisrow: bool, + #[prost(int32, tag = "5")] + pub location: i32, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct BooleanTest { + #[prost(message, optional, boxed, tag = "1")] + pub xpr: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(message, optional, boxed, tag = "2")] + pub arg: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(enumeration = "BoolTestType", tag = "3")] + pub booltesttype: i32, + #[prost(int32, tag = "4")] + pub location: i32, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct MergeAction { + #[prost(enumeration = "MergeMatchKind", tag = "1")] + pub match_kind: i32, + #[prost(enumeration = "CmdType", tag = "2")] + pub command_type: i32, + #[prost(enumeration = "OverridingKind", tag = "3")] + pub r#override: i32, + #[prost(message, optional, boxed, tag = "4")] + pub qual: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(message, repeated, tag = "5")] + pub target_list: ::prost::alloc::vec::Vec, + #[prost(message, repeated, tag = "6")] + pub update_colnos: ::prost::alloc::vec::Vec, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct CoerceToDomain { + #[prost(message, optional, boxed, tag = "1")] + pub xpr: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(message, optional, boxed, tag = "2")] + pub arg: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(uint32, tag = "3")] + pub resulttype: u32, + #[prost(int32, tag = "4")] + pub resulttypmod: i32, + #[prost(uint32, tag = "5")] + pub resultcollid: u32, + #[prost(enumeration = "CoercionForm", tag = "6")] + pub coercionformat: i32, + #[prost(int32, tag = "7")] + pub location: i32, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct CoerceToDomainValue { + #[prost(message, optional, boxed, tag = "1")] + pub xpr: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(uint32, tag = "2")] + pub type_id: u32, + #[prost(int32, tag = "3")] + pub type_mod: i32, + #[prost(uint32, tag = "4")] + pub collation: u32, + #[prost(int32, tag = "5")] + pub location: i32, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct SetToDefault { + #[prost(message, optional, boxed, tag = "1")] + pub xpr: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(uint32, tag = "2")] + pub type_id: u32, + #[prost(int32, tag = "3")] + pub type_mod: i32, + #[prost(uint32, tag = "4")] + pub collation: u32, + #[prost(int32, tag = "5")] + pub location: i32, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct CurrentOfExpr { + #[prost(message, optional, boxed, tag = "1")] + pub xpr: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(uint32, tag = "2")] + pub cvarno: u32, + #[prost(string, tag = "3")] + pub cursor_name: ::prost::alloc::string::String, + #[prost(int32, tag = "4")] + pub cursor_param: i32, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct NextValueExpr { + #[prost(message, optional, boxed, tag = "1")] + pub xpr: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(uint32, tag = "2")] + pub seqid: u32, + #[prost(uint32, tag = "3")] + pub type_id: u32, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct InferenceElem { + #[prost(message, optional, boxed, tag = "1")] + pub xpr: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(message, optional, boxed, tag = "2")] + pub expr: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(uint32, tag = "3")] + pub infercollid: u32, + #[prost(uint32, tag = "4")] + pub inferopclass: u32, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct TargetEntry { + #[prost(message, optional, boxed, tag = "1")] + pub xpr: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(message, optional, boxed, tag = "2")] + pub expr: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(int32, tag = "3")] + pub resno: i32, + #[prost(string, tag = "4")] + pub resname: ::prost::alloc::string::String, + #[prost(uint32, tag = "5")] + pub ressortgroupref: u32, + #[prost(uint32, tag = "6")] + pub resorigtbl: u32, + #[prost(int32, tag = "7")] + pub resorigcol: i32, + #[prost(bool, tag = "8")] + pub resjunk: bool, +} +#[derive(Clone, Copy, PartialEq, ::prost::Message)] +pub struct RangeTblRef { + #[prost(int32, tag = "1")] + pub rtindex: i32, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct JoinExpr { + #[prost(enumeration = "JoinType", tag = "1")] + pub jointype: i32, + #[prost(bool, tag = "2")] + pub is_natural: bool, + #[prost(message, optional, boxed, tag = "3")] + pub larg: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(message, optional, boxed, tag = "4")] + pub rarg: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(message, repeated, tag = "5")] + pub using_clause: ::prost::alloc::vec::Vec, + #[prost(message, optional, tag = "6")] + pub join_using_alias: ::core::option::Option, + #[prost(message, optional, boxed, tag = "7")] + pub quals: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(message, optional, tag = "8")] + pub alias: ::core::option::Option, + #[prost(int32, tag = "9")] + pub rtindex: i32, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct FromExpr { + #[prost(message, repeated, tag = "1")] + pub fromlist: ::prost::alloc::vec::Vec, + #[prost(message, optional, boxed, tag = "2")] + pub quals: ::core::option::Option<::prost::alloc::boxed::Box>, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct OnConflictExpr { + #[prost(enumeration = "OnConflictAction", tag = "1")] + pub action: i32, + #[prost(message, repeated, tag = "2")] + pub arbiter_elems: ::prost::alloc::vec::Vec, + #[prost(message, optional, boxed, tag = "3")] + pub arbiter_where: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(uint32, tag = "4")] + pub constraint: u32, + #[prost(message, repeated, tag = "5")] + pub on_conflict_set: ::prost::alloc::vec::Vec, + #[prost(message, optional, boxed, tag = "6")] + pub on_conflict_where: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(int32, tag = "7")] + pub excl_rel_index: i32, + #[prost(message, repeated, tag = "8")] + pub excl_rel_tlist: ::prost::alloc::vec::Vec, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct Query { + #[prost(enumeration = "CmdType", tag = "1")] + pub command_type: i32, + #[prost(enumeration = "QuerySource", tag = "2")] + pub query_source: i32, + #[prost(bool, tag = "3")] + pub can_set_tag: bool, + #[prost(message, optional, boxed, tag = "4")] + pub utility_stmt: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(int32, tag = "5")] + pub result_relation: i32, + #[prost(bool, tag = "6")] + pub has_aggs: bool, + #[prost(bool, tag = "7")] + pub has_window_funcs: bool, + #[prost(bool, tag = "8")] + pub has_target_srfs: bool, + #[prost(bool, tag = "9")] + pub has_sub_links: bool, + #[prost(bool, tag = "10")] + pub has_distinct_on: bool, + #[prost(bool, tag = "11")] + pub has_recursive: bool, + #[prost(bool, tag = "12")] + pub has_modifying_cte: bool, + #[prost(bool, tag = "13")] + pub has_for_update: bool, + #[prost(bool, tag = "14")] + pub has_row_security: bool, + #[prost(bool, tag = "15")] + pub is_return: bool, + #[prost(message, repeated, tag = "16")] + pub cte_list: ::prost::alloc::vec::Vec, + #[prost(message, repeated, tag = "17")] + pub rtable: ::prost::alloc::vec::Vec, + #[prost(message, repeated, tag = "18")] + pub rteperminfos: ::prost::alloc::vec::Vec, + #[prost(message, optional, boxed, tag = "19")] + pub jointree: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(message, repeated, tag = "20")] + pub merge_action_list: ::prost::alloc::vec::Vec, + #[prost(int32, tag = "21")] + pub merge_target_relation: i32, + #[prost(message, optional, boxed, tag = "22")] + pub merge_join_condition: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(message, repeated, tag = "23")] + pub target_list: ::prost::alloc::vec::Vec, + #[prost(enumeration = "OverridingKind", tag = "24")] + pub r#override: i32, + #[prost(message, optional, boxed, tag = "25")] + pub on_conflict: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(message, repeated, tag = "26")] + pub returning_list: ::prost::alloc::vec::Vec, + #[prost(message, repeated, tag = "27")] + pub group_clause: ::prost::alloc::vec::Vec, + #[prost(bool, tag = "28")] + pub group_distinct: bool, + #[prost(message, repeated, tag = "29")] + pub grouping_sets: ::prost::alloc::vec::Vec, + #[prost(message, optional, boxed, tag = "30")] + pub having_qual: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(message, repeated, tag = "31")] + pub window_clause: ::prost::alloc::vec::Vec, + #[prost(message, repeated, tag = "32")] + pub distinct_clause: ::prost::alloc::vec::Vec, + #[prost(message, repeated, tag = "33")] + pub sort_clause: ::prost::alloc::vec::Vec, + #[prost(message, optional, boxed, tag = "34")] + pub limit_offset: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(message, optional, boxed, tag = "35")] + pub limit_count: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(enumeration = "LimitOption", tag = "36")] + pub limit_option: i32, + #[prost(message, repeated, tag = "37")] + pub row_marks: ::prost::alloc::vec::Vec, + #[prost(message, optional, boxed, tag = "38")] + pub set_operations: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(message, repeated, tag = "39")] + pub constraint_deps: ::prost::alloc::vec::Vec, + #[prost(message, repeated, tag = "40")] + pub with_check_options: ::prost::alloc::vec::Vec, + #[prost(int32, tag = "41")] + pub stmt_location: i32, + #[prost(int32, tag = "42")] + pub stmt_len: i32, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct TypeName { + #[prost(message, repeated, tag = "1")] + pub names: ::prost::alloc::vec::Vec, + #[prost(uint32, tag = "2")] + pub type_oid: u32, + #[prost(bool, tag = "3")] + pub setof: bool, + #[prost(bool, tag = "4")] + pub pct_type: bool, + #[prost(message, repeated, tag = "5")] + pub typmods: ::prost::alloc::vec::Vec, + #[prost(int32, tag = "6")] + pub typemod: i32, + #[prost(message, repeated, tag = "7")] + pub array_bounds: ::prost::alloc::vec::Vec, + #[prost(int32, tag = "8")] + pub location: i32, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct ColumnRef { + #[prost(message, repeated, tag = "1")] + pub fields: ::prost::alloc::vec::Vec, + #[prost(int32, tag = "2")] + pub location: i32, +} +#[derive(Clone, Copy, PartialEq, ::prost::Message)] +pub struct ParamRef { + #[prost(int32, tag = "1")] + pub number: i32, + #[prost(int32, tag = "2")] + pub location: i32, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct AExpr { + #[prost(enumeration = "AExprKind", tag = "1")] + pub kind: i32, + #[prost(message, repeated, tag = "2")] + pub name: ::prost::alloc::vec::Vec, + #[prost(message, optional, boxed, tag = "3")] + pub lexpr: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(message, optional, boxed, tag = "4")] + pub rexpr: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(int32, tag = "5")] + pub location: i32, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct TypeCast { + #[prost(message, optional, boxed, tag = "1")] + pub arg: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(message, optional, tag = "2")] + pub type_name: ::core::option::Option, + #[prost(int32, tag = "3")] + pub location: i32, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct CollateClause { + #[prost(message, optional, boxed, tag = "1")] + pub arg: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(message, repeated, tag = "2")] + pub collname: ::prost::alloc::vec::Vec, + #[prost(int32, tag = "3")] + pub location: i32, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct RoleSpec { + #[prost(enumeration = "RoleSpecType", tag = "1")] + pub roletype: i32, + #[prost(string, tag = "2")] + pub rolename: ::prost::alloc::string::String, + #[prost(int32, tag = "3")] + pub location: i32, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct FuncCall { + #[prost(message, repeated, tag = "1")] + pub funcname: ::prost::alloc::vec::Vec, + #[prost(message, repeated, tag = "2")] + pub args: ::prost::alloc::vec::Vec, + #[prost(message, repeated, tag = "3")] + pub agg_order: ::prost::alloc::vec::Vec, + #[prost(message, optional, boxed, tag = "4")] + pub agg_filter: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(message, optional, boxed, tag = "5")] + pub over: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(bool, tag = "6")] + pub agg_within_group: bool, + #[prost(bool, tag = "7")] + pub agg_star: bool, + #[prost(bool, tag = "8")] + pub agg_distinct: bool, + #[prost(bool, tag = "9")] + pub func_variadic: bool, + #[prost(enumeration = "CoercionForm", tag = "10")] + pub funcformat: i32, + #[prost(int32, tag = "11")] + pub location: i32, +} +#[derive(Clone, Copy, PartialEq, ::prost::Message)] +pub struct AStar {} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct AIndices { + #[prost(bool, tag = "1")] + pub is_slice: bool, + #[prost(message, optional, boxed, tag = "2")] + pub lidx: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(message, optional, boxed, tag = "3")] + pub uidx: ::core::option::Option<::prost::alloc::boxed::Box>, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct AIndirection { + #[prost(message, optional, boxed, tag = "1")] + pub arg: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(message, repeated, tag = "2")] + pub indirection: ::prost::alloc::vec::Vec, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct AArrayExpr { + #[prost(message, repeated, tag = "1")] + pub elements: ::prost::alloc::vec::Vec, + #[prost(int32, tag = "2")] + pub location: i32, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct ResTarget { + #[prost(string, tag = "1")] + pub name: ::prost::alloc::string::String, + #[prost(message, repeated, tag = "2")] + pub indirection: ::prost::alloc::vec::Vec, + #[prost(message, optional, boxed, tag = "3")] + pub val: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(int32, tag = "4")] + pub location: i32, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct MultiAssignRef { + #[prost(message, optional, boxed, tag = "1")] + pub source: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(int32, tag = "2")] + pub colno: i32, + #[prost(int32, tag = "3")] + pub ncolumns: i32, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct SortBy { + #[prost(message, optional, boxed, tag = "1")] + pub node: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(enumeration = "SortByDir", tag = "2")] + pub sortby_dir: i32, + #[prost(enumeration = "SortByNulls", tag = "3")] + pub sortby_nulls: i32, + #[prost(message, repeated, tag = "4")] + pub use_op: ::prost::alloc::vec::Vec, + #[prost(int32, tag = "5")] + pub location: i32, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct WindowDef { + #[prost(string, tag = "1")] + pub name: ::prost::alloc::string::String, + #[prost(string, tag = "2")] + pub refname: ::prost::alloc::string::String, + #[prost(message, repeated, tag = "3")] + pub partition_clause: ::prost::alloc::vec::Vec, + #[prost(message, repeated, tag = "4")] + pub order_clause: ::prost::alloc::vec::Vec, + #[prost(int32, tag = "5")] + pub frame_options: i32, + #[prost(message, optional, boxed, tag = "6")] + pub start_offset: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(message, optional, boxed, tag = "7")] + pub end_offset: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(int32, tag = "8")] + pub location: i32, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct RangeSubselect { + #[prost(bool, tag = "1")] + pub lateral: bool, + #[prost(message, optional, boxed, tag = "2")] + pub subquery: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(message, optional, tag = "3")] + pub alias: ::core::option::Option, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct RangeFunction { + #[prost(bool, tag = "1")] + pub lateral: bool, + #[prost(bool, tag = "2")] + pub ordinality: bool, + #[prost(bool, tag = "3")] + pub is_rowsfrom: bool, + #[prost(message, repeated, tag = "4")] + pub functions: ::prost::alloc::vec::Vec, + #[prost(message, optional, tag = "5")] + pub alias: ::core::option::Option, + #[prost(message, repeated, tag = "6")] + pub coldeflist: ::prost::alloc::vec::Vec, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct RangeTableFunc { + #[prost(bool, tag = "1")] + pub lateral: bool, + #[prost(message, optional, boxed, tag = "2")] + pub docexpr: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(message, optional, boxed, tag = "3")] + pub rowexpr: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(message, repeated, tag = "4")] + pub namespaces: ::prost::alloc::vec::Vec, + #[prost(message, repeated, tag = "5")] + pub columns: ::prost::alloc::vec::Vec, + #[prost(message, optional, tag = "6")] + pub alias: ::core::option::Option, + #[prost(int32, tag = "7")] + pub location: i32, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct RangeTableFuncCol { + #[prost(string, tag = "1")] + pub colname: ::prost::alloc::string::String, + #[prost(message, optional, tag = "2")] + pub type_name: ::core::option::Option, + #[prost(bool, tag = "3")] + pub for_ordinality: bool, + #[prost(bool, tag = "4")] + pub is_not_null: bool, + #[prost(message, optional, boxed, tag = "5")] + pub colexpr: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(message, optional, boxed, tag = "6")] + pub coldefexpr: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(int32, tag = "7")] + pub location: i32, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct RangeTableSample { + #[prost(message, optional, boxed, tag = "1")] + pub relation: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(message, repeated, tag = "2")] + pub method: ::prost::alloc::vec::Vec, + #[prost(message, repeated, tag = "3")] + pub args: ::prost::alloc::vec::Vec, + #[prost(message, optional, boxed, tag = "4")] + pub repeatable: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(int32, tag = "5")] + pub location: i32, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct ColumnDef { + #[prost(string, tag = "1")] + pub colname: ::prost::alloc::string::String, + #[prost(message, optional, tag = "2")] + pub type_name: ::core::option::Option, + #[prost(string, tag = "3")] + pub compression: ::prost::alloc::string::String, + #[prost(int32, tag = "4")] + pub inhcount: i32, + #[prost(bool, tag = "5")] + pub is_local: bool, + #[prost(bool, tag = "6")] + pub is_not_null: bool, + #[prost(bool, tag = "7")] + pub is_from_type: bool, + #[prost(string, tag = "8")] + pub storage: ::prost::alloc::string::String, + #[prost(string, tag = "9")] + pub storage_name: ::prost::alloc::string::String, + #[prost(message, optional, boxed, tag = "10")] + pub raw_default: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(message, optional, boxed, tag = "11")] + pub cooked_default: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(string, tag = "12")] + pub identity: ::prost::alloc::string::String, + #[prost(message, optional, tag = "13")] + pub identity_sequence: ::core::option::Option, + #[prost(string, tag = "14")] + pub generated: ::prost::alloc::string::String, + #[prost(message, optional, boxed, tag = "15")] + pub coll_clause: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(uint32, tag = "16")] + pub coll_oid: u32, + #[prost(message, repeated, tag = "17")] + pub constraints: ::prost::alloc::vec::Vec, + #[prost(message, repeated, tag = "18")] + pub fdwoptions: ::prost::alloc::vec::Vec, + #[prost(int32, tag = "19")] + pub location: i32, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct TableLikeClause { + #[prost(message, optional, tag = "1")] + pub relation: ::core::option::Option, + #[prost(uint32, tag = "2")] + pub options: u32, + #[prost(uint32, tag = "3")] + pub relation_oid: u32, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct IndexElem { + #[prost(string, tag = "1")] + pub name: ::prost::alloc::string::String, + #[prost(message, optional, boxed, tag = "2")] + pub expr: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(string, tag = "3")] + pub indexcolname: ::prost::alloc::string::String, + #[prost(message, repeated, tag = "4")] + pub collation: ::prost::alloc::vec::Vec, + #[prost(message, repeated, tag = "5")] + pub opclass: ::prost::alloc::vec::Vec, + #[prost(message, repeated, tag = "6")] + pub opclassopts: ::prost::alloc::vec::Vec, + #[prost(enumeration = "SortByDir", tag = "7")] + pub ordering: i32, + #[prost(enumeration = "SortByNulls", tag = "8")] + pub nulls_ordering: i32, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct DefElem { + #[prost(string, tag = "1")] + pub defnamespace: ::prost::alloc::string::String, + #[prost(string, tag = "2")] + pub defname: ::prost::alloc::string::String, + #[prost(message, optional, boxed, tag = "3")] + pub arg: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(enumeration = "DefElemAction", tag = "4")] + pub defaction: i32, + #[prost(int32, tag = "5")] + pub location: i32, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct LockingClause { + #[prost(message, repeated, tag = "1")] + pub locked_rels: ::prost::alloc::vec::Vec, + #[prost(enumeration = "LockClauseStrength", tag = "2")] + pub strength: i32, + #[prost(enumeration = "LockWaitPolicy", tag = "3")] + pub wait_policy: i32, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct XmlSerialize { + #[prost(enumeration = "XmlOptionType", tag = "1")] + pub xmloption: i32, + #[prost(message, optional, boxed, tag = "2")] + pub expr: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(message, optional, tag = "3")] + pub type_name: ::core::option::Option, + #[prost(bool, tag = "4")] + pub indent: bool, + #[prost(int32, tag = "5")] + pub location: i32, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct PartitionElem { + #[prost(string, tag = "1")] + pub name: ::prost::alloc::string::String, + #[prost(message, optional, boxed, tag = "2")] + pub expr: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(message, repeated, tag = "3")] + pub collation: ::prost::alloc::vec::Vec, + #[prost(message, repeated, tag = "4")] + pub opclass: ::prost::alloc::vec::Vec, + #[prost(int32, tag = "5")] + pub location: i32, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct PartitionSpec { + #[prost(enumeration = "PartitionStrategy", tag = "1")] + pub strategy: i32, + #[prost(message, repeated, tag = "2")] + pub part_params: ::prost::alloc::vec::Vec, + #[prost(int32, tag = "3")] + pub location: i32, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct PartitionBoundSpec { + #[prost(string, tag = "1")] + pub strategy: ::prost::alloc::string::String, + #[prost(bool, tag = "2")] + pub is_default: bool, + #[prost(int32, tag = "3")] + pub modulus: i32, + #[prost(int32, tag = "4")] + pub remainder: i32, + #[prost(message, repeated, tag = "5")] + pub listdatums: ::prost::alloc::vec::Vec, + #[prost(message, repeated, tag = "6")] + pub lowerdatums: ::prost::alloc::vec::Vec, + #[prost(message, repeated, tag = "7")] + pub upperdatums: ::prost::alloc::vec::Vec, + #[prost(int32, tag = "8")] + pub location: i32, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct PartitionRangeDatum { + #[prost(enumeration = "PartitionRangeDatumKind", tag = "1")] + pub kind: i32, + #[prost(message, optional, boxed, tag = "2")] + pub value: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(int32, tag = "3")] + pub location: i32, +} +#[derive(Clone, Copy, PartialEq, ::prost::Message)] +pub struct SinglePartitionSpec {} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct PartitionCmd { + #[prost(message, optional, tag = "1")] + pub name: ::core::option::Option, + #[prost(message, optional, tag = "2")] + pub bound: ::core::option::Option, + #[prost(bool, tag = "3")] + pub concurrent: bool, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct RangeTblEntry { + #[prost(message, optional, tag = "1")] + pub alias: ::core::option::Option, + #[prost(message, optional, tag = "2")] + pub eref: ::core::option::Option, + #[prost(enumeration = "RteKind", tag = "3")] + pub rtekind: i32, + #[prost(uint32, tag = "4")] + pub relid: u32, + #[prost(bool, tag = "5")] + pub inh: bool, + #[prost(string, tag = "6")] + pub relkind: ::prost::alloc::string::String, + #[prost(int32, tag = "7")] + pub rellockmode: i32, + #[prost(uint32, tag = "8")] + pub perminfoindex: u32, + #[prost(message, optional, boxed, tag = "9")] + pub tablesample: ::core::option::Option< + ::prost::alloc::boxed::Box, + >, + #[prost(message, optional, boxed, tag = "10")] + pub subquery: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(bool, tag = "11")] + pub security_barrier: bool, + #[prost(enumeration = "JoinType", tag = "12")] + pub jointype: i32, + #[prost(int32, tag = "13")] + pub joinmergedcols: i32, + #[prost(message, repeated, tag = "14")] + pub joinaliasvars: ::prost::alloc::vec::Vec, + #[prost(message, repeated, tag = "15")] + pub joinleftcols: ::prost::alloc::vec::Vec, + #[prost(message, repeated, tag = "16")] + pub joinrightcols: ::prost::alloc::vec::Vec, + #[prost(message, optional, tag = "17")] + pub join_using_alias: ::core::option::Option, + #[prost(message, repeated, tag = "18")] + pub functions: ::prost::alloc::vec::Vec, + #[prost(bool, tag = "19")] + pub funcordinality: bool, + #[prost(message, optional, boxed, tag = "20")] + pub tablefunc: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(message, repeated, tag = "21")] + pub values_lists: ::prost::alloc::vec::Vec, + #[prost(string, tag = "22")] + pub ctename: ::prost::alloc::string::String, + #[prost(uint32, tag = "23")] + pub ctelevelsup: u32, + #[prost(bool, tag = "24")] + pub self_reference: bool, + #[prost(message, repeated, tag = "25")] + pub coltypes: ::prost::alloc::vec::Vec, + #[prost(message, repeated, tag = "26")] + pub coltypmods: ::prost::alloc::vec::Vec, + #[prost(message, repeated, tag = "27")] + pub colcollations: ::prost::alloc::vec::Vec, + #[prost(string, tag = "28")] + pub enrname: ::prost::alloc::string::String, + #[prost(double, tag = "29")] + pub enrtuples: f64, + #[prost(bool, tag = "30")] + pub lateral: bool, + #[prost(bool, tag = "31")] + pub in_from_cl: bool, + #[prost(message, repeated, tag = "32")] + pub security_quals: ::prost::alloc::vec::Vec, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct RtePermissionInfo { + #[prost(uint32, tag = "1")] + pub relid: u32, + #[prost(bool, tag = "2")] + pub inh: bool, + #[prost(uint64, tag = "3")] + pub required_perms: u64, + #[prost(uint32, tag = "4")] + pub check_as_user: u32, + #[prost(uint64, repeated, tag = "5")] + pub selected_cols: ::prost::alloc::vec::Vec, + #[prost(uint64, repeated, tag = "6")] + pub inserted_cols: ::prost::alloc::vec::Vec, + #[prost(uint64, repeated, tag = "7")] + pub updated_cols: ::prost::alloc::vec::Vec, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct RangeTblFunction { + #[prost(message, optional, boxed, tag = "1")] + pub funcexpr: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(int32, tag = "2")] + pub funccolcount: i32, + #[prost(message, repeated, tag = "3")] + pub funccolnames: ::prost::alloc::vec::Vec, + #[prost(message, repeated, tag = "4")] + pub funccoltypes: ::prost::alloc::vec::Vec, + #[prost(message, repeated, tag = "5")] + pub funccoltypmods: ::prost::alloc::vec::Vec, + #[prost(message, repeated, tag = "6")] + pub funccolcollations: ::prost::alloc::vec::Vec, + #[prost(uint64, repeated, tag = "7")] + pub funcparams: ::prost::alloc::vec::Vec, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct TableSampleClause { + #[prost(uint32, tag = "1")] + pub tsmhandler: u32, + #[prost(message, repeated, tag = "2")] + pub args: ::prost::alloc::vec::Vec, + #[prost(message, optional, boxed, tag = "3")] + pub repeatable: ::core::option::Option<::prost::alloc::boxed::Box>, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct WithCheckOption { + #[prost(enumeration = "WcoKind", tag = "1")] + pub kind: i32, + #[prost(string, tag = "2")] + pub relname: ::prost::alloc::string::String, + #[prost(string, tag = "3")] + pub polname: ::prost::alloc::string::String, + #[prost(message, optional, boxed, tag = "4")] + pub qual: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(bool, tag = "5")] + pub cascaded: bool, +} +#[derive(Clone, Copy, PartialEq, ::prost::Message)] +pub struct SortGroupClause { + #[prost(uint32, tag = "1")] + pub tle_sort_group_ref: u32, + #[prost(uint32, tag = "2")] + pub eqop: u32, + #[prost(uint32, tag = "3")] + pub sortop: u32, + #[prost(bool, tag = "4")] + pub nulls_first: bool, + #[prost(bool, tag = "5")] + pub hashable: bool, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct GroupingSet { + #[prost(enumeration = "GroupingSetKind", tag = "1")] + pub kind: i32, + #[prost(message, repeated, tag = "2")] + pub content: ::prost::alloc::vec::Vec, + #[prost(int32, tag = "3")] + pub location: i32, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct WindowClause { + #[prost(string, tag = "1")] + pub name: ::prost::alloc::string::String, + #[prost(string, tag = "2")] + pub refname: ::prost::alloc::string::String, + #[prost(message, repeated, tag = "3")] + pub partition_clause: ::prost::alloc::vec::Vec, + #[prost(message, repeated, tag = "4")] + pub order_clause: ::prost::alloc::vec::Vec, + #[prost(int32, tag = "5")] + pub frame_options: i32, + #[prost(message, optional, boxed, tag = "6")] + pub start_offset: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(message, optional, boxed, tag = "7")] + pub end_offset: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(uint32, tag = "8")] + pub start_in_range_func: u32, + #[prost(uint32, tag = "9")] + pub end_in_range_func: u32, + #[prost(uint32, tag = "10")] + pub in_range_coll: u32, + #[prost(bool, tag = "11")] + pub in_range_asc: bool, + #[prost(bool, tag = "12")] + pub in_range_nulls_first: bool, + #[prost(uint32, tag = "13")] + pub winref: u32, + #[prost(bool, tag = "14")] + pub copied_order: bool, +} +#[derive(Clone, Copy, PartialEq, ::prost::Message)] +pub struct RowMarkClause { + #[prost(uint32, tag = "1")] + pub rti: u32, + #[prost(enumeration = "LockClauseStrength", tag = "2")] + pub strength: i32, + #[prost(enumeration = "LockWaitPolicy", tag = "3")] + pub wait_policy: i32, + #[prost(bool, tag = "4")] + pub pushed_down: bool, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct WithClause { + #[prost(message, repeated, tag = "1")] + pub ctes: ::prost::alloc::vec::Vec, + #[prost(bool, tag = "2")] + pub recursive: bool, + #[prost(int32, tag = "3")] + pub location: i32, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct InferClause { + #[prost(message, repeated, tag = "1")] + pub index_elems: ::prost::alloc::vec::Vec, + #[prost(message, optional, boxed, tag = "2")] + pub where_clause: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(string, tag = "3")] + pub conname: ::prost::alloc::string::String, + #[prost(int32, tag = "4")] + pub location: i32, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct OnConflictClause { + #[prost(enumeration = "OnConflictAction", tag = "1")] + pub action: i32, + #[prost(message, optional, boxed, tag = "2")] + pub infer: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(message, repeated, tag = "3")] + pub target_list: ::prost::alloc::vec::Vec, + #[prost(message, optional, boxed, tag = "4")] + pub where_clause: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(int32, tag = "5")] + pub location: i32, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct CteSearchClause { + #[prost(message, repeated, tag = "1")] + pub search_col_list: ::prost::alloc::vec::Vec, + #[prost(bool, tag = "2")] + pub search_breadth_first: bool, + #[prost(string, tag = "3")] + pub search_seq_column: ::prost::alloc::string::String, + #[prost(int32, tag = "4")] + pub location: i32, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct CteCycleClause { + #[prost(message, repeated, tag = "1")] + pub cycle_col_list: ::prost::alloc::vec::Vec, + #[prost(string, tag = "2")] + pub cycle_mark_column: ::prost::alloc::string::String, + #[prost(message, optional, boxed, tag = "3")] + pub cycle_mark_value: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(message, optional, boxed, tag = "4")] + pub cycle_mark_default: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(string, tag = "5")] + pub cycle_path_column: ::prost::alloc::string::String, + #[prost(int32, tag = "6")] + pub location: i32, + #[prost(uint32, tag = "7")] + pub cycle_mark_type: u32, + #[prost(int32, tag = "8")] + pub cycle_mark_typmod: i32, + #[prost(uint32, tag = "9")] + pub cycle_mark_collation: u32, + #[prost(uint32, tag = "10")] + pub cycle_mark_neop: u32, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct CommonTableExpr { + #[prost(string, tag = "1")] + pub ctename: ::prost::alloc::string::String, + #[prost(message, repeated, tag = "2")] + pub aliascolnames: ::prost::alloc::vec::Vec, + #[prost(enumeration = "CteMaterialize", tag = "3")] + pub ctematerialized: i32, + #[prost(message, optional, boxed, tag = "4")] + pub ctequery: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(message, optional, tag = "5")] + pub search_clause: ::core::option::Option, + #[prost(message, optional, boxed, tag = "6")] + pub cycle_clause: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(int32, tag = "7")] + pub location: i32, + #[prost(bool, tag = "8")] + pub cterecursive: bool, + #[prost(int32, tag = "9")] + pub cterefcount: i32, + #[prost(message, repeated, tag = "10")] + pub ctecolnames: ::prost::alloc::vec::Vec, + #[prost(message, repeated, tag = "11")] + pub ctecoltypes: ::prost::alloc::vec::Vec, + #[prost(message, repeated, tag = "12")] + pub ctecoltypmods: ::prost::alloc::vec::Vec, + #[prost(message, repeated, tag = "13")] + pub ctecolcollations: ::prost::alloc::vec::Vec, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct MergeWhenClause { + #[prost(enumeration = "MergeMatchKind", tag = "1")] + pub match_kind: i32, + #[prost(enumeration = "CmdType", tag = "2")] + pub command_type: i32, + #[prost(enumeration = "OverridingKind", tag = "3")] + pub r#override: i32, + #[prost(message, optional, boxed, tag = "4")] + pub condition: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(message, repeated, tag = "5")] + pub target_list: ::prost::alloc::vec::Vec, + #[prost(message, repeated, tag = "6")] + pub values: ::prost::alloc::vec::Vec, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct TriggerTransition { + #[prost(string, tag = "1")] + pub name: ::prost::alloc::string::String, + #[prost(bool, tag = "2")] + pub is_new: bool, + #[prost(bool, tag = "3")] + pub is_table: bool, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct JsonOutput { + #[prost(message, optional, tag = "1")] + pub type_name: ::core::option::Option, + #[prost(message, optional, tag = "2")] + pub returning: ::core::option::Option, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct JsonArgument { + #[prost(message, optional, boxed, tag = "1")] + pub val: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(string, tag = "2")] + pub name: ::prost::alloc::string::String, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct JsonFuncExpr { + #[prost(enumeration = "JsonExprOp", tag = "1")] + pub op: i32, + #[prost(string, tag = "2")] + pub column_name: ::prost::alloc::string::String, + #[prost(message, optional, boxed, tag = "3")] + pub context_item: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(message, optional, boxed, tag = "4")] + pub pathspec: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(message, repeated, tag = "5")] + pub passing: ::prost::alloc::vec::Vec, + #[prost(message, optional, tag = "6")] + pub output: ::core::option::Option, + #[prost(message, optional, boxed, tag = "7")] + pub on_empty: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(message, optional, boxed, tag = "8")] + pub on_error: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(enumeration = "JsonWrapper", tag = "9")] + pub wrapper: i32, + #[prost(enumeration = "JsonQuotes", tag = "10")] + pub quotes: i32, + #[prost(int32, tag = "11")] + pub location: i32, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct JsonTablePathSpec { + #[prost(message, optional, boxed, tag = "1")] + pub string: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(string, tag = "2")] + pub name: ::prost::alloc::string::String, + #[prost(int32, tag = "3")] + pub name_location: i32, + #[prost(int32, tag = "4")] + pub location: i32, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct JsonTable { + #[prost(message, optional, boxed, tag = "1")] + pub context_item: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(message, optional, boxed, tag = "2")] + pub pathspec: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(message, repeated, tag = "3")] + pub passing: ::prost::alloc::vec::Vec, + #[prost(message, repeated, tag = "4")] + pub columns: ::prost::alloc::vec::Vec, + #[prost(message, optional, boxed, tag = "5")] + pub on_error: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(message, optional, tag = "6")] + pub alias: ::core::option::Option, + #[prost(bool, tag = "7")] + pub lateral: bool, + #[prost(int32, tag = "8")] + pub location: i32, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct JsonTableColumn { + #[prost(enumeration = "JsonTableColumnType", tag = "1")] + pub coltype: i32, + #[prost(string, tag = "2")] + pub name: ::prost::alloc::string::String, + #[prost(message, optional, tag = "3")] + pub type_name: ::core::option::Option, + #[prost(message, optional, boxed, tag = "4")] + pub pathspec: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(message, optional, tag = "5")] + pub format: ::core::option::Option, + #[prost(enumeration = "JsonWrapper", tag = "6")] + pub wrapper: i32, + #[prost(enumeration = "JsonQuotes", tag = "7")] + pub quotes: i32, + #[prost(message, repeated, tag = "8")] + pub columns: ::prost::alloc::vec::Vec, + #[prost(message, optional, boxed, tag = "9")] + pub on_empty: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(message, optional, boxed, tag = "10")] + pub on_error: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(int32, tag = "11")] + pub location: i32, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct JsonKeyValue { + #[prost(message, optional, boxed, tag = "1")] + pub key: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(message, optional, boxed, tag = "2")] + pub value: ::core::option::Option<::prost::alloc::boxed::Box>, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct JsonParseExpr { + #[prost(message, optional, boxed, tag = "1")] + pub expr: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(message, optional, tag = "2")] + pub output: ::core::option::Option, + #[prost(bool, tag = "3")] + pub unique_keys: bool, + #[prost(int32, tag = "4")] + pub location: i32, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct JsonScalarExpr { + #[prost(message, optional, boxed, tag = "1")] + pub expr: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(message, optional, tag = "2")] + pub output: ::core::option::Option, + #[prost(int32, tag = "3")] + pub location: i32, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct JsonSerializeExpr { + #[prost(message, optional, boxed, tag = "1")] + pub expr: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(message, optional, tag = "2")] + pub output: ::core::option::Option, + #[prost(int32, tag = "3")] + pub location: i32, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct JsonObjectConstructor { + #[prost(message, repeated, tag = "1")] + pub exprs: ::prost::alloc::vec::Vec, + #[prost(message, optional, tag = "2")] + pub output: ::core::option::Option, + #[prost(bool, tag = "3")] + pub absent_on_null: bool, + #[prost(bool, tag = "4")] + pub unique: bool, + #[prost(int32, tag = "5")] + pub location: i32, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct JsonArrayConstructor { + #[prost(message, repeated, tag = "1")] + pub exprs: ::prost::alloc::vec::Vec, + #[prost(message, optional, tag = "2")] + pub output: ::core::option::Option, + #[prost(bool, tag = "3")] + pub absent_on_null: bool, + #[prost(int32, tag = "4")] + pub location: i32, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct JsonArrayQueryConstructor { + #[prost(message, optional, boxed, tag = "1")] + pub query: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(message, optional, tag = "2")] + pub output: ::core::option::Option, + #[prost(message, optional, tag = "3")] + pub format: ::core::option::Option, + #[prost(bool, tag = "4")] + pub absent_on_null: bool, + #[prost(int32, tag = "5")] + pub location: i32, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct JsonAggConstructor { + #[prost(message, optional, tag = "1")] + pub output: ::core::option::Option, + #[prost(message, optional, boxed, tag = "2")] + pub agg_filter: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(message, repeated, tag = "3")] + pub agg_order: ::prost::alloc::vec::Vec, + #[prost(message, optional, boxed, tag = "4")] + pub over: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(int32, tag = "5")] + pub location: i32, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct JsonObjectAgg { + #[prost(message, optional, boxed, tag = "1")] + pub constructor: ::core::option::Option< + ::prost::alloc::boxed::Box, + >, + #[prost(message, optional, boxed, tag = "2")] + pub arg: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(bool, tag = "3")] + pub absent_on_null: bool, + #[prost(bool, tag = "4")] + pub unique: bool, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct JsonArrayAgg { + #[prost(message, optional, boxed, tag = "1")] + pub constructor: ::core::option::Option< + ::prost::alloc::boxed::Box, + >, + #[prost(message, optional, boxed, tag = "2")] + pub arg: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(bool, tag = "3")] + pub absent_on_null: bool, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct RawStmt { + #[prost(message, optional, boxed, tag = "1")] + pub stmt: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(int32, tag = "2")] + pub stmt_location: i32, + #[prost(int32, tag = "3")] + pub stmt_len: i32, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct InsertStmt { + #[prost(message, optional, tag = "1")] + pub relation: ::core::option::Option, + #[prost(message, repeated, tag = "2")] + pub cols: ::prost::alloc::vec::Vec, + #[prost(message, optional, boxed, tag = "3")] + pub select_stmt: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(message, optional, boxed, tag = "4")] + pub on_conflict_clause: ::core::option::Option< + ::prost::alloc::boxed::Box, + >, + #[prost(message, repeated, tag = "5")] + pub returning_list: ::prost::alloc::vec::Vec, + #[prost(message, optional, tag = "6")] + pub with_clause: ::core::option::Option, + #[prost(enumeration = "OverridingKind", tag = "7")] + pub r#override: i32, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct DeleteStmt { + #[prost(message, optional, tag = "1")] + pub relation: ::core::option::Option, + #[prost(message, repeated, tag = "2")] + pub using_clause: ::prost::alloc::vec::Vec, + #[prost(message, optional, boxed, tag = "3")] + pub where_clause: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(message, repeated, tag = "4")] + pub returning_list: ::prost::alloc::vec::Vec, + #[prost(message, optional, tag = "5")] + pub with_clause: ::core::option::Option, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct UpdateStmt { + #[prost(message, optional, tag = "1")] + pub relation: ::core::option::Option, + #[prost(message, repeated, tag = "2")] + pub target_list: ::prost::alloc::vec::Vec, + #[prost(message, optional, boxed, tag = "3")] + pub where_clause: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(message, repeated, tag = "4")] + pub from_clause: ::prost::alloc::vec::Vec, + #[prost(message, repeated, tag = "5")] + pub returning_list: ::prost::alloc::vec::Vec, + #[prost(message, optional, tag = "6")] + pub with_clause: ::core::option::Option, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct MergeStmt { + #[prost(message, optional, tag = "1")] + pub relation: ::core::option::Option, + #[prost(message, optional, boxed, tag = "2")] + pub source_relation: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(message, optional, boxed, tag = "3")] + pub join_condition: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(message, repeated, tag = "4")] + pub merge_when_clauses: ::prost::alloc::vec::Vec, + #[prost(message, repeated, tag = "5")] + pub returning_list: ::prost::alloc::vec::Vec, + #[prost(message, optional, tag = "6")] + pub with_clause: ::core::option::Option, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct SelectStmt { + #[prost(message, repeated, tag = "1")] + pub distinct_clause: ::prost::alloc::vec::Vec, + #[prost(message, optional, boxed, tag = "2")] + pub into_clause: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(message, repeated, tag = "3")] + pub target_list: ::prost::alloc::vec::Vec, + #[prost(message, repeated, tag = "4")] + pub from_clause: ::prost::alloc::vec::Vec, + #[prost(message, optional, boxed, tag = "5")] + pub where_clause: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(message, repeated, tag = "6")] + pub group_clause: ::prost::alloc::vec::Vec, + #[prost(bool, tag = "7")] + pub group_distinct: bool, + #[prost(message, optional, boxed, tag = "8")] + pub having_clause: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(message, repeated, tag = "9")] + pub window_clause: ::prost::alloc::vec::Vec, + #[prost(message, repeated, tag = "10")] + pub values_lists: ::prost::alloc::vec::Vec, + #[prost(message, repeated, tag = "11")] + pub sort_clause: ::prost::alloc::vec::Vec, + #[prost(message, optional, boxed, tag = "12")] + pub limit_offset: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(message, optional, boxed, tag = "13")] + pub limit_count: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(enumeration = "LimitOption", tag = "14")] + pub limit_option: i32, + #[prost(message, repeated, tag = "15")] + pub locking_clause: ::prost::alloc::vec::Vec, + #[prost(message, optional, tag = "16")] + pub with_clause: ::core::option::Option, + #[prost(enumeration = "SetOperation", tag = "17")] + pub op: i32, + #[prost(bool, tag = "18")] + pub all: bool, + #[prost(message, optional, boxed, tag = "19")] + pub larg: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(message, optional, boxed, tag = "20")] + pub rarg: ::core::option::Option<::prost::alloc::boxed::Box>, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct SetOperationStmt { + #[prost(enumeration = "SetOperation", tag = "1")] + pub op: i32, + #[prost(bool, tag = "2")] + pub all: bool, + #[prost(message, optional, boxed, tag = "3")] + pub larg: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(message, optional, boxed, tag = "4")] + pub rarg: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(message, repeated, tag = "5")] + pub col_types: ::prost::alloc::vec::Vec, + #[prost(message, repeated, tag = "6")] + pub col_typmods: ::prost::alloc::vec::Vec, + #[prost(message, repeated, tag = "7")] + pub col_collations: ::prost::alloc::vec::Vec, + #[prost(message, repeated, tag = "8")] + pub group_clauses: ::prost::alloc::vec::Vec, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct ReturnStmt { + #[prost(message, optional, boxed, tag = "1")] + pub returnval: ::core::option::Option<::prost::alloc::boxed::Box>, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct PlAssignStmt { + #[prost(string, tag = "1")] + pub name: ::prost::alloc::string::String, + #[prost(message, repeated, tag = "2")] + pub indirection: ::prost::alloc::vec::Vec, + #[prost(int32, tag = "3")] + pub nnames: i32, + #[prost(message, optional, boxed, tag = "4")] + pub val: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(int32, tag = "5")] + pub location: i32, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct CreateSchemaStmt { + #[prost(string, tag = "1")] + pub schemaname: ::prost::alloc::string::String, + #[prost(message, optional, tag = "2")] + pub authrole: ::core::option::Option, + #[prost(message, repeated, tag = "3")] + pub schema_elts: ::prost::alloc::vec::Vec, + #[prost(bool, tag = "4")] + pub if_not_exists: bool, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct AlterTableStmt { + #[prost(message, optional, tag = "1")] + pub relation: ::core::option::Option, + #[prost(message, repeated, tag = "2")] + pub cmds: ::prost::alloc::vec::Vec, + #[prost(enumeration = "ObjectType", tag = "3")] + pub objtype: i32, + #[prost(bool, tag = "4")] + pub missing_ok: bool, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct ReplicaIdentityStmt { + #[prost(string, tag = "1")] + pub identity_type: ::prost::alloc::string::String, + #[prost(string, tag = "2")] + pub name: ::prost::alloc::string::String, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct AlterTableCmd { + #[prost(enumeration = "AlterTableType", tag = "1")] + pub subtype: i32, + #[prost(string, tag = "2")] + pub name: ::prost::alloc::string::String, + #[prost(int32, tag = "3")] + pub num: i32, + #[prost(message, optional, tag = "4")] + pub newowner: ::core::option::Option, + #[prost(message, optional, boxed, tag = "5")] + pub def: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(enumeration = "DropBehavior", tag = "6")] + pub behavior: i32, + #[prost(bool, tag = "7")] + pub missing_ok: bool, + #[prost(bool, tag = "8")] + pub recurse: bool, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct AlterCollationStmt { + #[prost(message, repeated, tag = "1")] + pub collname: ::prost::alloc::vec::Vec, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct AlterDomainStmt { + #[prost(string, tag = "1")] + pub subtype: ::prost::alloc::string::String, + #[prost(message, repeated, tag = "2")] + pub type_name: ::prost::alloc::vec::Vec, + #[prost(string, tag = "3")] + pub name: ::prost::alloc::string::String, + #[prost(message, optional, boxed, tag = "4")] + pub def: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(enumeration = "DropBehavior", tag = "5")] + pub behavior: i32, + #[prost(bool, tag = "6")] + pub missing_ok: bool, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct GrantStmt { + #[prost(bool, tag = "1")] + pub is_grant: bool, + #[prost(enumeration = "GrantTargetType", tag = "2")] + pub targtype: i32, + #[prost(enumeration = "ObjectType", tag = "3")] + pub objtype: i32, + #[prost(message, repeated, tag = "4")] + pub objects: ::prost::alloc::vec::Vec, + #[prost(message, repeated, tag = "5")] + pub privileges: ::prost::alloc::vec::Vec, + #[prost(message, repeated, tag = "6")] + pub grantees: ::prost::alloc::vec::Vec, + #[prost(bool, tag = "7")] + pub grant_option: bool, + #[prost(message, optional, tag = "8")] + pub grantor: ::core::option::Option, + #[prost(enumeration = "DropBehavior", tag = "9")] + pub behavior: i32, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct ObjectWithArgs { + #[prost(message, repeated, tag = "1")] + pub objname: ::prost::alloc::vec::Vec, + #[prost(message, repeated, tag = "2")] + pub objargs: ::prost::alloc::vec::Vec, + #[prost(message, repeated, tag = "3")] + pub objfuncargs: ::prost::alloc::vec::Vec, + #[prost(bool, tag = "4")] + pub args_unspecified: bool, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct AccessPriv { + #[prost(string, tag = "1")] + pub priv_name: ::prost::alloc::string::String, + #[prost(message, repeated, tag = "2")] + pub cols: ::prost::alloc::vec::Vec, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct GrantRoleStmt { + #[prost(message, repeated, tag = "1")] + pub granted_roles: ::prost::alloc::vec::Vec, + #[prost(message, repeated, tag = "2")] + pub grantee_roles: ::prost::alloc::vec::Vec, + #[prost(bool, tag = "3")] + pub is_grant: bool, + #[prost(message, repeated, tag = "4")] + pub opt: ::prost::alloc::vec::Vec, + #[prost(message, optional, tag = "5")] + pub grantor: ::core::option::Option, + #[prost(enumeration = "DropBehavior", tag = "6")] + pub behavior: i32, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct AlterDefaultPrivilegesStmt { + #[prost(message, repeated, tag = "1")] + pub options: ::prost::alloc::vec::Vec, + #[prost(message, optional, tag = "2")] + pub action: ::core::option::Option, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct CopyStmt { + #[prost(message, optional, tag = "1")] + pub relation: ::core::option::Option, + #[prost(message, optional, boxed, tag = "2")] + pub query: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(message, repeated, tag = "3")] + pub attlist: ::prost::alloc::vec::Vec, + #[prost(bool, tag = "4")] + pub is_from: bool, + #[prost(bool, tag = "5")] + pub is_program: bool, + #[prost(string, tag = "6")] + pub filename: ::prost::alloc::string::String, + #[prost(message, repeated, tag = "7")] + pub options: ::prost::alloc::vec::Vec, + #[prost(message, optional, boxed, tag = "8")] + pub where_clause: ::core::option::Option<::prost::alloc::boxed::Box>, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct VariableSetStmt { + #[prost(enumeration = "VariableSetKind", tag = "1")] + pub kind: i32, + #[prost(string, tag = "2")] + pub name: ::prost::alloc::string::String, + #[prost(message, repeated, tag = "3")] + pub args: ::prost::alloc::vec::Vec, + #[prost(bool, tag = "4")] + pub is_local: bool, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct VariableShowStmt { + #[prost(string, tag = "1")] + pub name: ::prost::alloc::string::String, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct CreateStmt { + #[prost(message, optional, tag = "1")] + pub relation: ::core::option::Option, + #[prost(message, repeated, tag = "2")] + pub table_elts: ::prost::alloc::vec::Vec, + #[prost(message, repeated, tag = "3")] + pub inh_relations: ::prost::alloc::vec::Vec, + #[prost(message, optional, tag = "4")] + pub partbound: ::core::option::Option, + #[prost(message, optional, tag = "5")] + pub partspec: ::core::option::Option, + #[prost(message, optional, tag = "6")] + pub of_typename: ::core::option::Option, + #[prost(message, repeated, tag = "7")] + pub constraints: ::prost::alloc::vec::Vec, + #[prost(message, repeated, tag = "8")] + pub options: ::prost::alloc::vec::Vec, + #[prost(enumeration = "OnCommitAction", tag = "9")] + pub oncommit: i32, + #[prost(string, tag = "10")] + pub tablespacename: ::prost::alloc::string::String, + #[prost(string, tag = "11")] + pub access_method: ::prost::alloc::string::String, + #[prost(bool, tag = "12")] + pub if_not_exists: bool, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct Constraint { + #[prost(enumeration = "ConstrType", tag = "1")] + pub contype: i32, + #[prost(string, tag = "2")] + pub conname: ::prost::alloc::string::String, + #[prost(bool, tag = "3")] + pub deferrable: bool, + #[prost(bool, tag = "4")] + pub initdeferred: bool, + #[prost(bool, tag = "5")] + pub skip_validation: bool, + #[prost(bool, tag = "6")] + pub initially_valid: bool, + #[prost(bool, tag = "7")] + pub is_no_inherit: bool, + #[prost(message, optional, boxed, tag = "8")] + pub raw_expr: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(string, tag = "9")] + pub cooked_expr: ::prost::alloc::string::String, + #[prost(string, tag = "10")] + pub generated_when: ::prost::alloc::string::String, + #[prost(int32, tag = "11")] + pub inhcount: i32, + #[prost(bool, tag = "12")] + pub nulls_not_distinct: bool, + #[prost(message, repeated, tag = "13")] + pub keys: ::prost::alloc::vec::Vec, + #[prost(message, repeated, tag = "14")] + pub including: ::prost::alloc::vec::Vec, + #[prost(message, repeated, tag = "15")] + pub exclusions: ::prost::alloc::vec::Vec, + #[prost(message, repeated, tag = "16")] + pub options: ::prost::alloc::vec::Vec, + #[prost(string, tag = "17")] + pub indexname: ::prost::alloc::string::String, + #[prost(string, tag = "18")] + pub indexspace: ::prost::alloc::string::String, + #[prost(bool, tag = "19")] + pub reset_default_tblspc: bool, + #[prost(string, tag = "20")] + pub access_method: ::prost::alloc::string::String, + #[prost(message, optional, boxed, tag = "21")] + pub where_clause: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(message, optional, tag = "22")] + pub pktable: ::core::option::Option, + #[prost(message, repeated, tag = "23")] + pub fk_attrs: ::prost::alloc::vec::Vec, + #[prost(message, repeated, tag = "24")] + pub pk_attrs: ::prost::alloc::vec::Vec, + #[prost(string, tag = "25")] + pub fk_matchtype: ::prost::alloc::string::String, + #[prost(string, tag = "26")] + pub fk_upd_action: ::prost::alloc::string::String, + #[prost(string, tag = "27")] + pub fk_del_action: ::prost::alloc::string::String, + #[prost(message, repeated, tag = "28")] + pub fk_del_set_cols: ::prost::alloc::vec::Vec, + #[prost(message, repeated, tag = "29")] + pub old_conpfeqop: ::prost::alloc::vec::Vec, + #[prost(uint32, tag = "30")] + pub old_pktable_oid: u32, + #[prost(int32, tag = "31")] + pub location: i32, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct CreateTableSpaceStmt { + #[prost(string, tag = "1")] + pub tablespacename: ::prost::alloc::string::String, + #[prost(message, optional, tag = "2")] + pub owner: ::core::option::Option, + #[prost(string, tag = "3")] + pub location: ::prost::alloc::string::String, + #[prost(message, repeated, tag = "4")] + pub options: ::prost::alloc::vec::Vec, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct DropTableSpaceStmt { + #[prost(string, tag = "1")] + pub tablespacename: ::prost::alloc::string::String, + #[prost(bool, tag = "2")] + pub missing_ok: bool, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct AlterTableSpaceOptionsStmt { + #[prost(string, tag = "1")] + pub tablespacename: ::prost::alloc::string::String, + #[prost(message, repeated, tag = "2")] + pub options: ::prost::alloc::vec::Vec, + #[prost(bool, tag = "3")] + pub is_reset: bool, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct AlterTableMoveAllStmt { + #[prost(string, tag = "1")] + pub orig_tablespacename: ::prost::alloc::string::String, + #[prost(enumeration = "ObjectType", tag = "2")] + pub objtype: i32, + #[prost(message, repeated, tag = "3")] + pub roles: ::prost::alloc::vec::Vec, + #[prost(string, tag = "4")] + pub new_tablespacename: ::prost::alloc::string::String, + #[prost(bool, tag = "5")] + pub nowait: bool, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct CreateExtensionStmt { + #[prost(string, tag = "1")] + pub extname: ::prost::alloc::string::String, + #[prost(bool, tag = "2")] + pub if_not_exists: bool, + #[prost(message, repeated, tag = "3")] + pub options: ::prost::alloc::vec::Vec, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct AlterExtensionStmt { + #[prost(string, tag = "1")] + pub extname: ::prost::alloc::string::String, + #[prost(message, repeated, tag = "2")] + pub options: ::prost::alloc::vec::Vec, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct AlterExtensionContentsStmt { + #[prost(string, tag = "1")] + pub extname: ::prost::alloc::string::String, + #[prost(int32, tag = "2")] + pub action: i32, + #[prost(enumeration = "ObjectType", tag = "3")] + pub objtype: i32, + #[prost(message, optional, boxed, tag = "4")] + pub object: ::core::option::Option<::prost::alloc::boxed::Box>, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct CreateFdwStmt { + #[prost(string, tag = "1")] + pub fdwname: ::prost::alloc::string::String, + #[prost(message, repeated, tag = "2")] + pub func_options: ::prost::alloc::vec::Vec, + #[prost(message, repeated, tag = "3")] + pub options: ::prost::alloc::vec::Vec, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct AlterFdwStmt { + #[prost(string, tag = "1")] + pub fdwname: ::prost::alloc::string::String, + #[prost(message, repeated, tag = "2")] + pub func_options: ::prost::alloc::vec::Vec, + #[prost(message, repeated, tag = "3")] + pub options: ::prost::alloc::vec::Vec, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct CreateForeignServerStmt { + #[prost(string, tag = "1")] + pub servername: ::prost::alloc::string::String, + #[prost(string, tag = "2")] + pub servertype: ::prost::alloc::string::String, + #[prost(string, tag = "3")] + pub version: ::prost::alloc::string::String, + #[prost(string, tag = "4")] + pub fdwname: ::prost::alloc::string::String, + #[prost(bool, tag = "5")] + pub if_not_exists: bool, + #[prost(message, repeated, tag = "6")] + pub options: ::prost::alloc::vec::Vec, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct AlterForeignServerStmt { + #[prost(string, tag = "1")] + pub servername: ::prost::alloc::string::String, + #[prost(string, tag = "2")] + pub version: ::prost::alloc::string::String, + #[prost(message, repeated, tag = "3")] + pub options: ::prost::alloc::vec::Vec, + #[prost(bool, tag = "4")] + pub has_version: bool, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct CreateForeignTableStmt { + #[prost(message, optional, tag = "1")] + pub base_stmt: ::core::option::Option, + #[prost(string, tag = "2")] + pub servername: ::prost::alloc::string::String, + #[prost(message, repeated, tag = "3")] + pub options: ::prost::alloc::vec::Vec, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct CreateUserMappingStmt { + #[prost(message, optional, tag = "1")] + pub user: ::core::option::Option, + #[prost(string, tag = "2")] + pub servername: ::prost::alloc::string::String, + #[prost(bool, tag = "3")] + pub if_not_exists: bool, + #[prost(message, repeated, tag = "4")] + pub options: ::prost::alloc::vec::Vec, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct AlterUserMappingStmt { + #[prost(message, optional, tag = "1")] + pub user: ::core::option::Option, + #[prost(string, tag = "2")] + pub servername: ::prost::alloc::string::String, + #[prost(message, repeated, tag = "3")] + pub options: ::prost::alloc::vec::Vec, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct DropUserMappingStmt { + #[prost(message, optional, tag = "1")] + pub user: ::core::option::Option, + #[prost(string, tag = "2")] + pub servername: ::prost::alloc::string::String, + #[prost(bool, tag = "3")] + pub missing_ok: bool, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct ImportForeignSchemaStmt { + #[prost(string, tag = "1")] + pub server_name: ::prost::alloc::string::String, + #[prost(string, tag = "2")] + pub remote_schema: ::prost::alloc::string::String, + #[prost(string, tag = "3")] + pub local_schema: ::prost::alloc::string::String, + #[prost(enumeration = "ImportForeignSchemaType", tag = "4")] + pub list_type: i32, + #[prost(message, repeated, tag = "5")] + pub table_list: ::prost::alloc::vec::Vec, + #[prost(message, repeated, tag = "6")] + pub options: ::prost::alloc::vec::Vec, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct CreatePolicyStmt { + #[prost(string, tag = "1")] + pub policy_name: ::prost::alloc::string::String, + #[prost(message, optional, tag = "2")] + pub table: ::core::option::Option, + #[prost(string, tag = "3")] + pub cmd_name: ::prost::alloc::string::String, + #[prost(bool, tag = "4")] + pub permissive: bool, + #[prost(message, repeated, tag = "5")] + pub roles: ::prost::alloc::vec::Vec, + #[prost(message, optional, boxed, tag = "6")] + pub qual: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(message, optional, boxed, tag = "7")] + pub with_check: ::core::option::Option<::prost::alloc::boxed::Box>, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct AlterPolicyStmt { + #[prost(string, tag = "1")] + pub policy_name: ::prost::alloc::string::String, + #[prost(message, optional, tag = "2")] + pub table: ::core::option::Option, + #[prost(message, repeated, tag = "3")] + pub roles: ::prost::alloc::vec::Vec, + #[prost(message, optional, boxed, tag = "4")] + pub qual: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(message, optional, boxed, tag = "5")] + pub with_check: ::core::option::Option<::prost::alloc::boxed::Box>, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct CreateAmStmt { + #[prost(string, tag = "1")] + pub amname: ::prost::alloc::string::String, + #[prost(message, repeated, tag = "2")] + pub handler_name: ::prost::alloc::vec::Vec, + #[prost(string, tag = "3")] + pub amtype: ::prost::alloc::string::String, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct CreateTrigStmt { + #[prost(bool, tag = "1")] + pub replace: bool, + #[prost(bool, tag = "2")] + pub isconstraint: bool, + #[prost(string, tag = "3")] + pub trigname: ::prost::alloc::string::String, + #[prost(message, optional, tag = "4")] + pub relation: ::core::option::Option, + #[prost(message, repeated, tag = "5")] + pub funcname: ::prost::alloc::vec::Vec, + #[prost(message, repeated, tag = "6")] + pub args: ::prost::alloc::vec::Vec, + #[prost(bool, tag = "7")] + pub row: bool, + #[prost(int32, tag = "8")] + pub timing: i32, + #[prost(int32, tag = "9")] + pub events: i32, + #[prost(message, repeated, tag = "10")] + pub columns: ::prost::alloc::vec::Vec, + #[prost(message, optional, boxed, tag = "11")] + pub when_clause: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(message, repeated, tag = "12")] + pub transition_rels: ::prost::alloc::vec::Vec, + #[prost(bool, tag = "13")] + pub deferrable: bool, + #[prost(bool, tag = "14")] + pub initdeferred: bool, + #[prost(message, optional, tag = "15")] + pub constrrel: ::core::option::Option, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct CreateEventTrigStmt { + #[prost(string, tag = "1")] + pub trigname: ::prost::alloc::string::String, + #[prost(string, tag = "2")] + pub eventname: ::prost::alloc::string::String, + #[prost(message, repeated, tag = "3")] + pub whenclause: ::prost::alloc::vec::Vec, + #[prost(message, repeated, tag = "4")] + pub funcname: ::prost::alloc::vec::Vec, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct AlterEventTrigStmt { + #[prost(string, tag = "1")] + pub trigname: ::prost::alloc::string::String, + #[prost(string, tag = "2")] + pub tgenabled: ::prost::alloc::string::String, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct CreatePLangStmt { + #[prost(bool, tag = "1")] + pub replace: bool, + #[prost(string, tag = "2")] + pub plname: ::prost::alloc::string::String, + #[prost(message, repeated, tag = "3")] + pub plhandler: ::prost::alloc::vec::Vec, + #[prost(message, repeated, tag = "4")] + pub plinline: ::prost::alloc::vec::Vec, + #[prost(message, repeated, tag = "5")] + pub plvalidator: ::prost::alloc::vec::Vec, + #[prost(bool, tag = "6")] + pub pltrusted: bool, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct CreateRoleStmt { + #[prost(enumeration = "RoleStmtType", tag = "1")] + pub stmt_type: i32, + #[prost(string, tag = "2")] + pub role: ::prost::alloc::string::String, + #[prost(message, repeated, tag = "3")] + pub options: ::prost::alloc::vec::Vec, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct AlterRoleStmt { + #[prost(message, optional, tag = "1")] + pub role: ::core::option::Option, + #[prost(message, repeated, tag = "2")] + pub options: ::prost::alloc::vec::Vec, + #[prost(int32, tag = "3")] + pub action: i32, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct AlterRoleSetStmt { + #[prost(message, optional, tag = "1")] + pub role: ::core::option::Option, + #[prost(string, tag = "2")] + pub database: ::prost::alloc::string::String, + #[prost(message, optional, tag = "3")] + pub setstmt: ::core::option::Option, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct DropRoleStmt { + #[prost(message, repeated, tag = "1")] + pub roles: ::prost::alloc::vec::Vec, + #[prost(bool, tag = "2")] + pub missing_ok: bool, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct CreateSeqStmt { + #[prost(message, optional, tag = "1")] + pub sequence: ::core::option::Option, + #[prost(message, repeated, tag = "2")] + pub options: ::prost::alloc::vec::Vec, + #[prost(uint32, tag = "3")] + pub owner_id: u32, + #[prost(bool, tag = "4")] + pub for_identity: bool, + #[prost(bool, tag = "5")] + pub if_not_exists: bool, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct AlterSeqStmt { + #[prost(message, optional, tag = "1")] + pub sequence: ::core::option::Option, + #[prost(message, repeated, tag = "2")] + pub options: ::prost::alloc::vec::Vec, + #[prost(bool, tag = "3")] + pub for_identity: bool, + #[prost(bool, tag = "4")] + pub missing_ok: bool, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct DefineStmt { + #[prost(enumeration = "ObjectType", tag = "1")] + pub kind: i32, + #[prost(bool, tag = "2")] + pub oldstyle: bool, + #[prost(message, repeated, tag = "3")] + pub defnames: ::prost::alloc::vec::Vec, + #[prost(message, repeated, tag = "4")] + pub args: ::prost::alloc::vec::Vec, + #[prost(message, repeated, tag = "5")] + pub definition: ::prost::alloc::vec::Vec, + #[prost(bool, tag = "6")] + pub if_not_exists: bool, + #[prost(bool, tag = "7")] + pub replace: bool, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct CreateDomainStmt { + #[prost(message, repeated, tag = "1")] + pub domainname: ::prost::alloc::vec::Vec, + #[prost(message, optional, tag = "2")] + pub type_name: ::core::option::Option, + #[prost(message, optional, boxed, tag = "3")] + pub coll_clause: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(message, repeated, tag = "4")] + pub constraints: ::prost::alloc::vec::Vec, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct CreateOpClassStmt { + #[prost(message, repeated, tag = "1")] + pub opclassname: ::prost::alloc::vec::Vec, + #[prost(message, repeated, tag = "2")] + pub opfamilyname: ::prost::alloc::vec::Vec, + #[prost(string, tag = "3")] + pub amname: ::prost::alloc::string::String, + #[prost(message, optional, tag = "4")] + pub datatype: ::core::option::Option, + #[prost(message, repeated, tag = "5")] + pub items: ::prost::alloc::vec::Vec, + #[prost(bool, tag = "6")] + pub is_default: bool, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct CreateOpClassItem { + #[prost(int32, tag = "1")] + pub itemtype: i32, + #[prost(message, optional, tag = "2")] + pub name: ::core::option::Option, + #[prost(int32, tag = "3")] + pub number: i32, + #[prost(message, repeated, tag = "4")] + pub order_family: ::prost::alloc::vec::Vec, + #[prost(message, repeated, tag = "5")] + pub class_args: ::prost::alloc::vec::Vec, + #[prost(message, optional, tag = "6")] + pub storedtype: ::core::option::Option, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct CreateOpFamilyStmt { + #[prost(message, repeated, tag = "1")] + pub opfamilyname: ::prost::alloc::vec::Vec, + #[prost(string, tag = "2")] + pub amname: ::prost::alloc::string::String, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct AlterOpFamilyStmt { + #[prost(message, repeated, tag = "1")] + pub opfamilyname: ::prost::alloc::vec::Vec, + #[prost(string, tag = "2")] + pub amname: ::prost::alloc::string::String, + #[prost(bool, tag = "3")] + pub is_drop: bool, + #[prost(message, repeated, tag = "4")] + pub items: ::prost::alloc::vec::Vec, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct DropStmt { + #[prost(message, repeated, tag = "1")] + pub objects: ::prost::alloc::vec::Vec, + #[prost(enumeration = "ObjectType", tag = "2")] + pub remove_type: i32, + #[prost(enumeration = "DropBehavior", tag = "3")] + pub behavior: i32, + #[prost(bool, tag = "4")] + pub missing_ok: bool, + #[prost(bool, tag = "5")] + pub concurrent: bool, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct TruncateStmt { + #[prost(message, repeated, tag = "1")] + pub relations: ::prost::alloc::vec::Vec, + #[prost(bool, tag = "2")] + pub restart_seqs: bool, + #[prost(enumeration = "DropBehavior", tag = "3")] + pub behavior: i32, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct CommentStmt { + #[prost(enumeration = "ObjectType", tag = "1")] + pub objtype: i32, + #[prost(message, optional, boxed, tag = "2")] + pub object: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(string, tag = "3")] + pub comment: ::prost::alloc::string::String, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct SecLabelStmt { + #[prost(enumeration = "ObjectType", tag = "1")] + pub objtype: i32, + #[prost(message, optional, boxed, tag = "2")] + pub object: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(string, tag = "3")] + pub provider: ::prost::alloc::string::String, + #[prost(string, tag = "4")] + pub label: ::prost::alloc::string::String, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct DeclareCursorStmt { + #[prost(string, tag = "1")] + pub portalname: ::prost::alloc::string::String, + #[prost(int32, tag = "2")] + pub options: i32, + #[prost(message, optional, boxed, tag = "3")] + pub query: ::core::option::Option<::prost::alloc::boxed::Box>, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct ClosePortalStmt { + #[prost(string, tag = "1")] + pub portalname: ::prost::alloc::string::String, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct FetchStmt { + #[prost(enumeration = "FetchDirection", tag = "1")] + pub direction: i32, + #[prost(int64, tag = "2")] + pub how_many: i64, + #[prost(string, tag = "3")] + pub portalname: ::prost::alloc::string::String, + #[prost(bool, tag = "4")] + pub ismove: bool, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct IndexStmt { + #[prost(string, tag = "1")] + pub idxname: ::prost::alloc::string::String, + #[prost(message, optional, tag = "2")] + pub relation: ::core::option::Option, + #[prost(string, tag = "3")] + pub access_method: ::prost::alloc::string::String, + #[prost(string, tag = "4")] + pub table_space: ::prost::alloc::string::String, + #[prost(message, repeated, tag = "5")] + pub index_params: ::prost::alloc::vec::Vec, + #[prost(message, repeated, tag = "6")] + pub index_including_params: ::prost::alloc::vec::Vec, + #[prost(message, repeated, tag = "7")] + pub options: ::prost::alloc::vec::Vec, + #[prost(message, optional, boxed, tag = "8")] + pub where_clause: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(message, repeated, tag = "9")] + pub exclude_op_names: ::prost::alloc::vec::Vec, + #[prost(string, tag = "10")] + pub idxcomment: ::prost::alloc::string::String, + #[prost(uint32, tag = "11")] + pub index_oid: u32, + #[prost(uint32, tag = "12")] + pub old_number: u32, + #[prost(uint32, tag = "13")] + pub old_create_subid: u32, + #[prost(uint32, tag = "14")] + pub old_first_relfilelocator_subid: u32, + #[prost(bool, tag = "15")] + pub unique: bool, + #[prost(bool, tag = "16")] + pub nulls_not_distinct: bool, + #[prost(bool, tag = "17")] + pub primary: bool, + #[prost(bool, tag = "18")] + pub isconstraint: bool, + #[prost(bool, tag = "19")] + pub deferrable: bool, + #[prost(bool, tag = "20")] + pub initdeferred: bool, + #[prost(bool, tag = "21")] + pub transformed: bool, + #[prost(bool, tag = "22")] + pub concurrent: bool, + #[prost(bool, tag = "23")] + pub if_not_exists: bool, + #[prost(bool, tag = "24")] + pub reset_default_tblspc: bool, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct CreateStatsStmt { + #[prost(message, repeated, tag = "1")] + pub defnames: ::prost::alloc::vec::Vec, + #[prost(message, repeated, tag = "2")] + pub stat_types: ::prost::alloc::vec::Vec, + #[prost(message, repeated, tag = "3")] + pub exprs: ::prost::alloc::vec::Vec, + #[prost(message, repeated, tag = "4")] + pub relations: ::prost::alloc::vec::Vec, + #[prost(string, tag = "5")] + pub stxcomment: ::prost::alloc::string::String, + #[prost(bool, tag = "6")] + pub transformed: bool, + #[prost(bool, tag = "7")] + pub if_not_exists: bool, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct StatsElem { + #[prost(string, tag = "1")] + pub name: ::prost::alloc::string::String, + #[prost(message, optional, boxed, tag = "2")] + pub expr: ::core::option::Option<::prost::alloc::boxed::Box>, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct AlterStatsStmt { + #[prost(message, repeated, tag = "1")] + pub defnames: ::prost::alloc::vec::Vec, + #[prost(message, optional, boxed, tag = "2")] + pub stxstattarget: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(bool, tag = "3")] + pub missing_ok: bool, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct CreateFunctionStmt { + #[prost(bool, tag = "1")] + pub is_procedure: bool, + #[prost(bool, tag = "2")] + pub replace: bool, + #[prost(message, repeated, tag = "3")] + pub funcname: ::prost::alloc::vec::Vec, + #[prost(message, repeated, tag = "4")] + pub parameters: ::prost::alloc::vec::Vec, + #[prost(message, optional, tag = "5")] + pub return_type: ::core::option::Option, + #[prost(message, repeated, tag = "6")] + pub options: ::prost::alloc::vec::Vec, + #[prost(message, optional, boxed, tag = "7")] + pub sql_body: ::core::option::Option<::prost::alloc::boxed::Box>, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct FunctionParameter { + #[prost(string, tag = "1")] + pub name: ::prost::alloc::string::String, + #[prost(message, optional, tag = "2")] + pub arg_type: ::core::option::Option, + #[prost(enumeration = "FunctionParameterMode", tag = "3")] + pub mode: i32, + #[prost(message, optional, boxed, tag = "4")] + pub defexpr: ::core::option::Option<::prost::alloc::boxed::Box>, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct AlterFunctionStmt { + #[prost(enumeration = "ObjectType", tag = "1")] + pub objtype: i32, + #[prost(message, optional, tag = "2")] + pub func: ::core::option::Option, + #[prost(message, repeated, tag = "3")] + pub actions: ::prost::alloc::vec::Vec, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct DoStmt { + #[prost(message, repeated, tag = "1")] + pub args: ::prost::alloc::vec::Vec, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct InlineCodeBlock { + #[prost(string, tag = "1")] + pub source_text: ::prost::alloc::string::String, + #[prost(uint32, tag = "2")] + pub lang_oid: u32, + #[prost(bool, tag = "3")] + pub lang_is_trusted: bool, + #[prost(bool, tag = "4")] + pub atomic: bool, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct CallStmt { + #[prost(message, optional, boxed, tag = "1")] + pub funccall: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(message, optional, boxed, tag = "2")] + pub funcexpr: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(message, repeated, tag = "3")] + pub outargs: ::prost::alloc::vec::Vec, +} +#[derive(Clone, Copy, PartialEq, ::prost::Message)] +pub struct CallContext { + #[prost(bool, tag = "1")] + pub atomic: bool, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct RenameStmt { + #[prost(enumeration = "ObjectType", tag = "1")] + pub rename_type: i32, + #[prost(enumeration = "ObjectType", tag = "2")] + pub relation_type: i32, + #[prost(message, optional, tag = "3")] + pub relation: ::core::option::Option, + #[prost(message, optional, boxed, tag = "4")] + pub object: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(string, tag = "5")] + pub subname: ::prost::alloc::string::String, + #[prost(string, tag = "6")] + pub newname: ::prost::alloc::string::String, + #[prost(enumeration = "DropBehavior", tag = "7")] + pub behavior: i32, + #[prost(bool, tag = "8")] + pub missing_ok: bool, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct AlterObjectDependsStmt { + #[prost(enumeration = "ObjectType", tag = "1")] + pub object_type: i32, + #[prost(message, optional, tag = "2")] + pub relation: ::core::option::Option, + #[prost(message, optional, boxed, tag = "3")] + pub object: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(message, optional, tag = "4")] + pub extname: ::core::option::Option, + #[prost(bool, tag = "5")] + pub remove: bool, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct AlterObjectSchemaStmt { + #[prost(enumeration = "ObjectType", tag = "1")] + pub object_type: i32, + #[prost(message, optional, tag = "2")] + pub relation: ::core::option::Option, + #[prost(message, optional, boxed, tag = "3")] + pub object: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(string, tag = "4")] + pub newschema: ::prost::alloc::string::String, + #[prost(bool, tag = "5")] + pub missing_ok: bool, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct AlterOwnerStmt { + #[prost(enumeration = "ObjectType", tag = "1")] + pub object_type: i32, + #[prost(message, optional, tag = "2")] + pub relation: ::core::option::Option, + #[prost(message, optional, boxed, tag = "3")] + pub object: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(message, optional, tag = "4")] + pub newowner: ::core::option::Option, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct AlterOperatorStmt { + #[prost(message, optional, tag = "1")] + pub opername: ::core::option::Option, + #[prost(message, repeated, tag = "2")] + pub options: ::prost::alloc::vec::Vec, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct AlterTypeStmt { + #[prost(message, repeated, tag = "1")] + pub type_name: ::prost::alloc::vec::Vec, + #[prost(message, repeated, tag = "2")] + pub options: ::prost::alloc::vec::Vec, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct RuleStmt { + #[prost(message, optional, tag = "1")] + pub relation: ::core::option::Option, + #[prost(string, tag = "2")] + pub rulename: ::prost::alloc::string::String, + #[prost(message, optional, boxed, tag = "3")] + pub where_clause: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(enumeration = "CmdType", tag = "4")] + pub event: i32, + #[prost(bool, tag = "5")] + pub instead: bool, + #[prost(message, repeated, tag = "6")] + pub actions: ::prost::alloc::vec::Vec, + #[prost(bool, tag = "7")] + pub replace: bool, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct NotifyStmt { + #[prost(string, tag = "1")] + pub conditionname: ::prost::alloc::string::String, + #[prost(string, tag = "2")] + pub payload: ::prost::alloc::string::String, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct ListenStmt { + #[prost(string, tag = "1")] + pub conditionname: ::prost::alloc::string::String, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct UnlistenStmt { + #[prost(string, tag = "1")] + pub conditionname: ::prost::alloc::string::String, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct TransactionStmt { + #[prost(enumeration = "TransactionStmtKind", tag = "1")] + pub kind: i32, + #[prost(message, repeated, tag = "2")] + pub options: ::prost::alloc::vec::Vec, + #[prost(string, tag = "3")] + pub savepoint_name: ::prost::alloc::string::String, + #[prost(string, tag = "4")] + pub gid: ::prost::alloc::string::String, + #[prost(bool, tag = "5")] + pub chain: bool, + #[prost(int32, tag = "6")] + pub location: i32, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct CompositeTypeStmt { + #[prost(message, optional, tag = "1")] + pub typevar: ::core::option::Option, + #[prost(message, repeated, tag = "2")] + pub coldeflist: ::prost::alloc::vec::Vec, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct CreateEnumStmt { + #[prost(message, repeated, tag = "1")] + pub type_name: ::prost::alloc::vec::Vec, + #[prost(message, repeated, tag = "2")] + pub vals: ::prost::alloc::vec::Vec, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct CreateRangeStmt { + #[prost(message, repeated, tag = "1")] + pub type_name: ::prost::alloc::vec::Vec, + #[prost(message, repeated, tag = "2")] + pub params: ::prost::alloc::vec::Vec, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct AlterEnumStmt { + #[prost(message, repeated, tag = "1")] + pub type_name: ::prost::alloc::vec::Vec, + #[prost(string, tag = "2")] + pub old_val: ::prost::alloc::string::String, + #[prost(string, tag = "3")] + pub new_val: ::prost::alloc::string::String, + #[prost(string, tag = "4")] + pub new_val_neighbor: ::prost::alloc::string::String, + #[prost(bool, tag = "5")] + pub new_val_is_after: bool, + #[prost(bool, tag = "6")] + pub skip_if_new_val_exists: bool, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct ViewStmt { + #[prost(message, optional, tag = "1")] + pub view: ::core::option::Option, + #[prost(message, repeated, tag = "2")] + pub aliases: ::prost::alloc::vec::Vec, + #[prost(message, optional, boxed, tag = "3")] + pub query: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(bool, tag = "4")] + pub replace: bool, + #[prost(message, repeated, tag = "5")] + pub options: ::prost::alloc::vec::Vec, + #[prost(enumeration = "ViewCheckOption", tag = "6")] + pub with_check_option: i32, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct LoadStmt { + #[prost(string, tag = "1")] + pub filename: ::prost::alloc::string::String, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct CreatedbStmt { + #[prost(string, tag = "1")] + pub dbname: ::prost::alloc::string::String, + #[prost(message, repeated, tag = "2")] + pub options: ::prost::alloc::vec::Vec, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct AlterDatabaseStmt { + #[prost(string, tag = "1")] + pub dbname: ::prost::alloc::string::String, + #[prost(message, repeated, tag = "2")] + pub options: ::prost::alloc::vec::Vec, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct AlterDatabaseRefreshCollStmt { + #[prost(string, tag = "1")] + pub dbname: ::prost::alloc::string::String, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct AlterDatabaseSetStmt { + #[prost(string, tag = "1")] + pub dbname: ::prost::alloc::string::String, + #[prost(message, optional, tag = "2")] + pub setstmt: ::core::option::Option, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct DropdbStmt { + #[prost(string, tag = "1")] + pub dbname: ::prost::alloc::string::String, + #[prost(bool, tag = "2")] + pub missing_ok: bool, + #[prost(message, repeated, tag = "3")] + pub options: ::prost::alloc::vec::Vec, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct AlterSystemStmt { + #[prost(message, optional, tag = "1")] + pub setstmt: ::core::option::Option, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct ClusterStmt { + #[prost(message, optional, tag = "1")] + pub relation: ::core::option::Option, + #[prost(string, tag = "2")] + pub indexname: ::prost::alloc::string::String, + #[prost(message, repeated, tag = "3")] + pub params: ::prost::alloc::vec::Vec, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct VacuumStmt { + #[prost(message, repeated, tag = "1")] + pub options: ::prost::alloc::vec::Vec, + #[prost(message, repeated, tag = "2")] + pub rels: ::prost::alloc::vec::Vec, + #[prost(bool, tag = "3")] + pub is_vacuumcmd: bool, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct VacuumRelation { + #[prost(message, optional, tag = "1")] + pub relation: ::core::option::Option, + #[prost(uint32, tag = "2")] + pub oid: u32, + #[prost(message, repeated, tag = "3")] + pub va_cols: ::prost::alloc::vec::Vec, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct ExplainStmt { + #[prost(message, optional, boxed, tag = "1")] + pub query: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(message, repeated, tag = "2")] + pub options: ::prost::alloc::vec::Vec, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct CreateTableAsStmt { + #[prost(message, optional, boxed, tag = "1")] + pub query: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(message, optional, boxed, tag = "2")] + pub into: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(enumeration = "ObjectType", tag = "3")] + pub objtype: i32, + #[prost(bool, tag = "4")] + pub is_select_into: bool, + #[prost(bool, tag = "5")] + pub if_not_exists: bool, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct RefreshMatViewStmt { + #[prost(bool, tag = "1")] + pub concurrent: bool, + #[prost(bool, tag = "2")] + pub skip_data: bool, + #[prost(message, optional, tag = "3")] + pub relation: ::core::option::Option, +} +#[derive(Clone, Copy, PartialEq, ::prost::Message)] +pub struct CheckPointStmt {} +#[derive(Clone, Copy, PartialEq, ::prost::Message)] +pub struct DiscardStmt { + #[prost(enumeration = "DiscardMode", tag = "1")] + pub target: i32, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct LockStmt { + #[prost(message, repeated, tag = "1")] + pub relations: ::prost::alloc::vec::Vec, + #[prost(int32, tag = "2")] + pub mode: i32, + #[prost(bool, tag = "3")] + pub nowait: bool, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct ConstraintsSetStmt { + #[prost(message, repeated, tag = "1")] + pub constraints: ::prost::alloc::vec::Vec, + #[prost(bool, tag = "2")] + pub deferred: bool, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct ReindexStmt { + #[prost(enumeration = "ReindexObjectType", tag = "1")] + pub kind: i32, + #[prost(message, optional, tag = "2")] + pub relation: ::core::option::Option, + #[prost(string, tag = "3")] + pub name: ::prost::alloc::string::String, + #[prost(message, repeated, tag = "4")] + pub params: ::prost::alloc::vec::Vec, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct CreateConversionStmt { + #[prost(message, repeated, tag = "1")] + pub conversion_name: ::prost::alloc::vec::Vec, + #[prost(string, tag = "2")] + pub for_encoding_name: ::prost::alloc::string::String, + #[prost(string, tag = "3")] + pub to_encoding_name: ::prost::alloc::string::String, + #[prost(message, repeated, tag = "4")] + pub func_name: ::prost::alloc::vec::Vec, + #[prost(bool, tag = "5")] + pub def: bool, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct CreateCastStmt { + #[prost(message, optional, tag = "1")] + pub sourcetype: ::core::option::Option, + #[prost(message, optional, tag = "2")] + pub targettype: ::core::option::Option, + #[prost(message, optional, tag = "3")] + pub func: ::core::option::Option, + #[prost(enumeration = "CoercionContext", tag = "4")] + pub context: i32, + #[prost(bool, tag = "5")] + pub inout: bool, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct CreateTransformStmt { + #[prost(bool, tag = "1")] + pub replace: bool, + #[prost(message, optional, tag = "2")] + pub type_name: ::core::option::Option, + #[prost(string, tag = "3")] + pub lang: ::prost::alloc::string::String, + #[prost(message, optional, tag = "4")] + pub fromsql: ::core::option::Option, + #[prost(message, optional, tag = "5")] + pub tosql: ::core::option::Option, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct PrepareStmt { + #[prost(string, tag = "1")] + pub name: ::prost::alloc::string::String, + #[prost(message, repeated, tag = "2")] + pub argtypes: ::prost::alloc::vec::Vec, + #[prost(message, optional, boxed, tag = "3")] + pub query: ::core::option::Option<::prost::alloc::boxed::Box>, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct ExecuteStmt { + #[prost(string, tag = "1")] + pub name: ::prost::alloc::string::String, + #[prost(message, repeated, tag = "2")] + pub params: ::prost::alloc::vec::Vec, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct DeallocateStmt { + #[prost(string, tag = "1")] + pub name: ::prost::alloc::string::String, + #[prost(bool, tag = "2")] + pub isall: bool, + #[prost(int32, tag = "3")] + pub location: i32, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct DropOwnedStmt { + #[prost(message, repeated, tag = "1")] + pub roles: ::prost::alloc::vec::Vec, + #[prost(enumeration = "DropBehavior", tag = "2")] + pub behavior: i32, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct ReassignOwnedStmt { + #[prost(message, repeated, tag = "1")] + pub roles: ::prost::alloc::vec::Vec, + #[prost(message, optional, tag = "2")] + pub newrole: ::core::option::Option, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct AlterTsDictionaryStmt { + #[prost(message, repeated, tag = "1")] + pub dictname: ::prost::alloc::vec::Vec, + #[prost(message, repeated, tag = "2")] + pub options: ::prost::alloc::vec::Vec, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct AlterTsConfigurationStmt { + #[prost(enumeration = "AlterTsConfigType", tag = "1")] + pub kind: i32, + #[prost(message, repeated, tag = "2")] + pub cfgname: ::prost::alloc::vec::Vec, + #[prost(message, repeated, tag = "3")] + pub tokentype: ::prost::alloc::vec::Vec, + #[prost(message, repeated, tag = "4")] + pub dicts: ::prost::alloc::vec::Vec, + #[prost(bool, tag = "5")] + pub r#override: bool, + #[prost(bool, tag = "6")] + pub replace: bool, + #[prost(bool, tag = "7")] + pub missing_ok: bool, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct PublicationTable { + #[prost(message, optional, tag = "1")] + pub relation: ::core::option::Option, + #[prost(message, optional, boxed, tag = "2")] + pub where_clause: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(message, repeated, tag = "3")] + pub columns: ::prost::alloc::vec::Vec, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct PublicationObjSpec { + #[prost(enumeration = "PublicationObjSpecType", tag = "1")] + pub pubobjtype: i32, + #[prost(string, tag = "2")] + pub name: ::prost::alloc::string::String, + #[prost(message, optional, boxed, tag = "3")] + pub pubtable: ::core::option::Option<::prost::alloc::boxed::Box>, + #[prost(int32, tag = "4")] + pub location: i32, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct CreatePublicationStmt { + #[prost(string, tag = "1")] + pub pubname: ::prost::alloc::string::String, + #[prost(message, repeated, tag = "2")] + pub options: ::prost::alloc::vec::Vec, + #[prost(message, repeated, tag = "3")] + pub pubobjects: ::prost::alloc::vec::Vec, + #[prost(bool, tag = "4")] + pub for_all_tables: bool, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct AlterPublicationStmt { + #[prost(string, tag = "1")] + pub pubname: ::prost::alloc::string::String, + #[prost(message, repeated, tag = "2")] + pub options: ::prost::alloc::vec::Vec, + #[prost(message, repeated, tag = "3")] + pub pubobjects: ::prost::alloc::vec::Vec, + #[prost(bool, tag = "4")] + pub for_all_tables: bool, + #[prost(enumeration = "AlterPublicationAction", tag = "5")] + pub action: i32, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct CreateSubscriptionStmt { + #[prost(string, tag = "1")] + pub subname: ::prost::alloc::string::String, + #[prost(string, tag = "2")] + pub conninfo: ::prost::alloc::string::String, + #[prost(message, repeated, tag = "3")] + pub publication: ::prost::alloc::vec::Vec, + #[prost(message, repeated, tag = "4")] + pub options: ::prost::alloc::vec::Vec, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct AlterSubscriptionStmt { + #[prost(enumeration = "AlterSubscriptionType", tag = "1")] + pub kind: i32, + #[prost(string, tag = "2")] + pub subname: ::prost::alloc::string::String, + #[prost(string, tag = "3")] + pub conninfo: ::prost::alloc::string::String, + #[prost(message, repeated, tag = "4")] + pub publication: ::prost::alloc::vec::Vec, + #[prost(message, repeated, tag = "5")] + pub options: ::prost::alloc::vec::Vec, +} +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct DropSubscriptionStmt { + #[prost(string, tag = "1")] + pub subname: ::prost::alloc::string::String, + #[prost(bool, tag = "2")] + pub missing_ok: bool, + #[prost(enumeration = "DropBehavior", tag = "3")] + pub behavior: i32, +} +#[derive(Clone, Copy, PartialEq, ::prost::Message)] +pub struct ScanToken { + #[prost(int32, tag = "1")] + pub start: i32, + #[prost(int32, tag = "2")] + pub end: i32, + #[prost(enumeration = "Token", tag = "4")] + pub token: i32, + #[prost(enumeration = "KeywordKind", tag = "5")] + pub keyword_kind: i32, +} +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] +#[repr(i32)] +pub enum QuerySource { + Undefined = 0, + QsrcOriginal = 1, + QsrcParser = 2, + QsrcInsteadRule = 3, + QsrcQualInsteadRule = 4, + QsrcNonInsteadRule = 5, +} +impl QuerySource { + /// String value of the enum field names used in the ProtoBuf definition. + /// + /// The values are not transformed in any way and thus are considered stable + /// (if the ProtoBuf definition does not change) and safe for programmatic use. + pub fn as_str_name(&self) -> &'static str { + match self { + Self::Undefined => "QUERY_SOURCE_UNDEFINED", + Self::QsrcOriginal => "QSRC_ORIGINAL", + Self::QsrcParser => "QSRC_PARSER", + Self::QsrcInsteadRule => "QSRC_INSTEAD_RULE", + Self::QsrcQualInsteadRule => "QSRC_QUAL_INSTEAD_RULE", + Self::QsrcNonInsteadRule => "QSRC_NON_INSTEAD_RULE", + } + } + /// Creates an enum from field names used in the ProtoBuf definition. + pub fn from_str_name(value: &str) -> ::core::option::Option { + match value { + "QUERY_SOURCE_UNDEFINED" => Some(Self::Undefined), + "QSRC_ORIGINAL" => Some(Self::QsrcOriginal), + "QSRC_PARSER" => Some(Self::QsrcParser), + "QSRC_INSTEAD_RULE" => Some(Self::QsrcInsteadRule), + "QSRC_QUAL_INSTEAD_RULE" => Some(Self::QsrcQualInsteadRule), + "QSRC_NON_INSTEAD_RULE" => Some(Self::QsrcNonInsteadRule), + _ => None, + } + } +} +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] +#[repr(i32)] +pub enum SortByDir { + Undefined = 0, + SortbyDefault = 1, + SortbyAsc = 2, + SortbyDesc = 3, + SortbyUsing = 4, +} +impl SortByDir { + /// String value of the enum field names used in the ProtoBuf definition. + /// + /// The values are not transformed in any way and thus are considered stable + /// (if the ProtoBuf definition does not change) and safe for programmatic use. + pub fn as_str_name(&self) -> &'static str { + match self { + Self::Undefined => "SORT_BY_DIR_UNDEFINED", + Self::SortbyDefault => "SORTBY_DEFAULT", + Self::SortbyAsc => "SORTBY_ASC", + Self::SortbyDesc => "SORTBY_DESC", + Self::SortbyUsing => "SORTBY_USING", + } + } + /// Creates an enum from field names used in the ProtoBuf definition. + pub fn from_str_name(value: &str) -> ::core::option::Option { + match value { + "SORT_BY_DIR_UNDEFINED" => Some(Self::Undefined), + "SORTBY_DEFAULT" => Some(Self::SortbyDefault), + "SORTBY_ASC" => Some(Self::SortbyAsc), + "SORTBY_DESC" => Some(Self::SortbyDesc), + "SORTBY_USING" => Some(Self::SortbyUsing), + _ => None, + } + } +} +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] +#[repr(i32)] +pub enum SortByNulls { + Undefined = 0, + SortbyNullsDefault = 1, + SortbyNullsFirst = 2, + SortbyNullsLast = 3, +} +impl SortByNulls { + /// String value of the enum field names used in the ProtoBuf definition. + /// + /// The values are not transformed in any way and thus are considered stable + /// (if the ProtoBuf definition does not change) and safe for programmatic use. + pub fn as_str_name(&self) -> &'static str { + match self { + Self::Undefined => "SORT_BY_NULLS_UNDEFINED", + Self::SortbyNullsDefault => "SORTBY_NULLS_DEFAULT", + Self::SortbyNullsFirst => "SORTBY_NULLS_FIRST", + Self::SortbyNullsLast => "SORTBY_NULLS_LAST", + } + } + /// Creates an enum from field names used in the ProtoBuf definition. + pub fn from_str_name(value: &str) -> ::core::option::Option { + match value { + "SORT_BY_NULLS_UNDEFINED" => Some(Self::Undefined), + "SORTBY_NULLS_DEFAULT" => Some(Self::SortbyNullsDefault), + "SORTBY_NULLS_FIRST" => Some(Self::SortbyNullsFirst), + "SORTBY_NULLS_LAST" => Some(Self::SortbyNullsLast), + _ => None, + } + } +} +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] +#[repr(i32)] +pub enum SetQuantifier { + Undefined = 0, + Default = 1, + All = 2, + Distinct = 3, +} +impl SetQuantifier { + /// String value of the enum field names used in the ProtoBuf definition. + /// + /// The values are not transformed in any way and thus are considered stable + /// (if the ProtoBuf definition does not change) and safe for programmatic use. + pub fn as_str_name(&self) -> &'static str { + match self { + Self::Undefined => "SET_QUANTIFIER_UNDEFINED", + Self::Default => "SET_QUANTIFIER_DEFAULT", + Self::All => "SET_QUANTIFIER_ALL", + Self::Distinct => "SET_QUANTIFIER_DISTINCT", + } + } + /// Creates an enum from field names used in the ProtoBuf definition. + pub fn from_str_name(value: &str) -> ::core::option::Option { + match value { + "SET_QUANTIFIER_UNDEFINED" => Some(Self::Undefined), + "SET_QUANTIFIER_DEFAULT" => Some(Self::Default), + "SET_QUANTIFIER_ALL" => Some(Self::All), + "SET_QUANTIFIER_DISTINCT" => Some(Self::Distinct), + _ => None, + } + } +} +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] +#[repr(i32)] +pub enum AExprKind { + Undefined = 0, + AexprOp = 1, + AexprOpAny = 2, + AexprOpAll = 3, + AexprDistinct = 4, + AexprNotDistinct = 5, + AexprNullif = 6, + AexprIn = 7, + AexprLike = 8, + AexprIlike = 9, + AexprSimilar = 10, + AexprBetween = 11, + AexprNotBetween = 12, + AexprBetweenSym = 13, + AexprNotBetweenSym = 14, +} +impl AExprKind { + /// String value of the enum field names used in the ProtoBuf definition. + /// + /// The values are not transformed in any way and thus are considered stable + /// (if the ProtoBuf definition does not change) and safe for programmatic use. + pub fn as_str_name(&self) -> &'static str { + match self { + Self::Undefined => "A_EXPR_KIND_UNDEFINED", + Self::AexprOp => "AEXPR_OP", + Self::AexprOpAny => "AEXPR_OP_ANY", + Self::AexprOpAll => "AEXPR_OP_ALL", + Self::AexprDistinct => "AEXPR_DISTINCT", + Self::AexprNotDistinct => "AEXPR_NOT_DISTINCT", + Self::AexprNullif => "AEXPR_NULLIF", + Self::AexprIn => "AEXPR_IN", + Self::AexprLike => "AEXPR_LIKE", + Self::AexprIlike => "AEXPR_ILIKE", + Self::AexprSimilar => "AEXPR_SIMILAR", + Self::AexprBetween => "AEXPR_BETWEEN", + Self::AexprNotBetween => "AEXPR_NOT_BETWEEN", + Self::AexprBetweenSym => "AEXPR_BETWEEN_SYM", + Self::AexprNotBetweenSym => "AEXPR_NOT_BETWEEN_SYM", + } + } + /// Creates an enum from field names used in the ProtoBuf definition. + pub fn from_str_name(value: &str) -> ::core::option::Option { + match value { + "A_EXPR_KIND_UNDEFINED" => Some(Self::Undefined), + "AEXPR_OP" => Some(Self::AexprOp), + "AEXPR_OP_ANY" => Some(Self::AexprOpAny), + "AEXPR_OP_ALL" => Some(Self::AexprOpAll), + "AEXPR_DISTINCT" => Some(Self::AexprDistinct), + "AEXPR_NOT_DISTINCT" => Some(Self::AexprNotDistinct), + "AEXPR_NULLIF" => Some(Self::AexprNullif), + "AEXPR_IN" => Some(Self::AexprIn), + "AEXPR_LIKE" => Some(Self::AexprLike), + "AEXPR_ILIKE" => Some(Self::AexprIlike), + "AEXPR_SIMILAR" => Some(Self::AexprSimilar), + "AEXPR_BETWEEN" => Some(Self::AexprBetween), + "AEXPR_NOT_BETWEEN" => Some(Self::AexprNotBetween), + "AEXPR_BETWEEN_SYM" => Some(Self::AexprBetweenSym), + "AEXPR_NOT_BETWEEN_SYM" => Some(Self::AexprNotBetweenSym), + _ => None, + } + } +} +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] +#[repr(i32)] +pub enum RoleSpecType { + Undefined = 0, + RolespecCstring = 1, + RolespecCurrentRole = 2, + RolespecCurrentUser = 3, + RolespecSessionUser = 4, + RolespecPublic = 5, +} +impl RoleSpecType { + /// String value of the enum field names used in the ProtoBuf definition. + /// + /// The values are not transformed in any way and thus are considered stable + /// (if the ProtoBuf definition does not change) and safe for programmatic use. + pub fn as_str_name(&self) -> &'static str { + match self { + Self::Undefined => "ROLE_SPEC_TYPE_UNDEFINED", + Self::RolespecCstring => "ROLESPEC_CSTRING", + Self::RolespecCurrentRole => "ROLESPEC_CURRENT_ROLE", + Self::RolespecCurrentUser => "ROLESPEC_CURRENT_USER", + Self::RolespecSessionUser => "ROLESPEC_SESSION_USER", + Self::RolespecPublic => "ROLESPEC_PUBLIC", + } + } + /// Creates an enum from field names used in the ProtoBuf definition. + pub fn from_str_name(value: &str) -> ::core::option::Option { + match value { + "ROLE_SPEC_TYPE_UNDEFINED" => Some(Self::Undefined), + "ROLESPEC_CSTRING" => Some(Self::RolespecCstring), + "ROLESPEC_CURRENT_ROLE" => Some(Self::RolespecCurrentRole), + "ROLESPEC_CURRENT_USER" => Some(Self::RolespecCurrentUser), + "ROLESPEC_SESSION_USER" => Some(Self::RolespecSessionUser), + "ROLESPEC_PUBLIC" => Some(Self::RolespecPublic), + _ => None, + } + } +} +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] +#[repr(i32)] +pub enum TableLikeOption { + Undefined = 0, + CreateTableLikeComments = 1, + CreateTableLikeCompression = 2, + CreateTableLikeConstraints = 3, + CreateTableLikeDefaults = 4, + CreateTableLikeGenerated = 5, + CreateTableLikeIdentity = 6, + CreateTableLikeIndexes = 7, + CreateTableLikeStatistics = 8, + CreateTableLikeStorage = 9, + CreateTableLikeAll = 10, +} +impl TableLikeOption { + /// String value of the enum field names used in the ProtoBuf definition. + /// + /// The values are not transformed in any way and thus are considered stable + /// (if the ProtoBuf definition does not change) and safe for programmatic use. + pub fn as_str_name(&self) -> &'static str { + match self { + Self::Undefined => "TABLE_LIKE_OPTION_UNDEFINED", + Self::CreateTableLikeComments => "CREATE_TABLE_LIKE_COMMENTS", + Self::CreateTableLikeCompression => "CREATE_TABLE_LIKE_COMPRESSION", + Self::CreateTableLikeConstraints => "CREATE_TABLE_LIKE_CONSTRAINTS", + Self::CreateTableLikeDefaults => "CREATE_TABLE_LIKE_DEFAULTS", + Self::CreateTableLikeGenerated => "CREATE_TABLE_LIKE_GENERATED", + Self::CreateTableLikeIdentity => "CREATE_TABLE_LIKE_IDENTITY", + Self::CreateTableLikeIndexes => "CREATE_TABLE_LIKE_INDEXES", + Self::CreateTableLikeStatistics => "CREATE_TABLE_LIKE_STATISTICS", + Self::CreateTableLikeStorage => "CREATE_TABLE_LIKE_STORAGE", + Self::CreateTableLikeAll => "CREATE_TABLE_LIKE_ALL", + } + } + /// Creates an enum from field names used in the ProtoBuf definition. + pub fn from_str_name(value: &str) -> ::core::option::Option { + match value { + "TABLE_LIKE_OPTION_UNDEFINED" => Some(Self::Undefined), + "CREATE_TABLE_LIKE_COMMENTS" => Some(Self::CreateTableLikeComments), + "CREATE_TABLE_LIKE_COMPRESSION" => Some(Self::CreateTableLikeCompression), + "CREATE_TABLE_LIKE_CONSTRAINTS" => Some(Self::CreateTableLikeConstraints), + "CREATE_TABLE_LIKE_DEFAULTS" => Some(Self::CreateTableLikeDefaults), + "CREATE_TABLE_LIKE_GENERATED" => Some(Self::CreateTableLikeGenerated), + "CREATE_TABLE_LIKE_IDENTITY" => Some(Self::CreateTableLikeIdentity), + "CREATE_TABLE_LIKE_INDEXES" => Some(Self::CreateTableLikeIndexes), + "CREATE_TABLE_LIKE_STATISTICS" => Some(Self::CreateTableLikeStatistics), + "CREATE_TABLE_LIKE_STORAGE" => Some(Self::CreateTableLikeStorage), + "CREATE_TABLE_LIKE_ALL" => Some(Self::CreateTableLikeAll), + _ => None, + } + } +} +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] +#[repr(i32)] +pub enum DefElemAction { + Undefined = 0, + DefelemUnspec = 1, + DefelemSet = 2, + DefelemAdd = 3, + DefelemDrop = 4, +} +impl DefElemAction { + /// String value of the enum field names used in the ProtoBuf definition. + /// + /// The values are not transformed in any way and thus are considered stable + /// (if the ProtoBuf definition does not change) and safe for programmatic use. + pub fn as_str_name(&self) -> &'static str { + match self { + Self::Undefined => "DEF_ELEM_ACTION_UNDEFINED", + Self::DefelemUnspec => "DEFELEM_UNSPEC", + Self::DefelemSet => "DEFELEM_SET", + Self::DefelemAdd => "DEFELEM_ADD", + Self::DefelemDrop => "DEFELEM_DROP", + } + } + /// Creates an enum from field names used in the ProtoBuf definition. + pub fn from_str_name(value: &str) -> ::core::option::Option { + match value { + "DEF_ELEM_ACTION_UNDEFINED" => Some(Self::Undefined), + "DEFELEM_UNSPEC" => Some(Self::DefelemUnspec), + "DEFELEM_SET" => Some(Self::DefelemSet), + "DEFELEM_ADD" => Some(Self::DefelemAdd), + "DEFELEM_DROP" => Some(Self::DefelemDrop), + _ => None, + } + } +} +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] +#[repr(i32)] +pub enum PartitionStrategy { + Undefined = 0, + List = 1, + Range = 2, + Hash = 3, +} +impl PartitionStrategy { + /// String value of the enum field names used in the ProtoBuf definition. + /// + /// The values are not transformed in any way and thus are considered stable + /// (if the ProtoBuf definition does not change) and safe for programmatic use. + pub fn as_str_name(&self) -> &'static str { + match self { + Self::Undefined => "PARTITION_STRATEGY_UNDEFINED", + Self::List => "PARTITION_STRATEGY_LIST", + Self::Range => "PARTITION_STRATEGY_RANGE", + Self::Hash => "PARTITION_STRATEGY_HASH", + } + } + /// Creates an enum from field names used in the ProtoBuf definition. + pub fn from_str_name(value: &str) -> ::core::option::Option { + match value { + "PARTITION_STRATEGY_UNDEFINED" => Some(Self::Undefined), + "PARTITION_STRATEGY_LIST" => Some(Self::List), + "PARTITION_STRATEGY_RANGE" => Some(Self::Range), + "PARTITION_STRATEGY_HASH" => Some(Self::Hash), + _ => None, + } + } +} +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] +#[repr(i32)] +pub enum PartitionRangeDatumKind { + Undefined = 0, + PartitionRangeDatumMinvalue = 1, + PartitionRangeDatumValue = 2, + PartitionRangeDatumMaxvalue = 3, +} +impl PartitionRangeDatumKind { + /// String value of the enum field names used in the ProtoBuf definition. + /// + /// The values are not transformed in any way and thus are considered stable + /// (if the ProtoBuf definition does not change) and safe for programmatic use. + pub fn as_str_name(&self) -> &'static str { + match self { + Self::Undefined => "PARTITION_RANGE_DATUM_KIND_UNDEFINED", + Self::PartitionRangeDatumMinvalue => "PARTITION_RANGE_DATUM_MINVALUE", + Self::PartitionRangeDatumValue => "PARTITION_RANGE_DATUM_VALUE", + Self::PartitionRangeDatumMaxvalue => "PARTITION_RANGE_DATUM_MAXVALUE", + } + } + /// Creates an enum from field names used in the ProtoBuf definition. + pub fn from_str_name(value: &str) -> ::core::option::Option { + match value { + "PARTITION_RANGE_DATUM_KIND_UNDEFINED" => Some(Self::Undefined), + "PARTITION_RANGE_DATUM_MINVALUE" => Some(Self::PartitionRangeDatumMinvalue), + "PARTITION_RANGE_DATUM_VALUE" => Some(Self::PartitionRangeDatumValue), + "PARTITION_RANGE_DATUM_MAXVALUE" => Some(Self::PartitionRangeDatumMaxvalue), + _ => None, + } + } +} +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] +#[repr(i32)] +pub enum RteKind { + RtekindUndefined = 0, + RteRelation = 1, + RteSubquery = 2, + RteJoin = 3, + RteFunction = 4, + RteTablefunc = 5, + RteValues = 6, + RteCte = 7, + RteNamedtuplestore = 8, + RteResult = 9, +} +impl RteKind { + /// String value of the enum field names used in the ProtoBuf definition. + /// + /// The values are not transformed in any way and thus are considered stable + /// (if the ProtoBuf definition does not change) and safe for programmatic use. + pub fn as_str_name(&self) -> &'static str { + match self { + Self::RtekindUndefined => "RTEKIND_UNDEFINED", + Self::RteRelation => "RTE_RELATION", + Self::RteSubquery => "RTE_SUBQUERY", + Self::RteJoin => "RTE_JOIN", + Self::RteFunction => "RTE_FUNCTION", + Self::RteTablefunc => "RTE_TABLEFUNC", + Self::RteValues => "RTE_VALUES", + Self::RteCte => "RTE_CTE", + Self::RteNamedtuplestore => "RTE_NAMEDTUPLESTORE", + Self::RteResult => "RTE_RESULT", + } + } + /// Creates an enum from field names used in the ProtoBuf definition. + pub fn from_str_name(value: &str) -> ::core::option::Option { + match value { + "RTEKIND_UNDEFINED" => Some(Self::RtekindUndefined), + "RTE_RELATION" => Some(Self::RteRelation), + "RTE_SUBQUERY" => Some(Self::RteSubquery), + "RTE_JOIN" => Some(Self::RteJoin), + "RTE_FUNCTION" => Some(Self::RteFunction), + "RTE_TABLEFUNC" => Some(Self::RteTablefunc), + "RTE_VALUES" => Some(Self::RteValues), + "RTE_CTE" => Some(Self::RteCte), + "RTE_NAMEDTUPLESTORE" => Some(Self::RteNamedtuplestore), + "RTE_RESULT" => Some(Self::RteResult), + _ => None, + } + } +} +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] +#[repr(i32)] +pub enum WcoKind { + WcokindUndefined = 0, + WcoViewCheck = 1, + WcoRlsInsertCheck = 2, + WcoRlsUpdateCheck = 3, + WcoRlsConflictCheck = 4, + WcoRlsMergeUpdateCheck = 5, + WcoRlsMergeDeleteCheck = 6, +} +impl WcoKind { + /// String value of the enum field names used in the ProtoBuf definition. + /// + /// The values are not transformed in any way and thus are considered stable + /// (if the ProtoBuf definition does not change) and safe for programmatic use. + pub fn as_str_name(&self) -> &'static str { + match self { + Self::WcokindUndefined => "WCOKIND_UNDEFINED", + Self::WcoViewCheck => "WCO_VIEW_CHECK", + Self::WcoRlsInsertCheck => "WCO_RLS_INSERT_CHECK", + Self::WcoRlsUpdateCheck => "WCO_RLS_UPDATE_CHECK", + Self::WcoRlsConflictCheck => "WCO_RLS_CONFLICT_CHECK", + Self::WcoRlsMergeUpdateCheck => "WCO_RLS_MERGE_UPDATE_CHECK", + Self::WcoRlsMergeDeleteCheck => "WCO_RLS_MERGE_DELETE_CHECK", + } + } + /// Creates an enum from field names used in the ProtoBuf definition. + pub fn from_str_name(value: &str) -> ::core::option::Option { + match value { + "WCOKIND_UNDEFINED" => Some(Self::WcokindUndefined), + "WCO_VIEW_CHECK" => Some(Self::WcoViewCheck), + "WCO_RLS_INSERT_CHECK" => Some(Self::WcoRlsInsertCheck), + "WCO_RLS_UPDATE_CHECK" => Some(Self::WcoRlsUpdateCheck), + "WCO_RLS_CONFLICT_CHECK" => Some(Self::WcoRlsConflictCheck), + "WCO_RLS_MERGE_UPDATE_CHECK" => Some(Self::WcoRlsMergeUpdateCheck), + "WCO_RLS_MERGE_DELETE_CHECK" => Some(Self::WcoRlsMergeDeleteCheck), + _ => None, + } + } +} +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] +#[repr(i32)] +pub enum GroupingSetKind { + Undefined = 0, + GroupingSetEmpty = 1, + GroupingSetSimple = 2, + GroupingSetRollup = 3, + GroupingSetCube = 4, + GroupingSetSets = 5, +} +impl GroupingSetKind { + /// String value of the enum field names used in the ProtoBuf definition. + /// + /// The values are not transformed in any way and thus are considered stable + /// (if the ProtoBuf definition does not change) and safe for programmatic use. + pub fn as_str_name(&self) -> &'static str { + match self { + Self::Undefined => "GROUPING_SET_KIND_UNDEFINED", + Self::GroupingSetEmpty => "GROUPING_SET_EMPTY", + Self::GroupingSetSimple => "GROUPING_SET_SIMPLE", + Self::GroupingSetRollup => "GROUPING_SET_ROLLUP", + Self::GroupingSetCube => "GROUPING_SET_CUBE", + Self::GroupingSetSets => "GROUPING_SET_SETS", + } + } + /// Creates an enum from field names used in the ProtoBuf definition. + pub fn from_str_name(value: &str) -> ::core::option::Option { + match value { + "GROUPING_SET_KIND_UNDEFINED" => Some(Self::Undefined), + "GROUPING_SET_EMPTY" => Some(Self::GroupingSetEmpty), + "GROUPING_SET_SIMPLE" => Some(Self::GroupingSetSimple), + "GROUPING_SET_ROLLUP" => Some(Self::GroupingSetRollup), + "GROUPING_SET_CUBE" => Some(Self::GroupingSetCube), + "GROUPING_SET_SETS" => Some(Self::GroupingSetSets), + _ => None, + } + } +} +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] +#[repr(i32)] +pub enum CteMaterialize { + CtematerializeUndefined = 0, + Default = 1, + Always = 2, + Never = 3, +} +impl CteMaterialize { + /// String value of the enum field names used in the ProtoBuf definition. + /// + /// The values are not transformed in any way and thus are considered stable + /// (if the ProtoBuf definition does not change) and safe for programmatic use. + pub fn as_str_name(&self) -> &'static str { + match self { + Self::CtematerializeUndefined => "CTEMATERIALIZE_UNDEFINED", + Self::Default => "CTEMaterializeDefault", + Self::Always => "CTEMaterializeAlways", + Self::Never => "CTEMaterializeNever", + } + } + /// Creates an enum from field names used in the ProtoBuf definition. + pub fn from_str_name(value: &str) -> ::core::option::Option { + match value { + "CTEMATERIALIZE_UNDEFINED" => Some(Self::CtematerializeUndefined), + "CTEMaterializeDefault" => Some(Self::Default), + "CTEMaterializeAlways" => Some(Self::Always), + "CTEMaterializeNever" => Some(Self::Never), + _ => None, + } + } +} +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] +#[repr(i32)] +pub enum JsonQuotes { + Undefined = 0, + JsQuotesUnspec = 1, + JsQuotesKeep = 2, + JsQuotesOmit = 3, +} +impl JsonQuotes { + /// String value of the enum field names used in the ProtoBuf definition. + /// + /// The values are not transformed in any way and thus are considered stable + /// (if the ProtoBuf definition does not change) and safe for programmatic use. + pub fn as_str_name(&self) -> &'static str { + match self { + Self::Undefined => "JSON_QUOTES_UNDEFINED", + Self::JsQuotesUnspec => "JS_QUOTES_UNSPEC", + Self::JsQuotesKeep => "JS_QUOTES_KEEP", + Self::JsQuotesOmit => "JS_QUOTES_OMIT", + } + } + /// Creates an enum from field names used in the ProtoBuf definition. + pub fn from_str_name(value: &str) -> ::core::option::Option { + match value { + "JSON_QUOTES_UNDEFINED" => Some(Self::Undefined), + "JS_QUOTES_UNSPEC" => Some(Self::JsQuotesUnspec), + "JS_QUOTES_KEEP" => Some(Self::JsQuotesKeep), + "JS_QUOTES_OMIT" => Some(Self::JsQuotesOmit), + _ => None, + } + } +} +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] +#[repr(i32)] +pub enum JsonTableColumnType { + Undefined = 0, + JtcForOrdinality = 1, + JtcRegular = 2, + JtcExists = 3, + JtcFormatted = 4, + JtcNested = 5, +} +impl JsonTableColumnType { + /// String value of the enum field names used in the ProtoBuf definition. + /// + /// The values are not transformed in any way and thus are considered stable + /// (if the ProtoBuf definition does not change) and safe for programmatic use. + pub fn as_str_name(&self) -> &'static str { + match self { + Self::Undefined => "JSON_TABLE_COLUMN_TYPE_UNDEFINED", + Self::JtcForOrdinality => "JTC_FOR_ORDINALITY", + Self::JtcRegular => "JTC_REGULAR", + Self::JtcExists => "JTC_EXISTS", + Self::JtcFormatted => "JTC_FORMATTED", + Self::JtcNested => "JTC_NESTED", + } + } + /// Creates an enum from field names used in the ProtoBuf definition. + pub fn from_str_name(value: &str) -> ::core::option::Option { + match value { + "JSON_TABLE_COLUMN_TYPE_UNDEFINED" => Some(Self::Undefined), + "JTC_FOR_ORDINALITY" => Some(Self::JtcForOrdinality), + "JTC_REGULAR" => Some(Self::JtcRegular), + "JTC_EXISTS" => Some(Self::JtcExists), + "JTC_FORMATTED" => Some(Self::JtcFormatted), + "JTC_NESTED" => Some(Self::JtcNested), + _ => None, + } + } +} +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] +#[repr(i32)] +pub enum SetOperation { + Undefined = 0, + SetopNone = 1, + SetopUnion = 2, + SetopIntersect = 3, + SetopExcept = 4, +} +impl SetOperation { + /// String value of the enum field names used in the ProtoBuf definition. + /// + /// The values are not transformed in any way and thus are considered stable + /// (if the ProtoBuf definition does not change) and safe for programmatic use. + pub fn as_str_name(&self) -> &'static str { + match self { + Self::Undefined => "SET_OPERATION_UNDEFINED", + Self::SetopNone => "SETOP_NONE", + Self::SetopUnion => "SETOP_UNION", + Self::SetopIntersect => "SETOP_INTERSECT", + Self::SetopExcept => "SETOP_EXCEPT", + } + } + /// Creates an enum from field names used in the ProtoBuf definition. + pub fn from_str_name(value: &str) -> ::core::option::Option { + match value { + "SET_OPERATION_UNDEFINED" => Some(Self::Undefined), + "SETOP_NONE" => Some(Self::SetopNone), + "SETOP_UNION" => Some(Self::SetopUnion), + "SETOP_INTERSECT" => Some(Self::SetopIntersect), + "SETOP_EXCEPT" => Some(Self::SetopExcept), + _ => None, + } + } +} +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] +#[repr(i32)] +pub enum ObjectType { + Undefined = 0, + ObjectAccessMethod = 1, + ObjectAggregate = 2, + ObjectAmop = 3, + ObjectAmproc = 4, + ObjectAttribute = 5, + ObjectCast = 6, + ObjectColumn = 7, + ObjectCollation = 8, + ObjectConversion = 9, + ObjectDatabase = 10, + ObjectDefault = 11, + ObjectDefacl = 12, + ObjectDomain = 13, + ObjectDomconstraint = 14, + ObjectEventTrigger = 15, + ObjectExtension = 16, + ObjectFdw = 17, + ObjectForeignServer = 18, + ObjectForeignTable = 19, + ObjectFunction = 20, + ObjectIndex = 21, + ObjectLanguage = 22, + ObjectLargeobject = 23, + ObjectMatview = 24, + ObjectOpclass = 25, + ObjectOperator = 26, + ObjectOpfamily = 27, + ObjectParameterAcl = 28, + ObjectPolicy = 29, + ObjectProcedure = 30, + ObjectPublication = 31, + ObjectPublicationNamespace = 32, + ObjectPublicationRel = 33, + ObjectRole = 34, + ObjectRoutine = 35, + ObjectRule = 36, + ObjectSchema = 37, + ObjectSequence = 38, + ObjectSubscription = 39, + ObjectStatisticExt = 40, + ObjectTabconstraint = 41, + ObjectTable = 42, + ObjectTablespace = 43, + ObjectTransform = 44, + ObjectTrigger = 45, + ObjectTsconfiguration = 46, + ObjectTsdictionary = 47, + ObjectTsparser = 48, + ObjectTstemplate = 49, + ObjectType = 50, + ObjectUserMapping = 51, + ObjectView = 52, +} +impl ObjectType { + /// String value of the enum field names used in the ProtoBuf definition. + /// + /// The values are not transformed in any way and thus are considered stable + /// (if the ProtoBuf definition does not change) and safe for programmatic use. + pub fn as_str_name(&self) -> &'static str { + match self { + Self::Undefined => "OBJECT_TYPE_UNDEFINED", + Self::ObjectAccessMethod => "OBJECT_ACCESS_METHOD", + Self::ObjectAggregate => "OBJECT_AGGREGATE", + Self::ObjectAmop => "OBJECT_AMOP", + Self::ObjectAmproc => "OBJECT_AMPROC", + Self::ObjectAttribute => "OBJECT_ATTRIBUTE", + Self::ObjectCast => "OBJECT_CAST", + Self::ObjectColumn => "OBJECT_COLUMN", + Self::ObjectCollation => "OBJECT_COLLATION", + Self::ObjectConversion => "OBJECT_CONVERSION", + Self::ObjectDatabase => "OBJECT_DATABASE", + Self::ObjectDefault => "OBJECT_DEFAULT", + Self::ObjectDefacl => "OBJECT_DEFACL", + Self::ObjectDomain => "OBJECT_DOMAIN", + Self::ObjectDomconstraint => "OBJECT_DOMCONSTRAINT", + Self::ObjectEventTrigger => "OBJECT_EVENT_TRIGGER", + Self::ObjectExtension => "OBJECT_EXTENSION", + Self::ObjectFdw => "OBJECT_FDW", + Self::ObjectForeignServer => "OBJECT_FOREIGN_SERVER", + Self::ObjectForeignTable => "OBJECT_FOREIGN_TABLE", + Self::ObjectFunction => "OBJECT_FUNCTION", + Self::ObjectIndex => "OBJECT_INDEX", + Self::ObjectLanguage => "OBJECT_LANGUAGE", + Self::ObjectLargeobject => "OBJECT_LARGEOBJECT", + Self::ObjectMatview => "OBJECT_MATVIEW", + Self::ObjectOpclass => "OBJECT_OPCLASS", + Self::ObjectOperator => "OBJECT_OPERATOR", + Self::ObjectOpfamily => "OBJECT_OPFAMILY", + Self::ObjectParameterAcl => "OBJECT_PARAMETER_ACL", + Self::ObjectPolicy => "OBJECT_POLICY", + Self::ObjectProcedure => "OBJECT_PROCEDURE", + Self::ObjectPublication => "OBJECT_PUBLICATION", + Self::ObjectPublicationNamespace => "OBJECT_PUBLICATION_NAMESPACE", + Self::ObjectPublicationRel => "OBJECT_PUBLICATION_REL", + Self::ObjectRole => "OBJECT_ROLE", + Self::ObjectRoutine => "OBJECT_ROUTINE", + Self::ObjectRule => "OBJECT_RULE", + Self::ObjectSchema => "OBJECT_SCHEMA", + Self::ObjectSequence => "OBJECT_SEQUENCE", + Self::ObjectSubscription => "OBJECT_SUBSCRIPTION", + Self::ObjectStatisticExt => "OBJECT_STATISTIC_EXT", + Self::ObjectTabconstraint => "OBJECT_TABCONSTRAINT", + Self::ObjectTable => "OBJECT_TABLE", + Self::ObjectTablespace => "OBJECT_TABLESPACE", + Self::ObjectTransform => "OBJECT_TRANSFORM", + Self::ObjectTrigger => "OBJECT_TRIGGER", + Self::ObjectTsconfiguration => "OBJECT_TSCONFIGURATION", + Self::ObjectTsdictionary => "OBJECT_TSDICTIONARY", + Self::ObjectTsparser => "OBJECT_TSPARSER", + Self::ObjectTstemplate => "OBJECT_TSTEMPLATE", + Self::ObjectType => "OBJECT_TYPE", + Self::ObjectUserMapping => "OBJECT_USER_MAPPING", + Self::ObjectView => "OBJECT_VIEW", + } + } + /// Creates an enum from field names used in the ProtoBuf definition. + pub fn from_str_name(value: &str) -> ::core::option::Option { + match value { + "OBJECT_TYPE_UNDEFINED" => Some(Self::Undefined), + "OBJECT_ACCESS_METHOD" => Some(Self::ObjectAccessMethod), + "OBJECT_AGGREGATE" => Some(Self::ObjectAggregate), + "OBJECT_AMOP" => Some(Self::ObjectAmop), + "OBJECT_AMPROC" => Some(Self::ObjectAmproc), + "OBJECT_ATTRIBUTE" => Some(Self::ObjectAttribute), + "OBJECT_CAST" => Some(Self::ObjectCast), + "OBJECT_COLUMN" => Some(Self::ObjectColumn), + "OBJECT_COLLATION" => Some(Self::ObjectCollation), + "OBJECT_CONVERSION" => Some(Self::ObjectConversion), + "OBJECT_DATABASE" => Some(Self::ObjectDatabase), + "OBJECT_DEFAULT" => Some(Self::ObjectDefault), + "OBJECT_DEFACL" => Some(Self::ObjectDefacl), + "OBJECT_DOMAIN" => Some(Self::ObjectDomain), + "OBJECT_DOMCONSTRAINT" => Some(Self::ObjectDomconstraint), + "OBJECT_EVENT_TRIGGER" => Some(Self::ObjectEventTrigger), + "OBJECT_EXTENSION" => Some(Self::ObjectExtension), + "OBJECT_FDW" => Some(Self::ObjectFdw), + "OBJECT_FOREIGN_SERVER" => Some(Self::ObjectForeignServer), + "OBJECT_FOREIGN_TABLE" => Some(Self::ObjectForeignTable), + "OBJECT_FUNCTION" => Some(Self::ObjectFunction), + "OBJECT_INDEX" => Some(Self::ObjectIndex), + "OBJECT_LANGUAGE" => Some(Self::ObjectLanguage), + "OBJECT_LARGEOBJECT" => Some(Self::ObjectLargeobject), + "OBJECT_MATVIEW" => Some(Self::ObjectMatview), + "OBJECT_OPCLASS" => Some(Self::ObjectOpclass), + "OBJECT_OPERATOR" => Some(Self::ObjectOperator), + "OBJECT_OPFAMILY" => Some(Self::ObjectOpfamily), + "OBJECT_PARAMETER_ACL" => Some(Self::ObjectParameterAcl), + "OBJECT_POLICY" => Some(Self::ObjectPolicy), + "OBJECT_PROCEDURE" => Some(Self::ObjectProcedure), + "OBJECT_PUBLICATION" => Some(Self::ObjectPublication), + "OBJECT_PUBLICATION_NAMESPACE" => Some(Self::ObjectPublicationNamespace), + "OBJECT_PUBLICATION_REL" => Some(Self::ObjectPublicationRel), + "OBJECT_ROLE" => Some(Self::ObjectRole), + "OBJECT_ROUTINE" => Some(Self::ObjectRoutine), + "OBJECT_RULE" => Some(Self::ObjectRule), + "OBJECT_SCHEMA" => Some(Self::ObjectSchema), + "OBJECT_SEQUENCE" => Some(Self::ObjectSequence), + "OBJECT_SUBSCRIPTION" => Some(Self::ObjectSubscription), + "OBJECT_STATISTIC_EXT" => Some(Self::ObjectStatisticExt), + "OBJECT_TABCONSTRAINT" => Some(Self::ObjectTabconstraint), + "OBJECT_TABLE" => Some(Self::ObjectTable), + "OBJECT_TABLESPACE" => Some(Self::ObjectTablespace), + "OBJECT_TRANSFORM" => Some(Self::ObjectTransform), + "OBJECT_TRIGGER" => Some(Self::ObjectTrigger), + "OBJECT_TSCONFIGURATION" => Some(Self::ObjectTsconfiguration), + "OBJECT_TSDICTIONARY" => Some(Self::ObjectTsdictionary), + "OBJECT_TSPARSER" => Some(Self::ObjectTsparser), + "OBJECT_TSTEMPLATE" => Some(Self::ObjectTstemplate), + "OBJECT_TYPE" => Some(Self::ObjectType), + "OBJECT_USER_MAPPING" => Some(Self::ObjectUserMapping), + "OBJECT_VIEW" => Some(Self::ObjectView), + _ => None, + } + } +} +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] +#[repr(i32)] +pub enum DropBehavior { + Undefined = 0, + DropRestrict = 1, + DropCascade = 2, +} +impl DropBehavior { + /// String value of the enum field names used in the ProtoBuf definition. + /// + /// The values are not transformed in any way and thus are considered stable + /// (if the ProtoBuf definition does not change) and safe for programmatic use. + pub fn as_str_name(&self) -> &'static str { + match self { + Self::Undefined => "DROP_BEHAVIOR_UNDEFINED", + Self::DropRestrict => "DROP_RESTRICT", + Self::DropCascade => "DROP_CASCADE", + } + } + /// Creates an enum from field names used in the ProtoBuf definition. + pub fn from_str_name(value: &str) -> ::core::option::Option { + match value { + "DROP_BEHAVIOR_UNDEFINED" => Some(Self::Undefined), + "DROP_RESTRICT" => Some(Self::DropRestrict), + "DROP_CASCADE" => Some(Self::DropCascade), + _ => None, + } + } +} +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] +#[repr(i32)] +pub enum AlterTableType { + Undefined = 0, + AtAddColumn = 1, + AtAddColumnToView = 2, + AtColumnDefault = 3, + AtCookedColumnDefault = 4, + AtDropNotNull = 5, + AtSetNotNull = 6, + AtSetExpression = 7, + AtDropExpression = 8, + AtCheckNotNull = 9, + AtSetStatistics = 10, + AtSetOptions = 11, + AtResetOptions = 12, + AtSetStorage = 13, + AtSetCompression = 14, + AtDropColumn = 15, + AtAddIndex = 16, + AtReAddIndex = 17, + AtAddConstraint = 18, + AtReAddConstraint = 19, + AtReAddDomainConstraint = 20, + AtAlterConstraint = 21, + AtValidateConstraint = 22, + AtAddIndexConstraint = 23, + AtDropConstraint = 24, + AtReAddComment = 25, + AtAlterColumnType = 26, + AtAlterColumnGenericOptions = 27, + AtChangeOwner = 28, + AtClusterOn = 29, + AtDropCluster = 30, + AtSetLogged = 31, + AtSetUnLogged = 32, + AtDropOids = 33, + AtSetAccessMethod = 34, + AtSetTableSpace = 35, + AtSetRelOptions = 36, + AtResetRelOptions = 37, + AtReplaceRelOptions = 38, + AtEnableTrig = 39, + AtEnableAlwaysTrig = 40, + AtEnableReplicaTrig = 41, + AtDisableTrig = 42, + AtEnableTrigAll = 43, + AtDisableTrigAll = 44, + AtEnableTrigUser = 45, + AtDisableTrigUser = 46, + AtEnableRule = 47, + AtEnableAlwaysRule = 48, + AtEnableReplicaRule = 49, + AtDisableRule = 50, + AtAddInherit = 51, + AtDropInherit = 52, + AtAddOf = 53, + AtDropOf = 54, + AtReplicaIdentity = 55, + AtEnableRowSecurity = 56, + AtDisableRowSecurity = 57, + AtForceRowSecurity = 58, + AtNoForceRowSecurity = 59, + AtGenericOptions = 60, + AtAttachPartition = 61, + AtDetachPartition = 62, + AtDetachPartitionFinalize = 63, + AtAddIdentity = 64, + AtSetIdentity = 65, + AtDropIdentity = 66, + AtReAddStatistics = 67, +} +impl AlterTableType { + /// String value of the enum field names used in the ProtoBuf definition. + /// + /// The values are not transformed in any way and thus are considered stable + /// (if the ProtoBuf definition does not change) and safe for programmatic use. + pub fn as_str_name(&self) -> &'static str { + match self { + Self::Undefined => "ALTER_TABLE_TYPE_UNDEFINED", + Self::AtAddColumn => "AT_AddColumn", + Self::AtAddColumnToView => "AT_AddColumnToView", + Self::AtColumnDefault => "AT_ColumnDefault", + Self::AtCookedColumnDefault => "AT_CookedColumnDefault", + Self::AtDropNotNull => "AT_DropNotNull", + Self::AtSetNotNull => "AT_SetNotNull", + Self::AtSetExpression => "AT_SetExpression", + Self::AtDropExpression => "AT_DropExpression", + Self::AtCheckNotNull => "AT_CheckNotNull", + Self::AtSetStatistics => "AT_SetStatistics", + Self::AtSetOptions => "AT_SetOptions", + Self::AtResetOptions => "AT_ResetOptions", + Self::AtSetStorage => "AT_SetStorage", + Self::AtSetCompression => "AT_SetCompression", + Self::AtDropColumn => "AT_DropColumn", + Self::AtAddIndex => "AT_AddIndex", + Self::AtReAddIndex => "AT_ReAddIndex", + Self::AtAddConstraint => "AT_AddConstraint", + Self::AtReAddConstraint => "AT_ReAddConstraint", + Self::AtReAddDomainConstraint => "AT_ReAddDomainConstraint", + Self::AtAlterConstraint => "AT_AlterConstraint", + Self::AtValidateConstraint => "AT_ValidateConstraint", + Self::AtAddIndexConstraint => "AT_AddIndexConstraint", + Self::AtDropConstraint => "AT_DropConstraint", + Self::AtReAddComment => "AT_ReAddComment", + Self::AtAlterColumnType => "AT_AlterColumnType", + Self::AtAlterColumnGenericOptions => "AT_AlterColumnGenericOptions", + Self::AtChangeOwner => "AT_ChangeOwner", + Self::AtClusterOn => "AT_ClusterOn", + Self::AtDropCluster => "AT_DropCluster", + Self::AtSetLogged => "AT_SetLogged", + Self::AtSetUnLogged => "AT_SetUnLogged", + Self::AtDropOids => "AT_DropOids", + Self::AtSetAccessMethod => "AT_SetAccessMethod", + Self::AtSetTableSpace => "AT_SetTableSpace", + Self::AtSetRelOptions => "AT_SetRelOptions", + Self::AtResetRelOptions => "AT_ResetRelOptions", + Self::AtReplaceRelOptions => "AT_ReplaceRelOptions", + Self::AtEnableTrig => "AT_EnableTrig", + Self::AtEnableAlwaysTrig => "AT_EnableAlwaysTrig", + Self::AtEnableReplicaTrig => "AT_EnableReplicaTrig", + Self::AtDisableTrig => "AT_DisableTrig", + Self::AtEnableTrigAll => "AT_EnableTrigAll", + Self::AtDisableTrigAll => "AT_DisableTrigAll", + Self::AtEnableTrigUser => "AT_EnableTrigUser", + Self::AtDisableTrigUser => "AT_DisableTrigUser", + Self::AtEnableRule => "AT_EnableRule", + Self::AtEnableAlwaysRule => "AT_EnableAlwaysRule", + Self::AtEnableReplicaRule => "AT_EnableReplicaRule", + Self::AtDisableRule => "AT_DisableRule", + Self::AtAddInherit => "AT_AddInherit", + Self::AtDropInherit => "AT_DropInherit", + Self::AtAddOf => "AT_AddOf", + Self::AtDropOf => "AT_DropOf", + Self::AtReplicaIdentity => "AT_ReplicaIdentity", + Self::AtEnableRowSecurity => "AT_EnableRowSecurity", + Self::AtDisableRowSecurity => "AT_DisableRowSecurity", + Self::AtForceRowSecurity => "AT_ForceRowSecurity", + Self::AtNoForceRowSecurity => "AT_NoForceRowSecurity", + Self::AtGenericOptions => "AT_GenericOptions", + Self::AtAttachPartition => "AT_AttachPartition", + Self::AtDetachPartition => "AT_DetachPartition", + Self::AtDetachPartitionFinalize => "AT_DetachPartitionFinalize", + Self::AtAddIdentity => "AT_AddIdentity", + Self::AtSetIdentity => "AT_SetIdentity", + Self::AtDropIdentity => "AT_DropIdentity", + Self::AtReAddStatistics => "AT_ReAddStatistics", + } + } + /// Creates an enum from field names used in the ProtoBuf definition. + pub fn from_str_name(value: &str) -> ::core::option::Option { + match value { + "ALTER_TABLE_TYPE_UNDEFINED" => Some(Self::Undefined), + "AT_AddColumn" => Some(Self::AtAddColumn), + "AT_AddColumnToView" => Some(Self::AtAddColumnToView), + "AT_ColumnDefault" => Some(Self::AtColumnDefault), + "AT_CookedColumnDefault" => Some(Self::AtCookedColumnDefault), + "AT_DropNotNull" => Some(Self::AtDropNotNull), + "AT_SetNotNull" => Some(Self::AtSetNotNull), + "AT_SetExpression" => Some(Self::AtSetExpression), + "AT_DropExpression" => Some(Self::AtDropExpression), + "AT_CheckNotNull" => Some(Self::AtCheckNotNull), + "AT_SetStatistics" => Some(Self::AtSetStatistics), + "AT_SetOptions" => Some(Self::AtSetOptions), + "AT_ResetOptions" => Some(Self::AtResetOptions), + "AT_SetStorage" => Some(Self::AtSetStorage), + "AT_SetCompression" => Some(Self::AtSetCompression), + "AT_DropColumn" => Some(Self::AtDropColumn), + "AT_AddIndex" => Some(Self::AtAddIndex), + "AT_ReAddIndex" => Some(Self::AtReAddIndex), + "AT_AddConstraint" => Some(Self::AtAddConstraint), + "AT_ReAddConstraint" => Some(Self::AtReAddConstraint), + "AT_ReAddDomainConstraint" => Some(Self::AtReAddDomainConstraint), + "AT_AlterConstraint" => Some(Self::AtAlterConstraint), + "AT_ValidateConstraint" => Some(Self::AtValidateConstraint), + "AT_AddIndexConstraint" => Some(Self::AtAddIndexConstraint), + "AT_DropConstraint" => Some(Self::AtDropConstraint), + "AT_ReAddComment" => Some(Self::AtReAddComment), + "AT_AlterColumnType" => Some(Self::AtAlterColumnType), + "AT_AlterColumnGenericOptions" => Some(Self::AtAlterColumnGenericOptions), + "AT_ChangeOwner" => Some(Self::AtChangeOwner), + "AT_ClusterOn" => Some(Self::AtClusterOn), + "AT_DropCluster" => Some(Self::AtDropCluster), + "AT_SetLogged" => Some(Self::AtSetLogged), + "AT_SetUnLogged" => Some(Self::AtSetUnLogged), + "AT_DropOids" => Some(Self::AtDropOids), + "AT_SetAccessMethod" => Some(Self::AtSetAccessMethod), + "AT_SetTableSpace" => Some(Self::AtSetTableSpace), + "AT_SetRelOptions" => Some(Self::AtSetRelOptions), + "AT_ResetRelOptions" => Some(Self::AtResetRelOptions), + "AT_ReplaceRelOptions" => Some(Self::AtReplaceRelOptions), + "AT_EnableTrig" => Some(Self::AtEnableTrig), + "AT_EnableAlwaysTrig" => Some(Self::AtEnableAlwaysTrig), + "AT_EnableReplicaTrig" => Some(Self::AtEnableReplicaTrig), + "AT_DisableTrig" => Some(Self::AtDisableTrig), + "AT_EnableTrigAll" => Some(Self::AtEnableTrigAll), + "AT_DisableTrigAll" => Some(Self::AtDisableTrigAll), + "AT_EnableTrigUser" => Some(Self::AtEnableTrigUser), + "AT_DisableTrigUser" => Some(Self::AtDisableTrigUser), + "AT_EnableRule" => Some(Self::AtEnableRule), + "AT_EnableAlwaysRule" => Some(Self::AtEnableAlwaysRule), + "AT_EnableReplicaRule" => Some(Self::AtEnableReplicaRule), + "AT_DisableRule" => Some(Self::AtDisableRule), + "AT_AddInherit" => Some(Self::AtAddInherit), + "AT_DropInherit" => Some(Self::AtDropInherit), + "AT_AddOf" => Some(Self::AtAddOf), + "AT_DropOf" => Some(Self::AtDropOf), + "AT_ReplicaIdentity" => Some(Self::AtReplicaIdentity), + "AT_EnableRowSecurity" => Some(Self::AtEnableRowSecurity), + "AT_DisableRowSecurity" => Some(Self::AtDisableRowSecurity), + "AT_ForceRowSecurity" => Some(Self::AtForceRowSecurity), + "AT_NoForceRowSecurity" => Some(Self::AtNoForceRowSecurity), + "AT_GenericOptions" => Some(Self::AtGenericOptions), + "AT_AttachPartition" => Some(Self::AtAttachPartition), + "AT_DetachPartition" => Some(Self::AtDetachPartition), + "AT_DetachPartitionFinalize" => Some(Self::AtDetachPartitionFinalize), + "AT_AddIdentity" => Some(Self::AtAddIdentity), + "AT_SetIdentity" => Some(Self::AtSetIdentity), + "AT_DropIdentity" => Some(Self::AtDropIdentity), + "AT_ReAddStatistics" => Some(Self::AtReAddStatistics), + _ => None, + } + } +} +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] +#[repr(i32)] +pub enum GrantTargetType { + Undefined = 0, + AclTargetObject = 1, + AclTargetAllInSchema = 2, + AclTargetDefaults = 3, +} +impl GrantTargetType { + /// String value of the enum field names used in the ProtoBuf definition. + /// + /// The values are not transformed in any way and thus are considered stable + /// (if the ProtoBuf definition does not change) and safe for programmatic use. + pub fn as_str_name(&self) -> &'static str { + match self { + Self::Undefined => "GRANT_TARGET_TYPE_UNDEFINED", + Self::AclTargetObject => "ACL_TARGET_OBJECT", + Self::AclTargetAllInSchema => "ACL_TARGET_ALL_IN_SCHEMA", + Self::AclTargetDefaults => "ACL_TARGET_DEFAULTS", + } + } + /// Creates an enum from field names used in the ProtoBuf definition. + pub fn from_str_name(value: &str) -> ::core::option::Option { + match value { + "GRANT_TARGET_TYPE_UNDEFINED" => Some(Self::Undefined), + "ACL_TARGET_OBJECT" => Some(Self::AclTargetObject), + "ACL_TARGET_ALL_IN_SCHEMA" => Some(Self::AclTargetAllInSchema), + "ACL_TARGET_DEFAULTS" => Some(Self::AclTargetDefaults), + _ => None, + } + } +} +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] +#[repr(i32)] +pub enum VariableSetKind { + Undefined = 0, + VarSetValue = 1, + VarSetDefault = 2, + VarSetCurrent = 3, + VarSetMulti = 4, + VarReset = 5, + VarResetAll = 6, +} +impl VariableSetKind { + /// String value of the enum field names used in the ProtoBuf definition. + /// + /// The values are not transformed in any way and thus are considered stable + /// (if the ProtoBuf definition does not change) and safe for programmatic use. + pub fn as_str_name(&self) -> &'static str { + match self { + Self::Undefined => "VARIABLE_SET_KIND_UNDEFINED", + Self::VarSetValue => "VAR_SET_VALUE", + Self::VarSetDefault => "VAR_SET_DEFAULT", + Self::VarSetCurrent => "VAR_SET_CURRENT", + Self::VarSetMulti => "VAR_SET_MULTI", + Self::VarReset => "VAR_RESET", + Self::VarResetAll => "VAR_RESET_ALL", + } + } + /// Creates an enum from field names used in the ProtoBuf definition. + pub fn from_str_name(value: &str) -> ::core::option::Option { + match value { + "VARIABLE_SET_KIND_UNDEFINED" => Some(Self::Undefined), + "VAR_SET_VALUE" => Some(Self::VarSetValue), + "VAR_SET_DEFAULT" => Some(Self::VarSetDefault), + "VAR_SET_CURRENT" => Some(Self::VarSetCurrent), + "VAR_SET_MULTI" => Some(Self::VarSetMulti), + "VAR_RESET" => Some(Self::VarReset), + "VAR_RESET_ALL" => Some(Self::VarResetAll), + _ => None, + } + } +} +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] +#[repr(i32)] +pub enum ConstrType { + Undefined = 0, + ConstrNull = 1, + ConstrNotnull = 2, + ConstrDefault = 3, + ConstrIdentity = 4, + ConstrGenerated = 5, + ConstrCheck = 6, + ConstrPrimary = 7, + ConstrUnique = 8, + ConstrExclusion = 9, + ConstrForeign = 10, + ConstrAttrDeferrable = 11, + ConstrAttrNotDeferrable = 12, + ConstrAttrDeferred = 13, + ConstrAttrImmediate = 14, +} +impl ConstrType { + /// String value of the enum field names used in the ProtoBuf definition. + /// + /// The values are not transformed in any way and thus are considered stable + /// (if the ProtoBuf definition does not change) and safe for programmatic use. + pub fn as_str_name(&self) -> &'static str { + match self { + Self::Undefined => "CONSTR_TYPE_UNDEFINED", + Self::ConstrNull => "CONSTR_NULL", + Self::ConstrNotnull => "CONSTR_NOTNULL", + Self::ConstrDefault => "CONSTR_DEFAULT", + Self::ConstrIdentity => "CONSTR_IDENTITY", + Self::ConstrGenerated => "CONSTR_GENERATED", + Self::ConstrCheck => "CONSTR_CHECK", + Self::ConstrPrimary => "CONSTR_PRIMARY", + Self::ConstrUnique => "CONSTR_UNIQUE", + Self::ConstrExclusion => "CONSTR_EXCLUSION", + Self::ConstrForeign => "CONSTR_FOREIGN", + Self::ConstrAttrDeferrable => "CONSTR_ATTR_DEFERRABLE", + Self::ConstrAttrNotDeferrable => "CONSTR_ATTR_NOT_DEFERRABLE", + Self::ConstrAttrDeferred => "CONSTR_ATTR_DEFERRED", + Self::ConstrAttrImmediate => "CONSTR_ATTR_IMMEDIATE", + } + } + /// Creates an enum from field names used in the ProtoBuf definition. + pub fn from_str_name(value: &str) -> ::core::option::Option { + match value { + "CONSTR_TYPE_UNDEFINED" => Some(Self::Undefined), + "CONSTR_NULL" => Some(Self::ConstrNull), + "CONSTR_NOTNULL" => Some(Self::ConstrNotnull), + "CONSTR_DEFAULT" => Some(Self::ConstrDefault), + "CONSTR_IDENTITY" => Some(Self::ConstrIdentity), + "CONSTR_GENERATED" => Some(Self::ConstrGenerated), + "CONSTR_CHECK" => Some(Self::ConstrCheck), + "CONSTR_PRIMARY" => Some(Self::ConstrPrimary), + "CONSTR_UNIQUE" => Some(Self::ConstrUnique), + "CONSTR_EXCLUSION" => Some(Self::ConstrExclusion), + "CONSTR_FOREIGN" => Some(Self::ConstrForeign), + "CONSTR_ATTR_DEFERRABLE" => Some(Self::ConstrAttrDeferrable), + "CONSTR_ATTR_NOT_DEFERRABLE" => Some(Self::ConstrAttrNotDeferrable), + "CONSTR_ATTR_DEFERRED" => Some(Self::ConstrAttrDeferred), + "CONSTR_ATTR_IMMEDIATE" => Some(Self::ConstrAttrImmediate), + _ => None, + } + } +} +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] +#[repr(i32)] +pub enum ImportForeignSchemaType { + Undefined = 0, + FdwImportSchemaAll = 1, + FdwImportSchemaLimitTo = 2, + FdwImportSchemaExcept = 3, +} +impl ImportForeignSchemaType { + /// String value of the enum field names used in the ProtoBuf definition. + /// + /// The values are not transformed in any way and thus are considered stable + /// (if the ProtoBuf definition does not change) and safe for programmatic use. + pub fn as_str_name(&self) -> &'static str { + match self { + Self::Undefined => "IMPORT_FOREIGN_SCHEMA_TYPE_UNDEFINED", + Self::FdwImportSchemaAll => "FDW_IMPORT_SCHEMA_ALL", + Self::FdwImportSchemaLimitTo => "FDW_IMPORT_SCHEMA_LIMIT_TO", + Self::FdwImportSchemaExcept => "FDW_IMPORT_SCHEMA_EXCEPT", + } + } + /// Creates an enum from field names used in the ProtoBuf definition. + pub fn from_str_name(value: &str) -> ::core::option::Option { + match value { + "IMPORT_FOREIGN_SCHEMA_TYPE_UNDEFINED" => Some(Self::Undefined), + "FDW_IMPORT_SCHEMA_ALL" => Some(Self::FdwImportSchemaAll), + "FDW_IMPORT_SCHEMA_LIMIT_TO" => Some(Self::FdwImportSchemaLimitTo), + "FDW_IMPORT_SCHEMA_EXCEPT" => Some(Self::FdwImportSchemaExcept), + _ => None, + } + } +} +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] +#[repr(i32)] +pub enum RoleStmtType { + Undefined = 0, + RolestmtRole = 1, + RolestmtUser = 2, + RolestmtGroup = 3, +} +impl RoleStmtType { + /// String value of the enum field names used in the ProtoBuf definition. + /// + /// The values are not transformed in any way and thus are considered stable + /// (if the ProtoBuf definition does not change) and safe for programmatic use. + pub fn as_str_name(&self) -> &'static str { + match self { + Self::Undefined => "ROLE_STMT_TYPE_UNDEFINED", + Self::RolestmtRole => "ROLESTMT_ROLE", + Self::RolestmtUser => "ROLESTMT_USER", + Self::RolestmtGroup => "ROLESTMT_GROUP", + } + } + /// Creates an enum from field names used in the ProtoBuf definition. + pub fn from_str_name(value: &str) -> ::core::option::Option { + match value { + "ROLE_STMT_TYPE_UNDEFINED" => Some(Self::Undefined), + "ROLESTMT_ROLE" => Some(Self::RolestmtRole), + "ROLESTMT_USER" => Some(Self::RolestmtUser), + "ROLESTMT_GROUP" => Some(Self::RolestmtGroup), + _ => None, + } + } +} +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] +#[repr(i32)] +pub enum FetchDirection { + Undefined = 0, + FetchForward = 1, + FetchBackward = 2, + FetchAbsolute = 3, + FetchRelative = 4, +} +impl FetchDirection { + /// String value of the enum field names used in the ProtoBuf definition. + /// + /// The values are not transformed in any way and thus are considered stable + /// (if the ProtoBuf definition does not change) and safe for programmatic use. + pub fn as_str_name(&self) -> &'static str { + match self { + Self::Undefined => "FETCH_DIRECTION_UNDEFINED", + Self::FetchForward => "FETCH_FORWARD", + Self::FetchBackward => "FETCH_BACKWARD", + Self::FetchAbsolute => "FETCH_ABSOLUTE", + Self::FetchRelative => "FETCH_RELATIVE", + } + } + /// Creates an enum from field names used in the ProtoBuf definition. + pub fn from_str_name(value: &str) -> ::core::option::Option { + match value { + "FETCH_DIRECTION_UNDEFINED" => Some(Self::Undefined), + "FETCH_FORWARD" => Some(Self::FetchForward), + "FETCH_BACKWARD" => Some(Self::FetchBackward), + "FETCH_ABSOLUTE" => Some(Self::FetchAbsolute), + "FETCH_RELATIVE" => Some(Self::FetchRelative), + _ => None, + } + } +} +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] +#[repr(i32)] +pub enum FunctionParameterMode { + Undefined = 0, + FuncParamIn = 1, + FuncParamOut = 2, + FuncParamInout = 3, + FuncParamVariadic = 4, + FuncParamTable = 5, + FuncParamDefault = 6, +} +impl FunctionParameterMode { + /// String value of the enum field names used in the ProtoBuf definition. + /// + /// The values are not transformed in any way and thus are considered stable + /// (if the ProtoBuf definition does not change) and safe for programmatic use. + pub fn as_str_name(&self) -> &'static str { + match self { + Self::Undefined => "FUNCTION_PARAMETER_MODE_UNDEFINED", + Self::FuncParamIn => "FUNC_PARAM_IN", + Self::FuncParamOut => "FUNC_PARAM_OUT", + Self::FuncParamInout => "FUNC_PARAM_INOUT", + Self::FuncParamVariadic => "FUNC_PARAM_VARIADIC", + Self::FuncParamTable => "FUNC_PARAM_TABLE", + Self::FuncParamDefault => "FUNC_PARAM_DEFAULT", + } + } + /// Creates an enum from field names used in the ProtoBuf definition. + pub fn from_str_name(value: &str) -> ::core::option::Option { + match value { + "FUNCTION_PARAMETER_MODE_UNDEFINED" => Some(Self::Undefined), + "FUNC_PARAM_IN" => Some(Self::FuncParamIn), + "FUNC_PARAM_OUT" => Some(Self::FuncParamOut), + "FUNC_PARAM_INOUT" => Some(Self::FuncParamInout), + "FUNC_PARAM_VARIADIC" => Some(Self::FuncParamVariadic), + "FUNC_PARAM_TABLE" => Some(Self::FuncParamTable), + "FUNC_PARAM_DEFAULT" => Some(Self::FuncParamDefault), + _ => None, + } + } +} +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] +#[repr(i32)] +pub enum TransactionStmtKind { + Undefined = 0, + TransStmtBegin = 1, + TransStmtStart = 2, + TransStmtCommit = 3, + TransStmtRollback = 4, + TransStmtSavepoint = 5, + TransStmtRelease = 6, + TransStmtRollbackTo = 7, + TransStmtPrepare = 8, + TransStmtCommitPrepared = 9, + TransStmtRollbackPrepared = 10, +} +impl TransactionStmtKind { + /// String value of the enum field names used in the ProtoBuf definition. + /// + /// The values are not transformed in any way and thus are considered stable + /// (if the ProtoBuf definition does not change) and safe for programmatic use. + pub fn as_str_name(&self) -> &'static str { + match self { + Self::Undefined => "TRANSACTION_STMT_KIND_UNDEFINED", + Self::TransStmtBegin => "TRANS_STMT_BEGIN", + Self::TransStmtStart => "TRANS_STMT_START", + Self::TransStmtCommit => "TRANS_STMT_COMMIT", + Self::TransStmtRollback => "TRANS_STMT_ROLLBACK", + Self::TransStmtSavepoint => "TRANS_STMT_SAVEPOINT", + Self::TransStmtRelease => "TRANS_STMT_RELEASE", + Self::TransStmtRollbackTo => "TRANS_STMT_ROLLBACK_TO", + Self::TransStmtPrepare => "TRANS_STMT_PREPARE", + Self::TransStmtCommitPrepared => "TRANS_STMT_COMMIT_PREPARED", + Self::TransStmtRollbackPrepared => "TRANS_STMT_ROLLBACK_PREPARED", + } + } + /// Creates an enum from field names used in the ProtoBuf definition. + pub fn from_str_name(value: &str) -> ::core::option::Option { + match value { + "TRANSACTION_STMT_KIND_UNDEFINED" => Some(Self::Undefined), + "TRANS_STMT_BEGIN" => Some(Self::TransStmtBegin), + "TRANS_STMT_START" => Some(Self::TransStmtStart), + "TRANS_STMT_COMMIT" => Some(Self::TransStmtCommit), + "TRANS_STMT_ROLLBACK" => Some(Self::TransStmtRollback), + "TRANS_STMT_SAVEPOINT" => Some(Self::TransStmtSavepoint), + "TRANS_STMT_RELEASE" => Some(Self::TransStmtRelease), + "TRANS_STMT_ROLLBACK_TO" => Some(Self::TransStmtRollbackTo), + "TRANS_STMT_PREPARE" => Some(Self::TransStmtPrepare), + "TRANS_STMT_COMMIT_PREPARED" => Some(Self::TransStmtCommitPrepared), + "TRANS_STMT_ROLLBACK_PREPARED" => Some(Self::TransStmtRollbackPrepared), + _ => None, + } + } +} +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] +#[repr(i32)] +pub enum ViewCheckOption { + Undefined = 0, + NoCheckOption = 1, + LocalCheckOption = 2, + CascadedCheckOption = 3, +} +impl ViewCheckOption { + /// String value of the enum field names used in the ProtoBuf definition. + /// + /// The values are not transformed in any way and thus are considered stable + /// (if the ProtoBuf definition does not change) and safe for programmatic use. + pub fn as_str_name(&self) -> &'static str { + match self { + Self::Undefined => "VIEW_CHECK_OPTION_UNDEFINED", + Self::NoCheckOption => "NO_CHECK_OPTION", + Self::LocalCheckOption => "LOCAL_CHECK_OPTION", + Self::CascadedCheckOption => "CASCADED_CHECK_OPTION", + } + } + /// Creates an enum from field names used in the ProtoBuf definition. + pub fn from_str_name(value: &str) -> ::core::option::Option { + match value { + "VIEW_CHECK_OPTION_UNDEFINED" => Some(Self::Undefined), + "NO_CHECK_OPTION" => Some(Self::NoCheckOption), + "LOCAL_CHECK_OPTION" => Some(Self::LocalCheckOption), + "CASCADED_CHECK_OPTION" => Some(Self::CascadedCheckOption), + _ => None, + } + } +} +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] +#[repr(i32)] +pub enum DiscardMode { + Undefined = 0, + DiscardAll = 1, + DiscardPlans = 2, + DiscardSequences = 3, + DiscardTemp = 4, +} +impl DiscardMode { + /// String value of the enum field names used in the ProtoBuf definition. + /// + /// The values are not transformed in any way and thus are considered stable + /// (if the ProtoBuf definition does not change) and safe for programmatic use. + pub fn as_str_name(&self) -> &'static str { + match self { + Self::Undefined => "DISCARD_MODE_UNDEFINED", + Self::DiscardAll => "DISCARD_ALL", + Self::DiscardPlans => "DISCARD_PLANS", + Self::DiscardSequences => "DISCARD_SEQUENCES", + Self::DiscardTemp => "DISCARD_TEMP", + } + } + /// Creates an enum from field names used in the ProtoBuf definition. + pub fn from_str_name(value: &str) -> ::core::option::Option { + match value { + "DISCARD_MODE_UNDEFINED" => Some(Self::Undefined), + "DISCARD_ALL" => Some(Self::DiscardAll), + "DISCARD_PLANS" => Some(Self::DiscardPlans), + "DISCARD_SEQUENCES" => Some(Self::DiscardSequences), + "DISCARD_TEMP" => Some(Self::DiscardTemp), + _ => None, + } + } +} +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] +#[repr(i32)] +pub enum ReindexObjectType { + Undefined = 0, + ReindexObjectIndex = 1, + ReindexObjectTable = 2, + ReindexObjectSchema = 3, + ReindexObjectSystem = 4, + ReindexObjectDatabase = 5, +} +impl ReindexObjectType { + /// String value of the enum field names used in the ProtoBuf definition. + /// + /// The values are not transformed in any way and thus are considered stable + /// (if the ProtoBuf definition does not change) and safe for programmatic use. + pub fn as_str_name(&self) -> &'static str { + match self { + Self::Undefined => "REINDEX_OBJECT_TYPE_UNDEFINED", + Self::ReindexObjectIndex => "REINDEX_OBJECT_INDEX", + Self::ReindexObjectTable => "REINDEX_OBJECT_TABLE", + Self::ReindexObjectSchema => "REINDEX_OBJECT_SCHEMA", + Self::ReindexObjectSystem => "REINDEX_OBJECT_SYSTEM", + Self::ReindexObjectDatabase => "REINDEX_OBJECT_DATABASE", + } + } + /// Creates an enum from field names used in the ProtoBuf definition. + pub fn from_str_name(value: &str) -> ::core::option::Option { + match value { + "REINDEX_OBJECT_TYPE_UNDEFINED" => Some(Self::Undefined), + "REINDEX_OBJECT_INDEX" => Some(Self::ReindexObjectIndex), + "REINDEX_OBJECT_TABLE" => Some(Self::ReindexObjectTable), + "REINDEX_OBJECT_SCHEMA" => Some(Self::ReindexObjectSchema), + "REINDEX_OBJECT_SYSTEM" => Some(Self::ReindexObjectSystem), + "REINDEX_OBJECT_DATABASE" => Some(Self::ReindexObjectDatabase), + _ => None, + } + } +} +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] +#[repr(i32)] +pub enum AlterTsConfigType { + AlterTsconfigTypeUndefined = 0, + AlterTsconfigAddMapping = 1, + AlterTsconfigAlterMappingForToken = 2, + AlterTsconfigReplaceDict = 3, + AlterTsconfigReplaceDictForToken = 4, + AlterTsconfigDropMapping = 5, +} +impl AlterTsConfigType { + /// String value of the enum field names used in the ProtoBuf definition. + /// + /// The values are not transformed in any way and thus are considered stable + /// (if the ProtoBuf definition does not change) and safe for programmatic use. + pub fn as_str_name(&self) -> &'static str { + match self { + Self::AlterTsconfigTypeUndefined => "ALTER_TSCONFIG_TYPE_UNDEFINED", + Self::AlterTsconfigAddMapping => "ALTER_TSCONFIG_ADD_MAPPING", + Self::AlterTsconfigAlterMappingForToken => { + "ALTER_TSCONFIG_ALTER_MAPPING_FOR_TOKEN" + } + Self::AlterTsconfigReplaceDict => "ALTER_TSCONFIG_REPLACE_DICT", + Self::AlterTsconfigReplaceDictForToken => { + "ALTER_TSCONFIG_REPLACE_DICT_FOR_TOKEN" + } + Self::AlterTsconfigDropMapping => "ALTER_TSCONFIG_DROP_MAPPING", + } + } + /// Creates an enum from field names used in the ProtoBuf definition. + pub fn from_str_name(value: &str) -> ::core::option::Option { + match value { + "ALTER_TSCONFIG_TYPE_UNDEFINED" => Some(Self::AlterTsconfigTypeUndefined), + "ALTER_TSCONFIG_ADD_MAPPING" => Some(Self::AlterTsconfigAddMapping), + "ALTER_TSCONFIG_ALTER_MAPPING_FOR_TOKEN" => { + Some(Self::AlterTsconfigAlterMappingForToken) + } + "ALTER_TSCONFIG_REPLACE_DICT" => Some(Self::AlterTsconfigReplaceDict), + "ALTER_TSCONFIG_REPLACE_DICT_FOR_TOKEN" => { + Some(Self::AlterTsconfigReplaceDictForToken) + } + "ALTER_TSCONFIG_DROP_MAPPING" => Some(Self::AlterTsconfigDropMapping), + _ => None, + } + } +} +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] +#[repr(i32)] +pub enum PublicationObjSpecType { + Undefined = 0, + PublicationobjTable = 1, + PublicationobjTablesInSchema = 2, + PublicationobjTablesInCurSchema = 3, + PublicationobjContinuation = 4, +} +impl PublicationObjSpecType { + /// String value of the enum field names used in the ProtoBuf definition. + /// + /// The values are not transformed in any way and thus are considered stable + /// (if the ProtoBuf definition does not change) and safe for programmatic use. + pub fn as_str_name(&self) -> &'static str { + match self { + Self::Undefined => "PUBLICATION_OBJ_SPEC_TYPE_UNDEFINED", + Self::PublicationobjTable => "PUBLICATIONOBJ_TABLE", + Self::PublicationobjTablesInSchema => "PUBLICATIONOBJ_TABLES_IN_SCHEMA", + Self::PublicationobjTablesInCurSchema => { + "PUBLICATIONOBJ_TABLES_IN_CUR_SCHEMA" + } + Self::PublicationobjContinuation => "PUBLICATIONOBJ_CONTINUATION", + } + } + /// Creates an enum from field names used in the ProtoBuf definition. + pub fn from_str_name(value: &str) -> ::core::option::Option { + match value { + "PUBLICATION_OBJ_SPEC_TYPE_UNDEFINED" => Some(Self::Undefined), + "PUBLICATIONOBJ_TABLE" => Some(Self::PublicationobjTable), + "PUBLICATIONOBJ_TABLES_IN_SCHEMA" => Some(Self::PublicationobjTablesInSchema), + "PUBLICATIONOBJ_TABLES_IN_CUR_SCHEMA" => { + Some(Self::PublicationobjTablesInCurSchema) + } + "PUBLICATIONOBJ_CONTINUATION" => Some(Self::PublicationobjContinuation), + _ => None, + } + } +} +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] +#[repr(i32)] +pub enum AlterPublicationAction { + Undefined = 0, + ApAddObjects = 1, + ApDropObjects = 2, + ApSetObjects = 3, +} +impl AlterPublicationAction { + /// String value of the enum field names used in the ProtoBuf definition. + /// + /// The values are not transformed in any way and thus are considered stable + /// (if the ProtoBuf definition does not change) and safe for programmatic use. + pub fn as_str_name(&self) -> &'static str { + match self { + Self::Undefined => "ALTER_PUBLICATION_ACTION_UNDEFINED", + Self::ApAddObjects => "AP_AddObjects", + Self::ApDropObjects => "AP_DropObjects", + Self::ApSetObjects => "AP_SetObjects", + } + } + /// Creates an enum from field names used in the ProtoBuf definition. + pub fn from_str_name(value: &str) -> ::core::option::Option { + match value { + "ALTER_PUBLICATION_ACTION_UNDEFINED" => Some(Self::Undefined), + "AP_AddObjects" => Some(Self::ApAddObjects), + "AP_DropObjects" => Some(Self::ApDropObjects), + "AP_SetObjects" => Some(Self::ApSetObjects), + _ => None, + } + } +} +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] +#[repr(i32)] +pub enum AlterSubscriptionType { + Undefined = 0, + AlterSubscriptionOptions = 1, + AlterSubscriptionConnection = 2, + AlterSubscriptionSetPublication = 3, + AlterSubscriptionAddPublication = 4, + AlterSubscriptionDropPublication = 5, + AlterSubscriptionRefresh = 6, + AlterSubscriptionEnabled = 7, + AlterSubscriptionSkip = 8, +} +impl AlterSubscriptionType { + /// String value of the enum field names used in the ProtoBuf definition. + /// + /// The values are not transformed in any way and thus are considered stable + /// (if the ProtoBuf definition does not change) and safe for programmatic use. + pub fn as_str_name(&self) -> &'static str { + match self { + Self::Undefined => "ALTER_SUBSCRIPTION_TYPE_UNDEFINED", + Self::AlterSubscriptionOptions => "ALTER_SUBSCRIPTION_OPTIONS", + Self::AlterSubscriptionConnection => "ALTER_SUBSCRIPTION_CONNECTION", + Self::AlterSubscriptionSetPublication => "ALTER_SUBSCRIPTION_SET_PUBLICATION", + Self::AlterSubscriptionAddPublication => "ALTER_SUBSCRIPTION_ADD_PUBLICATION", + Self::AlterSubscriptionDropPublication => { + "ALTER_SUBSCRIPTION_DROP_PUBLICATION" + } + Self::AlterSubscriptionRefresh => "ALTER_SUBSCRIPTION_REFRESH", + Self::AlterSubscriptionEnabled => "ALTER_SUBSCRIPTION_ENABLED", + Self::AlterSubscriptionSkip => "ALTER_SUBSCRIPTION_SKIP", + } + } + /// Creates an enum from field names used in the ProtoBuf definition. + pub fn from_str_name(value: &str) -> ::core::option::Option { + match value { + "ALTER_SUBSCRIPTION_TYPE_UNDEFINED" => Some(Self::Undefined), + "ALTER_SUBSCRIPTION_OPTIONS" => Some(Self::AlterSubscriptionOptions), + "ALTER_SUBSCRIPTION_CONNECTION" => Some(Self::AlterSubscriptionConnection), + "ALTER_SUBSCRIPTION_SET_PUBLICATION" => { + Some(Self::AlterSubscriptionSetPublication) + } + "ALTER_SUBSCRIPTION_ADD_PUBLICATION" => { + Some(Self::AlterSubscriptionAddPublication) + } + "ALTER_SUBSCRIPTION_DROP_PUBLICATION" => { + Some(Self::AlterSubscriptionDropPublication) + } + "ALTER_SUBSCRIPTION_REFRESH" => Some(Self::AlterSubscriptionRefresh), + "ALTER_SUBSCRIPTION_ENABLED" => Some(Self::AlterSubscriptionEnabled), + "ALTER_SUBSCRIPTION_SKIP" => Some(Self::AlterSubscriptionSkip), + _ => None, + } + } +} +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] +#[repr(i32)] +pub enum OverridingKind { + Undefined = 0, + OverridingNotSet = 1, + OverridingUserValue = 2, + OverridingSystemValue = 3, +} +impl OverridingKind { + /// String value of the enum field names used in the ProtoBuf definition. + /// + /// The values are not transformed in any way and thus are considered stable + /// (if the ProtoBuf definition does not change) and safe for programmatic use. + pub fn as_str_name(&self) -> &'static str { + match self { + Self::Undefined => "OVERRIDING_KIND_UNDEFINED", + Self::OverridingNotSet => "OVERRIDING_NOT_SET", + Self::OverridingUserValue => "OVERRIDING_USER_VALUE", + Self::OverridingSystemValue => "OVERRIDING_SYSTEM_VALUE", + } + } + /// Creates an enum from field names used in the ProtoBuf definition. + pub fn from_str_name(value: &str) -> ::core::option::Option { + match value { + "OVERRIDING_KIND_UNDEFINED" => Some(Self::Undefined), + "OVERRIDING_NOT_SET" => Some(Self::OverridingNotSet), + "OVERRIDING_USER_VALUE" => Some(Self::OverridingUserValue), + "OVERRIDING_SYSTEM_VALUE" => Some(Self::OverridingSystemValue), + _ => None, + } + } +} +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] +#[repr(i32)] +pub enum OnCommitAction { + Undefined = 0, + OncommitNoop = 1, + OncommitPreserveRows = 2, + OncommitDeleteRows = 3, + OncommitDrop = 4, +} +impl OnCommitAction { + /// String value of the enum field names used in the ProtoBuf definition. + /// + /// The values are not transformed in any way and thus are considered stable + /// (if the ProtoBuf definition does not change) and safe for programmatic use. + pub fn as_str_name(&self) -> &'static str { + match self { + Self::Undefined => "ON_COMMIT_ACTION_UNDEFINED", + Self::OncommitNoop => "ONCOMMIT_NOOP", + Self::OncommitPreserveRows => "ONCOMMIT_PRESERVE_ROWS", + Self::OncommitDeleteRows => "ONCOMMIT_DELETE_ROWS", + Self::OncommitDrop => "ONCOMMIT_DROP", + } + } + /// Creates an enum from field names used in the ProtoBuf definition. + pub fn from_str_name(value: &str) -> ::core::option::Option { + match value { + "ON_COMMIT_ACTION_UNDEFINED" => Some(Self::Undefined), + "ONCOMMIT_NOOP" => Some(Self::OncommitNoop), + "ONCOMMIT_PRESERVE_ROWS" => Some(Self::OncommitPreserveRows), + "ONCOMMIT_DELETE_ROWS" => Some(Self::OncommitDeleteRows), + "ONCOMMIT_DROP" => Some(Self::OncommitDrop), + _ => None, + } + } +} +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] +#[repr(i32)] +pub enum TableFuncType { + Undefined = 0, + TftXmltable = 1, + TftJsonTable = 2, +} +impl TableFuncType { + /// String value of the enum field names used in the ProtoBuf definition. + /// + /// The values are not transformed in any way and thus are considered stable + /// (if the ProtoBuf definition does not change) and safe for programmatic use. + pub fn as_str_name(&self) -> &'static str { + match self { + Self::Undefined => "TABLE_FUNC_TYPE_UNDEFINED", + Self::TftXmltable => "TFT_XMLTABLE", + Self::TftJsonTable => "TFT_JSON_TABLE", + } + } + /// Creates an enum from field names used in the ProtoBuf definition. + pub fn from_str_name(value: &str) -> ::core::option::Option { + match value { + "TABLE_FUNC_TYPE_UNDEFINED" => Some(Self::Undefined), + "TFT_XMLTABLE" => Some(Self::TftXmltable), + "TFT_JSON_TABLE" => Some(Self::TftJsonTable), + _ => None, + } + } +} +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] +#[repr(i32)] +pub enum ParamKind { + Undefined = 0, + ParamExtern = 1, + ParamExec = 2, + ParamSublink = 3, + ParamMultiexpr = 4, +} +impl ParamKind { + /// String value of the enum field names used in the ProtoBuf definition. + /// + /// The values are not transformed in any way and thus are considered stable + /// (if the ProtoBuf definition does not change) and safe for programmatic use. + pub fn as_str_name(&self) -> &'static str { + match self { + Self::Undefined => "PARAM_KIND_UNDEFINED", + Self::ParamExtern => "PARAM_EXTERN", + Self::ParamExec => "PARAM_EXEC", + Self::ParamSublink => "PARAM_SUBLINK", + Self::ParamMultiexpr => "PARAM_MULTIEXPR", + } + } + /// Creates an enum from field names used in the ProtoBuf definition. + pub fn from_str_name(value: &str) -> ::core::option::Option { + match value { + "PARAM_KIND_UNDEFINED" => Some(Self::Undefined), + "PARAM_EXTERN" => Some(Self::ParamExtern), + "PARAM_EXEC" => Some(Self::ParamExec), + "PARAM_SUBLINK" => Some(Self::ParamSublink), + "PARAM_MULTIEXPR" => Some(Self::ParamMultiexpr), + _ => None, + } + } +} +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] +#[repr(i32)] +pub enum CoercionContext { + Undefined = 0, + CoercionImplicit = 1, + CoercionAssignment = 2, + CoercionPlpgsql = 3, + CoercionExplicit = 4, +} +impl CoercionContext { + /// String value of the enum field names used in the ProtoBuf definition. + /// + /// The values are not transformed in any way and thus are considered stable + /// (if the ProtoBuf definition does not change) and safe for programmatic use. + pub fn as_str_name(&self) -> &'static str { + match self { + Self::Undefined => "COERCION_CONTEXT_UNDEFINED", + Self::CoercionImplicit => "COERCION_IMPLICIT", + Self::CoercionAssignment => "COERCION_ASSIGNMENT", + Self::CoercionPlpgsql => "COERCION_PLPGSQL", + Self::CoercionExplicit => "COERCION_EXPLICIT", + } + } + /// Creates an enum from field names used in the ProtoBuf definition. + pub fn from_str_name(value: &str) -> ::core::option::Option { + match value { + "COERCION_CONTEXT_UNDEFINED" => Some(Self::Undefined), + "COERCION_IMPLICIT" => Some(Self::CoercionImplicit), + "COERCION_ASSIGNMENT" => Some(Self::CoercionAssignment), + "COERCION_PLPGSQL" => Some(Self::CoercionPlpgsql), + "COERCION_EXPLICIT" => Some(Self::CoercionExplicit), + _ => None, + } + } +} +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] +#[repr(i32)] +pub enum CoercionForm { + Undefined = 0, + CoerceExplicitCall = 1, + CoerceExplicitCast = 2, + CoerceImplicitCast = 3, + CoerceSqlSyntax = 4, +} +impl CoercionForm { + /// String value of the enum field names used in the ProtoBuf definition. + /// + /// The values are not transformed in any way and thus are considered stable + /// (if the ProtoBuf definition does not change) and safe for programmatic use. + pub fn as_str_name(&self) -> &'static str { + match self { + Self::Undefined => "COERCION_FORM_UNDEFINED", + Self::CoerceExplicitCall => "COERCE_EXPLICIT_CALL", + Self::CoerceExplicitCast => "COERCE_EXPLICIT_CAST", + Self::CoerceImplicitCast => "COERCE_IMPLICIT_CAST", + Self::CoerceSqlSyntax => "COERCE_SQL_SYNTAX", + } + } + /// Creates an enum from field names used in the ProtoBuf definition. + pub fn from_str_name(value: &str) -> ::core::option::Option { + match value { + "COERCION_FORM_UNDEFINED" => Some(Self::Undefined), + "COERCE_EXPLICIT_CALL" => Some(Self::CoerceExplicitCall), + "COERCE_EXPLICIT_CAST" => Some(Self::CoerceExplicitCast), + "COERCE_IMPLICIT_CAST" => Some(Self::CoerceImplicitCast), + "COERCE_SQL_SYNTAX" => Some(Self::CoerceSqlSyntax), + _ => None, + } + } +} +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] +#[repr(i32)] +pub enum BoolExprType { + Undefined = 0, + AndExpr = 1, + OrExpr = 2, + NotExpr = 3, +} +impl BoolExprType { + /// String value of the enum field names used in the ProtoBuf definition. + /// + /// The values are not transformed in any way and thus are considered stable + /// (if the ProtoBuf definition does not change) and safe for programmatic use. + pub fn as_str_name(&self) -> &'static str { + match self { + Self::Undefined => "BOOL_EXPR_TYPE_UNDEFINED", + Self::AndExpr => "AND_EXPR", + Self::OrExpr => "OR_EXPR", + Self::NotExpr => "NOT_EXPR", + } + } + /// Creates an enum from field names used in the ProtoBuf definition. + pub fn from_str_name(value: &str) -> ::core::option::Option { + match value { + "BOOL_EXPR_TYPE_UNDEFINED" => Some(Self::Undefined), + "AND_EXPR" => Some(Self::AndExpr), + "OR_EXPR" => Some(Self::OrExpr), + "NOT_EXPR" => Some(Self::NotExpr), + _ => None, + } + } +} +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] +#[repr(i32)] +pub enum SubLinkType { + Undefined = 0, + ExistsSublink = 1, + AllSublink = 2, + AnySublink = 3, + RowcompareSublink = 4, + ExprSublink = 5, + MultiexprSublink = 6, + ArraySublink = 7, + CteSublink = 8, +} +impl SubLinkType { + /// String value of the enum field names used in the ProtoBuf definition. + /// + /// The values are not transformed in any way and thus are considered stable + /// (if the ProtoBuf definition does not change) and safe for programmatic use. + pub fn as_str_name(&self) -> &'static str { + match self { + Self::Undefined => "SUB_LINK_TYPE_UNDEFINED", + Self::ExistsSublink => "EXISTS_SUBLINK", + Self::AllSublink => "ALL_SUBLINK", + Self::AnySublink => "ANY_SUBLINK", + Self::RowcompareSublink => "ROWCOMPARE_SUBLINK", + Self::ExprSublink => "EXPR_SUBLINK", + Self::MultiexprSublink => "MULTIEXPR_SUBLINK", + Self::ArraySublink => "ARRAY_SUBLINK", + Self::CteSublink => "CTE_SUBLINK", + } + } + /// Creates an enum from field names used in the ProtoBuf definition. + pub fn from_str_name(value: &str) -> ::core::option::Option { + match value { + "SUB_LINK_TYPE_UNDEFINED" => Some(Self::Undefined), + "EXISTS_SUBLINK" => Some(Self::ExistsSublink), + "ALL_SUBLINK" => Some(Self::AllSublink), + "ANY_SUBLINK" => Some(Self::AnySublink), + "ROWCOMPARE_SUBLINK" => Some(Self::RowcompareSublink), + "EXPR_SUBLINK" => Some(Self::ExprSublink), + "MULTIEXPR_SUBLINK" => Some(Self::MultiexprSublink), + "ARRAY_SUBLINK" => Some(Self::ArraySublink), + "CTE_SUBLINK" => Some(Self::CteSublink), + _ => None, + } + } +} +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] +#[repr(i32)] +pub enum RowCompareType { + Undefined = 0, + RowcompareLt = 1, + RowcompareLe = 2, + RowcompareEq = 3, + RowcompareGe = 4, + RowcompareGt = 5, + RowcompareNe = 6, +} +impl RowCompareType { + /// String value of the enum field names used in the ProtoBuf definition. + /// + /// The values are not transformed in any way and thus are considered stable + /// (if the ProtoBuf definition does not change) and safe for programmatic use. + pub fn as_str_name(&self) -> &'static str { + match self { + Self::Undefined => "ROW_COMPARE_TYPE_UNDEFINED", + Self::RowcompareLt => "ROWCOMPARE_LT", + Self::RowcompareLe => "ROWCOMPARE_LE", + Self::RowcompareEq => "ROWCOMPARE_EQ", + Self::RowcompareGe => "ROWCOMPARE_GE", + Self::RowcompareGt => "ROWCOMPARE_GT", + Self::RowcompareNe => "ROWCOMPARE_NE", + } + } + /// Creates an enum from field names used in the ProtoBuf definition. + pub fn from_str_name(value: &str) -> ::core::option::Option { + match value { + "ROW_COMPARE_TYPE_UNDEFINED" => Some(Self::Undefined), + "ROWCOMPARE_LT" => Some(Self::RowcompareLt), + "ROWCOMPARE_LE" => Some(Self::RowcompareLe), + "ROWCOMPARE_EQ" => Some(Self::RowcompareEq), + "ROWCOMPARE_GE" => Some(Self::RowcompareGe), + "ROWCOMPARE_GT" => Some(Self::RowcompareGt), + "ROWCOMPARE_NE" => Some(Self::RowcompareNe), + _ => None, + } + } +} +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] +#[repr(i32)] +pub enum MinMaxOp { + Undefined = 0, + IsGreatest = 1, + IsLeast = 2, +} +impl MinMaxOp { + /// String value of the enum field names used in the ProtoBuf definition. + /// + /// The values are not transformed in any way and thus are considered stable + /// (if the ProtoBuf definition does not change) and safe for programmatic use. + pub fn as_str_name(&self) -> &'static str { + match self { + Self::Undefined => "MIN_MAX_OP_UNDEFINED", + Self::IsGreatest => "IS_GREATEST", + Self::IsLeast => "IS_LEAST", + } + } + /// Creates an enum from field names used in the ProtoBuf definition. + pub fn from_str_name(value: &str) -> ::core::option::Option { + match value { + "MIN_MAX_OP_UNDEFINED" => Some(Self::Undefined), + "IS_GREATEST" => Some(Self::IsGreatest), + "IS_LEAST" => Some(Self::IsLeast), + _ => None, + } + } +} +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] +#[repr(i32)] +pub enum SqlValueFunctionOp { + SqlvalueFunctionOpUndefined = 0, + SvfopCurrentDate = 1, + SvfopCurrentTime = 2, + SvfopCurrentTimeN = 3, + SvfopCurrentTimestamp = 4, + SvfopCurrentTimestampN = 5, + SvfopLocaltime = 6, + SvfopLocaltimeN = 7, + SvfopLocaltimestamp = 8, + SvfopLocaltimestampN = 9, + SvfopCurrentRole = 10, + SvfopCurrentUser = 11, + SvfopUser = 12, + SvfopSessionUser = 13, + SvfopCurrentCatalog = 14, + SvfopCurrentSchema = 15, +} +impl SqlValueFunctionOp { + /// String value of the enum field names used in the ProtoBuf definition. + /// + /// The values are not transformed in any way and thus are considered stable + /// (if the ProtoBuf definition does not change) and safe for programmatic use. + pub fn as_str_name(&self) -> &'static str { + match self { + Self::SqlvalueFunctionOpUndefined => "SQLVALUE_FUNCTION_OP_UNDEFINED", + Self::SvfopCurrentDate => "SVFOP_CURRENT_DATE", + Self::SvfopCurrentTime => "SVFOP_CURRENT_TIME", + Self::SvfopCurrentTimeN => "SVFOP_CURRENT_TIME_N", + Self::SvfopCurrentTimestamp => "SVFOP_CURRENT_TIMESTAMP", + Self::SvfopCurrentTimestampN => "SVFOP_CURRENT_TIMESTAMP_N", + Self::SvfopLocaltime => "SVFOP_LOCALTIME", + Self::SvfopLocaltimeN => "SVFOP_LOCALTIME_N", + Self::SvfopLocaltimestamp => "SVFOP_LOCALTIMESTAMP", + Self::SvfopLocaltimestampN => "SVFOP_LOCALTIMESTAMP_N", + Self::SvfopCurrentRole => "SVFOP_CURRENT_ROLE", + Self::SvfopCurrentUser => "SVFOP_CURRENT_USER", + Self::SvfopUser => "SVFOP_USER", + Self::SvfopSessionUser => "SVFOP_SESSION_USER", + Self::SvfopCurrentCatalog => "SVFOP_CURRENT_CATALOG", + Self::SvfopCurrentSchema => "SVFOP_CURRENT_SCHEMA", + } + } + /// Creates an enum from field names used in the ProtoBuf definition. + pub fn from_str_name(value: &str) -> ::core::option::Option { + match value { + "SQLVALUE_FUNCTION_OP_UNDEFINED" => Some(Self::SqlvalueFunctionOpUndefined), + "SVFOP_CURRENT_DATE" => Some(Self::SvfopCurrentDate), + "SVFOP_CURRENT_TIME" => Some(Self::SvfopCurrentTime), + "SVFOP_CURRENT_TIME_N" => Some(Self::SvfopCurrentTimeN), + "SVFOP_CURRENT_TIMESTAMP" => Some(Self::SvfopCurrentTimestamp), + "SVFOP_CURRENT_TIMESTAMP_N" => Some(Self::SvfopCurrentTimestampN), + "SVFOP_LOCALTIME" => Some(Self::SvfopLocaltime), + "SVFOP_LOCALTIME_N" => Some(Self::SvfopLocaltimeN), + "SVFOP_LOCALTIMESTAMP" => Some(Self::SvfopLocaltimestamp), + "SVFOP_LOCALTIMESTAMP_N" => Some(Self::SvfopLocaltimestampN), + "SVFOP_CURRENT_ROLE" => Some(Self::SvfopCurrentRole), + "SVFOP_CURRENT_USER" => Some(Self::SvfopCurrentUser), + "SVFOP_USER" => Some(Self::SvfopUser), + "SVFOP_SESSION_USER" => Some(Self::SvfopSessionUser), + "SVFOP_CURRENT_CATALOG" => Some(Self::SvfopCurrentCatalog), + "SVFOP_CURRENT_SCHEMA" => Some(Self::SvfopCurrentSchema), + _ => None, + } + } +} +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] +#[repr(i32)] +pub enum XmlExprOp { + Undefined = 0, + IsXmlconcat = 1, + IsXmlelement = 2, + IsXmlforest = 3, + IsXmlparse = 4, + IsXmlpi = 5, + IsXmlroot = 6, + IsXmlserialize = 7, + IsDocument = 8, +} +impl XmlExprOp { + /// String value of the enum field names used in the ProtoBuf definition. + /// + /// The values are not transformed in any way and thus are considered stable + /// (if the ProtoBuf definition does not change) and safe for programmatic use. + pub fn as_str_name(&self) -> &'static str { + match self { + Self::Undefined => "XML_EXPR_OP_UNDEFINED", + Self::IsXmlconcat => "IS_XMLCONCAT", + Self::IsXmlelement => "IS_XMLELEMENT", + Self::IsXmlforest => "IS_XMLFOREST", + Self::IsXmlparse => "IS_XMLPARSE", + Self::IsXmlpi => "IS_XMLPI", + Self::IsXmlroot => "IS_XMLROOT", + Self::IsXmlserialize => "IS_XMLSERIALIZE", + Self::IsDocument => "IS_DOCUMENT", + } + } + /// Creates an enum from field names used in the ProtoBuf definition. + pub fn from_str_name(value: &str) -> ::core::option::Option { + match value { + "XML_EXPR_OP_UNDEFINED" => Some(Self::Undefined), + "IS_XMLCONCAT" => Some(Self::IsXmlconcat), + "IS_XMLELEMENT" => Some(Self::IsXmlelement), + "IS_XMLFOREST" => Some(Self::IsXmlforest), + "IS_XMLPARSE" => Some(Self::IsXmlparse), + "IS_XMLPI" => Some(Self::IsXmlpi), + "IS_XMLROOT" => Some(Self::IsXmlroot), + "IS_XMLSERIALIZE" => Some(Self::IsXmlserialize), + "IS_DOCUMENT" => Some(Self::IsDocument), + _ => None, + } + } +} +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] +#[repr(i32)] +pub enum XmlOptionType { + Undefined = 0, + XmloptionDocument = 1, + XmloptionContent = 2, +} +impl XmlOptionType { + /// String value of the enum field names used in the ProtoBuf definition. + /// + /// The values are not transformed in any way and thus are considered stable + /// (if the ProtoBuf definition does not change) and safe for programmatic use. + pub fn as_str_name(&self) -> &'static str { + match self { + Self::Undefined => "XML_OPTION_TYPE_UNDEFINED", + Self::XmloptionDocument => "XMLOPTION_DOCUMENT", + Self::XmloptionContent => "XMLOPTION_CONTENT", + } + } + /// Creates an enum from field names used in the ProtoBuf definition. + pub fn from_str_name(value: &str) -> ::core::option::Option { + match value { + "XML_OPTION_TYPE_UNDEFINED" => Some(Self::Undefined), + "XMLOPTION_DOCUMENT" => Some(Self::XmloptionDocument), + "XMLOPTION_CONTENT" => Some(Self::XmloptionContent), + _ => None, + } + } +} +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] +#[repr(i32)] +pub enum JsonEncoding { + Undefined = 0, + JsEncDefault = 1, + JsEncUtf8 = 2, + JsEncUtf16 = 3, + JsEncUtf32 = 4, +} +impl JsonEncoding { + /// String value of the enum field names used in the ProtoBuf definition. + /// + /// The values are not transformed in any way and thus are considered stable + /// (if the ProtoBuf definition does not change) and safe for programmatic use. + pub fn as_str_name(&self) -> &'static str { + match self { + Self::Undefined => "JSON_ENCODING_UNDEFINED", + Self::JsEncDefault => "JS_ENC_DEFAULT", + Self::JsEncUtf8 => "JS_ENC_UTF8", + Self::JsEncUtf16 => "JS_ENC_UTF16", + Self::JsEncUtf32 => "JS_ENC_UTF32", + } + } + /// Creates an enum from field names used in the ProtoBuf definition. + pub fn from_str_name(value: &str) -> ::core::option::Option { + match value { + "JSON_ENCODING_UNDEFINED" => Some(Self::Undefined), + "JS_ENC_DEFAULT" => Some(Self::JsEncDefault), + "JS_ENC_UTF8" => Some(Self::JsEncUtf8), + "JS_ENC_UTF16" => Some(Self::JsEncUtf16), + "JS_ENC_UTF32" => Some(Self::JsEncUtf32), + _ => None, + } + } +} +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] +#[repr(i32)] +pub enum JsonFormatType { + Undefined = 0, + JsFormatDefault = 1, + JsFormatJson = 2, + JsFormatJsonb = 3, +} +impl JsonFormatType { + /// String value of the enum field names used in the ProtoBuf definition. + /// + /// The values are not transformed in any way and thus are considered stable + /// (if the ProtoBuf definition does not change) and safe for programmatic use. + pub fn as_str_name(&self) -> &'static str { + match self { + Self::Undefined => "JSON_FORMAT_TYPE_UNDEFINED", + Self::JsFormatDefault => "JS_FORMAT_DEFAULT", + Self::JsFormatJson => "JS_FORMAT_JSON", + Self::JsFormatJsonb => "JS_FORMAT_JSONB", + } + } + /// Creates an enum from field names used in the ProtoBuf definition. + pub fn from_str_name(value: &str) -> ::core::option::Option { + match value { + "JSON_FORMAT_TYPE_UNDEFINED" => Some(Self::Undefined), + "JS_FORMAT_DEFAULT" => Some(Self::JsFormatDefault), + "JS_FORMAT_JSON" => Some(Self::JsFormatJson), + "JS_FORMAT_JSONB" => Some(Self::JsFormatJsonb), + _ => None, + } + } +} +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] +#[repr(i32)] +pub enum JsonConstructorType { + Undefined = 0, + JsctorJsonObject = 1, + JsctorJsonArray = 2, + JsctorJsonObjectagg = 3, + JsctorJsonArrayagg = 4, + JsctorJsonParse = 5, + JsctorJsonScalar = 6, + JsctorJsonSerialize = 7, +} +impl JsonConstructorType { + /// String value of the enum field names used in the ProtoBuf definition. + /// + /// The values are not transformed in any way and thus are considered stable + /// (if the ProtoBuf definition does not change) and safe for programmatic use. + pub fn as_str_name(&self) -> &'static str { + match self { + Self::Undefined => "JSON_CONSTRUCTOR_TYPE_UNDEFINED", + Self::JsctorJsonObject => "JSCTOR_JSON_OBJECT", + Self::JsctorJsonArray => "JSCTOR_JSON_ARRAY", + Self::JsctorJsonObjectagg => "JSCTOR_JSON_OBJECTAGG", + Self::JsctorJsonArrayagg => "JSCTOR_JSON_ARRAYAGG", + Self::JsctorJsonParse => "JSCTOR_JSON_PARSE", + Self::JsctorJsonScalar => "JSCTOR_JSON_SCALAR", + Self::JsctorJsonSerialize => "JSCTOR_JSON_SERIALIZE", + } + } + /// Creates an enum from field names used in the ProtoBuf definition. + pub fn from_str_name(value: &str) -> ::core::option::Option { + match value { + "JSON_CONSTRUCTOR_TYPE_UNDEFINED" => Some(Self::Undefined), + "JSCTOR_JSON_OBJECT" => Some(Self::JsctorJsonObject), + "JSCTOR_JSON_ARRAY" => Some(Self::JsctorJsonArray), + "JSCTOR_JSON_OBJECTAGG" => Some(Self::JsctorJsonObjectagg), + "JSCTOR_JSON_ARRAYAGG" => Some(Self::JsctorJsonArrayagg), + "JSCTOR_JSON_PARSE" => Some(Self::JsctorJsonParse), + "JSCTOR_JSON_SCALAR" => Some(Self::JsctorJsonScalar), + "JSCTOR_JSON_SERIALIZE" => Some(Self::JsctorJsonSerialize), + _ => None, + } + } +} +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] +#[repr(i32)] +pub enum JsonValueType { + Undefined = 0, + JsTypeAny = 1, + JsTypeObject = 2, + JsTypeArray = 3, + JsTypeScalar = 4, +} +impl JsonValueType { + /// String value of the enum field names used in the ProtoBuf definition. + /// + /// The values are not transformed in any way and thus are considered stable + /// (if the ProtoBuf definition does not change) and safe for programmatic use. + pub fn as_str_name(&self) -> &'static str { + match self { + Self::Undefined => "JSON_VALUE_TYPE_UNDEFINED", + Self::JsTypeAny => "JS_TYPE_ANY", + Self::JsTypeObject => "JS_TYPE_OBJECT", + Self::JsTypeArray => "JS_TYPE_ARRAY", + Self::JsTypeScalar => "JS_TYPE_SCALAR", + } + } + /// Creates an enum from field names used in the ProtoBuf definition. + pub fn from_str_name(value: &str) -> ::core::option::Option { + match value { + "JSON_VALUE_TYPE_UNDEFINED" => Some(Self::Undefined), + "JS_TYPE_ANY" => Some(Self::JsTypeAny), + "JS_TYPE_OBJECT" => Some(Self::JsTypeObject), + "JS_TYPE_ARRAY" => Some(Self::JsTypeArray), + "JS_TYPE_SCALAR" => Some(Self::JsTypeScalar), + _ => None, + } + } +} +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] +#[repr(i32)] +pub enum JsonWrapper { + Undefined = 0, + JswUnspec = 1, + JswNone = 2, + JswConditional = 3, + JswUnconditional = 4, +} +impl JsonWrapper { + /// String value of the enum field names used in the ProtoBuf definition. + /// + /// The values are not transformed in any way and thus are considered stable + /// (if the ProtoBuf definition does not change) and safe for programmatic use. + pub fn as_str_name(&self) -> &'static str { + match self { + Self::Undefined => "JSON_WRAPPER_UNDEFINED", + Self::JswUnspec => "JSW_UNSPEC", + Self::JswNone => "JSW_NONE", + Self::JswConditional => "JSW_CONDITIONAL", + Self::JswUnconditional => "JSW_UNCONDITIONAL", + } + } + /// Creates an enum from field names used in the ProtoBuf definition. + pub fn from_str_name(value: &str) -> ::core::option::Option { + match value { + "JSON_WRAPPER_UNDEFINED" => Some(Self::Undefined), + "JSW_UNSPEC" => Some(Self::JswUnspec), + "JSW_NONE" => Some(Self::JswNone), + "JSW_CONDITIONAL" => Some(Self::JswConditional), + "JSW_UNCONDITIONAL" => Some(Self::JswUnconditional), + _ => None, + } + } +} +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] +#[repr(i32)] +pub enum JsonBehaviorType { + Undefined = 0, + JsonBehaviorNull = 1, + JsonBehaviorError = 2, + JsonBehaviorEmpty = 3, + JsonBehaviorTrue = 4, + JsonBehaviorFalse = 5, + JsonBehaviorUnknown = 6, + JsonBehaviorEmptyArray = 7, + JsonBehaviorEmptyObject = 8, + JsonBehaviorDefault = 9, +} +impl JsonBehaviorType { + /// String value of the enum field names used in the ProtoBuf definition. + /// + /// The values are not transformed in any way and thus are considered stable + /// (if the ProtoBuf definition does not change) and safe for programmatic use. + pub fn as_str_name(&self) -> &'static str { + match self { + Self::Undefined => "JSON_BEHAVIOR_TYPE_UNDEFINED", + Self::JsonBehaviorNull => "JSON_BEHAVIOR_NULL", + Self::JsonBehaviorError => "JSON_BEHAVIOR_ERROR", + Self::JsonBehaviorEmpty => "JSON_BEHAVIOR_EMPTY", + Self::JsonBehaviorTrue => "JSON_BEHAVIOR_TRUE", + Self::JsonBehaviorFalse => "JSON_BEHAVIOR_FALSE", + Self::JsonBehaviorUnknown => "JSON_BEHAVIOR_UNKNOWN", + Self::JsonBehaviorEmptyArray => "JSON_BEHAVIOR_EMPTY_ARRAY", + Self::JsonBehaviorEmptyObject => "JSON_BEHAVIOR_EMPTY_OBJECT", + Self::JsonBehaviorDefault => "JSON_BEHAVIOR_DEFAULT", + } + } + /// Creates an enum from field names used in the ProtoBuf definition. + pub fn from_str_name(value: &str) -> ::core::option::Option { + match value { + "JSON_BEHAVIOR_TYPE_UNDEFINED" => Some(Self::Undefined), + "JSON_BEHAVIOR_NULL" => Some(Self::JsonBehaviorNull), + "JSON_BEHAVIOR_ERROR" => Some(Self::JsonBehaviorError), + "JSON_BEHAVIOR_EMPTY" => Some(Self::JsonBehaviorEmpty), + "JSON_BEHAVIOR_TRUE" => Some(Self::JsonBehaviorTrue), + "JSON_BEHAVIOR_FALSE" => Some(Self::JsonBehaviorFalse), + "JSON_BEHAVIOR_UNKNOWN" => Some(Self::JsonBehaviorUnknown), + "JSON_BEHAVIOR_EMPTY_ARRAY" => Some(Self::JsonBehaviorEmptyArray), + "JSON_BEHAVIOR_EMPTY_OBJECT" => Some(Self::JsonBehaviorEmptyObject), + "JSON_BEHAVIOR_DEFAULT" => Some(Self::JsonBehaviorDefault), + _ => None, + } + } +} +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] +#[repr(i32)] +pub enum JsonExprOp { + Undefined = 0, + JsonExistsOp = 1, + JsonQueryOp = 2, + JsonValueOp = 3, + JsonTableOp = 4, +} +impl JsonExprOp { + /// String value of the enum field names used in the ProtoBuf definition. + /// + /// The values are not transformed in any way and thus are considered stable + /// (if the ProtoBuf definition does not change) and safe for programmatic use. + pub fn as_str_name(&self) -> &'static str { + match self { + Self::Undefined => "JSON_EXPR_OP_UNDEFINED", + Self::JsonExistsOp => "JSON_EXISTS_OP", + Self::JsonQueryOp => "JSON_QUERY_OP", + Self::JsonValueOp => "JSON_VALUE_OP", + Self::JsonTableOp => "JSON_TABLE_OP", + } + } + /// Creates an enum from field names used in the ProtoBuf definition. + pub fn from_str_name(value: &str) -> ::core::option::Option { + match value { + "JSON_EXPR_OP_UNDEFINED" => Some(Self::Undefined), + "JSON_EXISTS_OP" => Some(Self::JsonExistsOp), + "JSON_QUERY_OP" => Some(Self::JsonQueryOp), + "JSON_VALUE_OP" => Some(Self::JsonValueOp), + "JSON_TABLE_OP" => Some(Self::JsonTableOp), + _ => None, + } + } +} +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] +#[repr(i32)] +pub enum NullTestType { + Undefined = 0, + IsNull = 1, + IsNotNull = 2, +} +impl NullTestType { + /// String value of the enum field names used in the ProtoBuf definition. + /// + /// The values are not transformed in any way and thus are considered stable + /// (if the ProtoBuf definition does not change) and safe for programmatic use. + pub fn as_str_name(&self) -> &'static str { + match self { + Self::Undefined => "NULL_TEST_TYPE_UNDEFINED", + Self::IsNull => "IS_NULL", + Self::IsNotNull => "IS_NOT_NULL", + } + } + /// Creates an enum from field names used in the ProtoBuf definition. + pub fn from_str_name(value: &str) -> ::core::option::Option { + match value { + "NULL_TEST_TYPE_UNDEFINED" => Some(Self::Undefined), + "IS_NULL" => Some(Self::IsNull), + "IS_NOT_NULL" => Some(Self::IsNotNull), + _ => None, + } + } +} +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] +#[repr(i32)] +pub enum BoolTestType { + Undefined = 0, + IsTrue = 1, + IsNotTrue = 2, + IsFalse = 3, + IsNotFalse = 4, + IsUnknown = 5, + IsNotUnknown = 6, +} +impl BoolTestType { + /// String value of the enum field names used in the ProtoBuf definition. + /// + /// The values are not transformed in any way and thus are considered stable + /// (if the ProtoBuf definition does not change) and safe for programmatic use. + pub fn as_str_name(&self) -> &'static str { + match self { + Self::Undefined => "BOOL_TEST_TYPE_UNDEFINED", + Self::IsTrue => "IS_TRUE", + Self::IsNotTrue => "IS_NOT_TRUE", + Self::IsFalse => "IS_FALSE", + Self::IsNotFalse => "IS_NOT_FALSE", + Self::IsUnknown => "IS_UNKNOWN", + Self::IsNotUnknown => "IS_NOT_UNKNOWN", + } + } + /// Creates an enum from field names used in the ProtoBuf definition. + pub fn from_str_name(value: &str) -> ::core::option::Option { + match value { + "BOOL_TEST_TYPE_UNDEFINED" => Some(Self::Undefined), + "IS_TRUE" => Some(Self::IsTrue), + "IS_NOT_TRUE" => Some(Self::IsNotTrue), + "IS_FALSE" => Some(Self::IsFalse), + "IS_NOT_FALSE" => Some(Self::IsNotFalse), + "IS_UNKNOWN" => Some(Self::IsUnknown), + "IS_NOT_UNKNOWN" => Some(Self::IsNotUnknown), + _ => None, + } + } +} +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] +#[repr(i32)] +pub enum MergeMatchKind { + Undefined = 0, + MergeWhenMatched = 1, + MergeWhenNotMatchedBySource = 2, + MergeWhenNotMatchedByTarget = 3, +} +impl MergeMatchKind { + /// String value of the enum field names used in the ProtoBuf definition. + /// + /// The values are not transformed in any way and thus are considered stable + /// (if the ProtoBuf definition does not change) and safe for programmatic use. + pub fn as_str_name(&self) -> &'static str { + match self { + Self::Undefined => "MERGE_MATCH_KIND_UNDEFINED", + Self::MergeWhenMatched => "MERGE_WHEN_MATCHED", + Self::MergeWhenNotMatchedBySource => "MERGE_WHEN_NOT_MATCHED_BY_SOURCE", + Self::MergeWhenNotMatchedByTarget => "MERGE_WHEN_NOT_MATCHED_BY_TARGET", + } + } + /// Creates an enum from field names used in the ProtoBuf definition. + pub fn from_str_name(value: &str) -> ::core::option::Option { + match value { + "MERGE_MATCH_KIND_UNDEFINED" => Some(Self::Undefined), + "MERGE_WHEN_MATCHED" => Some(Self::MergeWhenMatched), + "MERGE_WHEN_NOT_MATCHED_BY_SOURCE" => Some(Self::MergeWhenNotMatchedBySource), + "MERGE_WHEN_NOT_MATCHED_BY_TARGET" => Some(Self::MergeWhenNotMatchedByTarget), + _ => None, + } + } +} +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] +#[repr(i32)] +pub enum CmdType { + Undefined = 0, + CmdUnknown = 1, + CmdSelect = 2, + CmdUpdate = 3, + CmdInsert = 4, + CmdDelete = 5, + CmdMerge = 6, + CmdUtility = 7, + CmdNothing = 8, +} +impl CmdType { + /// String value of the enum field names used in the ProtoBuf definition. + /// + /// The values are not transformed in any way and thus are considered stable + /// (if the ProtoBuf definition does not change) and safe for programmatic use. + pub fn as_str_name(&self) -> &'static str { + match self { + Self::Undefined => "CMD_TYPE_UNDEFINED", + Self::CmdUnknown => "CMD_UNKNOWN", + Self::CmdSelect => "CMD_SELECT", + Self::CmdUpdate => "CMD_UPDATE", + Self::CmdInsert => "CMD_INSERT", + Self::CmdDelete => "CMD_DELETE", + Self::CmdMerge => "CMD_MERGE", + Self::CmdUtility => "CMD_UTILITY", + Self::CmdNothing => "CMD_NOTHING", + } + } + /// Creates an enum from field names used in the ProtoBuf definition. + pub fn from_str_name(value: &str) -> ::core::option::Option { + match value { + "CMD_TYPE_UNDEFINED" => Some(Self::Undefined), + "CMD_UNKNOWN" => Some(Self::CmdUnknown), + "CMD_SELECT" => Some(Self::CmdSelect), + "CMD_UPDATE" => Some(Self::CmdUpdate), + "CMD_INSERT" => Some(Self::CmdInsert), + "CMD_DELETE" => Some(Self::CmdDelete), + "CMD_MERGE" => Some(Self::CmdMerge), + "CMD_UTILITY" => Some(Self::CmdUtility), + "CMD_NOTHING" => Some(Self::CmdNothing), + _ => None, + } + } +} +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] +#[repr(i32)] +pub enum JoinType { + Undefined = 0, + JoinInner = 1, + JoinLeft = 2, + JoinFull = 3, + JoinRight = 4, + JoinSemi = 5, + JoinAnti = 6, + JoinRightAnti = 7, + JoinUniqueOuter = 8, + JoinUniqueInner = 9, +} +impl JoinType { + /// String value of the enum field names used in the ProtoBuf definition. + /// + /// The values are not transformed in any way and thus are considered stable + /// (if the ProtoBuf definition does not change) and safe for programmatic use. + pub fn as_str_name(&self) -> &'static str { + match self { + Self::Undefined => "JOIN_TYPE_UNDEFINED", + Self::JoinInner => "JOIN_INNER", + Self::JoinLeft => "JOIN_LEFT", + Self::JoinFull => "JOIN_FULL", + Self::JoinRight => "JOIN_RIGHT", + Self::JoinSemi => "JOIN_SEMI", + Self::JoinAnti => "JOIN_ANTI", + Self::JoinRightAnti => "JOIN_RIGHT_ANTI", + Self::JoinUniqueOuter => "JOIN_UNIQUE_OUTER", + Self::JoinUniqueInner => "JOIN_UNIQUE_INNER", + } + } + /// Creates an enum from field names used in the ProtoBuf definition. + pub fn from_str_name(value: &str) -> ::core::option::Option { + match value { + "JOIN_TYPE_UNDEFINED" => Some(Self::Undefined), + "JOIN_INNER" => Some(Self::JoinInner), + "JOIN_LEFT" => Some(Self::JoinLeft), + "JOIN_FULL" => Some(Self::JoinFull), + "JOIN_RIGHT" => Some(Self::JoinRight), + "JOIN_SEMI" => Some(Self::JoinSemi), + "JOIN_ANTI" => Some(Self::JoinAnti), + "JOIN_RIGHT_ANTI" => Some(Self::JoinRightAnti), + "JOIN_UNIQUE_OUTER" => Some(Self::JoinUniqueOuter), + "JOIN_UNIQUE_INNER" => Some(Self::JoinUniqueInner), + _ => None, + } + } +} +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] +#[repr(i32)] +pub enum AggStrategy { + Undefined = 0, + AggPlain = 1, + AggSorted = 2, + AggHashed = 3, + AggMixed = 4, +} +impl AggStrategy { + /// String value of the enum field names used in the ProtoBuf definition. + /// + /// The values are not transformed in any way and thus are considered stable + /// (if the ProtoBuf definition does not change) and safe for programmatic use. + pub fn as_str_name(&self) -> &'static str { + match self { + Self::Undefined => "AGG_STRATEGY_UNDEFINED", + Self::AggPlain => "AGG_PLAIN", + Self::AggSorted => "AGG_SORTED", + Self::AggHashed => "AGG_HASHED", + Self::AggMixed => "AGG_MIXED", + } + } + /// Creates an enum from field names used in the ProtoBuf definition. + pub fn from_str_name(value: &str) -> ::core::option::Option { + match value { + "AGG_STRATEGY_UNDEFINED" => Some(Self::Undefined), + "AGG_PLAIN" => Some(Self::AggPlain), + "AGG_SORTED" => Some(Self::AggSorted), + "AGG_HASHED" => Some(Self::AggHashed), + "AGG_MIXED" => Some(Self::AggMixed), + _ => None, + } + } +} +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] +#[repr(i32)] +pub enum AggSplit { + Undefined = 0, + AggsplitSimple = 1, + AggsplitInitialSerial = 2, + AggsplitFinalDeserial = 3, +} +impl AggSplit { + /// String value of the enum field names used in the ProtoBuf definition. + /// + /// The values are not transformed in any way and thus are considered stable + /// (if the ProtoBuf definition does not change) and safe for programmatic use. + pub fn as_str_name(&self) -> &'static str { + match self { + Self::Undefined => "AGG_SPLIT_UNDEFINED", + Self::AggsplitSimple => "AGGSPLIT_SIMPLE", + Self::AggsplitInitialSerial => "AGGSPLIT_INITIAL_SERIAL", + Self::AggsplitFinalDeserial => "AGGSPLIT_FINAL_DESERIAL", + } + } + /// Creates an enum from field names used in the ProtoBuf definition. + pub fn from_str_name(value: &str) -> ::core::option::Option { + match value { + "AGG_SPLIT_UNDEFINED" => Some(Self::Undefined), + "AGGSPLIT_SIMPLE" => Some(Self::AggsplitSimple), + "AGGSPLIT_INITIAL_SERIAL" => Some(Self::AggsplitInitialSerial), + "AGGSPLIT_FINAL_DESERIAL" => Some(Self::AggsplitFinalDeserial), + _ => None, + } + } +} +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] +#[repr(i32)] +pub enum SetOpCmd { + Undefined = 0, + SetopcmdIntersect = 1, + SetopcmdIntersectAll = 2, + SetopcmdExcept = 3, + SetopcmdExceptAll = 4, +} +impl SetOpCmd { + /// String value of the enum field names used in the ProtoBuf definition. + /// + /// The values are not transformed in any way and thus are considered stable + /// (if the ProtoBuf definition does not change) and safe for programmatic use. + pub fn as_str_name(&self) -> &'static str { + match self { + Self::Undefined => "SET_OP_CMD_UNDEFINED", + Self::SetopcmdIntersect => "SETOPCMD_INTERSECT", + Self::SetopcmdIntersectAll => "SETOPCMD_INTERSECT_ALL", + Self::SetopcmdExcept => "SETOPCMD_EXCEPT", + Self::SetopcmdExceptAll => "SETOPCMD_EXCEPT_ALL", + } + } + /// Creates an enum from field names used in the ProtoBuf definition. + pub fn from_str_name(value: &str) -> ::core::option::Option { + match value { + "SET_OP_CMD_UNDEFINED" => Some(Self::Undefined), + "SETOPCMD_INTERSECT" => Some(Self::SetopcmdIntersect), + "SETOPCMD_INTERSECT_ALL" => Some(Self::SetopcmdIntersectAll), + "SETOPCMD_EXCEPT" => Some(Self::SetopcmdExcept), + "SETOPCMD_EXCEPT_ALL" => Some(Self::SetopcmdExceptAll), + _ => None, + } + } +} +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] +#[repr(i32)] +pub enum SetOpStrategy { + Undefined = 0, + SetopSorted = 1, + SetopHashed = 2, +} +impl SetOpStrategy { + /// String value of the enum field names used in the ProtoBuf definition. + /// + /// The values are not transformed in any way and thus are considered stable + /// (if the ProtoBuf definition does not change) and safe for programmatic use. + pub fn as_str_name(&self) -> &'static str { + match self { + Self::Undefined => "SET_OP_STRATEGY_UNDEFINED", + Self::SetopSorted => "SETOP_SORTED", + Self::SetopHashed => "SETOP_HASHED", + } + } + /// Creates an enum from field names used in the ProtoBuf definition. + pub fn from_str_name(value: &str) -> ::core::option::Option { + match value { + "SET_OP_STRATEGY_UNDEFINED" => Some(Self::Undefined), + "SETOP_SORTED" => Some(Self::SetopSorted), + "SETOP_HASHED" => Some(Self::SetopHashed), + _ => None, + } + } +} +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] +#[repr(i32)] +pub enum OnConflictAction { + Undefined = 0, + OnconflictNone = 1, + OnconflictNothing = 2, + OnconflictUpdate = 3, +} +impl OnConflictAction { + /// String value of the enum field names used in the ProtoBuf definition. + /// + /// The values are not transformed in any way and thus are considered stable + /// (if the ProtoBuf definition does not change) and safe for programmatic use. + pub fn as_str_name(&self) -> &'static str { + match self { + Self::Undefined => "ON_CONFLICT_ACTION_UNDEFINED", + Self::OnconflictNone => "ONCONFLICT_NONE", + Self::OnconflictNothing => "ONCONFLICT_NOTHING", + Self::OnconflictUpdate => "ONCONFLICT_UPDATE", + } + } + /// Creates an enum from field names used in the ProtoBuf definition. + pub fn from_str_name(value: &str) -> ::core::option::Option { + match value { + "ON_CONFLICT_ACTION_UNDEFINED" => Some(Self::Undefined), + "ONCONFLICT_NONE" => Some(Self::OnconflictNone), + "ONCONFLICT_NOTHING" => Some(Self::OnconflictNothing), + "ONCONFLICT_UPDATE" => Some(Self::OnconflictUpdate), + _ => None, + } + } +} +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] +#[repr(i32)] +pub enum LimitOption { + Undefined = 0, + Default = 1, + Count = 2, + WithTies = 3, +} +impl LimitOption { + /// String value of the enum field names used in the ProtoBuf definition. + /// + /// The values are not transformed in any way and thus are considered stable + /// (if the ProtoBuf definition does not change) and safe for programmatic use. + pub fn as_str_name(&self) -> &'static str { + match self { + Self::Undefined => "LIMIT_OPTION_UNDEFINED", + Self::Default => "LIMIT_OPTION_DEFAULT", + Self::Count => "LIMIT_OPTION_COUNT", + Self::WithTies => "LIMIT_OPTION_WITH_TIES", + } + } + /// Creates an enum from field names used in the ProtoBuf definition. + pub fn from_str_name(value: &str) -> ::core::option::Option { + match value { + "LIMIT_OPTION_UNDEFINED" => Some(Self::Undefined), + "LIMIT_OPTION_DEFAULT" => Some(Self::Default), + "LIMIT_OPTION_COUNT" => Some(Self::Count), + "LIMIT_OPTION_WITH_TIES" => Some(Self::WithTies), + _ => None, + } + } +} +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] +#[repr(i32)] +pub enum LockClauseStrength { + Undefined = 0, + LcsNone = 1, + LcsForkeyshare = 2, + LcsForshare = 3, + LcsFornokeyupdate = 4, + LcsForupdate = 5, +} +impl LockClauseStrength { + /// String value of the enum field names used in the ProtoBuf definition. + /// + /// The values are not transformed in any way and thus are considered stable + /// (if the ProtoBuf definition does not change) and safe for programmatic use. + pub fn as_str_name(&self) -> &'static str { + match self { + Self::Undefined => "LOCK_CLAUSE_STRENGTH_UNDEFINED", + Self::LcsNone => "LCS_NONE", + Self::LcsForkeyshare => "LCS_FORKEYSHARE", + Self::LcsForshare => "LCS_FORSHARE", + Self::LcsFornokeyupdate => "LCS_FORNOKEYUPDATE", + Self::LcsForupdate => "LCS_FORUPDATE", + } + } + /// Creates an enum from field names used in the ProtoBuf definition. + pub fn from_str_name(value: &str) -> ::core::option::Option { + match value { + "LOCK_CLAUSE_STRENGTH_UNDEFINED" => Some(Self::Undefined), + "LCS_NONE" => Some(Self::LcsNone), + "LCS_FORKEYSHARE" => Some(Self::LcsForkeyshare), + "LCS_FORSHARE" => Some(Self::LcsForshare), + "LCS_FORNOKEYUPDATE" => Some(Self::LcsFornokeyupdate), + "LCS_FORUPDATE" => Some(Self::LcsForupdate), + _ => None, + } + } +} +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] +#[repr(i32)] +pub enum LockWaitPolicy { + Undefined = 0, + LockWaitBlock = 1, + LockWaitSkip = 2, + LockWaitError = 3, +} +impl LockWaitPolicy { + /// String value of the enum field names used in the ProtoBuf definition. + /// + /// The values are not transformed in any way and thus are considered stable + /// (if the ProtoBuf definition does not change) and safe for programmatic use. + pub fn as_str_name(&self) -> &'static str { + match self { + Self::Undefined => "LOCK_WAIT_POLICY_UNDEFINED", + Self::LockWaitBlock => "LockWaitBlock", + Self::LockWaitSkip => "LockWaitSkip", + Self::LockWaitError => "LockWaitError", + } + } + /// Creates an enum from field names used in the ProtoBuf definition. + pub fn from_str_name(value: &str) -> ::core::option::Option { + match value { + "LOCK_WAIT_POLICY_UNDEFINED" => Some(Self::Undefined), + "LockWaitBlock" => Some(Self::LockWaitBlock), + "LockWaitSkip" => Some(Self::LockWaitSkip), + "LockWaitError" => Some(Self::LockWaitError), + _ => None, + } + } +} +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] +#[repr(i32)] +pub enum LockTupleMode { + Undefined = 0, + LockTupleKeyShare = 1, + LockTupleShare = 2, + LockTupleNoKeyExclusive = 3, + LockTupleExclusive = 4, +} +impl LockTupleMode { + /// String value of the enum field names used in the ProtoBuf definition. + /// + /// The values are not transformed in any way and thus are considered stable + /// (if the ProtoBuf definition does not change) and safe for programmatic use. + pub fn as_str_name(&self) -> &'static str { + match self { + Self::Undefined => "LOCK_TUPLE_MODE_UNDEFINED", + Self::LockTupleKeyShare => "LockTupleKeyShare", + Self::LockTupleShare => "LockTupleShare", + Self::LockTupleNoKeyExclusive => "LockTupleNoKeyExclusive", + Self::LockTupleExclusive => "LockTupleExclusive", + } + } + /// Creates an enum from field names used in the ProtoBuf definition. + pub fn from_str_name(value: &str) -> ::core::option::Option { + match value { + "LOCK_TUPLE_MODE_UNDEFINED" => Some(Self::Undefined), + "LockTupleKeyShare" => Some(Self::LockTupleKeyShare), + "LockTupleShare" => Some(Self::LockTupleShare), + "LockTupleNoKeyExclusive" => Some(Self::LockTupleNoKeyExclusive), + "LockTupleExclusive" => Some(Self::LockTupleExclusive), + _ => None, + } + } +} +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] +#[repr(i32)] +pub enum KeywordKind { + NoKeyword = 0, + UnreservedKeyword = 1, + ColNameKeyword = 2, + TypeFuncNameKeyword = 3, + ReservedKeyword = 4, +} +impl KeywordKind { + /// String value of the enum field names used in the ProtoBuf definition. + /// + /// The values are not transformed in any way and thus are considered stable + /// (if the ProtoBuf definition does not change) and safe for programmatic use. + pub fn as_str_name(&self) -> &'static str { + match self { + Self::NoKeyword => "NO_KEYWORD", + Self::UnreservedKeyword => "UNRESERVED_KEYWORD", + Self::ColNameKeyword => "COL_NAME_KEYWORD", + Self::TypeFuncNameKeyword => "TYPE_FUNC_NAME_KEYWORD", + Self::ReservedKeyword => "RESERVED_KEYWORD", + } + } + /// Creates an enum from field names used in the ProtoBuf definition. + pub fn from_str_name(value: &str) -> ::core::option::Option { + match value { + "NO_KEYWORD" => Some(Self::NoKeyword), + "UNRESERVED_KEYWORD" => Some(Self::UnreservedKeyword), + "COL_NAME_KEYWORD" => Some(Self::ColNameKeyword), + "TYPE_FUNC_NAME_KEYWORD" => Some(Self::TypeFuncNameKeyword), + "RESERVED_KEYWORD" => Some(Self::ReservedKeyword), + _ => None, + } + } +} +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] +#[repr(i32)] +pub enum Token { + Nul = 0, + /// Single-character tokens that are returned 1:1 (identical with "self" list in scan.l) + /// Either supporting syntax, or single-character operators (some can be both) + /// Also see + /// + /// "$" + Ascii36 = 36, + /// "%" + Ascii37 = 37, + /// "(" + Ascii40 = 40, + /// ")" + Ascii41 = 41, + /// "*" + Ascii42 = 42, + /// "+" + Ascii43 = 43, + /// "," + Ascii44 = 44, + /// "-" + Ascii45 = 45, + /// "." + Ascii46 = 46, + /// "/" + Ascii47 = 47, + /// ":" + Ascii58 = 58, + /// ";" + Ascii59 = 59, + /// "<" + Ascii60 = 60, + /// "=" + Ascii61 = 61, + /// ">" + Ascii62 = 62, + /// "?" + Ascii63 = 63, + /// "[" + Ascii91 = 91, + /// "\" + Ascii92 = 92, + /// "]" + Ascii93 = 93, + /// "^" + Ascii94 = 94, + /// Named tokens in scan.l + Ident = 258, + Uident = 259, + Fconst = 260, + Sconst = 261, + Usconst = 262, + Bconst = 263, + Xconst = 264, + Op = 265, + Iconst = 266, + Param = 267, + Typecast = 268, + DotDot = 269, + ColonEquals = 270, + EqualsGreater = 271, + LessEquals = 272, + GreaterEquals = 273, + NotEquals = 274, + SqlComment = 275, + CComment = 276, + AbortP = 277, + Absent = 278, + AbsoluteP = 279, + Access = 280, + Action = 281, + AddP = 282, + Admin = 283, + After = 284, + Aggregate = 285, + All = 286, + Also = 287, + Alter = 288, + Always = 289, + Analyse = 290, + Analyze = 291, + And = 292, + Any = 293, + Array = 294, + As = 295, + Asc = 296, + Asensitive = 297, + Assertion = 298, + Assignment = 299, + Asymmetric = 300, + Atomic = 301, + At = 302, + Attach = 303, + Attribute = 304, + Authorization = 305, + Backward = 306, + Before = 307, + BeginP = 308, + Between = 309, + Bigint = 310, + Binary = 311, + Bit = 312, + BooleanP = 313, + Both = 314, + Breadth = 315, + By = 316, + Cache = 317, + Call = 318, + Called = 319, + Cascade = 320, + Cascaded = 321, + Case = 322, + Cast = 323, + CatalogP = 324, + Chain = 325, + CharP = 326, + Character = 327, + Characteristics = 328, + Check = 329, + Checkpoint = 330, + Class = 331, + Close = 332, + Cluster = 333, + Coalesce = 334, + Collate = 335, + Collation = 336, + Column = 337, + Columns = 338, + Comment = 339, + Comments = 340, + Commit = 341, + Committed = 342, + Compression = 343, + Concurrently = 344, + Conditional = 345, + Configuration = 346, + Conflict = 347, + Connection = 348, + Constraint = 349, + Constraints = 350, + ContentP = 351, + ContinueP = 352, + ConversionP = 353, + Copy = 354, + Cost = 355, + Create = 356, + Cross = 357, + Csv = 358, + Cube = 359, + CurrentP = 360, + CurrentCatalog = 361, + CurrentDate = 362, + CurrentRole = 363, + CurrentSchema = 364, + CurrentTime = 365, + CurrentTimestamp = 366, + CurrentUser = 367, + Cursor = 368, + Cycle = 369, + DataP = 370, + Database = 371, + DayP = 372, + Deallocate = 373, + Dec = 374, + DecimalP = 375, + Declare = 376, + Default = 377, + Defaults = 378, + Deferrable = 379, + Deferred = 380, + Definer = 381, + DeleteP = 382, + Delimiter = 383, + Delimiters = 384, + Depends = 385, + Depth = 386, + Desc = 387, + Detach = 388, + Dictionary = 389, + DisableP = 390, + Discard = 391, + Distinct = 392, + Do = 393, + DocumentP = 394, + DomainP = 395, + DoubleP = 396, + Drop = 397, + Each = 398, + Else = 399, + EmptyP = 400, + EnableP = 401, + Encoding = 402, + Encrypted = 403, + EndP = 404, + EnumP = 405, + ErrorP = 406, + Escape = 407, + Event = 408, + Except = 409, + Exclude = 410, + Excluding = 411, + Exclusive = 412, + Execute = 413, + Exists = 414, + Explain = 415, + Expression = 416, + Extension = 417, + External = 418, + Extract = 419, + FalseP = 420, + Family = 421, + Fetch = 422, + Filter = 423, + Finalize = 424, + FirstP = 425, + FloatP = 426, + Following = 427, + For = 428, + Force = 429, + Foreign = 430, + Format = 431, + Forward = 432, + Freeze = 433, + From = 434, + Full = 435, + Function = 436, + Functions = 437, + Generated = 438, + Global = 439, + Grant = 440, + Granted = 441, + Greatest = 442, + GroupP = 443, + Grouping = 444, + Groups = 445, + Handler = 446, + Having = 447, + HeaderP = 448, + Hold = 449, + HourP = 450, + IdentityP = 451, + IfP = 452, + Ilike = 453, + Immediate = 454, + Immutable = 455, + ImplicitP = 456, + ImportP = 457, + InP = 458, + Include = 459, + Including = 460, + Increment = 461, + Indent = 462, + Index = 463, + Indexes = 464, + Inherit = 465, + Inherits = 466, + Initially = 467, + InlineP = 468, + InnerP = 469, + Inout = 470, + InputP = 471, + Insensitive = 472, + Insert = 473, + Instead = 474, + IntP = 475, + Integer = 476, + Intersect = 477, + Interval = 478, + Into = 479, + Invoker = 480, + Is = 481, + Isnull = 482, + Isolation = 483, + Join = 484, + Json = 485, + JsonArray = 486, + JsonArrayagg = 487, + JsonExists = 488, + JsonObject = 489, + JsonObjectagg = 490, + JsonQuery = 491, + JsonScalar = 492, + JsonSerialize = 493, + JsonTable = 494, + JsonValue = 495, + Keep = 496, + Key = 497, + Keys = 498, + Label = 499, + Language = 500, + LargeP = 501, + LastP = 502, + LateralP = 503, + Leading = 504, + Leakproof = 505, + Least = 506, + Left = 507, + Level = 508, + Like = 509, + Limit = 510, + Listen = 511, + Load = 512, + Local = 513, + Localtime = 514, + Localtimestamp = 515, + Location = 516, + LockP = 517, + Locked = 518, + Logged = 519, + Mapping = 520, + Match = 521, + Matched = 522, + Materialized = 523, + Maxvalue = 524, + Merge = 525, + MergeAction = 526, + Method = 527, + MinuteP = 528, + Minvalue = 529, + Mode = 530, + MonthP = 531, + Move = 532, + NameP = 533, + Names = 534, + National = 535, + Natural = 536, + Nchar = 537, + Nested = 538, + New = 539, + Next = 540, + Nfc = 541, + Nfd = 542, + Nfkc = 543, + Nfkd = 544, + No = 545, + None = 546, + Normalize = 547, + Normalized = 548, + Not = 549, + Nothing = 550, + Notify = 551, + Notnull = 552, + Nowait = 553, + NullP = 554, + Nullif = 555, + NullsP = 556, + Numeric = 557, + ObjectP = 558, + Of = 559, + Off = 560, + Offset = 561, + Oids = 562, + Old = 563, + Omit = 564, + On = 565, + Only = 566, + Operator = 567, + Option = 568, + Options = 569, + Or = 570, + Order = 571, + Ordinality = 572, + Others = 573, + OutP = 574, + OuterP = 575, + Over = 576, + Overlaps = 577, + Overlay = 578, + Overriding = 579, + Owned = 580, + Owner = 581, + Parallel = 582, + Parameter = 583, + Parser = 584, + Partial = 585, + Partition = 586, + Passing = 587, + Password = 588, + Path = 589, + Placing = 590, + Plan = 591, + Plans = 592, + Policy = 593, + Position = 594, + Preceding = 595, + Precision = 596, + Preserve = 597, + Prepare = 598, + Prepared = 599, + Primary = 600, + Prior = 601, + Privileges = 602, + Procedural = 603, + Procedure = 604, + Procedures = 605, + Program = 606, + Publication = 607, + Quote = 608, + Quotes = 609, + Range = 610, + Read = 611, + Real = 612, + Reassign = 613, + Recheck = 614, + Recursive = 615, + RefP = 616, + References = 617, + Referencing = 618, + Refresh = 619, + Reindex = 620, + RelativeP = 621, + Release = 622, + Rename = 623, + Repeatable = 624, + Replace = 625, + Replica = 626, + Reset = 627, + Restart = 628, + Restrict = 629, + Return = 630, + Returning = 631, + Returns = 632, + Revoke = 633, + Right = 634, + Role = 635, + Rollback = 636, + Rollup = 637, + Routine = 638, + Routines = 639, + Row = 640, + Rows = 641, + Rule = 642, + Savepoint = 643, + Scalar = 644, + Schema = 645, + Schemas = 646, + Scroll = 647, + Search = 648, + SecondP = 649, + Security = 650, + Select = 651, + Sequence = 652, + Sequences = 653, + Serializable = 654, + Server = 655, + Session = 656, + SessionUser = 657, + Set = 658, + Sets = 659, + Setof = 660, + Share = 661, + Show = 662, + Similar = 663, + Simple = 664, + Skip = 665, + Smallint = 666, + Snapshot = 667, + Some = 668, + Source = 669, + SqlP = 670, + Stable = 671, + StandaloneP = 672, + Start = 673, + Statement = 674, + Statistics = 675, + Stdin = 676, + Stdout = 677, + Storage = 678, + Stored = 679, + StrictP = 680, + StringP = 681, + StripP = 682, + Subscription = 683, + Substring = 684, + Support = 685, + Symmetric = 686, + Sysid = 687, + SystemP = 688, + SystemUser = 689, + Table = 690, + Tables = 691, + Tablesample = 692, + Tablespace = 693, + Target = 694, + Temp = 695, + Template = 696, + Temporary = 697, + TextP = 698, + Then = 699, + Ties = 700, + Time = 701, + Timestamp = 702, + To = 703, + Trailing = 704, + Transaction = 705, + Transform = 706, + Treat = 707, + Trigger = 708, + Trim = 709, + TrueP = 710, + Truncate = 711, + Trusted = 712, + TypeP = 713, + TypesP = 714, + Uescape = 715, + Unbounded = 716, + Unconditional = 717, + Uncommitted = 718, + Unencrypted = 719, + Union = 720, + Unique = 721, + Unknown = 722, + Unlisten = 723, + Unlogged = 724, + Until = 725, + Update = 726, + User = 727, + Using = 728, + Vacuum = 729, + Valid = 730, + Validate = 731, + Validator = 732, + ValueP = 733, + Values = 734, + Varchar = 735, + Variadic = 736, + Varying = 737, + Verbose = 738, + VersionP = 739, + View = 740, + Views = 741, + Volatile = 742, + When = 743, + Where = 744, + WhitespaceP = 745, + Window = 746, + With = 747, + Within = 748, + Without = 749, + Work = 750, + Wrapper = 751, + Write = 752, + XmlP = 753, + Xmlattributes = 754, + Xmlconcat = 755, + Xmlelement = 756, + Xmlexists = 757, + Xmlforest = 758, + Xmlnamespaces = 759, + Xmlparse = 760, + Xmlpi = 761, + Xmlroot = 762, + Xmlserialize = 763, + Xmltable = 764, + YearP = 765, + YesP = 766, + Zone = 767, + FormatLa = 768, + NotLa = 769, + NullsLa = 770, + WithLa = 771, + WithoutLa = 772, + ModeTypeName = 773, + ModePlpgsqlExpr = 774, + ModePlpgsqlAssign1 = 775, + ModePlpgsqlAssign2 = 776, + ModePlpgsqlAssign3 = 777, + Uminus = 778, +} +impl Token { + /// String value of the enum field names used in the ProtoBuf definition. + /// + /// The values are not transformed in any way and thus are considered stable + /// (if the ProtoBuf definition does not change) and safe for programmatic use. + pub fn as_str_name(&self) -> &'static str { + match self { + Self::Nul => "NUL", + Self::Ascii36 => "ASCII_36", + Self::Ascii37 => "ASCII_37", + Self::Ascii40 => "ASCII_40", + Self::Ascii41 => "ASCII_41", + Self::Ascii42 => "ASCII_42", + Self::Ascii43 => "ASCII_43", + Self::Ascii44 => "ASCII_44", + Self::Ascii45 => "ASCII_45", + Self::Ascii46 => "ASCII_46", + Self::Ascii47 => "ASCII_47", + Self::Ascii58 => "ASCII_58", + Self::Ascii59 => "ASCII_59", + Self::Ascii60 => "ASCII_60", + Self::Ascii61 => "ASCII_61", + Self::Ascii62 => "ASCII_62", + Self::Ascii63 => "ASCII_63", + Self::Ascii91 => "ASCII_91", + Self::Ascii92 => "ASCII_92", + Self::Ascii93 => "ASCII_93", + Self::Ascii94 => "ASCII_94", + Self::Ident => "IDENT", + Self::Uident => "UIDENT", + Self::Fconst => "FCONST", + Self::Sconst => "SCONST", + Self::Usconst => "USCONST", + Self::Bconst => "BCONST", + Self::Xconst => "XCONST", + Self::Op => "Op", + Self::Iconst => "ICONST", + Self::Param => "PARAM", + Self::Typecast => "TYPECAST", + Self::DotDot => "DOT_DOT", + Self::ColonEquals => "COLON_EQUALS", + Self::EqualsGreater => "EQUALS_GREATER", + Self::LessEquals => "LESS_EQUALS", + Self::GreaterEquals => "GREATER_EQUALS", + Self::NotEquals => "NOT_EQUALS", + Self::SqlComment => "SQL_COMMENT", + Self::CComment => "C_COMMENT", + Self::AbortP => "ABORT_P", + Self::Absent => "ABSENT", + Self::AbsoluteP => "ABSOLUTE_P", + Self::Access => "ACCESS", + Self::Action => "ACTION", + Self::AddP => "ADD_P", + Self::Admin => "ADMIN", + Self::After => "AFTER", + Self::Aggregate => "AGGREGATE", + Self::All => "ALL", + Self::Also => "ALSO", + Self::Alter => "ALTER", + Self::Always => "ALWAYS", + Self::Analyse => "ANALYSE", + Self::Analyze => "ANALYZE", + Self::And => "AND", + Self::Any => "ANY", + Self::Array => "ARRAY", + Self::As => "AS", + Self::Asc => "ASC", + Self::Asensitive => "ASENSITIVE", + Self::Assertion => "ASSERTION", + Self::Assignment => "ASSIGNMENT", + Self::Asymmetric => "ASYMMETRIC", + Self::Atomic => "ATOMIC", + Self::At => "AT", + Self::Attach => "ATTACH", + Self::Attribute => "ATTRIBUTE", + Self::Authorization => "AUTHORIZATION", + Self::Backward => "BACKWARD", + Self::Before => "BEFORE", + Self::BeginP => "BEGIN_P", + Self::Between => "BETWEEN", + Self::Bigint => "BIGINT", + Self::Binary => "BINARY", + Self::Bit => "BIT", + Self::BooleanP => "BOOLEAN_P", + Self::Both => "BOTH", + Self::Breadth => "BREADTH", + Self::By => "BY", + Self::Cache => "CACHE", + Self::Call => "CALL", + Self::Called => "CALLED", + Self::Cascade => "CASCADE", + Self::Cascaded => "CASCADED", + Self::Case => "CASE", + Self::Cast => "CAST", + Self::CatalogP => "CATALOG_P", + Self::Chain => "CHAIN", + Self::CharP => "CHAR_P", + Self::Character => "CHARACTER", + Self::Characteristics => "CHARACTERISTICS", + Self::Check => "CHECK", + Self::Checkpoint => "CHECKPOINT", + Self::Class => "CLASS", + Self::Close => "CLOSE", + Self::Cluster => "CLUSTER", + Self::Coalesce => "COALESCE", + Self::Collate => "COLLATE", + Self::Collation => "COLLATION", + Self::Column => "COLUMN", + Self::Columns => "COLUMNS", + Self::Comment => "COMMENT", + Self::Comments => "COMMENTS", + Self::Commit => "COMMIT", + Self::Committed => "COMMITTED", + Self::Compression => "COMPRESSION", + Self::Concurrently => "CONCURRENTLY", + Self::Conditional => "CONDITIONAL", + Self::Configuration => "CONFIGURATION", + Self::Conflict => "CONFLICT", + Self::Connection => "CONNECTION", + Self::Constraint => "CONSTRAINT", + Self::Constraints => "CONSTRAINTS", + Self::ContentP => "CONTENT_P", + Self::ContinueP => "CONTINUE_P", + Self::ConversionP => "CONVERSION_P", + Self::Copy => "COPY", + Self::Cost => "COST", + Self::Create => "CREATE", + Self::Cross => "CROSS", + Self::Csv => "CSV", + Self::Cube => "CUBE", + Self::CurrentP => "CURRENT_P", + Self::CurrentCatalog => "CURRENT_CATALOG", + Self::CurrentDate => "CURRENT_DATE", + Self::CurrentRole => "CURRENT_ROLE", + Self::CurrentSchema => "CURRENT_SCHEMA", + Self::CurrentTime => "CURRENT_TIME", + Self::CurrentTimestamp => "CURRENT_TIMESTAMP", + Self::CurrentUser => "CURRENT_USER", + Self::Cursor => "CURSOR", + Self::Cycle => "CYCLE", + Self::DataP => "DATA_P", + Self::Database => "DATABASE", + Self::DayP => "DAY_P", + Self::Deallocate => "DEALLOCATE", + Self::Dec => "DEC", + Self::DecimalP => "DECIMAL_P", + Self::Declare => "DECLARE", + Self::Default => "DEFAULT", + Self::Defaults => "DEFAULTS", + Self::Deferrable => "DEFERRABLE", + Self::Deferred => "DEFERRED", + Self::Definer => "DEFINER", + Self::DeleteP => "DELETE_P", + Self::Delimiter => "DELIMITER", + Self::Delimiters => "DELIMITERS", + Self::Depends => "DEPENDS", + Self::Depth => "DEPTH", + Self::Desc => "DESC", + Self::Detach => "DETACH", + Self::Dictionary => "DICTIONARY", + Self::DisableP => "DISABLE_P", + Self::Discard => "DISCARD", + Self::Distinct => "DISTINCT", + Self::Do => "DO", + Self::DocumentP => "DOCUMENT_P", + Self::DomainP => "DOMAIN_P", + Self::DoubleP => "DOUBLE_P", + Self::Drop => "DROP", + Self::Each => "EACH", + Self::Else => "ELSE", + Self::EmptyP => "EMPTY_P", + Self::EnableP => "ENABLE_P", + Self::Encoding => "ENCODING", + Self::Encrypted => "ENCRYPTED", + Self::EndP => "END_P", + Self::EnumP => "ENUM_P", + Self::ErrorP => "ERROR_P", + Self::Escape => "ESCAPE", + Self::Event => "EVENT", + Self::Except => "EXCEPT", + Self::Exclude => "EXCLUDE", + Self::Excluding => "EXCLUDING", + Self::Exclusive => "EXCLUSIVE", + Self::Execute => "EXECUTE", + Self::Exists => "EXISTS", + Self::Explain => "EXPLAIN", + Self::Expression => "EXPRESSION", + Self::Extension => "EXTENSION", + Self::External => "EXTERNAL", + Self::Extract => "EXTRACT", + Self::FalseP => "FALSE_P", + Self::Family => "FAMILY", + Self::Fetch => "FETCH", + Self::Filter => "FILTER", + Self::Finalize => "FINALIZE", + Self::FirstP => "FIRST_P", + Self::FloatP => "FLOAT_P", + Self::Following => "FOLLOWING", + Self::For => "FOR", + Self::Force => "FORCE", + Self::Foreign => "FOREIGN", + Self::Format => "FORMAT", + Self::Forward => "FORWARD", + Self::Freeze => "FREEZE", + Self::From => "FROM", + Self::Full => "FULL", + Self::Function => "FUNCTION", + Self::Functions => "FUNCTIONS", + Self::Generated => "GENERATED", + Self::Global => "GLOBAL", + Self::Grant => "GRANT", + Self::Granted => "GRANTED", + Self::Greatest => "GREATEST", + Self::GroupP => "GROUP_P", + Self::Grouping => "GROUPING", + Self::Groups => "GROUPS", + Self::Handler => "HANDLER", + Self::Having => "HAVING", + Self::HeaderP => "HEADER_P", + Self::Hold => "HOLD", + Self::HourP => "HOUR_P", + Self::IdentityP => "IDENTITY_P", + Self::IfP => "IF_P", + Self::Ilike => "ILIKE", + Self::Immediate => "IMMEDIATE", + Self::Immutable => "IMMUTABLE", + Self::ImplicitP => "IMPLICIT_P", + Self::ImportP => "IMPORT_P", + Self::InP => "IN_P", + Self::Include => "INCLUDE", + Self::Including => "INCLUDING", + Self::Increment => "INCREMENT", + Self::Indent => "INDENT", + Self::Index => "INDEX", + Self::Indexes => "INDEXES", + Self::Inherit => "INHERIT", + Self::Inherits => "INHERITS", + Self::Initially => "INITIALLY", + Self::InlineP => "INLINE_P", + Self::InnerP => "INNER_P", + Self::Inout => "INOUT", + Self::InputP => "INPUT_P", + Self::Insensitive => "INSENSITIVE", + Self::Insert => "INSERT", + Self::Instead => "INSTEAD", + Self::IntP => "INT_P", + Self::Integer => "INTEGER", + Self::Intersect => "INTERSECT", + Self::Interval => "INTERVAL", + Self::Into => "INTO", + Self::Invoker => "INVOKER", + Self::Is => "IS", + Self::Isnull => "ISNULL", + Self::Isolation => "ISOLATION", + Self::Join => "JOIN", + Self::Json => "JSON", + Self::JsonArray => "JSON_ARRAY", + Self::JsonArrayagg => "JSON_ARRAYAGG", + Self::JsonExists => "JSON_EXISTS", + Self::JsonObject => "JSON_OBJECT", + Self::JsonObjectagg => "JSON_OBJECTAGG", + Self::JsonQuery => "JSON_QUERY", + Self::JsonScalar => "JSON_SCALAR", + Self::JsonSerialize => "JSON_SERIALIZE", + Self::JsonTable => "JSON_TABLE", + Self::JsonValue => "JSON_VALUE", + Self::Keep => "KEEP", + Self::Key => "KEY", + Self::Keys => "KEYS", + Self::Label => "LABEL", + Self::Language => "LANGUAGE", + Self::LargeP => "LARGE_P", + Self::LastP => "LAST_P", + Self::LateralP => "LATERAL_P", + Self::Leading => "LEADING", + Self::Leakproof => "LEAKPROOF", + Self::Least => "LEAST", + Self::Left => "LEFT", + Self::Level => "LEVEL", + Self::Like => "LIKE", + Self::Limit => "LIMIT", + Self::Listen => "LISTEN", + Self::Load => "LOAD", + Self::Local => "LOCAL", + Self::Localtime => "LOCALTIME", + Self::Localtimestamp => "LOCALTIMESTAMP", + Self::Location => "LOCATION", + Self::LockP => "LOCK_P", + Self::Locked => "LOCKED", + Self::Logged => "LOGGED", + Self::Mapping => "MAPPING", + Self::Match => "MATCH", + Self::Matched => "MATCHED", + Self::Materialized => "MATERIALIZED", + Self::Maxvalue => "MAXVALUE", + Self::Merge => "MERGE", + Self::MergeAction => "MERGE_ACTION", + Self::Method => "METHOD", + Self::MinuteP => "MINUTE_P", + Self::Minvalue => "MINVALUE", + Self::Mode => "MODE", + Self::MonthP => "MONTH_P", + Self::Move => "MOVE", + Self::NameP => "NAME_P", + Self::Names => "NAMES", + Self::National => "NATIONAL", + Self::Natural => "NATURAL", + Self::Nchar => "NCHAR", + Self::Nested => "NESTED", + Self::New => "NEW", + Self::Next => "NEXT", + Self::Nfc => "NFC", + Self::Nfd => "NFD", + Self::Nfkc => "NFKC", + Self::Nfkd => "NFKD", + Self::No => "NO", + Self::None => "NONE", + Self::Normalize => "NORMALIZE", + Self::Normalized => "NORMALIZED", + Self::Not => "NOT", + Self::Nothing => "NOTHING", + Self::Notify => "NOTIFY", + Self::Notnull => "NOTNULL", + Self::Nowait => "NOWAIT", + Self::NullP => "NULL_P", + Self::Nullif => "NULLIF", + Self::NullsP => "NULLS_P", + Self::Numeric => "NUMERIC", + Self::ObjectP => "OBJECT_P", + Self::Of => "OF", + Self::Off => "OFF", + Self::Offset => "OFFSET", + Self::Oids => "OIDS", + Self::Old => "OLD", + Self::Omit => "OMIT", + Self::On => "ON", + Self::Only => "ONLY", + Self::Operator => "OPERATOR", + Self::Option => "OPTION", + Self::Options => "OPTIONS", + Self::Or => "OR", + Self::Order => "ORDER", + Self::Ordinality => "ORDINALITY", + Self::Others => "OTHERS", + Self::OutP => "OUT_P", + Self::OuterP => "OUTER_P", + Self::Over => "OVER", + Self::Overlaps => "OVERLAPS", + Self::Overlay => "OVERLAY", + Self::Overriding => "OVERRIDING", + Self::Owned => "OWNED", + Self::Owner => "OWNER", + Self::Parallel => "PARALLEL", + Self::Parameter => "PARAMETER", + Self::Parser => "PARSER", + Self::Partial => "PARTIAL", + Self::Partition => "PARTITION", + Self::Passing => "PASSING", + Self::Password => "PASSWORD", + Self::Path => "PATH", + Self::Placing => "PLACING", + Self::Plan => "PLAN", + Self::Plans => "PLANS", + Self::Policy => "POLICY", + Self::Position => "POSITION", + Self::Preceding => "PRECEDING", + Self::Precision => "PRECISION", + Self::Preserve => "PRESERVE", + Self::Prepare => "PREPARE", + Self::Prepared => "PREPARED", + Self::Primary => "PRIMARY", + Self::Prior => "PRIOR", + Self::Privileges => "PRIVILEGES", + Self::Procedural => "PROCEDURAL", + Self::Procedure => "PROCEDURE", + Self::Procedures => "PROCEDURES", + Self::Program => "PROGRAM", + Self::Publication => "PUBLICATION", + Self::Quote => "QUOTE", + Self::Quotes => "QUOTES", + Self::Range => "RANGE", + Self::Read => "READ", + Self::Real => "REAL", + Self::Reassign => "REASSIGN", + Self::Recheck => "RECHECK", + Self::Recursive => "RECURSIVE", + Self::RefP => "REF_P", + Self::References => "REFERENCES", + Self::Referencing => "REFERENCING", + Self::Refresh => "REFRESH", + Self::Reindex => "REINDEX", + Self::RelativeP => "RELATIVE_P", + Self::Release => "RELEASE", + Self::Rename => "RENAME", + Self::Repeatable => "REPEATABLE", + Self::Replace => "REPLACE", + Self::Replica => "REPLICA", + Self::Reset => "RESET", + Self::Restart => "RESTART", + Self::Restrict => "RESTRICT", + Self::Return => "RETURN", + Self::Returning => "RETURNING", + Self::Returns => "RETURNS", + Self::Revoke => "REVOKE", + Self::Right => "RIGHT", + Self::Role => "ROLE", + Self::Rollback => "ROLLBACK", + Self::Rollup => "ROLLUP", + Self::Routine => "ROUTINE", + Self::Routines => "ROUTINES", + Self::Row => "ROW", + Self::Rows => "ROWS", + Self::Rule => "RULE", + Self::Savepoint => "SAVEPOINT", + Self::Scalar => "SCALAR", + Self::Schema => "SCHEMA", + Self::Schemas => "SCHEMAS", + Self::Scroll => "SCROLL", + Self::Search => "SEARCH", + Self::SecondP => "SECOND_P", + Self::Security => "SECURITY", + Self::Select => "SELECT", + Self::Sequence => "SEQUENCE", + Self::Sequences => "SEQUENCES", + Self::Serializable => "SERIALIZABLE", + Self::Server => "SERVER", + Self::Session => "SESSION", + Self::SessionUser => "SESSION_USER", + Self::Set => "SET", + Self::Sets => "SETS", + Self::Setof => "SETOF", + Self::Share => "SHARE", + Self::Show => "SHOW", + Self::Similar => "SIMILAR", + Self::Simple => "SIMPLE", + Self::Skip => "SKIP", + Self::Smallint => "SMALLINT", + Self::Snapshot => "SNAPSHOT", + Self::Some => "SOME", + Self::Source => "SOURCE", + Self::SqlP => "SQL_P", + Self::Stable => "STABLE", + Self::StandaloneP => "STANDALONE_P", + Self::Start => "START", + Self::Statement => "STATEMENT", + Self::Statistics => "STATISTICS", + Self::Stdin => "STDIN", + Self::Stdout => "STDOUT", + Self::Storage => "STORAGE", + Self::Stored => "STORED", + Self::StrictP => "STRICT_P", + Self::StringP => "STRING_P", + Self::StripP => "STRIP_P", + Self::Subscription => "SUBSCRIPTION", + Self::Substring => "SUBSTRING", + Self::Support => "SUPPORT", + Self::Symmetric => "SYMMETRIC", + Self::Sysid => "SYSID", + Self::SystemP => "SYSTEM_P", + Self::SystemUser => "SYSTEM_USER", + Self::Table => "TABLE", + Self::Tables => "TABLES", + Self::Tablesample => "TABLESAMPLE", + Self::Tablespace => "TABLESPACE", + Self::Target => "TARGET", + Self::Temp => "TEMP", + Self::Template => "TEMPLATE", + Self::Temporary => "TEMPORARY", + Self::TextP => "TEXT_P", + Self::Then => "THEN", + Self::Ties => "TIES", + Self::Time => "TIME", + Self::Timestamp => "TIMESTAMP", + Self::To => "TO", + Self::Trailing => "TRAILING", + Self::Transaction => "TRANSACTION", + Self::Transform => "TRANSFORM", + Self::Treat => "TREAT", + Self::Trigger => "TRIGGER", + Self::Trim => "TRIM", + Self::TrueP => "TRUE_P", + Self::Truncate => "TRUNCATE", + Self::Trusted => "TRUSTED", + Self::TypeP => "TYPE_P", + Self::TypesP => "TYPES_P", + Self::Uescape => "UESCAPE", + Self::Unbounded => "UNBOUNDED", + Self::Unconditional => "UNCONDITIONAL", + Self::Uncommitted => "UNCOMMITTED", + Self::Unencrypted => "UNENCRYPTED", + Self::Union => "UNION", + Self::Unique => "UNIQUE", + Self::Unknown => "UNKNOWN", + Self::Unlisten => "UNLISTEN", + Self::Unlogged => "UNLOGGED", + Self::Until => "UNTIL", + Self::Update => "UPDATE", + Self::User => "USER", + Self::Using => "USING", + Self::Vacuum => "VACUUM", + Self::Valid => "VALID", + Self::Validate => "VALIDATE", + Self::Validator => "VALIDATOR", + Self::ValueP => "VALUE_P", + Self::Values => "VALUES", + Self::Varchar => "VARCHAR", + Self::Variadic => "VARIADIC", + Self::Varying => "VARYING", + Self::Verbose => "VERBOSE", + Self::VersionP => "VERSION_P", + Self::View => "VIEW", + Self::Views => "VIEWS", + Self::Volatile => "VOLATILE", + Self::When => "WHEN", + Self::Where => "WHERE", + Self::WhitespaceP => "WHITESPACE_P", + Self::Window => "WINDOW", + Self::With => "WITH", + Self::Within => "WITHIN", + Self::Without => "WITHOUT", + Self::Work => "WORK", + Self::Wrapper => "WRAPPER", + Self::Write => "WRITE", + Self::XmlP => "XML_P", + Self::Xmlattributes => "XMLATTRIBUTES", + Self::Xmlconcat => "XMLCONCAT", + Self::Xmlelement => "XMLELEMENT", + Self::Xmlexists => "XMLEXISTS", + Self::Xmlforest => "XMLFOREST", + Self::Xmlnamespaces => "XMLNAMESPACES", + Self::Xmlparse => "XMLPARSE", + Self::Xmlpi => "XMLPI", + Self::Xmlroot => "XMLROOT", + Self::Xmlserialize => "XMLSERIALIZE", + Self::Xmltable => "XMLTABLE", + Self::YearP => "YEAR_P", + Self::YesP => "YES_P", + Self::Zone => "ZONE", + Self::FormatLa => "FORMAT_LA", + Self::NotLa => "NOT_LA", + Self::NullsLa => "NULLS_LA", + Self::WithLa => "WITH_LA", + Self::WithoutLa => "WITHOUT_LA", + Self::ModeTypeName => "MODE_TYPE_NAME", + Self::ModePlpgsqlExpr => "MODE_PLPGSQL_EXPR", + Self::ModePlpgsqlAssign1 => "MODE_PLPGSQL_ASSIGN1", + Self::ModePlpgsqlAssign2 => "MODE_PLPGSQL_ASSIGN2", + Self::ModePlpgsqlAssign3 => "MODE_PLPGSQL_ASSIGN3", + Self::Uminus => "UMINUS", + } + } + /// Creates an enum from field names used in the ProtoBuf definition. + pub fn from_str_name(value: &str) -> ::core::option::Option { + match value { + "NUL" => Some(Self::Nul), + "ASCII_36" => Some(Self::Ascii36), + "ASCII_37" => Some(Self::Ascii37), + "ASCII_40" => Some(Self::Ascii40), + "ASCII_41" => Some(Self::Ascii41), + "ASCII_42" => Some(Self::Ascii42), + "ASCII_43" => Some(Self::Ascii43), + "ASCII_44" => Some(Self::Ascii44), + "ASCII_45" => Some(Self::Ascii45), + "ASCII_46" => Some(Self::Ascii46), + "ASCII_47" => Some(Self::Ascii47), + "ASCII_58" => Some(Self::Ascii58), + "ASCII_59" => Some(Self::Ascii59), + "ASCII_60" => Some(Self::Ascii60), + "ASCII_61" => Some(Self::Ascii61), + "ASCII_62" => Some(Self::Ascii62), + "ASCII_63" => Some(Self::Ascii63), + "ASCII_91" => Some(Self::Ascii91), + "ASCII_92" => Some(Self::Ascii92), + "ASCII_93" => Some(Self::Ascii93), + "ASCII_94" => Some(Self::Ascii94), + "IDENT" => Some(Self::Ident), + "UIDENT" => Some(Self::Uident), + "FCONST" => Some(Self::Fconst), + "SCONST" => Some(Self::Sconst), + "USCONST" => Some(Self::Usconst), + "BCONST" => Some(Self::Bconst), + "XCONST" => Some(Self::Xconst), + "Op" => Some(Self::Op), + "ICONST" => Some(Self::Iconst), + "PARAM" => Some(Self::Param), + "TYPECAST" => Some(Self::Typecast), + "DOT_DOT" => Some(Self::DotDot), + "COLON_EQUALS" => Some(Self::ColonEquals), + "EQUALS_GREATER" => Some(Self::EqualsGreater), + "LESS_EQUALS" => Some(Self::LessEquals), + "GREATER_EQUALS" => Some(Self::GreaterEquals), + "NOT_EQUALS" => Some(Self::NotEquals), + "SQL_COMMENT" => Some(Self::SqlComment), + "C_COMMENT" => Some(Self::CComment), + "ABORT_P" => Some(Self::AbortP), + "ABSENT" => Some(Self::Absent), + "ABSOLUTE_P" => Some(Self::AbsoluteP), + "ACCESS" => Some(Self::Access), + "ACTION" => Some(Self::Action), + "ADD_P" => Some(Self::AddP), + "ADMIN" => Some(Self::Admin), + "AFTER" => Some(Self::After), + "AGGREGATE" => Some(Self::Aggregate), + "ALL" => Some(Self::All), + "ALSO" => Some(Self::Also), + "ALTER" => Some(Self::Alter), + "ALWAYS" => Some(Self::Always), + "ANALYSE" => Some(Self::Analyse), + "ANALYZE" => Some(Self::Analyze), + "AND" => Some(Self::And), + "ANY" => Some(Self::Any), + "ARRAY" => Some(Self::Array), + "AS" => Some(Self::As), + "ASC" => Some(Self::Asc), + "ASENSITIVE" => Some(Self::Asensitive), + "ASSERTION" => Some(Self::Assertion), + "ASSIGNMENT" => Some(Self::Assignment), + "ASYMMETRIC" => Some(Self::Asymmetric), + "ATOMIC" => Some(Self::Atomic), + "AT" => Some(Self::At), + "ATTACH" => Some(Self::Attach), + "ATTRIBUTE" => Some(Self::Attribute), + "AUTHORIZATION" => Some(Self::Authorization), + "BACKWARD" => Some(Self::Backward), + "BEFORE" => Some(Self::Before), + "BEGIN_P" => Some(Self::BeginP), + "BETWEEN" => Some(Self::Between), + "BIGINT" => Some(Self::Bigint), + "BINARY" => Some(Self::Binary), + "BIT" => Some(Self::Bit), + "BOOLEAN_P" => Some(Self::BooleanP), + "BOTH" => Some(Self::Both), + "BREADTH" => Some(Self::Breadth), + "BY" => Some(Self::By), + "CACHE" => Some(Self::Cache), + "CALL" => Some(Self::Call), + "CALLED" => Some(Self::Called), + "CASCADE" => Some(Self::Cascade), + "CASCADED" => Some(Self::Cascaded), + "CASE" => Some(Self::Case), + "CAST" => Some(Self::Cast), + "CATALOG_P" => Some(Self::CatalogP), + "CHAIN" => Some(Self::Chain), + "CHAR_P" => Some(Self::CharP), + "CHARACTER" => Some(Self::Character), + "CHARACTERISTICS" => Some(Self::Characteristics), + "CHECK" => Some(Self::Check), + "CHECKPOINT" => Some(Self::Checkpoint), + "CLASS" => Some(Self::Class), + "CLOSE" => Some(Self::Close), + "CLUSTER" => Some(Self::Cluster), + "COALESCE" => Some(Self::Coalesce), + "COLLATE" => Some(Self::Collate), + "COLLATION" => Some(Self::Collation), + "COLUMN" => Some(Self::Column), + "COLUMNS" => Some(Self::Columns), + "COMMENT" => Some(Self::Comment), + "COMMENTS" => Some(Self::Comments), + "COMMIT" => Some(Self::Commit), + "COMMITTED" => Some(Self::Committed), + "COMPRESSION" => Some(Self::Compression), + "CONCURRENTLY" => Some(Self::Concurrently), + "CONDITIONAL" => Some(Self::Conditional), + "CONFIGURATION" => Some(Self::Configuration), + "CONFLICT" => Some(Self::Conflict), + "CONNECTION" => Some(Self::Connection), + "CONSTRAINT" => Some(Self::Constraint), + "CONSTRAINTS" => Some(Self::Constraints), + "CONTENT_P" => Some(Self::ContentP), + "CONTINUE_P" => Some(Self::ContinueP), + "CONVERSION_P" => Some(Self::ConversionP), + "COPY" => Some(Self::Copy), + "COST" => Some(Self::Cost), + "CREATE" => Some(Self::Create), + "CROSS" => Some(Self::Cross), + "CSV" => Some(Self::Csv), + "CUBE" => Some(Self::Cube), + "CURRENT_P" => Some(Self::CurrentP), + "CURRENT_CATALOG" => Some(Self::CurrentCatalog), + "CURRENT_DATE" => Some(Self::CurrentDate), + "CURRENT_ROLE" => Some(Self::CurrentRole), + "CURRENT_SCHEMA" => Some(Self::CurrentSchema), + "CURRENT_TIME" => Some(Self::CurrentTime), + "CURRENT_TIMESTAMP" => Some(Self::CurrentTimestamp), + "CURRENT_USER" => Some(Self::CurrentUser), + "CURSOR" => Some(Self::Cursor), + "CYCLE" => Some(Self::Cycle), + "DATA_P" => Some(Self::DataP), + "DATABASE" => Some(Self::Database), + "DAY_P" => Some(Self::DayP), + "DEALLOCATE" => Some(Self::Deallocate), + "DEC" => Some(Self::Dec), + "DECIMAL_P" => Some(Self::DecimalP), + "DECLARE" => Some(Self::Declare), + "DEFAULT" => Some(Self::Default), + "DEFAULTS" => Some(Self::Defaults), + "DEFERRABLE" => Some(Self::Deferrable), + "DEFERRED" => Some(Self::Deferred), + "DEFINER" => Some(Self::Definer), + "DELETE_P" => Some(Self::DeleteP), + "DELIMITER" => Some(Self::Delimiter), + "DELIMITERS" => Some(Self::Delimiters), + "DEPENDS" => Some(Self::Depends), + "DEPTH" => Some(Self::Depth), + "DESC" => Some(Self::Desc), + "DETACH" => Some(Self::Detach), + "DICTIONARY" => Some(Self::Dictionary), + "DISABLE_P" => Some(Self::DisableP), + "DISCARD" => Some(Self::Discard), + "DISTINCT" => Some(Self::Distinct), + "DO" => Some(Self::Do), + "DOCUMENT_P" => Some(Self::DocumentP), + "DOMAIN_P" => Some(Self::DomainP), + "DOUBLE_P" => Some(Self::DoubleP), + "DROP" => Some(Self::Drop), + "EACH" => Some(Self::Each), + "ELSE" => Some(Self::Else), + "EMPTY_P" => Some(Self::EmptyP), + "ENABLE_P" => Some(Self::EnableP), + "ENCODING" => Some(Self::Encoding), + "ENCRYPTED" => Some(Self::Encrypted), + "END_P" => Some(Self::EndP), + "ENUM_P" => Some(Self::EnumP), + "ERROR_P" => Some(Self::ErrorP), + "ESCAPE" => Some(Self::Escape), + "EVENT" => Some(Self::Event), + "EXCEPT" => Some(Self::Except), + "EXCLUDE" => Some(Self::Exclude), + "EXCLUDING" => Some(Self::Excluding), + "EXCLUSIVE" => Some(Self::Exclusive), + "EXECUTE" => Some(Self::Execute), + "EXISTS" => Some(Self::Exists), + "EXPLAIN" => Some(Self::Explain), + "EXPRESSION" => Some(Self::Expression), + "EXTENSION" => Some(Self::Extension), + "EXTERNAL" => Some(Self::External), + "EXTRACT" => Some(Self::Extract), + "FALSE_P" => Some(Self::FalseP), + "FAMILY" => Some(Self::Family), + "FETCH" => Some(Self::Fetch), + "FILTER" => Some(Self::Filter), + "FINALIZE" => Some(Self::Finalize), + "FIRST_P" => Some(Self::FirstP), + "FLOAT_P" => Some(Self::FloatP), + "FOLLOWING" => Some(Self::Following), + "FOR" => Some(Self::For), + "FORCE" => Some(Self::Force), + "FOREIGN" => Some(Self::Foreign), + "FORMAT" => Some(Self::Format), + "FORWARD" => Some(Self::Forward), + "FREEZE" => Some(Self::Freeze), + "FROM" => Some(Self::From), + "FULL" => Some(Self::Full), + "FUNCTION" => Some(Self::Function), + "FUNCTIONS" => Some(Self::Functions), + "GENERATED" => Some(Self::Generated), + "GLOBAL" => Some(Self::Global), + "GRANT" => Some(Self::Grant), + "GRANTED" => Some(Self::Granted), + "GREATEST" => Some(Self::Greatest), + "GROUP_P" => Some(Self::GroupP), + "GROUPING" => Some(Self::Grouping), + "GROUPS" => Some(Self::Groups), + "HANDLER" => Some(Self::Handler), + "HAVING" => Some(Self::Having), + "HEADER_P" => Some(Self::HeaderP), + "HOLD" => Some(Self::Hold), + "HOUR_P" => Some(Self::HourP), + "IDENTITY_P" => Some(Self::IdentityP), + "IF_P" => Some(Self::IfP), + "ILIKE" => Some(Self::Ilike), + "IMMEDIATE" => Some(Self::Immediate), + "IMMUTABLE" => Some(Self::Immutable), + "IMPLICIT_P" => Some(Self::ImplicitP), + "IMPORT_P" => Some(Self::ImportP), + "IN_P" => Some(Self::InP), + "INCLUDE" => Some(Self::Include), + "INCLUDING" => Some(Self::Including), + "INCREMENT" => Some(Self::Increment), + "INDENT" => Some(Self::Indent), + "INDEX" => Some(Self::Index), + "INDEXES" => Some(Self::Indexes), + "INHERIT" => Some(Self::Inherit), + "INHERITS" => Some(Self::Inherits), + "INITIALLY" => Some(Self::Initially), + "INLINE_P" => Some(Self::InlineP), + "INNER_P" => Some(Self::InnerP), + "INOUT" => Some(Self::Inout), + "INPUT_P" => Some(Self::InputP), + "INSENSITIVE" => Some(Self::Insensitive), + "INSERT" => Some(Self::Insert), + "INSTEAD" => Some(Self::Instead), + "INT_P" => Some(Self::IntP), + "INTEGER" => Some(Self::Integer), + "INTERSECT" => Some(Self::Intersect), + "INTERVAL" => Some(Self::Interval), + "INTO" => Some(Self::Into), + "INVOKER" => Some(Self::Invoker), + "IS" => Some(Self::Is), + "ISNULL" => Some(Self::Isnull), + "ISOLATION" => Some(Self::Isolation), + "JOIN" => Some(Self::Join), + "JSON" => Some(Self::Json), + "JSON_ARRAY" => Some(Self::JsonArray), + "JSON_ARRAYAGG" => Some(Self::JsonArrayagg), + "JSON_EXISTS" => Some(Self::JsonExists), + "JSON_OBJECT" => Some(Self::JsonObject), + "JSON_OBJECTAGG" => Some(Self::JsonObjectagg), + "JSON_QUERY" => Some(Self::JsonQuery), + "JSON_SCALAR" => Some(Self::JsonScalar), + "JSON_SERIALIZE" => Some(Self::JsonSerialize), + "JSON_TABLE" => Some(Self::JsonTable), + "JSON_VALUE" => Some(Self::JsonValue), + "KEEP" => Some(Self::Keep), + "KEY" => Some(Self::Key), + "KEYS" => Some(Self::Keys), + "LABEL" => Some(Self::Label), + "LANGUAGE" => Some(Self::Language), + "LARGE_P" => Some(Self::LargeP), + "LAST_P" => Some(Self::LastP), + "LATERAL_P" => Some(Self::LateralP), + "LEADING" => Some(Self::Leading), + "LEAKPROOF" => Some(Self::Leakproof), + "LEAST" => Some(Self::Least), + "LEFT" => Some(Self::Left), + "LEVEL" => Some(Self::Level), + "LIKE" => Some(Self::Like), + "LIMIT" => Some(Self::Limit), + "LISTEN" => Some(Self::Listen), + "LOAD" => Some(Self::Load), + "LOCAL" => Some(Self::Local), + "LOCALTIME" => Some(Self::Localtime), + "LOCALTIMESTAMP" => Some(Self::Localtimestamp), + "LOCATION" => Some(Self::Location), + "LOCK_P" => Some(Self::LockP), + "LOCKED" => Some(Self::Locked), + "LOGGED" => Some(Self::Logged), + "MAPPING" => Some(Self::Mapping), + "MATCH" => Some(Self::Match), + "MATCHED" => Some(Self::Matched), + "MATERIALIZED" => Some(Self::Materialized), + "MAXVALUE" => Some(Self::Maxvalue), + "MERGE" => Some(Self::Merge), + "MERGE_ACTION" => Some(Self::MergeAction), + "METHOD" => Some(Self::Method), + "MINUTE_P" => Some(Self::MinuteP), + "MINVALUE" => Some(Self::Minvalue), + "MODE" => Some(Self::Mode), + "MONTH_P" => Some(Self::MonthP), + "MOVE" => Some(Self::Move), + "NAME_P" => Some(Self::NameP), + "NAMES" => Some(Self::Names), + "NATIONAL" => Some(Self::National), + "NATURAL" => Some(Self::Natural), + "NCHAR" => Some(Self::Nchar), + "NESTED" => Some(Self::Nested), + "NEW" => Some(Self::New), + "NEXT" => Some(Self::Next), + "NFC" => Some(Self::Nfc), + "NFD" => Some(Self::Nfd), + "NFKC" => Some(Self::Nfkc), + "NFKD" => Some(Self::Nfkd), + "NO" => Some(Self::No), + "NONE" => Some(Self::None), + "NORMALIZE" => Some(Self::Normalize), + "NORMALIZED" => Some(Self::Normalized), + "NOT" => Some(Self::Not), + "NOTHING" => Some(Self::Nothing), + "NOTIFY" => Some(Self::Notify), + "NOTNULL" => Some(Self::Notnull), + "NOWAIT" => Some(Self::Nowait), + "NULL_P" => Some(Self::NullP), + "NULLIF" => Some(Self::Nullif), + "NULLS_P" => Some(Self::NullsP), + "NUMERIC" => Some(Self::Numeric), + "OBJECT_P" => Some(Self::ObjectP), + "OF" => Some(Self::Of), + "OFF" => Some(Self::Off), + "OFFSET" => Some(Self::Offset), + "OIDS" => Some(Self::Oids), + "OLD" => Some(Self::Old), + "OMIT" => Some(Self::Omit), + "ON" => Some(Self::On), + "ONLY" => Some(Self::Only), + "OPERATOR" => Some(Self::Operator), + "OPTION" => Some(Self::Option), + "OPTIONS" => Some(Self::Options), + "OR" => Some(Self::Or), + "ORDER" => Some(Self::Order), + "ORDINALITY" => Some(Self::Ordinality), + "OTHERS" => Some(Self::Others), + "OUT_P" => Some(Self::OutP), + "OUTER_P" => Some(Self::OuterP), + "OVER" => Some(Self::Over), + "OVERLAPS" => Some(Self::Overlaps), + "OVERLAY" => Some(Self::Overlay), + "OVERRIDING" => Some(Self::Overriding), + "OWNED" => Some(Self::Owned), + "OWNER" => Some(Self::Owner), + "PARALLEL" => Some(Self::Parallel), + "PARAMETER" => Some(Self::Parameter), + "PARSER" => Some(Self::Parser), + "PARTIAL" => Some(Self::Partial), + "PARTITION" => Some(Self::Partition), + "PASSING" => Some(Self::Passing), + "PASSWORD" => Some(Self::Password), + "PATH" => Some(Self::Path), + "PLACING" => Some(Self::Placing), + "PLAN" => Some(Self::Plan), + "PLANS" => Some(Self::Plans), + "POLICY" => Some(Self::Policy), + "POSITION" => Some(Self::Position), + "PRECEDING" => Some(Self::Preceding), + "PRECISION" => Some(Self::Precision), + "PRESERVE" => Some(Self::Preserve), + "PREPARE" => Some(Self::Prepare), + "PREPARED" => Some(Self::Prepared), + "PRIMARY" => Some(Self::Primary), + "PRIOR" => Some(Self::Prior), + "PRIVILEGES" => Some(Self::Privileges), + "PROCEDURAL" => Some(Self::Procedural), + "PROCEDURE" => Some(Self::Procedure), + "PROCEDURES" => Some(Self::Procedures), + "PROGRAM" => Some(Self::Program), + "PUBLICATION" => Some(Self::Publication), + "QUOTE" => Some(Self::Quote), + "QUOTES" => Some(Self::Quotes), + "RANGE" => Some(Self::Range), + "READ" => Some(Self::Read), + "REAL" => Some(Self::Real), + "REASSIGN" => Some(Self::Reassign), + "RECHECK" => Some(Self::Recheck), + "RECURSIVE" => Some(Self::Recursive), + "REF_P" => Some(Self::RefP), + "REFERENCES" => Some(Self::References), + "REFERENCING" => Some(Self::Referencing), + "REFRESH" => Some(Self::Refresh), + "REINDEX" => Some(Self::Reindex), + "RELATIVE_P" => Some(Self::RelativeP), + "RELEASE" => Some(Self::Release), + "RENAME" => Some(Self::Rename), + "REPEATABLE" => Some(Self::Repeatable), + "REPLACE" => Some(Self::Replace), + "REPLICA" => Some(Self::Replica), + "RESET" => Some(Self::Reset), + "RESTART" => Some(Self::Restart), + "RESTRICT" => Some(Self::Restrict), + "RETURN" => Some(Self::Return), + "RETURNING" => Some(Self::Returning), + "RETURNS" => Some(Self::Returns), + "REVOKE" => Some(Self::Revoke), + "RIGHT" => Some(Self::Right), + "ROLE" => Some(Self::Role), + "ROLLBACK" => Some(Self::Rollback), + "ROLLUP" => Some(Self::Rollup), + "ROUTINE" => Some(Self::Routine), + "ROUTINES" => Some(Self::Routines), + "ROW" => Some(Self::Row), + "ROWS" => Some(Self::Rows), + "RULE" => Some(Self::Rule), + "SAVEPOINT" => Some(Self::Savepoint), + "SCALAR" => Some(Self::Scalar), + "SCHEMA" => Some(Self::Schema), + "SCHEMAS" => Some(Self::Schemas), + "SCROLL" => Some(Self::Scroll), + "SEARCH" => Some(Self::Search), + "SECOND_P" => Some(Self::SecondP), + "SECURITY" => Some(Self::Security), + "SELECT" => Some(Self::Select), + "SEQUENCE" => Some(Self::Sequence), + "SEQUENCES" => Some(Self::Sequences), + "SERIALIZABLE" => Some(Self::Serializable), + "SERVER" => Some(Self::Server), + "SESSION" => Some(Self::Session), + "SESSION_USER" => Some(Self::SessionUser), + "SET" => Some(Self::Set), + "SETS" => Some(Self::Sets), + "SETOF" => Some(Self::Setof), + "SHARE" => Some(Self::Share), + "SHOW" => Some(Self::Show), + "SIMILAR" => Some(Self::Similar), + "SIMPLE" => Some(Self::Simple), + "SKIP" => Some(Self::Skip), + "SMALLINT" => Some(Self::Smallint), + "SNAPSHOT" => Some(Self::Snapshot), + "SOME" => Some(Self::Some), + "SOURCE" => Some(Self::Source), + "SQL_P" => Some(Self::SqlP), + "STABLE" => Some(Self::Stable), + "STANDALONE_P" => Some(Self::StandaloneP), + "START" => Some(Self::Start), + "STATEMENT" => Some(Self::Statement), + "STATISTICS" => Some(Self::Statistics), + "STDIN" => Some(Self::Stdin), + "STDOUT" => Some(Self::Stdout), + "STORAGE" => Some(Self::Storage), + "STORED" => Some(Self::Stored), + "STRICT_P" => Some(Self::StrictP), + "STRING_P" => Some(Self::StringP), + "STRIP_P" => Some(Self::StripP), + "SUBSCRIPTION" => Some(Self::Subscription), + "SUBSTRING" => Some(Self::Substring), + "SUPPORT" => Some(Self::Support), + "SYMMETRIC" => Some(Self::Symmetric), + "SYSID" => Some(Self::Sysid), + "SYSTEM_P" => Some(Self::SystemP), + "SYSTEM_USER" => Some(Self::SystemUser), + "TABLE" => Some(Self::Table), + "TABLES" => Some(Self::Tables), + "TABLESAMPLE" => Some(Self::Tablesample), + "TABLESPACE" => Some(Self::Tablespace), + "TARGET" => Some(Self::Target), + "TEMP" => Some(Self::Temp), + "TEMPLATE" => Some(Self::Template), + "TEMPORARY" => Some(Self::Temporary), + "TEXT_P" => Some(Self::TextP), + "THEN" => Some(Self::Then), + "TIES" => Some(Self::Ties), + "TIME" => Some(Self::Time), + "TIMESTAMP" => Some(Self::Timestamp), + "TO" => Some(Self::To), + "TRAILING" => Some(Self::Trailing), + "TRANSACTION" => Some(Self::Transaction), + "TRANSFORM" => Some(Self::Transform), + "TREAT" => Some(Self::Treat), + "TRIGGER" => Some(Self::Trigger), + "TRIM" => Some(Self::Trim), + "TRUE_P" => Some(Self::TrueP), + "TRUNCATE" => Some(Self::Truncate), + "TRUSTED" => Some(Self::Trusted), + "TYPE_P" => Some(Self::TypeP), + "TYPES_P" => Some(Self::TypesP), + "UESCAPE" => Some(Self::Uescape), + "UNBOUNDED" => Some(Self::Unbounded), + "UNCONDITIONAL" => Some(Self::Unconditional), + "UNCOMMITTED" => Some(Self::Uncommitted), + "UNENCRYPTED" => Some(Self::Unencrypted), + "UNION" => Some(Self::Union), + "UNIQUE" => Some(Self::Unique), + "UNKNOWN" => Some(Self::Unknown), + "UNLISTEN" => Some(Self::Unlisten), + "UNLOGGED" => Some(Self::Unlogged), + "UNTIL" => Some(Self::Until), + "UPDATE" => Some(Self::Update), + "USER" => Some(Self::User), + "USING" => Some(Self::Using), + "VACUUM" => Some(Self::Vacuum), + "VALID" => Some(Self::Valid), + "VALIDATE" => Some(Self::Validate), + "VALIDATOR" => Some(Self::Validator), + "VALUE_P" => Some(Self::ValueP), + "VALUES" => Some(Self::Values), + "VARCHAR" => Some(Self::Varchar), + "VARIADIC" => Some(Self::Variadic), + "VARYING" => Some(Self::Varying), + "VERBOSE" => Some(Self::Verbose), + "VERSION_P" => Some(Self::VersionP), + "VIEW" => Some(Self::View), + "VIEWS" => Some(Self::Views), + "VOLATILE" => Some(Self::Volatile), + "WHEN" => Some(Self::When), + "WHERE" => Some(Self::Where), + "WHITESPACE_P" => Some(Self::WhitespaceP), + "WINDOW" => Some(Self::Window), + "WITH" => Some(Self::With), + "WITHIN" => Some(Self::Within), + "WITHOUT" => Some(Self::Without), + "WORK" => Some(Self::Work), + "WRAPPER" => Some(Self::Wrapper), + "WRITE" => Some(Self::Write), + "XML_P" => Some(Self::XmlP), + "XMLATTRIBUTES" => Some(Self::Xmlattributes), + "XMLCONCAT" => Some(Self::Xmlconcat), + "XMLELEMENT" => Some(Self::Xmlelement), + "XMLEXISTS" => Some(Self::Xmlexists), + "XMLFOREST" => Some(Self::Xmlforest), + "XMLNAMESPACES" => Some(Self::Xmlnamespaces), + "XMLPARSE" => Some(Self::Xmlparse), + "XMLPI" => Some(Self::Xmlpi), + "XMLROOT" => Some(Self::Xmlroot), + "XMLSERIALIZE" => Some(Self::Xmlserialize), + "XMLTABLE" => Some(Self::Xmltable), + "YEAR_P" => Some(Self::YearP), + "YES_P" => Some(Self::YesP), + "ZONE" => Some(Self::Zone), + "FORMAT_LA" => Some(Self::FormatLa), + "NOT_LA" => Some(Self::NotLa), + "NULLS_LA" => Some(Self::NullsLa), + "WITH_LA" => Some(Self::WithLa), + "WITHOUT_LA" => Some(Self::WithoutLa), + "MODE_TYPE_NAME" => Some(Self::ModeTypeName), + "MODE_PLPGSQL_EXPR" => Some(Self::ModePlpgsqlExpr), + "MODE_PLPGSQL_ASSIGN1" => Some(Self::ModePlpgsqlAssign1), + "MODE_PLPGSQL_ASSIGN2" => Some(Self::ModePlpgsqlAssign2), + "MODE_PLPGSQL_ASSIGN3" => Some(Self::ModePlpgsqlAssign3), + "UMINUS" => Some(Self::Uminus), + _ => None, + } + } +} diff --git a/crates/pgt_query/src/scan.rs b/crates/pgt_query/src/scan.rs new file mode 100644 index 000000000..b12061e7f --- /dev/null +++ b/crates/pgt_query/src/scan.rs @@ -0,0 +1,33 @@ +use std::ffi::{CStr, CString}; + +use crate::bindings::*; +use crate::error::*; +use crate::protobuf; + +use prost::Message; + +/// Scans (lexes) the given SQL statement into tokens. +/// +/// # Example +/// +/// ```rust +/// let result = pgt_query::scan("SELECT * FROM contacts"); +/// assert!(result.is_ok()); +/// ``` +pub fn scan(sql: &str) -> Result { + let input = CString::new(sql)?; + let result = unsafe { pg_query_scan(input.as_ptr()) }; + let scan_result = if !result.error.is_null() { + let message = unsafe { CStr::from_ptr((*result.error).message) } + .to_string_lossy() + .to_string(); + Err(Error::Scan(message)) + } else { + let data = unsafe { + std::slice::from_raw_parts(result.pbuf.data as *const u8, result.pbuf.len as usize) + }; + protobuf::ScanResult::decode(data).map_err(Error::Decode) + }; + unsafe { pg_query_free_scan_result(result) }; + scan_result +} diff --git a/crates/pgt_query/src/split.rs b/crates/pgt_query/src/split.rs new file mode 100644 index 000000000..abb95eb87 --- /dev/null +++ b/crates/pgt_query/src/split.rs @@ -0,0 +1,86 @@ +use std::ffi::{CStr, CString}; + +use crate::bindings::*; +use crate::error::*; + +/// Split a well-formed query into separate statements. +/// +/// # Example +/// +/// ```rust +/// let query = r#"select /*;*/ 1; select "2;", (select 3);"#; +/// let statements = pgt_query::split_with_parser(query).unwrap(); +/// assert_eq!(statements, vec!["select /*;*/ 1", r#" select "2;", (select 3)"#]); +/// ``` +/// +/// However, `split_with_parser` will fail on malformed statements +/// +/// ```rust +/// let query = "select 1; this statement is not sql; select 2;"; +/// let result = pgt_query::split_with_parser(query); +/// let err = r#"syntax error at or near "this""#; +/// assert_eq!(result, Err(pgt_query::Error::Split(err.to_string()))); +/// ``` +pub fn split_with_parser(query: &str) -> Result> { + let input = CString::new(query)?; + let result = unsafe { pg_query_split_with_parser(input.as_ptr()) }; + let split_result = if !result.error.is_null() { + let message = unsafe { CStr::from_ptr((*result.error).message) } + .to_string_lossy() + .to_string(); + Err(Error::Split(message)) + } else { + let n_stmts = result.n_stmts as usize; + let mut statements = Vec::with_capacity(n_stmts); + for offset in 0..n_stmts { + let split_stmt = unsafe { *result.stmts.add(offset).read() }; + let start = split_stmt.stmt_location as usize; + let end = start + split_stmt.stmt_len as usize; + statements.push(&query[start..end]); + // not sure the start..end slice'll hold up for non-utf8 charsets + } + Ok(statements) + }; + unsafe { pg_query_free_split_result(result) }; + split_result +} + +/// Split a potentially-malformed query into separate statements. Note that +/// invalid tokens will be skipped +/// ```rust +/// let query = r#"select /*;*/ 1; asdf; select "2;", (select 3); asdf"#; +/// let statements = pgt_query::split_with_scanner(query).unwrap(); +/// assert_eq!(statements, vec![ +/// "select /*;*/ 1", +/// // skipped " asdf" since it was an invalid token +/// r#" select "2;", (select 3)"#, +/// ]); +/// ``` +pub fn split_with_scanner(query: &str) -> Result> { + let input = CString::new(query)?; + let result = unsafe { pg_query_split_with_scanner(input.as_ptr()) }; + let split_result = if !result.error.is_null() { + let message = unsafe { CStr::from_ptr((*result.error).message) } + .to_string_lossy() + .to_string(); + Err(Error::Split(message)) + } else { + // don't use result.stderr_buffer since it appears unused unless + // libpg_query is compiled with DEBUG defined. + let n_stmts = result.n_stmts as usize; + let mut start: usize; + let mut end: usize; + let mut statements = Vec::with_capacity(n_stmts); + for offset in 0..n_stmts { + let split_stmt = unsafe { *result.stmts.add(offset).read() }; + start = split_stmt.stmt_location as usize; + // TODO: consider comparing the new value of start to the old value + // of end to see if any region larger than a statement-separator got skipped + end = start + split_stmt.stmt_len as usize; + statements.push(&query[start..end]); + } + Ok(statements) + }; + unsafe { pg_query_free_split_result(result) }; + split_result +} diff --git a/crates/pgt_query_ext/Cargo.toml b/crates/pgt_query_ext/Cargo.toml index c6754b670..9b4bfa1d3 100644 --- a/crates/pgt_query_ext/Cargo.toml +++ b/crates/pgt_query_ext/Cargo.toml @@ -12,13 +12,9 @@ version = "0.0.0" [dependencies] -petgraph = "0.6.4" - -pg_query.workspace = true -pgt_diagnostics.workspace = true -pgt_lexer.workspace = true -pgt_query_ext_codegen.workspace = true -pgt_text_size.workspace = true +pgt_diagnostics.workspace = true +pgt_query.workspace = true +pgt_text_size.workspace = true [lib] doctest = false diff --git a/crates/pgt_query_ext/src/codegen.rs b/crates/pgt_query_ext/src/codegen.rs deleted file mode 100644 index 8278383b6..000000000 --- a/crates/pgt_query_ext/src/codegen.rs +++ /dev/null @@ -1 +0,0 @@ -pgt_query_ext_codegen::codegen!(); diff --git a/crates/pgt_query_ext/src/diagnostics.rs b/crates/pgt_query_ext/src/diagnostics.rs index aa16db813..4b5d92e9e 100644 --- a/crates/pgt_query_ext/src/diagnostics.rs +++ b/crates/pgt_query_ext/src/diagnostics.rs @@ -9,14 +9,29 @@ use pgt_text_size::TextRange; pub struct SyntaxDiagnostic { /// The location where the error is occurred #[location(span)] - span: Option, + pub span: Option, #[message] #[description] pub message: MessageAndDescription, } -impl From for SyntaxDiagnostic { - fn from(err: pg_query::Error) -> Self { +impl SyntaxDiagnostic { + /// Create a new syntax diagnostic with the given message and optional span. + pub fn new(message: impl Into, span: Option) -> Self { + SyntaxDiagnostic { + span, + message: MessageAndDescription::from(message.into()), + } + } + + pub fn span(mut self, span: TextRange) -> Self { + self.span = Some(span); + self + } +} + +impl From for SyntaxDiagnostic { + fn from(err: pgt_query::Error) -> Self { SyntaxDiagnostic { span: None, message: MessageAndDescription::from(err.to_string()), diff --git a/crates/pgt_query_ext/src/lib.rs b/crates/pgt_query_ext/src/lib.rs index c1f5fb49a..b0288da8d 100644 --- a/crates/pgt_query_ext/src/lib.rs +++ b/crates/pgt_query_ext/src/lib.rs @@ -1,32 +1,2 @@ -//! Postgres Statement Parser -//! -//! Simple wrapper crate for `pg_query` to expose types and a function to get the root node for an -//! SQL statement. -//! -//! It also host any "extensions" to the `pg_query` crate that are not yet contributed upstream. -//! Extensions include -//! - `get_location` to get the location of a node -//! - `get_node_properties` to get the properties of a node -//! - `get_nodes` to get all the nodes in the AST as a petgraph tree -//! - `ChildrenIterator` to iterate over the children of a node -mod codegen; pub mod diagnostics; - -pub use pg_query::protobuf; -pub use pg_query::{Error, NodeEnum, Result}; - -pub use codegen::{ - ChildrenIterator, Node, TokenProperty, get_location, get_node_properties, get_nodes, -}; - -pub fn parse(sql: &str) -> Result { - pg_query::parse(sql).map(|parsed| { - parsed - .protobuf - .nodes() - .iter() - .find(|n| n.1 == 1) - .map(|n| n.0.to_enum()) - .ok_or_else(|| Error::Parse("Unable to find root node".to_string())) - })? -} +pub mod utils; diff --git a/crates/pgt_query_ext/src/utils.rs b/crates/pgt_query_ext/src/utils.rs new file mode 100644 index 000000000..6dedebea1 --- /dev/null +++ b/crates/pgt_query_ext/src/utils.rs @@ -0,0 +1,100 @@ +/// Helper function to find a specific option value from function options +pub fn find_option_value( + create_fn: &pgt_query::protobuf::CreateFunctionStmt, + option_name: &str, +) -> Option { + create_fn + .options + .iter() + .filter_map(|opt_wrapper| opt_wrapper.node.as_ref()) + .find_map(|opt| { + if let pgt_query::NodeEnum::DefElem(def_elem) = opt { + if def_elem.defname == option_name { + def_elem + .arg + .iter() + .filter_map(|arg_wrapper| arg_wrapper.node.as_ref()) + .find_map(|arg| { + if let pgt_query::NodeEnum::String(s) = arg { + Some(s.sval.clone()) + } else if let pgt_query::NodeEnum::List(l) = arg { + l.items.iter().find_map(|item_wrapper| { + if let Some(pgt_query::NodeEnum::String(s)) = + item_wrapper.node.as_ref() + { + Some(s.sval.clone()) + } else { + None + } + }) + } else { + None + } + }) + } else { + None + } + } else { + None + } + }) +} + +pub fn parse_name(nodes: &[pgt_query::protobuf::Node]) -> Option<(Option, String)> { + let names = nodes + .iter() + .map(|n| match &n.node { + Some(pgt_query::NodeEnum::String(s)) => Some(s.sval.clone()), + _ => None, + }) + .collect::>(); + + match names.as_slice() { + [Some(schema), Some(name)] => Some((Some(schema.clone()), name.clone())), + [Some(name)] => Some((None, name.clone())), + _ => None, + } +} + +#[cfg(test)] +mod tests { + use crate::utils::{find_option_value, parse_name}; + + #[test] + fn test_find_option_value() { + let input = " + CREATE OR REPLACE FUNCTION public.f1() + RETURNS boolean + LANGUAGE plpgsql + AS $function$ + declare r t1 := (select t1 from t1 where a = 1); + BEGIN + if r.c is null or + true is false + then -- there is bug - table t1 missing \"c\" column + RAISE NOTICE 'c is null'; + end if; + END; + $function$; +" + .trim(); + + let ast = pgt_query::parse(input).unwrap().into_root().unwrap(); + let create_fn = match &ast { + pgt_query::NodeEnum::CreateFunctionStmt(stmt) => stmt, + _ => panic!("Expected CreateFunctionStmt"), + }; + + assert_eq!( + find_option_value(create_fn, "language"), + Some("plpgsql".to_string()) + ); + + assert!(find_option_value(create_fn, "as").is_some(),); + + assert_eq!( + parse_name(&create_fn.return_type.as_ref().unwrap().names), + Some((Some("pg_catalog".to_string()), "bool".to_string())) + ); + } +} diff --git a/crates/pgt_query_ext_codegen/src/get_location.rs b/crates/pgt_query_ext_codegen/src/get_location.rs deleted file mode 100644 index fa6fa8b26..000000000 --- a/crates/pgt_query_ext_codegen/src/get_location.rs +++ /dev/null @@ -1,122 +0,0 @@ -use pgt_query_proto_parser::{FieldType, Node, ProtoFile}; -use proc_macro2::{Ident, TokenStream}; -use quote::{format_ident, quote}; - -pub fn get_location_mod(proto_file: &ProtoFile) -> proc_macro2::TokenStream { - let manual_node_names = manual_node_names(); - - let node_identifiers = node_identifiers(&proto_file.nodes, &manual_node_names); - let location_idents = location_idents(&proto_file.nodes, &manual_node_names); - - quote! { - /// Returns the location of a node - pub fn get_location(node: &NodeEnum) -> Option { - let loc = get_location_internal(node); - if loc.is_some() { - usize::try_from(loc.unwrap()).ok() - } else { - None - } - } - - fn get_location_internal(node: &NodeEnum) -> Option { - let location = match node { - // for some nodes, the location of the node itself is after their children location. - // we implement the logic for those nodes manually. - // if you add one, make sure to add its name to `manual_node_names()`. - NodeEnum::BoolExpr(n) => { - let a = n.args.iter().min_by(|a, b| { - let loc_a = get_location_internal(&a.node.as_ref().unwrap()); - let loc_b = get_location_internal(&b.node.as_ref().unwrap()); - loc_a.cmp(&loc_b) - }); - get_location_internal(&a.unwrap().node.as_ref().unwrap()) - }, - NodeEnum::AExpr(n) => get_location_internal(&n.lexpr.as_ref().unwrap().node.as_ref().unwrap()), - NodeEnum::WindowDef(n) => { - if n.partition_clause.len() > 0 || n.order_clause.len() > 0 { - // the location is not correct if its the definition clause, e.g. for - // window w as (partition by a order by b) - // the location is the start of the `partition` token - None - } else { - Some(n.location) - } - }, - NodeEnum::CollateClause(n) => get_location_internal(&n.arg.as_ref().unwrap().node.as_ref().unwrap()), - NodeEnum::TypeCast(n) => get_location_internal(&n.arg.as_ref().unwrap().node.as_ref().unwrap()), - NodeEnum::ColumnDef(n) => if n.colname.len() > 0 { - Some(n.location) - } else { - None - }, - NodeEnum::NullTest(n) => if n.arg.is_some() { - get_location_internal(&n.arg.as_ref().unwrap().node.as_ref().unwrap()) - } else { - Some(n.location) - }, - NodeEnum::PublicationObjSpec(n) => { - match &n.pubtable { - Some(pubtable) => match &pubtable.relation { - Some(range_var) => Some(range_var.location), - None => Some(n.location), - }, - None => Some(n.location), - } - }, - NodeEnum::BooleanTest(n) => { - if n.arg.is_some() { - get_location_internal(&n.arg.as_ref().unwrap().node.as_ref().unwrap()) - } else { - Some(n.location) - } - }, - #(NodeEnum::#node_identifiers(n) => #location_idents),* - }; - if location.is_some() && location.unwrap() < 0 { - None - } else { - location - } - } - } -} - -fn manual_node_names() -> Vec<&'static str> { - vec![ - "BoolExpr", - "AExpr", - "WindowDef", - "CollateClause", - "TypeCast", - "ColumnDef", - "NullTest", - "PublicationObjSpec", - ] -} - -fn location_idents(nodes: &[Node], exclude_nodes: &[&str]) -> Vec { - nodes - .iter() - .filter(|n| !exclude_nodes.contains(&n.name.as_str())) - .map(|node| { - if node - .fields - .iter() - .any(|n| n.name == "location" && n.field_type == FieldType::Int32) - { - quote! { Some(n.location) } - } else { - quote! { None } - } - }) - .collect() -} - -fn node_identifiers(nodes: &[Node], exclude_nodes: &[&str]) -> Vec { - nodes - .iter() - .filter(|n| !exclude_nodes.contains(&n.name.as_str())) - .map(|node| format_ident!("{}", &node.name)) - .collect() -} diff --git a/crates/pgt_query_ext_codegen/src/get_node_properties.rs b/crates/pgt_query_ext_codegen/src/get_node_properties.rs deleted file mode 100644 index 9581304bd..000000000 --- a/crates/pgt_query_ext_codegen/src/get_node_properties.rs +++ /dev/null @@ -1,1006 +0,0 @@ -use pgt_query_proto_parser::{FieldType, Node, ProtoFile}; -use proc_macro2::{Ident, TokenStream}; -use quote::{format_ident, quote}; - -pub fn get_node_properties_mod(proto_file: &ProtoFile) -> proc_macro2::TokenStream { - let node_identifiers = node_identifiers(&proto_file.nodes); - let node_handlers = node_handlers(&proto_file.nodes); - - quote! { - #[derive(Debug, Clone, PartialEq)] - pub struct TokenProperty { - pub value: Option, - pub kind: Option, - } - - impl TokenProperty { - pub fn new(value: Option, kind: Option) -> TokenProperty { - if value.is_none() && kind.is_none() { - panic!("TokenProperty must have either value or kind"); - } - TokenProperty { value, kind } - } - } - - impl From for TokenProperty { - fn from(value: i32) -> TokenProperty { - TokenProperty { - value: Some(value.to_string()), - kind: None, - } - } - } - - impl From for TokenProperty { - fn from(value: u32) -> TokenProperty { - TokenProperty { - value: Some(value.to_string()), - kind: None, - } - } - } - - - impl From for TokenProperty { - fn from(value: i64) -> TokenProperty { - TokenProperty { - value: Some(value.to_string()), - kind: None, - } - } - } - - impl From for TokenProperty { - fn from(value: u64) -> TokenProperty { - TokenProperty { - value: Some(value.to_string()), - kind: None, - } - } - } - - impl From for TokenProperty { - fn from(value: f64) -> TokenProperty { - TokenProperty { - value: Some(value.to_string()), - kind: None, - } - } - } - - impl From for TokenProperty { - fn from(value: bool) -> TokenProperty { - TokenProperty { - value: Some(value.to_string()), - kind: None, - } - } - } - - impl From for TokenProperty { - fn from(value: String) -> TokenProperty { - assert!(value.len() > 0, "String property value has length 0"); - TokenProperty { - value: Some(value.to_lowercase()), - kind: None, - } - } - } - - - impl From<&pg_query::protobuf::Integer> for TokenProperty { - fn from(node: &pg_query::protobuf::Integer) -> TokenProperty { - TokenProperty { - value: Some(node.ival.to_string()), - kind: Some(SyntaxKind::Iconst) - } - } - } - - impl From<&pg_query::protobuf::Boolean> for TokenProperty { - fn from(node: &pg_query::protobuf::Boolean) -> TokenProperty { - TokenProperty { - value: Some(node.boolval.to_string()), - kind: match node.boolval { - true => Some(SyntaxKind::TrueP), - false => Some(SyntaxKind::FalseP), - } - } - } - } - - impl From for TokenProperty { - fn from(kind: SyntaxKind) -> TokenProperty { - TokenProperty { - value: None, - kind: Some(kind), - } - } - } - - impl From for TokenProperty { - fn from(token: Token) -> TokenProperty { - TokenProperty { - value: None, - kind: Some(SyntaxKind::from(token)), - } - } - } - - pub fn get_node_properties(node: &NodeEnum, parent: Option<&NodeEnum>) -> Vec { - let mut tokens: Vec = Vec::new(); - - match node { - #(NodeEnum::#node_identifiers(n) => {#node_handlers}),*, - }; - - tokens - } - - } -} - -fn node_identifiers(nodes: &[Node]) -> Vec { - nodes - .iter() - .map(|node| format_ident!("{}", &node.name)) - .collect() -} - -fn node_handlers(nodes: &[Node]) -> Vec { - nodes - .iter() - .map(|node| { - let string_property_handlers = string_property_handlers(node); - let custom_handlers = custom_handlers(node); - quote! { - #custom_handlers - #(#string_property_handlers)* - } - }) - .collect() -} - -fn custom_handlers(node: &Node) -> TokenStream { - match node.name.as_str() { - "SelectStmt" => quote! { - tokens.push(TokenProperty::from(Token::Select)); - if n.distinct_clause.len() > 0 { - tokens.push(TokenProperty::from(Token::Distinct)); - } - if n.values_lists.len() > 0 { - tokens.push(TokenProperty::from(Token::Values)); - } - if n.from_clause.len() > 0 { - tokens.push(TokenProperty::from(Token::From)); - } - if n.where_clause.is_some() { - tokens.push(TokenProperty::from(Token::Where)); - } - if n.group_clause.len() > 0 { - tokens.push(TokenProperty::from(Token::GroupP)); - tokens.push(TokenProperty::from(Token::By)); - } - match n.op() { - protobuf::SetOperation::Undefined => {}, - protobuf::SetOperation::SetopNone => {}, - protobuf::SetOperation::SetopUnion => tokens.push(TokenProperty::from(Token::Union)), - protobuf::SetOperation::SetopIntersect => tokens.push(TokenProperty::from(Token::Intersect)), - protobuf::SetOperation::SetopExcept => tokens.push(TokenProperty::from(Token::Except)), - _ => panic!("Unknown SelectStmt op {:#?}", n.op()), - } - if n.all { - tokens.push(TokenProperty::from(Token::All)); - } - }, - "BoolExpr" => quote! { - match n.boolop() { - protobuf::BoolExprType::AndExpr => tokens.push(TokenProperty::from(Token::And)), - protobuf::BoolExprType::OrExpr => tokens.push(TokenProperty::from(Token::Or)), - protobuf::BoolExprType::NotExpr => tokens.push(TokenProperty::from(Token::Not)), - _ => panic!("Unknown BoolExpr {:#?}", n.boolop()), - } - }, - "JoinExpr" => quote! { - tokens.push(TokenProperty::from(Token::Join)); - tokens.push(TokenProperty::from(Token::On)); - match n.jointype() { - protobuf::JoinType::JoinInner => tokens.push(TokenProperty::from(Token::InnerP)), - protobuf::JoinType::JoinLeft => tokens.push(TokenProperty::from(Token::Left)), - protobuf::JoinType::JoinFull => tokens.push(TokenProperty::from(Token::Full)), - protobuf::JoinType::JoinRight => tokens.push(TokenProperty::from(Token::Right)), - _ => panic!("Unknown JoinExpr jointype {:#?}", n.jointype()), - } - - }, - "ResTarget" => quote! { - if n.name.len() > 0 { - tokens.push(TokenProperty::from(Token::As)); - } - }, - "Integer" => quote! { - tokens.push(TokenProperty::from(n)); - }, - "DefElem" => quote! { - match n.defname.as_str() { - "location" => { - tokens.push(TokenProperty::from(Token::Default)); - }, - "connection_limit" => { - tokens.push(TokenProperty::from(Token::Limit)); - tokens.push(TokenProperty::from(Token::Iconst)); - }, - "owner" => { - tokens.push(TokenProperty::from(Token::Owner)); - } - _ => {} - } - match n.defaction() { - protobuf::DefElemAction::DefelemUnspec => tokens.push(TokenProperty::from(Token::Ascii61)), - _ => panic!("Unknown DefElem {:#?}", n.defaction()), - } - }, - "Alias" => quote! { - tokens.push(TokenProperty::from(Token::As)); - }, - "CollateClause" => quote! { - tokens.push(TokenProperty::from(Token::Collate)); - }, - "AExpr" => quote! { - match n.kind() { - protobuf::AExprKind::AexprOp => {}, // do nothing - protobuf::AExprKind::AexprOpAny => tokens.push(TokenProperty::from(Token::Any)), - protobuf::AExprKind::AexprIn => tokens.push(TokenProperty::from(Token::InP)), - _ => panic!("Unknown AExpr kind {:#?}", n.kind()), - } - }, - "WindowDef" => quote! { - if n.partition_clause.len() > 0 || n.order_clause.len() > 0 { - tokens.push(TokenProperty::from(Token::Window)); - tokens.push(TokenProperty::from(Token::As)); - } - if n.partition_clause.len() > 0 { - tokens.push(TokenProperty::from(Token::Partition)); - tokens.push(TokenProperty::from(Token::By)); - } - }, - "Boolean" => quote! { - tokens.push(TokenProperty::from(n)); - }, - "AStar" => quote! { - tokens.push(TokenProperty::from(Token::Ascii42)); - }, - "FuncCall" => quote! { - if n.funcname.len() == 1 && n.args.len() == 0 { - // check if count(*) - if let Some(node) = &n.funcname[0].node { - if let NodeEnum::String(n) = node { - if n.sval == "count" { - tokens.push(TokenProperty::from(Token::Ascii42)); - } - } - } - } - if n.agg_filter.is_some() { - tokens.push(TokenProperty::from(Token::Filter)); - tokens.push(TokenProperty::from(Token::Where)); - } - if n.over.is_some() { - tokens.push(TokenProperty::from(Token::Over)); - } - }, - "SqlvalueFunction" => quote! { - match n.op() { - protobuf::SqlValueFunctionOp::SvfopCurrentRole => tokens.push(TokenProperty::from(Token::CurrentRole)), - protobuf::SqlValueFunctionOp::SvfopCurrentUser => tokens.push(TokenProperty::from(Token::CurrentUser)), - _ => panic!("Unknown SqlvalueFunction {:#?}", n.op()), - } - }, - "SortBy" => quote! { - tokens.push(TokenProperty::from(Token::Order)); - tokens.push(TokenProperty::from(Token::By)); - match n.sortby_dir() { - protobuf::SortByDir::SortbyAsc => tokens.push(TokenProperty::from(Token::Asc)), - protobuf::SortByDir::SortbyDesc => tokens.push(TokenProperty::from(Token::Desc)), - _ => {} - } - }, - "AConst" => quote! { - if n.isnull { - tokens.push(TokenProperty::from(Token::NullP)); - } - }, - "AlterTableStmt" => quote! { - tokens.push(TokenProperty::from(Token::Alter)); - tokens.push(TokenProperty::from(Token::Table)); - }, - "AlterTableCmd" => quote! { - match n.subtype() { - protobuf::AlterTableType::AtColumnDefault => { - tokens.push(TokenProperty::from(Token::Alter)); - tokens.push(TokenProperty::from(Token::Column)); - tokens.push(TokenProperty::from(Token::Set)); - tokens.push(TokenProperty::from(Token::Default)); - }, - protobuf::AlterTableType::AtAddConstraint => tokens.push(TokenProperty::from(Token::AddP)), - protobuf::AlterTableType::AtAlterColumnType => { - tokens.push(TokenProperty::from(Token::Alter)); - tokens.push(TokenProperty::from(Token::Column)); - tokens.push(TokenProperty::from(Token::TypeP)); - }, - protobuf::AlterTableType::AtDropColumn => { - tokens.push(TokenProperty::from(Token::Drop)); - tokens.push(TokenProperty::from(Token::Column)); - }, - _ => panic!("Unknown AlterTableCmd {:#?}", n.subtype()), - } - }, - "VariableSetStmt" => quote! { - tokens.push(TokenProperty::from(Token::Set)); - match n.kind() { - protobuf::VariableSetKind::VarSetValue => tokens.push(TokenProperty::from(Token::To)), - _ => panic!("Unknown VariableSetStmt {:#?}", n.kind()), - } - }, - "CreatePolicyStmt" => quote! { - tokens.push(TokenProperty::from(Token::Create)); - tokens.push(TokenProperty::from(Token::Policy)); - tokens.push(TokenProperty::from(Token::On)); - if n.roles.len() > 0 { - tokens.push(TokenProperty::from(Token::To)); - } - if n.qual.is_some() { - tokens.push(TokenProperty::from(Token::Using)); - } - if n.with_check.is_some() { - tokens.push(TokenProperty::from(Token::With)); - tokens.push(TokenProperty::from(Token::Check)); - } - }, - "CopyStmt" => quote! { - tokens.push(TokenProperty::from(Token::Copy)); - tokens.push(TokenProperty::from(Token::From)); - }, - "RenameStmt" => quote! { - tokens.push(TokenProperty::from(Token::Alter)); - tokens.push(TokenProperty::from(Token::Table)); - tokens.push(TokenProperty::from(Token::Rename)); - tokens.push(TokenProperty::from(Token::To)); - }, - "Constraint" => quote! { - match n.contype() { - protobuf::ConstrType::ConstrNotnull => { - tokens.push(TokenProperty::from(Token::Not)); - tokens.push(TokenProperty::from(Token::NullP)); - }, - protobuf::ConstrType::ConstrDefault => tokens.push(TokenProperty::from(Token::Default)), - protobuf::ConstrType::ConstrCheck => tokens.push(TokenProperty::from(Token::Check)), - protobuf::ConstrType::ConstrPrimary => { - tokens.push(TokenProperty::from(Token::Primary)); - tokens.push(TokenProperty::from(Token::Key)); - }, - protobuf::ConstrType::ConstrForeign => tokens.push(TokenProperty::from(Token::References)), - protobuf::ConstrType::ConstrUnique => tokens.push(TokenProperty::from(Token::Unique)), - _ => panic!("Unknown Constraint {:#?}", n.contype()), - }; - if n.options.len() > 0 { - tokens.push(TokenProperty::from(Token::With)); - } - }, - "PartitionSpec" => quote! { - tokens.push(TokenProperty::from(Token::Partition)); - tokens.push(TokenProperty::from(Token::By)); - }, - "InsertStmt" => quote! { - tokens.push(TokenProperty::from(Token::Insert)); - tokens.push(TokenProperty::from(Token::Into)); - }, - "DeleteStmt" => quote! { - tokens.push(TokenProperty::from(Token::DeleteP)); - tokens.push(TokenProperty::from(Token::From)); - if n.where_clause.is_some() { - tokens.push(TokenProperty::from(Token::Where)); - } - if n.using_clause.len() > 0 { - tokens.push(TokenProperty::from(Token::Using)); - } - }, - "ViewStmt" => quote! { - tokens.push(TokenProperty::from(Token::Create)); - tokens.push(TokenProperty::from(Token::View)); - if n.query.is_some() { - tokens.push(TokenProperty::from(Token::As)); - // check if SelectStmt with WithClause with recursive set to true - if let Some(NodeEnum::SelectStmt(select_stmt)) = n.query.as_ref().and_then(|query| query.node.as_ref()) { - if select_stmt.with_clause.is_some() && select_stmt.with_clause.as_ref().unwrap().recursive { - tokens.push(TokenProperty::from(Token::Recursive)); - } - } - } - if n.replace { - tokens.push(TokenProperty::from(Token::Or)); - tokens.push(TokenProperty::from(Token::Replace)); - } - if let Some(n) = &n.view { - match n.relpersistence.as_str() { - // Temporary - "t" => tokens.push(TokenProperty::from(Token::Temporary)), - _ => {}, - } - } - match n.with_check_option() { - protobuf::ViewCheckOption::LocalCheckOption => { - tokens.push(TokenProperty::from(Token::With)); - tokens.push(TokenProperty::from(Token::Local)); - tokens.push(TokenProperty::from(Token::Check)); - tokens.push(TokenProperty::from(Token::Option)); - }, - protobuf::ViewCheckOption::CascadedCheckOption => { - tokens.push(TokenProperty::from(Token::With)); - tokens.push(TokenProperty::from(Token::Cascaded)); - tokens.push(TokenProperty::from(Token::Check)); - tokens.push(TokenProperty::from(Token::Option)); - }, - _ => {} - } - }, - "CreateStmt" => quote! { - tokens.push(TokenProperty::from(Token::Create)); - tokens.push(TokenProperty::from(Token::Table)); - if n.tablespacename.len() > 0 { - tokens.push(TokenProperty::from(Token::Tablespace)); - } - if n.options.len() > 0 { - tokens.push(TokenProperty::from(Token::With)); - } - if n.if_not_exists { - tokens.push(TokenProperty::from(Token::IfP)); - tokens.push(TokenProperty::from(Token::Not)); - tokens.push(TokenProperty::from(Token::Exists)); - } - if n.partbound.is_some() { - tokens.push(TokenProperty::from(Token::Partition)); - tokens.push(TokenProperty::from(Token::Of)); - tokens.push(TokenProperty::from(Token::For)); - tokens.push(TokenProperty::from(Token::Values)); - } - if let Some(n) = &n.relation { - match n.relpersistence.as_str() { - // Unlogged - "u" => tokens.push(TokenProperty::from(Token::Unlogged)), - // Temporary - "t" => tokens.push(TokenProperty::from(Token::Temporary)), - _ => {}, - } - if n.inh { - tokens.push(TokenProperty::from(Token::Inherits)); - } - } - }, - "TableLikeClause" => quote! { - tokens.push(TokenProperty::from(Token::Like)); - // CREATE_TABLE_LIKE_ALL - if n.options == 0x7FFFFFFF { - tokens.push(TokenProperty::from(Token::Including)); - tokens.push(TokenProperty::from(Token::All)); - } else { - tokens.push(TokenProperty::from(Token::Excluding)); - tokens.push(TokenProperty::from(Token::All)); - } - }, - "TransactionStmt" => quote! { - match n.kind() { - protobuf::TransactionStmtKind::TransStmtBegin => tokens.push(TokenProperty::from(Token::BeginP)), - protobuf::TransactionStmtKind::TransStmtCommit => tokens.push(TokenProperty::from(Token::Commit)), - _ => panic!("Unknown TransactionStmt {:#?}", n.kind()) - } - }, - "PartitionBoundSpec" => quote! { - tokens.push(TokenProperty::from(Token::From)); - tokens.push(TokenProperty::from(Token::To)); - }, - "CaseExpr" => quote! { - tokens.push(TokenProperty::from(Token::Case)); - tokens.push(TokenProperty::from(Token::EndP)); - if n.defresult.is_some() { - tokens.push(TokenProperty::from(Token::Else)); - } - }, - "NullTest" => quote! { - match n.nulltesttype() { - protobuf::NullTestType::IsNull => tokens.push(TokenProperty::from(Token::Is)), - protobuf::NullTestType::IsNotNull => { - tokens.push(TokenProperty::from(Token::Is)); - tokens.push(TokenProperty::from(Token::Not)); - }, - _ => panic!("Unknown NullTest {:#?}", n.nulltesttype()), - } - tokens.push(TokenProperty::from(Token::NullP)); - }, - "CreateFunctionStmt" => quote! { - tokens.push(TokenProperty::from(Token::Create)); - if n.is_procedure { - tokens.push(TokenProperty::from(Token::Procedure)); - } else { - tokens.push(TokenProperty::from(Token::Function)); - } - if n.replace { - tokens.push(TokenProperty::from(Token::Or)); - tokens.push(TokenProperty::from(Token::Replace)); - } - if let Some(return_type) = &n.return_type { - tokens.push(TokenProperty::from(Token::Returns)); - if return_type.setof { - tokens.push(TokenProperty::from(Token::Setof)); - } - } - for option in &n.options { - if let Some(NodeEnum::DefElem(node)) = &option.node { - if node.defname == "strict" { - if let Some(NodeEnum::Boolean(node)) = - node.arg.as_ref().and_then(|arg| arg.node.as_ref()) - { - if node.boolval { - tokens.push(TokenProperty::from(Token::NullP)); - tokens.push(TokenProperty::from(Token::On)); - tokens.push(TokenProperty::from(Token::NullP)); - tokens.push(TokenProperty::from(Token::InputP)); - } else { - tokens.push(TokenProperty::from(Token::On)); - tokens.push(TokenProperty::from(Token::NullP)); - tokens.push(TokenProperty::from(Token::InputP)); - } - } - } - } - } - }, - "FunctionParameter" => quote! { - match n.mode() { - protobuf::FunctionParameterMode::FuncParamIn => tokens.push(TokenProperty::from(Token::InP)), - protobuf::FunctionParameterMode::FuncParamOut => tokens.push(TokenProperty::from(Token::OutP)), - protobuf::FunctionParameterMode::FuncParamInout => tokens.push(TokenProperty::from(Token::Inout)), - protobuf::FunctionParameterMode::FuncParamVariadic => tokens.push(TokenProperty::from(Token::Variadic)), - // protobuf::FunctionParameterMode::FuncParamTable => tokens.push(TokenProperty::from(Token::Table)), - protobuf::FunctionParameterMode::FuncParamDefault => {}, // do nothing - _ => panic!("Unknown FunctionParameter {:#?}", n.mode()), - }; - if n.defexpr.is_some() { - tokens.push(TokenProperty::from(Token::Default)); - } - }, - "NamedArgExpr" => quote! { - // => - tokens.push(TokenProperty::from(Token::EqualsGreater)); - }, - "CaseWhen" => quote! { - tokens.push(TokenProperty::from(Token::When)); - tokens.push(TokenProperty::from(Token::Then)); - }, - "TypeCast" => quote! { - tokens.push(TokenProperty::from(Token::Typecast)); - }, - "CreateDomainStmt" => quote! { - tokens.push(TokenProperty::from(Token::Create)); - tokens.push(TokenProperty::from(Token::DomainP)); - if n.type_name.is_some() { - tokens.push(TokenProperty::from(Token::As)); - } - }, - "List" => quote! { - if parent.is_some() { - // if parent is `DefineStmt`, we need to check whether an ORDER BY needs to be added - if let NodeEnum::DefineStmt(define_stmt) = parent.unwrap() { - // there *seems* to be an integer node in the last position of the DefineStmt args that - // defines whether the list contains an order by statement - let integer = define_stmt.args.last() - .and_then(|node| node.node.as_ref()) - .and_then(|node| if let NodeEnum::Integer(n) = node { Some(n.ival) } else { None }); - if integer.is_none() { - panic!("DefineStmt of type ObjectAggregate has no integer node in last position of args"); - } - // if the integer is 1, then there is an order by statement - // we add it to the `List` node because that seems to make most sense based off the grammar definition - // ref: https://github.com/postgres/postgres/blob/REL_15_STABLE/src/backend/parser/gram.y#L8355 - // ``` - // aggr_args: - // | '(' aggr_args_list ORDER BY aggr_args_list ')' - // ``` - if integer.unwrap() == 1 { - tokens.push(TokenProperty::from(Token::Order)); - tokens.push(TokenProperty::from(Token::By)); - } - } - } - }, - "DefineStmt" => quote! { - tokens.push(TokenProperty::from(Token::Create)); - if n.replace { - tokens.push(TokenProperty::from(Token::Or)); - tokens.push(TokenProperty::from(Token::Replace)); - } - match n.kind() { - protobuf::ObjectType::ObjectAggregate => { - tokens.push(TokenProperty::from(Token::Aggregate)); - - // n.args is always an array with two nodes - assert_eq!(n.args.len(), 2, "DefineStmt of type ObjectAggregate does not have exactly 2 args"); - // the first is either a List or a Node { node: None } - - if let Some(node) = &n.args.first() { - if node.node.is_none() { - // if first element is a Node { node: None }, then it's "*" - tokens.push(TokenProperty::from(Token::Ascii42)); - } } - // if its a list, we handle it in the handler for `List` - }, - protobuf::ObjectType::ObjectType => { - tokens.push(TokenProperty::from(Token::TypeP)); - }, - _ => panic!("Unknown DefineStmt {:#?}", n.kind()), - } - }, - "CreateSchemaStmt" => quote! { - tokens.push(TokenProperty::from(Token::Create)); - tokens.push(TokenProperty::from(Token::Schema)); - if n.if_not_exists { - tokens.push(TokenProperty::from(Token::IfP)); - tokens.push(TokenProperty::from(Token::Not)); - tokens.push(TokenProperty::from(Token::Exists)); - } - if n.authrole.is_some() { - tokens.push(TokenProperty::from(Token::Authorization)); - } - }, - "CreateEnumStmt" => quote! { - tokens.push(TokenProperty::from(Token::Create)); - tokens.push(TokenProperty::from(Token::TypeP)); - tokens.push(TokenProperty::from(Token::As)); - tokens.push(TokenProperty::from(Token::EnumP)); - }, - "CreateCastStmt" => quote! { - tokens.push(TokenProperty::from(Token::Create)); - tokens.push(TokenProperty::from(Token::Cast)); - tokens.push(TokenProperty::from(Token::As)); - if n.inout { - tokens.push(TokenProperty::from(Token::With)); - tokens.push(TokenProperty::from(Token::Inout)); - } else if n.func.is_some() { - tokens.push(TokenProperty::from(Token::With)); - tokens.push(TokenProperty::from(Token::Function)); - } else { - tokens.push(TokenProperty::from(Token::Without)); - tokens.push(TokenProperty::from(Token::Function)); - } - match n.context() { - protobuf::CoercionContext::CoercionImplicit => { - tokens.push(TokenProperty::from(Token::As)); - tokens.push(TokenProperty::from(Token::ImplicitP)); - }, - protobuf::CoercionContext::CoercionAssignment => { - tokens.push(TokenProperty::from(Token::As)); - tokens.push(TokenProperty::from(Token::Assignment)); - }, - protobuf::CoercionContext::CoercionPlpgsql => {}, - protobuf::CoercionContext::CoercionExplicit => {}, - _ => panic!("Unknown CreateCastStmt {:#?}", n.context()) - } - }, - "CreateRangeStmt" => quote! { - tokens.push(TokenProperty::from(Token::Create)); - tokens.push(TokenProperty::from(Token::TypeP)); - tokens.push(TokenProperty::from(Token::As)); - tokens.push(TokenProperty::from(Token::Range)); - }, - "IndexStmt" => quote! { - tokens.push(TokenProperty::from(Token::Create)); - if n.unique { - tokens.push(TokenProperty::from(Token::Unique)); - } - tokens.push(TokenProperty::from(Token::Index)); - if n.concurrent { - tokens.push(TokenProperty::from(Token::Concurrently)); - } - if n.if_not_exists { - tokens.push(TokenProperty::from(Token::IfP)); - tokens.push(TokenProperty::from(Token::Not)); - tokens.push(TokenProperty::from(Token::Exists)); - } - tokens.push(TokenProperty::from(Token::On)); - // access_method is btree by default - if n.access_method.len() > 0 { - tokens.push(TokenProperty::from(Token::Using)); - } - if n.index_including_params.len() > 0 { - tokens.push(TokenProperty::from(Token::Include)); - } - if n.options.len() > 0 { - tokens.push(TokenProperty::from(Token::With)); - } - // table_space is an empty string by default - if n.table_space.len() > 0 { - tokens.push(TokenProperty::from(Token::Tablespace)); - } - }, - "IndexElem" => quote! { - if n.collation.len() > 0 { - tokens.push(TokenProperty::from(Token::Collate)); - } - match n.nulls_ordering() { - protobuf::SortByNulls::SortbyNullsDefault => {}, - protobuf::SortByNulls::SortbyNullsFirst => { - tokens.push(TokenProperty::from(Token::NullsP)); - tokens.push(TokenProperty::from(Token::FirstP)); - }, - protobuf::SortByNulls::SortbyNullsLast => { - tokens.push(TokenProperty::from(Token::NullsP)); - tokens.push(TokenProperty::from(Token::LastP)); - }, - _ => panic!("Unknown IndexElem {:#?}", n.nulls_ordering()), - } - }, - "CreateTableSpaceStmt" => quote! { - tokens.push(TokenProperty::from(Token::Create)); - tokens.push(TokenProperty::from(Token::Tablespace)); - tokens.push(TokenProperty::from(Token::Location)); - if n.owner.is_some() { - tokens.push(TokenProperty::from(Token::Owner)); - } - if n.options.len() > 0 { - tokens.push(TokenProperty::from(Token::With)); - } - }, - "CreatePublicationStmt" => quote! { - tokens.push(TokenProperty::from(Token::Create)); - tokens.push(TokenProperty::from(Token::Publication)); - if n.for_all_tables { - tokens.push(TokenProperty::from(Token::For)); - tokens.push(TokenProperty::from(Token::All)); - tokens.push(TokenProperty::from(Token::Tables)); - } - if let Some(n) = n.options.first() { - tokens.push(TokenProperty::from(Token::With)); - } - if let Some(n) = n.pubobjects.first() { - tokens.push(TokenProperty::from(Token::For)); - if let Some(NodeEnum::PublicationObjSpec(n)) = &n.node { - match n.pubobjtype() { - protobuf::PublicationObjSpecType::PublicationobjTable => { - tokens.push(TokenProperty::from(Token::Table)); - }, - protobuf::PublicationObjSpecType::PublicationobjTablesInSchema => { - tokens.push(TokenProperty::from(Token::Tables)); - tokens.push(TokenProperty::from(Token::InP)); - tokens.push(TokenProperty::from(Token::Schema)); - }, - _ => panic!("Unknown CreatePublicationStmt {:#?}", n.pubobjtype()) - } - } - } - if let Some(n) = n.pubobjects.last() { - if let Some(NodeEnum::PublicationObjSpec(n)) = &n.node { - match n.pubobjtype() { - protobuf::PublicationObjSpecType::PublicationobjTablesInSchema => { - tokens.push(TokenProperty::from(Token::Tables)); - tokens.push(TokenProperty::from(Token::InP)); - tokens.push(TokenProperty::from(Token::Schema)); - }, - _ => {} - } - } - } - }, - "PublicationTable" => quote! { - if n.where_clause.is_some() { - tokens.push(TokenProperty::from(Token::Where)); - } - }, - "BooleanTest" => quote! { - match n.booltesttype() { - protobuf::BoolTestType::IsTrue => { - tokens.push(TokenProperty::from(Token::Is)); - tokens.push(TokenProperty::from(Token::TrueP)); - }, - protobuf::BoolTestType::IsNotTrue => { - tokens.push(TokenProperty::from(Token::Is)); - tokens.push(TokenProperty::from(Token::Not)); - tokens.push(TokenProperty::from(Token::TrueP)); - }, - protobuf::BoolTestType::IsFalse => { - tokens.push(TokenProperty::from(Token::Is)); - tokens.push(TokenProperty::from(Token::FalseP)); - }, - protobuf::BoolTestType::IsNotFalse => { - tokens.push(TokenProperty::from(Token::Is)); - tokens.push(TokenProperty::from(Token::Not)); - tokens.push(TokenProperty::from(Token::FalseP)); - }, - _ => panic!("Unknown BooleanTest {:#?}", n.booltesttype()), - } - }, - "CompositeTypeStmt" => quote! { - tokens.push(TokenProperty::from(Token::Create)); - tokens.push(TokenProperty::from(Token::TypeP)); - tokens.push(TokenProperty::from(Token::As)); - }, - "CreatedbStmt" => quote! { - tokens.push(TokenProperty::from(Token::Create)); - tokens.push(TokenProperty::from(Token::Database)); - }, - "CreateExtensionStmt" => quote! { - tokens.push(TokenProperty::from(Token::Create)); - tokens.push(TokenProperty::from(Token::Extension)); - if n.if_not_exists { - tokens.push(TokenProperty::from(Token::IfP)); - tokens.push(TokenProperty::from(Token::Not)); - tokens.push(TokenProperty::from(Token::Exists)); - } - }, - "CreateConversionStmt" => quote! { - tokens.push(TokenProperty::from(Token::Create)); - if n.def { - tokens.push(TokenProperty::from(Token::Default)); - } - tokens.push(TokenProperty::from(Token::ConversionP)); - if n.for_encoding_name.len() > 0 { - tokens.push(TokenProperty::from(Token::For)); - } - if n.to_encoding_name.len() > 0 { - tokens.push(TokenProperty::from(Token::To)); - } - if n.func_name.len() == 1 { - tokens.push(TokenProperty::from(Token::From)); - } else if n.func_name.len() > 1 { - panic!("Encountered multiple defined func_name elements in CreateConversionStmt"); - } - }, - "CreateTransformStmt" => quote! { - tokens.push(TokenProperty::from(Token::Create)); - if n.replace { - tokens.push(TokenProperty::from(Token::Or)); - tokens.push(TokenProperty::from(Token::Replace)); - } - tokens.push(TokenProperty::from(Token::Transform)); - if n.type_name.is_some() { - tokens.push(TokenProperty::from(Token::For)); - } - tokens.push(TokenProperty::from(Token::Language)); - if n.fromsql.is_some() { - tokens.push(TokenProperty::from(Token::From)); - tokens.push(TokenProperty::from(Token::SqlP)); - tokens.push(TokenProperty::from(Token::With)); - tokens.push(TokenProperty::from(Token::Function)); - } - if n.tosql.is_some() { - tokens.push(TokenProperty::from(Token::To)); - tokens.push(TokenProperty::from(Token::SqlP)); - tokens.push(TokenProperty::from(Token::With)); - tokens.push(TokenProperty::from(Token::Function)); - } - }, - "TypeName" => quote! { - let names = n.names - .iter() - .filter_map(|n| if let Some(NodeEnum::String(s)) = &n.node { Some(s.sval.clone()) } else { None }) - .collect::>(); - - if names.len() == 2 && names[0] == "pg_catalog" { - match names[1].as_str() { - "float8" => { - tokens.push(TokenProperty::from(Token::DoubleP)); - tokens.push(TokenProperty::from(Token::Precision)); - }, - "interval" => { - // Adapted from https://github.com/postgres/postgres/blob/REL_15_STABLE/src/backend/utils/adt/timestamp.c#L1103 - const MONTH: i32 = 1; - const YEAR: i32 = 2; - const DAY: i32 = 3; - const HOUR: i32 = 10; - const MINUTE: i32 = 11; - const SECOND: i32 = 12; - - let fields = &n.typmods.first() - .and_then(|node| node.node.as_ref()) - .and_then(|node| if let NodeEnum::AConst(n) = node { n.val.clone() } else { None }) - .and_then(|node| if let protobuf::a_const::Val::Ival(n) = node { Some(n.ival) } else { None }); - - if let Some(fields) = fields { - match fields.clone() { - // YEAR TO MONTH - i if i == 1 << YEAR | 1 << MONTH => { - tokens.push(TokenProperty::from(Token::To)); - tokens.push(TokenProperty::from(Token::MonthP)); - }, - // DAY TO HOUR - i if i == 1 << DAY | 1 << HOUR => { - tokens.push(TokenProperty::from(Token::To)); - tokens.push(TokenProperty::from(Token::HourP)); - }, - // DAY TO MINUTE - i if i == 1 << DAY | 1 << HOUR | 1 << MINUTE => { - tokens.push(TokenProperty::from(Token::To)); - tokens.push(TokenProperty::from(Token::MinuteP)); - }, - // DAY TO SECOND - i if i == 1 << DAY | 1 << HOUR | 1 << MINUTE | 1 << SECOND => { - tokens.push(TokenProperty::from(Token::To)); - tokens.push(TokenProperty::from(Token::SecondP)); - }, - // HOUR TO MINUTE - i if i == 1 << HOUR | 1 << MINUTE => { - tokens.push(TokenProperty::from(Token::To)); - tokens.push(TokenProperty::from(Token::MinuteP)); - }, - // HOUR TO SECOND - i if i == 1 << HOUR | 1 << MINUTE | 1 << SECOND => { - tokens.push(TokenProperty::from(Token::To)); - tokens.push(TokenProperty::from(Token::SecondP)); - }, - // MINUTE TO SECOND - i if i == 1 << MINUTE | 1 << SECOND => { - tokens.push(TokenProperty::from(Token::To)); - tokens.push(TokenProperty::from(Token::SecondP)); - }, - _ => panic!("Unknown Interval fields {:#?}", fields), - } - } - }, - "timestamptz" => { - tokens.push(TokenProperty::from(Token::Timestamp)); - tokens.push(TokenProperty::from(Token::With)); - tokens.push(TokenProperty::from(Token::Time)); - tokens.push(TokenProperty::from(Token::Zone)); - } - "timetz" => { - tokens.push(TokenProperty::from(Token::Time)); - tokens.push(TokenProperty::from(Token::With)); - tokens.push(TokenProperty::from(Token::Time)); - tokens.push(TokenProperty::from(Token::Zone)); - } - _ => {} - } - } - }, - "TruncateStmt" => quote! { - tokens.push(TokenProperty::from(Token::Truncate)); - tokens.push(TokenProperty::from(Token::Table)); - if n.restart_seqs { - tokens.push(TokenProperty::from(Token::Restart)); - tokens.push(TokenProperty::from(Token::IdentityP)); - } else { - tokens.push(TokenProperty::from(Token::ContinueP)); - tokens.push(TokenProperty::from(Token::IdentityP)); - } - match n.behavior { - // DropRestrict - 1 => tokens.push(TokenProperty::from(Token::Restrict)), - // DropCascade - 2 => tokens.push(TokenProperty::from(Token::Cascade)), - _ => {} - } - }, - _ => quote! {}, - } -} - -fn string_property_handlers(node: &Node) -> Vec { - node.fields - .iter() - .filter_map(|field| { - if field.repeated { - return None; - } - let field_name = format_ident!("{}", field.name.as_str()); - match field.field_type { - // just handle string values for now - FieldType::String => Some(quote! { - // most string values are never None, but an empty string - if n.#field_name.len() > 0 { - tokens.push(TokenProperty::from(n.#field_name.to_owned())); - } - }), - _ => None, - } - }) - .collect() -} diff --git a/crates/pgt_query_ext_codegen/src/get_nodes.rs b/crates/pgt_query_ext_codegen/src/get_nodes.rs deleted file mode 100644 index e03813311..000000000 --- a/crates/pgt_query_ext_codegen/src/get_nodes.rs +++ /dev/null @@ -1,141 +0,0 @@ -use pgt_query_proto_parser::{FieldType, Node, ProtoFile}; -use proc_macro2::{Ident, TokenStream}; -use quote::{format_ident, quote}; - -pub fn get_nodes_mod(proto_file: &ProtoFile) -> proc_macro2::TokenStream { - let manual_node_names = manual_node_names(); - - let node_identifiers = node_identifiers(&proto_file.nodes, &manual_node_names); - let node_handlers = node_handlers(&proto_file.nodes, &manual_node_names); - - quote! { - #[derive(Debug, Clone)] - pub struct Node { - pub inner: NodeEnum, - pub depth: usize, - pub properties: Vec, - pub location: Option, - } - - /// Returns all children of the node, recursively - /// location is resolved manually - pub fn get_nodes(node: &NodeEnum) -> StableGraph { - let mut g = StableGraph::::new(); - - let root_node_idx = g.add_node(Node { - inner: node.to_owned(), - depth: 0, - properties: get_node_properties(node, None), - location: get_location(node), - }); - - // Parent node idx, Node, depth - let mut stack: VecDeque<(NodeIndex, NodeEnum, usize)> = - VecDeque::from(vec![(root_node_idx, node.to_owned(), 0)]); - while !stack.is_empty() { - let (parent_idx, node, depth) = stack.pop_front().unwrap(); - let current_depth = depth + 1; - let mut handle_child = |c: NodeEnum| { - if match &c { - // all "simple nodes" are not handled individually but merged with their parent - NodeEnum::String(n) => true, - NodeEnum::Integer(n) => true, - NodeEnum::Float(n) => true, - NodeEnum::Boolean(n) => true, - NodeEnum::BitString(n) => true, - _ => false - } { - g[parent_idx].properties.extend(get_node_properties(&c, Some(&node))); - } else { - let node_idx = g.add_node(Node { - depth: current_depth, - properties: get_node_properties(&c, Some(&node)), - location: get_location(&c), - inner: c.to_owned(), - }); - g.add_edge(parent_idx, node_idx, ()); - stack.push_back((node_idx, c.to_owned(), current_depth)); - } - }; - match &node { - // `AConst` is the only node with a `one of` property, so we handle it manually - // if you need to handle other nodes manually, add them to the `manual_node_names` function below - NodeEnum::AConst(n) => { - if n.val.is_some() { - handle_child(match n.val.to_owned().unwrap() { - pg_query::protobuf::a_const::Val::Ival(v) => NodeEnum::Integer(v), - pg_query::protobuf::a_const::Val::Fval(v) => NodeEnum::Float(v), - pg_query::protobuf::a_const::Val::Boolval(v) => NodeEnum::Boolean(v), - pg_query::protobuf::a_const::Val::Sval(v) => NodeEnum::String(v), - pg_query::protobuf::a_const::Val::Bsval(v) => NodeEnum::BitString(v), - }); - } - } - #(NodeEnum::#node_identifiers(n) => {#node_handlers}),*, - }; - } - g - } - } -} - -fn manual_node_names() -> Vec<&'static str> { - vec!["AConst"] -} - -fn node_identifiers(nodes: &[Node], exclude_nodes: &[&str]) -> Vec { - nodes - .iter() - .filter(|node| !exclude_nodes.contains(&node.name.as_str())) - .map(|node| format_ident!("{}", &node.name)) - .collect() -} - -fn node_handlers(nodes: &[Node], exclude_nodes: &[&str]) -> Vec { - nodes - .iter() - .filter(|node| !exclude_nodes.contains(&node.name.as_str())) - .map(|node| { - let property_handlers = property_handlers(node); - quote! { - #(#property_handlers)* - } - }) - .collect() -} - -fn property_handlers(node: &Node) -> Vec { - node.fields - .iter() - .filter_map(|field| { - let field_name = format_ident!("{}", field.name.as_str()); - if field.field_type == FieldType::Node && field.repeated { - Some(quote! { - n.#field_name - .iter() - .for_each(|x| if x.node.is_some() { - handle_child(x.node.as_ref().unwrap().to_owned()); - }); - }) - } else if field.field_type == FieldType::Node && !field.is_one_of { - if field.node_name == Some("Node".to_owned()) { - Some(quote! { - if n.#field_name.is_some() { - handle_child(n.#field_name.to_owned().unwrap().node.unwrap()); - } - }) - } else { - let enum_variant_name = - format_ident!("{}", field.enum_variant_name.as_ref().unwrap().as_str()); - Some(quote! { - if n.#field_name.is_some() { - handle_child(NodeEnum::#enum_variant_name(n.#field_name.to_owned().unwrap())); - } - }) - } - } else { - None - } - }) - .collect() -} diff --git a/crates/pgt_query_ext_codegen/src/lib.rs b/crates/pgt_query_ext_codegen/src/lib.rs deleted file mode 100644 index c4f39c0e9..000000000 --- a/crates/pgt_query_ext_codegen/src/lib.rs +++ /dev/null @@ -1,48 +0,0 @@ -mod get_location; -mod get_node_properties; -mod get_nodes; -mod node_iterator; - -use get_location::get_location_mod; -use get_node_properties::get_node_properties_mod; -use get_nodes::get_nodes_mod; -use node_iterator::node_iterator_mod; -use pgt_query_proto_parser::ProtoParser; -use quote::quote; -use std::{env, path, path::Path}; - -#[proc_macro] -pub fn codegen(_input: proc_macro::TokenStream) -> proc_macro::TokenStream { - let parser = ProtoParser::new(&proto_file_path()); - let proto_file = parser.parse(); - - let get_location = get_location_mod(&proto_file); - let get_node_properties = get_node_properties_mod(&proto_file); - let get_nodes = get_nodes_mod(&proto_file); - let iterator = node_iterator_mod(&proto_file); - - quote! { - use pgt_lexer::SyntaxKind; - use std::collections::VecDeque; - use pg_query::{protobuf, protobuf::ScanToken, protobuf::Token, NodeEnum, NodeRef}; - use std::cmp::{min, Ordering}; - use std::fmt::{Display, Formatter}; - use petgraph::stable_graph::{StableGraph}; - use petgraph::graph::{NodeIndex}; - - #get_location - #get_node_properties - #get_nodes - #iterator - } - .into() -} - -fn proto_file_path() -> path::PathBuf { - Path::new(env!("CARGO_MANIFEST_DIR")) - .ancestors() - .nth(2) - .unwrap() - .join("libpg_query/protobuf/pg_query.proto") - .to_path_buf() -} diff --git a/crates/pgt_query_ext_codegen/src/node_iterator.rs b/crates/pgt_query_ext_codegen/src/node_iterator.rs deleted file mode 100644 index 526966df8..000000000 --- a/crates/pgt_query_ext_codegen/src/node_iterator.rs +++ /dev/null @@ -1,123 +0,0 @@ -use pgt_query_proto_parser::{FieldType, Node, ProtoFile}; -use proc_macro2::{Ident, TokenStream}; -use quote::{format_ident, quote}; - -pub fn node_iterator_mod(proto_file: &ProtoFile) -> proc_macro2::TokenStream { - let manual_node_names = manual_node_names(); - - let node_identifiers = node_identifiers(&proto_file.nodes, &manual_node_names); - let node_handlers = node_handlers(&proto_file.nodes, &manual_node_names); - - quote! { - #[derive(Debug, Clone)] - pub struct ChildrenIterator { - stack: VecDeque<(NodeEnum, usize)>, - nodes: Vec, - } - - impl ChildrenIterator { - pub fn new(root: NodeEnum) -> Self { - Self { - stack: VecDeque::from(vec![(root, 0)]), - nodes: Vec::new(), - } - } - } - - impl Iterator for ChildrenIterator { - type Item = NodeEnum; - - fn next(&mut self) -> Option { - if self.stack.is_empty() { - return None; - } - - let (node, depth) = self.stack.pop_front().unwrap(); - - let current_depth = depth + 1; - - match &node { - // `AConst` is the only node with a `one of` property, so we handle it manually - // if you need to handle other nodes manually, add them to the `manual_node_names` function below - NodeEnum::AConst(n) => { - // if n.val.is_some() { - // let new_node = match n.val.as_ref().unwrap() { - // pg_query::protobuf::a_const::Val::Ival(v) => Box::new(NodeEnum::Integer(v.clone())), - // pg_query::protobuf::a_const::Val::Fval(v) => Box::new(NodeEnum::Float(v.clone())), - // pg_query::protobuf::a_const::Val::Boolval(v) => Box::new(NodeEnum::Boolean(v.clone())), - // pg_query::protobuf::a_const::Val::Sval(v) => Box::new(NodeEnum::String(v.clone())), - // pg_query::protobuf::a_const::Val::Bsval(v) => Box::new(NodeEnum::BitString(v.clone())), - // }; - // self.stack.push_back((&new_node, current_depth)); - // self.boxed_nodes.push(new_node); - // } - } - #(NodeEnum::#node_identifiers(n) => {#node_handlers}),*, - }; - - Some(node) - } - } - } -} - -fn manual_node_names() -> Vec<&'static str> { - vec!["AConst"] -} - -fn node_identifiers(nodes: &[Node], exclude_nodes: &[&str]) -> Vec { - nodes - .iter() - .filter(|node| !exclude_nodes.contains(&node.name.as_str())) - .map(|node| format_ident!("{}", &node.name)) - .collect() -} - -fn node_handlers(nodes: &[Node], exclude_nodes: &[&str]) -> Vec { - nodes - .iter() - .filter(|node| !exclude_nodes.contains(&node.name.as_str())) - .map(|node| { - let property_handlers = property_handlers(node); - quote! { - #(#property_handlers)* - } - }) - .collect() -} - -fn property_handlers(node: &Node) -> Vec { - node.fields - .iter() - .filter_map(|field| { - let field_name = format_ident!("{}", field.name.as_str()); - if field.field_type == FieldType::Node && field.repeated { - Some(quote! { - n.#field_name - .iter() - .for_each(|x| if x.node.is_some() { - self.stack.push_back((x.node.as_ref().unwrap().to_owned(), current_depth)); - }); - }) - } else if field.field_type == FieldType::Node && !field.is_one_of { - if field.node_name == Some("Node".to_owned()) { - Some(quote! { - if n.#field_name.is_some() { - self.stack.push_back((n.#field_name.to_owned().unwrap().node.unwrap(), current_depth)); - } - }) - } else { - let enum_variant_name = - format_ident!("{}", field.enum_variant_name.as_ref().unwrap().as_str()); - Some(quote! { - if n.#field_name.is_some() { - self.stack.push_back((NodeEnum::#enum_variant_name(n.#field_name.to_owned().unwrap()), current_depth)); - } - }) - } - } else { - None - } - }) - .collect() -} diff --git a/crates/pgt_query_macros/Cargo.toml b/crates/pgt_query_macros/Cargo.toml new file mode 100644 index 000000000..0fcc52cf6 --- /dev/null +++ b/crates/pgt_query_macros/Cargo.toml @@ -0,0 +1,24 @@ +[package] +authors.workspace = true +categories.workspace = true +description = "" +edition.workspace = true +homepage.workspace = true +keywords.workspace = true +license.workspace = true +name = "pgt_query_macros" +repository.workspace = true +version = "0.0.0" + +[dependencies] +convert_case = { workspace = true } +proc-macro2.workspace = true +prost-reflect = { workspace = true } +protox = { workspace = true } +quote.workspace = true + +[lib] +proc-macro = true + +[build-dependencies] +ureq = "2.9" diff --git a/crates/pgt_query_macros/build.rs b/crates/pgt_query_macros/build.rs new file mode 100644 index 000000000..db83ce86e --- /dev/null +++ b/crates/pgt_query_macros/build.rs @@ -0,0 +1,59 @@ +use std::env; +use std::fs; +use std::io::Write; +use std::path::PathBuf; + +// This should match the version used by pgt_query crate +// You can configure this via environment variable PG_QUERY_VERSION if needed +static LIBPG_QUERY_TAG: &str = "17-6.1.0"; + +fn main() -> Result<(), Box> { + // Allow version override via environment variable + let version = env::var("PG_QUERY_VERSION").unwrap_or_else(|_| LIBPG_QUERY_TAG.to_string()); + + // Get the manifest directory (source directory) + let manifest_dir = PathBuf::from(env::var("CARGO_MANIFEST_DIR")?); + let postgres_dir = manifest_dir.join("postgres"); + let proto_filename = format!("{}.proto", version); + let proto_path = postgres_dir.join(&proto_filename); + + // Download proto file if not already present in source directory + if !proto_path.exists() { + println!( + "cargo:warning=Downloading pg_query.proto for libpg_query {} to source directory", + version + ); + + // Create postgres directory if it doesn't exist + fs::create_dir_all(&postgres_dir)?; + + // Download the proto file + let proto_url = format!( + "https://raw.githubusercontent.com/pganalyze/libpg_query/{}/protobuf/pg_query.proto", + version + ); + + let response = ureq::get(&proto_url).call()?; + let proto_content = response.into_string()?; + + // Write proto file to source directory + let mut file = fs::File::create(&proto_path)?; + file.write_all(proto_content.as_bytes())?; + + println!( + "cargo:warning=Successfully downloaded pg_query.proto to {}", + proto_path.display() + ); + } + + // Set environment variable for the proc macro + println!( + "cargo:rustc-env=PG_QUERY_PROTO_PATH={}", + proto_path.display() + ); + + // Tell cargo to rerun if the proto file changes + println!("cargo:rerun-if-changed={}", proto_path.display()); + + Ok(()) +} diff --git a/crates/pgt_query_macros/postgres/17-6.1.0.proto b/crates/pgt_query_macros/postgres/17-6.1.0.proto new file mode 100644 index 000000000..24a8f14cd --- /dev/null +++ b/crates/pgt_query_macros/postgres/17-6.1.0.proto @@ -0,0 +1,4110 @@ +// This file is autogenerated by ./scripts/generate_protobuf_and_funcs.rb + +syntax = "proto3"; + +package pg_query; + +message ParseResult { + int32 version = 1; + repeated RawStmt stmts = 2; +} + +message ScanResult { + int32 version = 1; + repeated ScanToken tokens = 2; +} + +message Node { + oneof node { + Alias alias = 1 [json_name="Alias"]; + RangeVar range_var = 2 [json_name="RangeVar"]; + TableFunc table_func = 3 [json_name="TableFunc"]; + IntoClause into_clause = 4 [json_name="IntoClause"]; + Var var = 5 [json_name="Var"]; + Param param = 6 [json_name="Param"]; + Aggref aggref = 7 [json_name="Aggref"]; + GroupingFunc grouping_func = 8 [json_name="GroupingFunc"]; + WindowFunc window_func = 9 [json_name="WindowFunc"]; + WindowFuncRunCondition window_func_run_condition = 10 [json_name="WindowFuncRunCondition"]; + MergeSupportFunc merge_support_func = 11 [json_name="MergeSupportFunc"]; + SubscriptingRef subscripting_ref = 12 [json_name="SubscriptingRef"]; + FuncExpr func_expr = 13 [json_name="FuncExpr"]; + NamedArgExpr named_arg_expr = 14 [json_name="NamedArgExpr"]; + OpExpr op_expr = 15 [json_name="OpExpr"]; + DistinctExpr distinct_expr = 16 [json_name="DistinctExpr"]; + NullIfExpr null_if_expr = 17 [json_name="NullIfExpr"]; + ScalarArrayOpExpr scalar_array_op_expr = 18 [json_name="ScalarArrayOpExpr"]; + BoolExpr bool_expr = 19 [json_name="BoolExpr"]; + SubLink sub_link = 20 [json_name="SubLink"]; + SubPlan sub_plan = 21 [json_name="SubPlan"]; + AlternativeSubPlan alternative_sub_plan = 22 [json_name="AlternativeSubPlan"]; + FieldSelect field_select = 23 [json_name="FieldSelect"]; + FieldStore field_store = 24 [json_name="FieldStore"]; + RelabelType relabel_type = 25 [json_name="RelabelType"]; + CoerceViaIO coerce_via_io = 26 [json_name="CoerceViaIO"]; + ArrayCoerceExpr array_coerce_expr = 27 [json_name="ArrayCoerceExpr"]; + ConvertRowtypeExpr convert_rowtype_expr = 28 [json_name="ConvertRowtypeExpr"]; + CollateExpr collate_expr = 29 [json_name="CollateExpr"]; + CaseExpr case_expr = 30 [json_name="CaseExpr"]; + CaseWhen case_when = 31 [json_name="CaseWhen"]; + CaseTestExpr case_test_expr = 32 [json_name="CaseTestExpr"]; + ArrayExpr array_expr = 33 [json_name="ArrayExpr"]; + RowExpr row_expr = 34 [json_name="RowExpr"]; + RowCompareExpr row_compare_expr = 35 [json_name="RowCompareExpr"]; + CoalesceExpr coalesce_expr = 36 [json_name="CoalesceExpr"]; + MinMaxExpr min_max_expr = 37 [json_name="MinMaxExpr"]; + SQLValueFunction sqlvalue_function = 38 [json_name="SQLValueFunction"]; + XmlExpr xml_expr = 39 [json_name="XmlExpr"]; + JsonFormat json_format = 40 [json_name="JsonFormat"]; + JsonReturning json_returning = 41 [json_name="JsonReturning"]; + JsonValueExpr json_value_expr = 42 [json_name="JsonValueExpr"]; + JsonConstructorExpr json_constructor_expr = 43 [json_name="JsonConstructorExpr"]; + JsonIsPredicate json_is_predicate = 44 [json_name="JsonIsPredicate"]; + JsonBehavior json_behavior = 45 [json_name="JsonBehavior"]; + JsonExpr json_expr = 46 [json_name="JsonExpr"]; + JsonTablePath json_table_path = 47 [json_name="JsonTablePath"]; + JsonTablePathScan json_table_path_scan = 48 [json_name="JsonTablePathScan"]; + JsonTableSiblingJoin json_table_sibling_join = 49 [json_name="JsonTableSiblingJoin"]; + NullTest null_test = 50 [json_name="NullTest"]; + BooleanTest boolean_test = 51 [json_name="BooleanTest"]; + MergeAction merge_action = 52 [json_name="MergeAction"]; + CoerceToDomain coerce_to_domain = 53 [json_name="CoerceToDomain"]; + CoerceToDomainValue coerce_to_domain_value = 54 [json_name="CoerceToDomainValue"]; + SetToDefault set_to_default = 55 [json_name="SetToDefault"]; + CurrentOfExpr current_of_expr = 56 [json_name="CurrentOfExpr"]; + NextValueExpr next_value_expr = 57 [json_name="NextValueExpr"]; + InferenceElem inference_elem = 58 [json_name="InferenceElem"]; + TargetEntry target_entry = 59 [json_name="TargetEntry"]; + RangeTblRef range_tbl_ref = 60 [json_name="RangeTblRef"]; + JoinExpr join_expr = 61 [json_name="JoinExpr"]; + FromExpr from_expr = 62 [json_name="FromExpr"]; + OnConflictExpr on_conflict_expr = 63 [json_name="OnConflictExpr"]; + Query query = 64 [json_name="Query"]; + TypeName type_name = 65 [json_name="TypeName"]; + ColumnRef column_ref = 66 [json_name="ColumnRef"]; + ParamRef param_ref = 67 [json_name="ParamRef"]; + A_Expr a_expr = 68 [json_name="A_Expr"]; + TypeCast type_cast = 69 [json_name="TypeCast"]; + CollateClause collate_clause = 70 [json_name="CollateClause"]; + RoleSpec role_spec = 71 [json_name="RoleSpec"]; + FuncCall func_call = 72 [json_name="FuncCall"]; + A_Star a_star = 73 [json_name="A_Star"]; + A_Indices a_indices = 74 [json_name="A_Indices"]; + A_Indirection a_indirection = 75 [json_name="A_Indirection"]; + A_ArrayExpr a_array_expr = 76 [json_name="A_ArrayExpr"]; + ResTarget res_target = 77 [json_name="ResTarget"]; + MultiAssignRef multi_assign_ref = 78 [json_name="MultiAssignRef"]; + SortBy sort_by = 79 [json_name="SortBy"]; + WindowDef window_def = 80 [json_name="WindowDef"]; + RangeSubselect range_subselect = 81 [json_name="RangeSubselect"]; + RangeFunction range_function = 82 [json_name="RangeFunction"]; + RangeTableFunc range_table_func = 83 [json_name="RangeTableFunc"]; + RangeTableFuncCol range_table_func_col = 84 [json_name="RangeTableFuncCol"]; + RangeTableSample range_table_sample = 85 [json_name="RangeTableSample"]; + ColumnDef column_def = 86 [json_name="ColumnDef"]; + TableLikeClause table_like_clause = 87 [json_name="TableLikeClause"]; + IndexElem index_elem = 88 [json_name="IndexElem"]; + DefElem def_elem = 89 [json_name="DefElem"]; + LockingClause locking_clause = 90 [json_name="LockingClause"]; + XmlSerialize xml_serialize = 91 [json_name="XmlSerialize"]; + PartitionElem partition_elem = 92 [json_name="PartitionElem"]; + PartitionSpec partition_spec = 93 [json_name="PartitionSpec"]; + PartitionBoundSpec partition_bound_spec = 94 [json_name="PartitionBoundSpec"]; + PartitionRangeDatum partition_range_datum = 95 [json_name="PartitionRangeDatum"]; + SinglePartitionSpec single_partition_spec = 96 [json_name="SinglePartitionSpec"]; + PartitionCmd partition_cmd = 97 [json_name="PartitionCmd"]; + RangeTblEntry range_tbl_entry = 98 [json_name="RangeTblEntry"]; + RTEPermissionInfo rtepermission_info = 99 [json_name="RTEPermissionInfo"]; + RangeTblFunction range_tbl_function = 100 [json_name="RangeTblFunction"]; + TableSampleClause table_sample_clause = 101 [json_name="TableSampleClause"]; + WithCheckOption with_check_option = 102 [json_name="WithCheckOption"]; + SortGroupClause sort_group_clause = 103 [json_name="SortGroupClause"]; + GroupingSet grouping_set = 104 [json_name="GroupingSet"]; + WindowClause window_clause = 105 [json_name="WindowClause"]; + RowMarkClause row_mark_clause = 106 [json_name="RowMarkClause"]; + WithClause with_clause = 107 [json_name="WithClause"]; + InferClause infer_clause = 108 [json_name="InferClause"]; + OnConflictClause on_conflict_clause = 109 [json_name="OnConflictClause"]; + CTESearchClause ctesearch_clause = 110 [json_name="CTESearchClause"]; + CTECycleClause ctecycle_clause = 111 [json_name="CTECycleClause"]; + CommonTableExpr common_table_expr = 112 [json_name="CommonTableExpr"]; + MergeWhenClause merge_when_clause = 113 [json_name="MergeWhenClause"]; + TriggerTransition trigger_transition = 114 [json_name="TriggerTransition"]; + JsonOutput json_output = 115 [json_name="JsonOutput"]; + JsonArgument json_argument = 116 [json_name="JsonArgument"]; + JsonFuncExpr json_func_expr = 117 [json_name="JsonFuncExpr"]; + JsonTablePathSpec json_table_path_spec = 118 [json_name="JsonTablePathSpec"]; + JsonTable json_table = 119 [json_name="JsonTable"]; + JsonTableColumn json_table_column = 120 [json_name="JsonTableColumn"]; + JsonKeyValue json_key_value = 121 [json_name="JsonKeyValue"]; + JsonParseExpr json_parse_expr = 122 [json_name="JsonParseExpr"]; + JsonScalarExpr json_scalar_expr = 123 [json_name="JsonScalarExpr"]; + JsonSerializeExpr json_serialize_expr = 124 [json_name="JsonSerializeExpr"]; + JsonObjectConstructor json_object_constructor = 125 [json_name="JsonObjectConstructor"]; + JsonArrayConstructor json_array_constructor = 126 [json_name="JsonArrayConstructor"]; + JsonArrayQueryConstructor json_array_query_constructor = 127 [json_name="JsonArrayQueryConstructor"]; + JsonAggConstructor json_agg_constructor = 128 [json_name="JsonAggConstructor"]; + JsonObjectAgg json_object_agg = 129 [json_name="JsonObjectAgg"]; + JsonArrayAgg json_array_agg = 130 [json_name="JsonArrayAgg"]; + RawStmt raw_stmt = 131 [json_name="RawStmt"]; + InsertStmt insert_stmt = 132 [json_name="InsertStmt"]; + DeleteStmt delete_stmt = 133 [json_name="DeleteStmt"]; + UpdateStmt update_stmt = 134 [json_name="UpdateStmt"]; + MergeStmt merge_stmt = 135 [json_name="MergeStmt"]; + SelectStmt select_stmt = 136 [json_name="SelectStmt"]; + SetOperationStmt set_operation_stmt = 137 [json_name="SetOperationStmt"]; + ReturnStmt return_stmt = 138 [json_name="ReturnStmt"]; + PLAssignStmt plassign_stmt = 139 [json_name="PLAssignStmt"]; + CreateSchemaStmt create_schema_stmt = 140 [json_name="CreateSchemaStmt"]; + AlterTableStmt alter_table_stmt = 141 [json_name="AlterTableStmt"]; + ReplicaIdentityStmt replica_identity_stmt = 142 [json_name="ReplicaIdentityStmt"]; + AlterTableCmd alter_table_cmd = 143 [json_name="AlterTableCmd"]; + AlterCollationStmt alter_collation_stmt = 144 [json_name="AlterCollationStmt"]; + AlterDomainStmt alter_domain_stmt = 145 [json_name="AlterDomainStmt"]; + GrantStmt grant_stmt = 146 [json_name="GrantStmt"]; + ObjectWithArgs object_with_args = 147 [json_name="ObjectWithArgs"]; + AccessPriv access_priv = 148 [json_name="AccessPriv"]; + GrantRoleStmt grant_role_stmt = 149 [json_name="GrantRoleStmt"]; + AlterDefaultPrivilegesStmt alter_default_privileges_stmt = 150 [json_name="AlterDefaultPrivilegesStmt"]; + CopyStmt copy_stmt = 151 [json_name="CopyStmt"]; + VariableSetStmt variable_set_stmt = 152 [json_name="VariableSetStmt"]; + VariableShowStmt variable_show_stmt = 153 [json_name="VariableShowStmt"]; + CreateStmt create_stmt = 154 [json_name="CreateStmt"]; + Constraint constraint = 155 [json_name="Constraint"]; + CreateTableSpaceStmt create_table_space_stmt = 156 [json_name="CreateTableSpaceStmt"]; + DropTableSpaceStmt drop_table_space_stmt = 157 [json_name="DropTableSpaceStmt"]; + AlterTableSpaceOptionsStmt alter_table_space_options_stmt = 158 [json_name="AlterTableSpaceOptionsStmt"]; + AlterTableMoveAllStmt alter_table_move_all_stmt = 159 [json_name="AlterTableMoveAllStmt"]; + CreateExtensionStmt create_extension_stmt = 160 [json_name="CreateExtensionStmt"]; + AlterExtensionStmt alter_extension_stmt = 161 [json_name="AlterExtensionStmt"]; + AlterExtensionContentsStmt alter_extension_contents_stmt = 162 [json_name="AlterExtensionContentsStmt"]; + CreateFdwStmt create_fdw_stmt = 163 [json_name="CreateFdwStmt"]; + AlterFdwStmt alter_fdw_stmt = 164 [json_name="AlterFdwStmt"]; + CreateForeignServerStmt create_foreign_server_stmt = 165 [json_name="CreateForeignServerStmt"]; + AlterForeignServerStmt alter_foreign_server_stmt = 166 [json_name="AlterForeignServerStmt"]; + CreateForeignTableStmt create_foreign_table_stmt = 167 [json_name="CreateForeignTableStmt"]; + CreateUserMappingStmt create_user_mapping_stmt = 168 [json_name="CreateUserMappingStmt"]; + AlterUserMappingStmt alter_user_mapping_stmt = 169 [json_name="AlterUserMappingStmt"]; + DropUserMappingStmt drop_user_mapping_stmt = 170 [json_name="DropUserMappingStmt"]; + ImportForeignSchemaStmt import_foreign_schema_stmt = 171 [json_name="ImportForeignSchemaStmt"]; + CreatePolicyStmt create_policy_stmt = 172 [json_name="CreatePolicyStmt"]; + AlterPolicyStmt alter_policy_stmt = 173 [json_name="AlterPolicyStmt"]; + CreateAmStmt create_am_stmt = 174 [json_name="CreateAmStmt"]; + CreateTrigStmt create_trig_stmt = 175 [json_name="CreateTrigStmt"]; + CreateEventTrigStmt create_event_trig_stmt = 176 [json_name="CreateEventTrigStmt"]; + AlterEventTrigStmt alter_event_trig_stmt = 177 [json_name="AlterEventTrigStmt"]; + CreatePLangStmt create_plang_stmt = 178 [json_name="CreatePLangStmt"]; + CreateRoleStmt create_role_stmt = 179 [json_name="CreateRoleStmt"]; + AlterRoleStmt alter_role_stmt = 180 [json_name="AlterRoleStmt"]; + AlterRoleSetStmt alter_role_set_stmt = 181 [json_name="AlterRoleSetStmt"]; + DropRoleStmt drop_role_stmt = 182 [json_name="DropRoleStmt"]; + CreateSeqStmt create_seq_stmt = 183 [json_name="CreateSeqStmt"]; + AlterSeqStmt alter_seq_stmt = 184 [json_name="AlterSeqStmt"]; + DefineStmt define_stmt = 185 [json_name="DefineStmt"]; + CreateDomainStmt create_domain_stmt = 186 [json_name="CreateDomainStmt"]; + CreateOpClassStmt create_op_class_stmt = 187 [json_name="CreateOpClassStmt"]; + CreateOpClassItem create_op_class_item = 188 [json_name="CreateOpClassItem"]; + CreateOpFamilyStmt create_op_family_stmt = 189 [json_name="CreateOpFamilyStmt"]; + AlterOpFamilyStmt alter_op_family_stmt = 190 [json_name="AlterOpFamilyStmt"]; + DropStmt drop_stmt = 191 [json_name="DropStmt"]; + TruncateStmt truncate_stmt = 192 [json_name="TruncateStmt"]; + CommentStmt comment_stmt = 193 [json_name="CommentStmt"]; + SecLabelStmt sec_label_stmt = 194 [json_name="SecLabelStmt"]; + DeclareCursorStmt declare_cursor_stmt = 195 [json_name="DeclareCursorStmt"]; + ClosePortalStmt close_portal_stmt = 196 [json_name="ClosePortalStmt"]; + FetchStmt fetch_stmt = 197 [json_name="FetchStmt"]; + IndexStmt index_stmt = 198 [json_name="IndexStmt"]; + CreateStatsStmt create_stats_stmt = 199 [json_name="CreateStatsStmt"]; + StatsElem stats_elem = 200 [json_name="StatsElem"]; + AlterStatsStmt alter_stats_stmt = 201 [json_name="AlterStatsStmt"]; + CreateFunctionStmt create_function_stmt = 202 [json_name="CreateFunctionStmt"]; + FunctionParameter function_parameter = 203 [json_name="FunctionParameter"]; + AlterFunctionStmt alter_function_stmt = 204 [json_name="AlterFunctionStmt"]; + DoStmt do_stmt = 205 [json_name="DoStmt"]; + InlineCodeBlock inline_code_block = 206 [json_name="InlineCodeBlock"]; + CallStmt call_stmt = 207 [json_name="CallStmt"]; + CallContext call_context = 208 [json_name="CallContext"]; + RenameStmt rename_stmt = 209 [json_name="RenameStmt"]; + AlterObjectDependsStmt alter_object_depends_stmt = 210 [json_name="AlterObjectDependsStmt"]; + AlterObjectSchemaStmt alter_object_schema_stmt = 211 [json_name="AlterObjectSchemaStmt"]; + AlterOwnerStmt alter_owner_stmt = 212 [json_name="AlterOwnerStmt"]; + AlterOperatorStmt alter_operator_stmt = 213 [json_name="AlterOperatorStmt"]; + AlterTypeStmt alter_type_stmt = 214 [json_name="AlterTypeStmt"]; + RuleStmt rule_stmt = 215 [json_name="RuleStmt"]; + NotifyStmt notify_stmt = 216 [json_name="NotifyStmt"]; + ListenStmt listen_stmt = 217 [json_name="ListenStmt"]; + UnlistenStmt unlisten_stmt = 218 [json_name="UnlistenStmt"]; + TransactionStmt transaction_stmt = 219 [json_name="TransactionStmt"]; + CompositeTypeStmt composite_type_stmt = 220 [json_name="CompositeTypeStmt"]; + CreateEnumStmt create_enum_stmt = 221 [json_name="CreateEnumStmt"]; + CreateRangeStmt create_range_stmt = 222 [json_name="CreateRangeStmt"]; + AlterEnumStmt alter_enum_stmt = 223 [json_name="AlterEnumStmt"]; + ViewStmt view_stmt = 224 [json_name="ViewStmt"]; + LoadStmt load_stmt = 225 [json_name="LoadStmt"]; + CreatedbStmt createdb_stmt = 226 [json_name="CreatedbStmt"]; + AlterDatabaseStmt alter_database_stmt = 227 [json_name="AlterDatabaseStmt"]; + AlterDatabaseRefreshCollStmt alter_database_refresh_coll_stmt = 228 [json_name="AlterDatabaseRefreshCollStmt"]; + AlterDatabaseSetStmt alter_database_set_stmt = 229 [json_name="AlterDatabaseSetStmt"]; + DropdbStmt dropdb_stmt = 230 [json_name="DropdbStmt"]; + AlterSystemStmt alter_system_stmt = 231 [json_name="AlterSystemStmt"]; + ClusterStmt cluster_stmt = 232 [json_name="ClusterStmt"]; + VacuumStmt vacuum_stmt = 233 [json_name="VacuumStmt"]; + VacuumRelation vacuum_relation = 234 [json_name="VacuumRelation"]; + ExplainStmt explain_stmt = 235 [json_name="ExplainStmt"]; + CreateTableAsStmt create_table_as_stmt = 236 [json_name="CreateTableAsStmt"]; + RefreshMatViewStmt refresh_mat_view_stmt = 237 [json_name="RefreshMatViewStmt"]; + CheckPointStmt check_point_stmt = 238 [json_name="CheckPointStmt"]; + DiscardStmt discard_stmt = 239 [json_name="DiscardStmt"]; + LockStmt lock_stmt = 240 [json_name="LockStmt"]; + ConstraintsSetStmt constraints_set_stmt = 241 [json_name="ConstraintsSetStmt"]; + ReindexStmt reindex_stmt = 242 [json_name="ReindexStmt"]; + CreateConversionStmt create_conversion_stmt = 243 [json_name="CreateConversionStmt"]; + CreateCastStmt create_cast_stmt = 244 [json_name="CreateCastStmt"]; + CreateTransformStmt create_transform_stmt = 245 [json_name="CreateTransformStmt"]; + PrepareStmt prepare_stmt = 246 [json_name="PrepareStmt"]; + ExecuteStmt execute_stmt = 247 [json_name="ExecuteStmt"]; + DeallocateStmt deallocate_stmt = 248 [json_name="DeallocateStmt"]; + DropOwnedStmt drop_owned_stmt = 249 [json_name="DropOwnedStmt"]; + ReassignOwnedStmt reassign_owned_stmt = 250 [json_name="ReassignOwnedStmt"]; + AlterTSDictionaryStmt alter_tsdictionary_stmt = 251 [json_name="AlterTSDictionaryStmt"]; + AlterTSConfigurationStmt alter_tsconfiguration_stmt = 252 [json_name="AlterTSConfigurationStmt"]; + PublicationTable publication_table = 253 [json_name="PublicationTable"]; + PublicationObjSpec publication_obj_spec = 254 [json_name="PublicationObjSpec"]; + CreatePublicationStmt create_publication_stmt = 255 [json_name="CreatePublicationStmt"]; + AlterPublicationStmt alter_publication_stmt = 256 [json_name="AlterPublicationStmt"]; + CreateSubscriptionStmt create_subscription_stmt = 257 [json_name="CreateSubscriptionStmt"]; + AlterSubscriptionStmt alter_subscription_stmt = 258 [json_name="AlterSubscriptionStmt"]; + DropSubscriptionStmt drop_subscription_stmt = 259 [json_name="DropSubscriptionStmt"]; + Integer integer = 260 [json_name="Integer"]; + Float float = 261 [json_name="Float"]; + Boolean boolean = 262 [json_name="Boolean"]; + String string = 263 [json_name="String"]; + BitString bit_string = 264 [json_name="BitString"]; + List list = 265 [json_name="List"]; + IntList int_list = 266 [json_name="IntList"]; + OidList oid_list = 267 [json_name="OidList"]; + A_Const a_const = 268 [json_name="A_Const"]; + } +} + +message Integer +{ + int32 ival = 1; /* machine integer */ +} + +message Float +{ + string fval = 1; /* string */ +} + +message Boolean +{ + bool boolval = 1; +} + +message String +{ + string sval = 1; /* string */ +} + +message BitString +{ + string bsval = 1; /* string */ +} + +message List +{ + repeated Node items = 1; +} + +message OidList +{ + repeated Node items = 1; +} + +message IntList +{ + repeated Node items = 1; +} + +message A_Const +{ + oneof val { + Integer ival = 1; + Float fval = 2; + Boolean boolval = 3; + String sval = 4; + BitString bsval = 5; + } + bool isnull = 10; + int32 location = 11; +} + +message Alias +{ + string aliasname = 1 [json_name="aliasname"]; + repeated Node colnames = 2 [json_name="colnames"]; +} + +message RangeVar +{ + string catalogname = 1 [json_name="catalogname"]; + string schemaname = 2 [json_name="schemaname"]; + string relname = 3 [json_name="relname"]; + bool inh = 4 [json_name="inh"]; + string relpersistence = 5 [json_name="relpersistence"]; + Alias alias = 6 [json_name="alias"]; + int32 location = 7 [json_name="location"]; +} + +message TableFunc +{ + TableFuncType functype = 1 [json_name="functype"]; + repeated Node ns_uris = 2 [json_name="ns_uris"]; + repeated Node ns_names = 3 [json_name="ns_names"]; + Node docexpr = 4 [json_name="docexpr"]; + Node rowexpr = 5 [json_name="rowexpr"]; + repeated Node colnames = 6 [json_name="colnames"]; + repeated Node coltypes = 7 [json_name="coltypes"]; + repeated Node coltypmods = 8 [json_name="coltypmods"]; + repeated Node colcollations = 9 [json_name="colcollations"]; + repeated Node colexprs = 10 [json_name="colexprs"]; + repeated Node coldefexprs = 11 [json_name="coldefexprs"]; + repeated Node colvalexprs = 12 [json_name="colvalexprs"]; + repeated Node passingvalexprs = 13 [json_name="passingvalexprs"]; + repeated uint64 notnulls = 14 [json_name="notnulls"]; + Node plan = 15 [json_name="plan"]; + int32 ordinalitycol = 16 [json_name="ordinalitycol"]; + int32 location = 17 [json_name="location"]; +} + +message IntoClause +{ + RangeVar rel = 1 [json_name="rel"]; + repeated Node col_names = 2 [json_name="colNames"]; + string access_method = 3 [json_name="accessMethod"]; + repeated Node options = 4 [json_name="options"]; + OnCommitAction on_commit = 5 [json_name="onCommit"]; + string table_space_name = 6 [json_name="tableSpaceName"]; + Node view_query = 7 [json_name="viewQuery"]; + bool skip_data = 8 [json_name="skipData"]; +} + +message Var +{ + Node xpr = 1 [json_name="xpr"]; + int32 varno = 2 [json_name="varno"]; + int32 varattno = 3 [json_name="varattno"]; + uint32 vartype = 4 [json_name="vartype"]; + int32 vartypmod = 5 [json_name="vartypmod"]; + uint32 varcollid = 6 [json_name="varcollid"]; + repeated uint64 varnullingrels = 7 [json_name="varnullingrels"]; + uint32 varlevelsup = 8 [json_name="varlevelsup"]; + int32 location = 9 [json_name="location"]; +} + +message Param +{ + Node xpr = 1 [json_name="xpr"]; + ParamKind paramkind = 2 [json_name="paramkind"]; + int32 paramid = 3 [json_name="paramid"]; + uint32 paramtype = 4 [json_name="paramtype"]; + int32 paramtypmod = 5 [json_name="paramtypmod"]; + uint32 paramcollid = 6 [json_name="paramcollid"]; + int32 location = 7 [json_name="location"]; +} + +message Aggref +{ + Node xpr = 1 [json_name="xpr"]; + uint32 aggfnoid = 2 [json_name="aggfnoid"]; + uint32 aggtype = 3 [json_name="aggtype"]; + uint32 aggcollid = 4 [json_name="aggcollid"]; + uint32 inputcollid = 5 [json_name="inputcollid"]; + repeated Node aggargtypes = 6 [json_name="aggargtypes"]; + repeated Node aggdirectargs = 7 [json_name="aggdirectargs"]; + repeated Node args = 8 [json_name="args"]; + repeated Node aggorder = 9 [json_name="aggorder"]; + repeated Node aggdistinct = 10 [json_name="aggdistinct"]; + Node aggfilter = 11 [json_name="aggfilter"]; + bool aggstar = 12 [json_name="aggstar"]; + bool aggvariadic = 13 [json_name="aggvariadic"]; + string aggkind = 14 [json_name="aggkind"]; + uint32 agglevelsup = 15 [json_name="agglevelsup"]; + AggSplit aggsplit = 16 [json_name="aggsplit"]; + int32 aggno = 17 [json_name="aggno"]; + int32 aggtransno = 18 [json_name="aggtransno"]; + int32 location = 19 [json_name="location"]; +} + +message GroupingFunc +{ + Node xpr = 1 [json_name="xpr"]; + repeated Node args = 2 [json_name="args"]; + repeated Node refs = 3 [json_name="refs"]; + uint32 agglevelsup = 4 [json_name="agglevelsup"]; + int32 location = 5 [json_name="location"]; +} + +message WindowFunc +{ + Node xpr = 1 [json_name="xpr"]; + uint32 winfnoid = 2 [json_name="winfnoid"]; + uint32 wintype = 3 [json_name="wintype"]; + uint32 wincollid = 4 [json_name="wincollid"]; + uint32 inputcollid = 5 [json_name="inputcollid"]; + repeated Node args = 6 [json_name="args"]; + Node aggfilter = 7 [json_name="aggfilter"]; + repeated Node run_condition = 8 [json_name="runCondition"]; + uint32 winref = 9 [json_name="winref"]; + bool winstar = 10 [json_name="winstar"]; + bool winagg = 11 [json_name="winagg"]; + int32 location = 12 [json_name="location"]; +} + +message WindowFuncRunCondition +{ + Node xpr = 1 [json_name="xpr"]; + uint32 opno = 2 [json_name="opno"]; + uint32 inputcollid = 3 [json_name="inputcollid"]; + bool wfunc_left = 4 [json_name="wfunc_left"]; + Node arg = 5 [json_name="arg"]; +} + +message MergeSupportFunc +{ + Node xpr = 1 [json_name="xpr"]; + uint32 msftype = 2 [json_name="msftype"]; + uint32 msfcollid = 3 [json_name="msfcollid"]; + int32 location = 4 [json_name="location"]; +} + +message SubscriptingRef +{ + Node xpr = 1 [json_name="xpr"]; + uint32 refcontainertype = 2 [json_name="refcontainertype"]; + uint32 refelemtype = 3 [json_name="refelemtype"]; + uint32 refrestype = 4 [json_name="refrestype"]; + int32 reftypmod = 5 [json_name="reftypmod"]; + uint32 refcollid = 6 [json_name="refcollid"]; + repeated Node refupperindexpr = 7 [json_name="refupperindexpr"]; + repeated Node reflowerindexpr = 8 [json_name="reflowerindexpr"]; + Node refexpr = 9 [json_name="refexpr"]; + Node refassgnexpr = 10 [json_name="refassgnexpr"]; +} + +message FuncExpr +{ + Node xpr = 1 [json_name="xpr"]; + uint32 funcid = 2 [json_name="funcid"]; + uint32 funcresulttype = 3 [json_name="funcresulttype"]; + bool funcretset = 4 [json_name="funcretset"]; + bool funcvariadic = 5 [json_name="funcvariadic"]; + CoercionForm funcformat = 6 [json_name="funcformat"]; + uint32 funccollid = 7 [json_name="funccollid"]; + uint32 inputcollid = 8 [json_name="inputcollid"]; + repeated Node args = 9 [json_name="args"]; + int32 location = 10 [json_name="location"]; +} + +message NamedArgExpr +{ + Node xpr = 1 [json_name="xpr"]; + Node arg = 2 [json_name="arg"]; + string name = 3 [json_name="name"]; + int32 argnumber = 4 [json_name="argnumber"]; + int32 location = 5 [json_name="location"]; +} + +message OpExpr +{ + Node xpr = 1 [json_name="xpr"]; + uint32 opno = 2 [json_name="opno"]; + uint32 opresulttype = 3 [json_name="opresulttype"]; + bool opretset = 4 [json_name="opretset"]; + uint32 opcollid = 5 [json_name="opcollid"]; + uint32 inputcollid = 6 [json_name="inputcollid"]; + repeated Node args = 7 [json_name="args"]; + int32 location = 8 [json_name="location"]; +} + +message DistinctExpr +{ + Node xpr = 1 [json_name="xpr"]; + uint32 opno = 2 [json_name="opno"]; + uint32 opresulttype = 3 [json_name="opresulttype"]; + bool opretset = 4 [json_name="opretset"]; + uint32 opcollid = 5 [json_name="opcollid"]; + uint32 inputcollid = 6 [json_name="inputcollid"]; + repeated Node args = 7 [json_name="args"]; + int32 location = 8 [json_name="location"]; +} + +message NullIfExpr +{ + Node xpr = 1 [json_name="xpr"]; + uint32 opno = 2 [json_name="opno"]; + uint32 opresulttype = 3 [json_name="opresulttype"]; + bool opretset = 4 [json_name="opretset"]; + uint32 opcollid = 5 [json_name="opcollid"]; + uint32 inputcollid = 6 [json_name="inputcollid"]; + repeated Node args = 7 [json_name="args"]; + int32 location = 8 [json_name="location"]; +} + +message ScalarArrayOpExpr +{ + Node xpr = 1 [json_name="xpr"]; + uint32 opno = 2 [json_name="opno"]; + bool use_or = 3 [json_name="useOr"]; + uint32 inputcollid = 4 [json_name="inputcollid"]; + repeated Node args = 5 [json_name="args"]; + int32 location = 6 [json_name="location"]; +} + +message BoolExpr +{ + Node xpr = 1 [json_name="xpr"]; + BoolExprType boolop = 2 [json_name="boolop"]; + repeated Node args = 3 [json_name="args"]; + int32 location = 4 [json_name="location"]; +} + +message SubLink +{ + Node xpr = 1 [json_name="xpr"]; + SubLinkType sub_link_type = 2 [json_name="subLinkType"]; + int32 sub_link_id = 3 [json_name="subLinkId"]; + Node testexpr = 4 [json_name="testexpr"]; + repeated Node oper_name = 5 [json_name="operName"]; + Node subselect = 6 [json_name="subselect"]; + int32 location = 7 [json_name="location"]; +} + +message SubPlan +{ + Node xpr = 1 [json_name="xpr"]; + SubLinkType sub_link_type = 2 [json_name="subLinkType"]; + Node testexpr = 3 [json_name="testexpr"]; + repeated Node param_ids = 4 [json_name="paramIds"]; + int32 plan_id = 5 [json_name="plan_id"]; + string plan_name = 6 [json_name="plan_name"]; + uint32 first_col_type = 7 [json_name="firstColType"]; + int32 first_col_typmod = 8 [json_name="firstColTypmod"]; + uint32 first_col_collation = 9 [json_name="firstColCollation"]; + bool use_hash_table = 10 [json_name="useHashTable"]; + bool unknown_eq_false = 11 [json_name="unknownEqFalse"]; + bool parallel_safe = 12 [json_name="parallel_safe"]; + repeated Node set_param = 13 [json_name="setParam"]; + repeated Node par_param = 14 [json_name="parParam"]; + repeated Node args = 15 [json_name="args"]; + double startup_cost = 16 [json_name="startup_cost"]; + double per_call_cost = 17 [json_name="per_call_cost"]; +} + +message AlternativeSubPlan +{ + Node xpr = 1 [json_name="xpr"]; + repeated Node subplans = 2 [json_name="subplans"]; +} + +message FieldSelect +{ + Node xpr = 1 [json_name="xpr"]; + Node arg = 2 [json_name="arg"]; + int32 fieldnum = 3 [json_name="fieldnum"]; + uint32 resulttype = 4 [json_name="resulttype"]; + int32 resulttypmod = 5 [json_name="resulttypmod"]; + uint32 resultcollid = 6 [json_name="resultcollid"]; +} + +message FieldStore +{ + Node xpr = 1 [json_name="xpr"]; + Node arg = 2 [json_name="arg"]; + repeated Node newvals = 3 [json_name="newvals"]; + repeated Node fieldnums = 4 [json_name="fieldnums"]; + uint32 resulttype = 5 [json_name="resulttype"]; +} + +message RelabelType +{ + Node xpr = 1 [json_name="xpr"]; + Node arg = 2 [json_name="arg"]; + uint32 resulttype = 3 [json_name="resulttype"]; + int32 resulttypmod = 4 [json_name="resulttypmod"]; + uint32 resultcollid = 5 [json_name="resultcollid"]; + CoercionForm relabelformat = 6 [json_name="relabelformat"]; + int32 location = 7 [json_name="location"]; +} + +message CoerceViaIO +{ + Node xpr = 1 [json_name="xpr"]; + Node arg = 2 [json_name="arg"]; + uint32 resulttype = 3 [json_name="resulttype"]; + uint32 resultcollid = 4 [json_name="resultcollid"]; + CoercionForm coerceformat = 5 [json_name="coerceformat"]; + int32 location = 6 [json_name="location"]; +} + +message ArrayCoerceExpr +{ + Node xpr = 1 [json_name="xpr"]; + Node arg = 2 [json_name="arg"]; + Node elemexpr = 3 [json_name="elemexpr"]; + uint32 resulttype = 4 [json_name="resulttype"]; + int32 resulttypmod = 5 [json_name="resulttypmod"]; + uint32 resultcollid = 6 [json_name="resultcollid"]; + CoercionForm coerceformat = 7 [json_name="coerceformat"]; + int32 location = 8 [json_name="location"]; +} + +message ConvertRowtypeExpr +{ + Node xpr = 1 [json_name="xpr"]; + Node arg = 2 [json_name="arg"]; + uint32 resulttype = 3 [json_name="resulttype"]; + CoercionForm convertformat = 4 [json_name="convertformat"]; + int32 location = 5 [json_name="location"]; +} + +message CollateExpr +{ + Node xpr = 1 [json_name="xpr"]; + Node arg = 2 [json_name="arg"]; + uint32 coll_oid = 3 [json_name="collOid"]; + int32 location = 4 [json_name="location"]; +} + +message CaseExpr +{ + Node xpr = 1 [json_name="xpr"]; + uint32 casetype = 2 [json_name="casetype"]; + uint32 casecollid = 3 [json_name="casecollid"]; + Node arg = 4 [json_name="arg"]; + repeated Node args = 5 [json_name="args"]; + Node defresult = 6 [json_name="defresult"]; + int32 location = 7 [json_name="location"]; +} + +message CaseWhen +{ + Node xpr = 1 [json_name="xpr"]; + Node expr = 2 [json_name="expr"]; + Node result = 3 [json_name="result"]; + int32 location = 4 [json_name="location"]; +} + +message CaseTestExpr +{ + Node xpr = 1 [json_name="xpr"]; + uint32 type_id = 2 [json_name="typeId"]; + int32 type_mod = 3 [json_name="typeMod"]; + uint32 collation = 4 [json_name="collation"]; +} + +message ArrayExpr +{ + Node xpr = 1 [json_name="xpr"]; + uint32 array_typeid = 2 [json_name="array_typeid"]; + uint32 array_collid = 3 [json_name="array_collid"]; + uint32 element_typeid = 4 [json_name="element_typeid"]; + repeated Node elements = 5 [json_name="elements"]; + bool multidims = 6 [json_name="multidims"]; + int32 location = 7 [json_name="location"]; +} + +message RowExpr +{ + Node xpr = 1 [json_name="xpr"]; + repeated Node args = 2 [json_name="args"]; + uint32 row_typeid = 3 [json_name="row_typeid"]; + CoercionForm row_format = 4 [json_name="row_format"]; + repeated Node colnames = 5 [json_name="colnames"]; + int32 location = 6 [json_name="location"]; +} + +message RowCompareExpr +{ + Node xpr = 1 [json_name="xpr"]; + RowCompareType rctype = 2 [json_name="rctype"]; + repeated Node opnos = 3 [json_name="opnos"]; + repeated Node opfamilies = 4 [json_name="opfamilies"]; + repeated Node inputcollids = 5 [json_name="inputcollids"]; + repeated Node largs = 6 [json_name="largs"]; + repeated Node rargs = 7 [json_name="rargs"]; +} + +message CoalesceExpr +{ + Node xpr = 1 [json_name="xpr"]; + uint32 coalescetype = 2 [json_name="coalescetype"]; + uint32 coalescecollid = 3 [json_name="coalescecollid"]; + repeated Node args = 4 [json_name="args"]; + int32 location = 5 [json_name="location"]; +} + +message MinMaxExpr +{ + Node xpr = 1 [json_name="xpr"]; + uint32 minmaxtype = 2 [json_name="minmaxtype"]; + uint32 minmaxcollid = 3 [json_name="minmaxcollid"]; + uint32 inputcollid = 4 [json_name="inputcollid"]; + MinMaxOp op = 5 [json_name="op"]; + repeated Node args = 6 [json_name="args"]; + int32 location = 7 [json_name="location"]; +} + +message SQLValueFunction +{ + Node xpr = 1 [json_name="xpr"]; + SQLValueFunctionOp op = 2 [json_name="op"]; + uint32 type = 3 [json_name="type"]; + int32 typmod = 4 [json_name="typmod"]; + int32 location = 5 [json_name="location"]; +} + +message XmlExpr +{ + Node xpr = 1 [json_name="xpr"]; + XmlExprOp op = 2 [json_name="op"]; + string name = 3 [json_name="name"]; + repeated Node named_args = 4 [json_name="named_args"]; + repeated Node arg_names = 5 [json_name="arg_names"]; + repeated Node args = 6 [json_name="args"]; + XmlOptionType xmloption = 7 [json_name="xmloption"]; + bool indent = 8 [json_name="indent"]; + uint32 type = 9 [json_name="type"]; + int32 typmod = 10 [json_name="typmod"]; + int32 location = 11 [json_name="location"]; +} + +message JsonFormat +{ + JsonFormatType format_type = 1 [json_name="format_type"]; + JsonEncoding encoding = 2 [json_name="encoding"]; + int32 location = 3 [json_name="location"]; +} + +message JsonReturning +{ + JsonFormat format = 1 [json_name="format"]; + uint32 typid = 2 [json_name="typid"]; + int32 typmod = 3 [json_name="typmod"]; +} + +message JsonValueExpr +{ + Node raw_expr = 1 [json_name="raw_expr"]; + Node formatted_expr = 2 [json_name="formatted_expr"]; + JsonFormat format = 3 [json_name="format"]; +} + +message JsonConstructorExpr +{ + Node xpr = 1 [json_name="xpr"]; + JsonConstructorType type = 2 [json_name="type"]; + repeated Node args = 3 [json_name="args"]; + Node func = 4 [json_name="func"]; + Node coercion = 5 [json_name="coercion"]; + JsonReturning returning = 6 [json_name="returning"]; + bool absent_on_null = 7 [json_name="absent_on_null"]; + bool unique = 8 [json_name="unique"]; + int32 location = 9 [json_name="location"]; +} + +message JsonIsPredicate +{ + Node expr = 1 [json_name="expr"]; + JsonFormat format = 2 [json_name="format"]; + JsonValueType item_type = 3 [json_name="item_type"]; + bool unique_keys = 4 [json_name="unique_keys"]; + int32 location = 5 [json_name="location"]; +} + +message JsonBehavior +{ + JsonBehaviorType btype = 1 [json_name="btype"]; + Node expr = 2 [json_name="expr"]; + bool coerce = 3 [json_name="coerce"]; + int32 location = 4 [json_name="location"]; +} + +message JsonExpr +{ + Node xpr = 1 [json_name="xpr"]; + JsonExprOp op = 2 [json_name="op"]; + string column_name = 3 [json_name="column_name"]; + Node formatted_expr = 4 [json_name="formatted_expr"]; + JsonFormat format = 5 [json_name="format"]; + Node path_spec = 6 [json_name="path_spec"]; + JsonReturning returning = 7 [json_name="returning"]; + repeated Node passing_names = 8 [json_name="passing_names"]; + repeated Node passing_values = 9 [json_name="passing_values"]; + JsonBehavior on_empty = 10 [json_name="on_empty"]; + JsonBehavior on_error = 11 [json_name="on_error"]; + bool use_io_coercion = 12 [json_name="use_io_coercion"]; + bool use_json_coercion = 13 [json_name="use_json_coercion"]; + JsonWrapper wrapper = 14 [json_name="wrapper"]; + bool omit_quotes = 15 [json_name="omit_quotes"]; + uint32 collation = 16 [json_name="collation"]; + int32 location = 17 [json_name="location"]; +} + +message JsonTablePath +{ + string name = 1 [json_name="name"]; +} + +message JsonTablePathScan +{ + Node plan = 1 [json_name="plan"]; + JsonTablePath path = 2 [json_name="path"]; + bool error_on_error = 3 [json_name="errorOnError"]; + Node child = 4 [json_name="child"]; + int32 col_min = 5 [json_name="colMin"]; + int32 col_max = 6 [json_name="colMax"]; +} + +message JsonTableSiblingJoin +{ + Node plan = 1 [json_name="plan"]; + Node lplan = 2 [json_name="lplan"]; + Node rplan = 3 [json_name="rplan"]; +} + +message NullTest +{ + Node xpr = 1 [json_name="xpr"]; + Node arg = 2 [json_name="arg"]; + NullTestType nulltesttype = 3 [json_name="nulltesttype"]; + bool argisrow = 4 [json_name="argisrow"]; + int32 location = 5 [json_name="location"]; +} + +message BooleanTest +{ + Node xpr = 1 [json_name="xpr"]; + Node arg = 2 [json_name="arg"]; + BoolTestType booltesttype = 3 [json_name="booltesttype"]; + int32 location = 4 [json_name="location"]; +} + +message MergeAction +{ + MergeMatchKind match_kind = 1 [json_name="matchKind"]; + CmdType command_type = 2 [json_name="commandType"]; + OverridingKind override = 3 [json_name="override"]; + Node qual = 4 [json_name="qual"]; + repeated Node target_list = 5 [json_name="targetList"]; + repeated Node update_colnos = 6 [json_name="updateColnos"]; +} + +message CoerceToDomain +{ + Node xpr = 1 [json_name="xpr"]; + Node arg = 2 [json_name="arg"]; + uint32 resulttype = 3 [json_name="resulttype"]; + int32 resulttypmod = 4 [json_name="resulttypmod"]; + uint32 resultcollid = 5 [json_name="resultcollid"]; + CoercionForm coercionformat = 6 [json_name="coercionformat"]; + int32 location = 7 [json_name="location"]; +} + +message CoerceToDomainValue +{ + Node xpr = 1 [json_name="xpr"]; + uint32 type_id = 2 [json_name="typeId"]; + int32 type_mod = 3 [json_name="typeMod"]; + uint32 collation = 4 [json_name="collation"]; + int32 location = 5 [json_name="location"]; +} + +message SetToDefault +{ + Node xpr = 1 [json_name="xpr"]; + uint32 type_id = 2 [json_name="typeId"]; + int32 type_mod = 3 [json_name="typeMod"]; + uint32 collation = 4 [json_name="collation"]; + int32 location = 5 [json_name="location"]; +} + +message CurrentOfExpr +{ + Node xpr = 1 [json_name="xpr"]; + uint32 cvarno = 2 [json_name="cvarno"]; + string cursor_name = 3 [json_name="cursor_name"]; + int32 cursor_param = 4 [json_name="cursor_param"]; +} + +message NextValueExpr +{ + Node xpr = 1 [json_name="xpr"]; + uint32 seqid = 2 [json_name="seqid"]; + uint32 type_id = 3 [json_name="typeId"]; +} + +message InferenceElem +{ + Node xpr = 1 [json_name="xpr"]; + Node expr = 2 [json_name="expr"]; + uint32 infercollid = 3 [json_name="infercollid"]; + uint32 inferopclass = 4 [json_name="inferopclass"]; +} + +message TargetEntry +{ + Node xpr = 1 [json_name="xpr"]; + Node expr = 2 [json_name="expr"]; + int32 resno = 3 [json_name="resno"]; + string resname = 4 [json_name="resname"]; + uint32 ressortgroupref = 5 [json_name="ressortgroupref"]; + uint32 resorigtbl = 6 [json_name="resorigtbl"]; + int32 resorigcol = 7 [json_name="resorigcol"]; + bool resjunk = 8 [json_name="resjunk"]; +} + +message RangeTblRef +{ + int32 rtindex = 1 [json_name="rtindex"]; +} + +message JoinExpr +{ + JoinType jointype = 1 [json_name="jointype"]; + bool is_natural = 2 [json_name="isNatural"]; + Node larg = 3 [json_name="larg"]; + Node rarg = 4 [json_name="rarg"]; + repeated Node using_clause = 5 [json_name="usingClause"]; + Alias join_using_alias = 6 [json_name="join_using_alias"]; + Node quals = 7 [json_name="quals"]; + Alias alias = 8 [json_name="alias"]; + int32 rtindex = 9 [json_name="rtindex"]; +} + +message FromExpr +{ + repeated Node fromlist = 1 [json_name="fromlist"]; + Node quals = 2 [json_name="quals"]; +} + +message OnConflictExpr +{ + OnConflictAction action = 1 [json_name="action"]; + repeated Node arbiter_elems = 2 [json_name="arbiterElems"]; + Node arbiter_where = 3 [json_name="arbiterWhere"]; + uint32 constraint = 4 [json_name="constraint"]; + repeated Node on_conflict_set = 5 [json_name="onConflictSet"]; + Node on_conflict_where = 6 [json_name="onConflictWhere"]; + int32 excl_rel_index = 7 [json_name="exclRelIndex"]; + repeated Node excl_rel_tlist = 8 [json_name="exclRelTlist"]; +} + +message Query +{ + CmdType command_type = 1 [json_name="commandType"]; + QuerySource query_source = 2 [json_name="querySource"]; + bool can_set_tag = 3 [json_name="canSetTag"]; + Node utility_stmt = 4 [json_name="utilityStmt"]; + int32 result_relation = 5 [json_name="resultRelation"]; + bool has_aggs = 6 [json_name="hasAggs"]; + bool has_window_funcs = 7 [json_name="hasWindowFuncs"]; + bool has_target_srfs = 8 [json_name="hasTargetSRFs"]; + bool has_sub_links = 9 [json_name="hasSubLinks"]; + bool has_distinct_on = 10 [json_name="hasDistinctOn"]; + bool has_recursive = 11 [json_name="hasRecursive"]; + bool has_modifying_cte = 12 [json_name="hasModifyingCTE"]; + bool has_for_update = 13 [json_name="hasForUpdate"]; + bool has_row_security = 14 [json_name="hasRowSecurity"]; + bool is_return = 15 [json_name="isReturn"]; + repeated Node cte_list = 16 [json_name="cteList"]; + repeated Node rtable = 17 [json_name="rtable"]; + repeated Node rteperminfos = 18 [json_name="rteperminfos"]; + FromExpr jointree = 19 [json_name="jointree"]; + repeated Node merge_action_list = 20 [json_name="mergeActionList"]; + int32 merge_target_relation = 21 [json_name="mergeTargetRelation"]; + Node merge_join_condition = 22 [json_name="mergeJoinCondition"]; + repeated Node target_list = 23 [json_name="targetList"]; + OverridingKind override = 24 [json_name="override"]; + OnConflictExpr on_conflict = 25 [json_name="onConflict"]; + repeated Node returning_list = 26 [json_name="returningList"]; + repeated Node group_clause = 27 [json_name="groupClause"]; + bool group_distinct = 28 [json_name="groupDistinct"]; + repeated Node grouping_sets = 29 [json_name="groupingSets"]; + Node having_qual = 30 [json_name="havingQual"]; + repeated Node window_clause = 31 [json_name="windowClause"]; + repeated Node distinct_clause = 32 [json_name="distinctClause"]; + repeated Node sort_clause = 33 [json_name="sortClause"]; + Node limit_offset = 34 [json_name="limitOffset"]; + Node limit_count = 35 [json_name="limitCount"]; + LimitOption limit_option = 36 [json_name="limitOption"]; + repeated Node row_marks = 37 [json_name="rowMarks"]; + Node set_operations = 38 [json_name="setOperations"]; + repeated Node constraint_deps = 39 [json_name="constraintDeps"]; + repeated Node with_check_options = 40 [json_name="withCheckOptions"]; + int32 stmt_location = 41 [json_name="stmt_location"]; + int32 stmt_len = 42 [json_name="stmt_len"]; +} + +message TypeName +{ + repeated Node names = 1 [json_name="names"]; + uint32 type_oid = 2 [json_name="typeOid"]; + bool setof = 3 [json_name="setof"]; + bool pct_type = 4 [json_name="pct_type"]; + repeated Node typmods = 5 [json_name="typmods"]; + int32 typemod = 6 [json_name="typemod"]; + repeated Node array_bounds = 7 [json_name="arrayBounds"]; + int32 location = 8 [json_name="location"]; +} + +message ColumnRef +{ + repeated Node fields = 1 [json_name="fields"]; + int32 location = 2 [json_name="location"]; +} + +message ParamRef +{ + int32 number = 1 [json_name="number"]; + int32 location = 2 [json_name="location"]; +} + +message A_Expr +{ + A_Expr_Kind kind = 1 [json_name="kind"]; + repeated Node name = 2 [json_name="name"]; + Node lexpr = 3 [json_name="lexpr"]; + Node rexpr = 4 [json_name="rexpr"]; + int32 location = 5 [json_name="location"]; +} + +message TypeCast +{ + Node arg = 1 [json_name="arg"]; + TypeName type_name = 2 [json_name="typeName"]; + int32 location = 3 [json_name="location"]; +} + +message CollateClause +{ + Node arg = 1 [json_name="arg"]; + repeated Node collname = 2 [json_name="collname"]; + int32 location = 3 [json_name="location"]; +} + +message RoleSpec +{ + RoleSpecType roletype = 1 [json_name="roletype"]; + string rolename = 2 [json_name="rolename"]; + int32 location = 3 [json_name="location"]; +} + +message FuncCall +{ + repeated Node funcname = 1 [json_name="funcname"]; + repeated Node args = 2 [json_name="args"]; + repeated Node agg_order = 3 [json_name="agg_order"]; + Node agg_filter = 4 [json_name="agg_filter"]; + WindowDef over = 5 [json_name="over"]; + bool agg_within_group = 6 [json_name="agg_within_group"]; + bool agg_star = 7 [json_name="agg_star"]; + bool agg_distinct = 8 [json_name="agg_distinct"]; + bool func_variadic = 9 [json_name="func_variadic"]; + CoercionForm funcformat = 10 [json_name="funcformat"]; + int32 location = 11 [json_name="location"]; +} + +message A_Star +{ +} + +message A_Indices +{ + bool is_slice = 1 [json_name="is_slice"]; + Node lidx = 2 [json_name="lidx"]; + Node uidx = 3 [json_name="uidx"]; +} + +message A_Indirection +{ + Node arg = 1 [json_name="arg"]; + repeated Node indirection = 2 [json_name="indirection"]; +} + +message A_ArrayExpr +{ + repeated Node elements = 1 [json_name="elements"]; + int32 location = 2 [json_name="location"]; +} + +message ResTarget +{ + string name = 1 [json_name="name"]; + repeated Node indirection = 2 [json_name="indirection"]; + Node val = 3 [json_name="val"]; + int32 location = 4 [json_name="location"]; +} + +message MultiAssignRef +{ + Node source = 1 [json_name="source"]; + int32 colno = 2 [json_name="colno"]; + int32 ncolumns = 3 [json_name="ncolumns"]; +} + +message SortBy +{ + Node node = 1 [json_name="node"]; + SortByDir sortby_dir = 2 [json_name="sortby_dir"]; + SortByNulls sortby_nulls = 3 [json_name="sortby_nulls"]; + repeated Node use_op = 4 [json_name="useOp"]; + int32 location = 5 [json_name="location"]; +} + +message WindowDef +{ + string name = 1 [json_name="name"]; + string refname = 2 [json_name="refname"]; + repeated Node partition_clause = 3 [json_name="partitionClause"]; + repeated Node order_clause = 4 [json_name="orderClause"]; + int32 frame_options = 5 [json_name="frameOptions"]; + Node start_offset = 6 [json_name="startOffset"]; + Node end_offset = 7 [json_name="endOffset"]; + int32 location = 8 [json_name="location"]; +} + +message RangeSubselect +{ + bool lateral = 1 [json_name="lateral"]; + Node subquery = 2 [json_name="subquery"]; + Alias alias = 3 [json_name="alias"]; +} + +message RangeFunction +{ + bool lateral = 1 [json_name="lateral"]; + bool ordinality = 2 [json_name="ordinality"]; + bool is_rowsfrom = 3 [json_name="is_rowsfrom"]; + repeated Node functions = 4 [json_name="functions"]; + Alias alias = 5 [json_name="alias"]; + repeated Node coldeflist = 6 [json_name="coldeflist"]; +} + +message RangeTableFunc +{ + bool lateral = 1 [json_name="lateral"]; + Node docexpr = 2 [json_name="docexpr"]; + Node rowexpr = 3 [json_name="rowexpr"]; + repeated Node namespaces = 4 [json_name="namespaces"]; + repeated Node columns = 5 [json_name="columns"]; + Alias alias = 6 [json_name="alias"]; + int32 location = 7 [json_name="location"]; +} + +message RangeTableFuncCol +{ + string colname = 1 [json_name="colname"]; + TypeName type_name = 2 [json_name="typeName"]; + bool for_ordinality = 3 [json_name="for_ordinality"]; + bool is_not_null = 4 [json_name="is_not_null"]; + Node colexpr = 5 [json_name="colexpr"]; + Node coldefexpr = 6 [json_name="coldefexpr"]; + int32 location = 7 [json_name="location"]; +} + +message RangeTableSample +{ + Node relation = 1 [json_name="relation"]; + repeated Node method = 2 [json_name="method"]; + repeated Node args = 3 [json_name="args"]; + Node repeatable = 4 [json_name="repeatable"]; + int32 location = 5 [json_name="location"]; +} + +message ColumnDef +{ + string colname = 1 [json_name="colname"]; + TypeName type_name = 2 [json_name="typeName"]; + string compression = 3 [json_name="compression"]; + int32 inhcount = 4 [json_name="inhcount"]; + bool is_local = 5 [json_name="is_local"]; + bool is_not_null = 6 [json_name="is_not_null"]; + bool is_from_type = 7 [json_name="is_from_type"]; + string storage = 8 [json_name="storage"]; + string storage_name = 9 [json_name="storage_name"]; + Node raw_default = 10 [json_name="raw_default"]; + Node cooked_default = 11 [json_name="cooked_default"]; + string identity = 12 [json_name="identity"]; + RangeVar identity_sequence = 13 [json_name="identitySequence"]; + string generated = 14 [json_name="generated"]; + CollateClause coll_clause = 15 [json_name="collClause"]; + uint32 coll_oid = 16 [json_name="collOid"]; + repeated Node constraints = 17 [json_name="constraints"]; + repeated Node fdwoptions = 18 [json_name="fdwoptions"]; + int32 location = 19 [json_name="location"]; +} + +message TableLikeClause +{ + RangeVar relation = 1 [json_name="relation"]; + uint32 options = 2 [json_name="options"]; + uint32 relation_oid = 3 [json_name="relationOid"]; +} + +message IndexElem +{ + string name = 1 [json_name="name"]; + Node expr = 2 [json_name="expr"]; + string indexcolname = 3 [json_name="indexcolname"]; + repeated Node collation = 4 [json_name="collation"]; + repeated Node opclass = 5 [json_name="opclass"]; + repeated Node opclassopts = 6 [json_name="opclassopts"]; + SortByDir ordering = 7 [json_name="ordering"]; + SortByNulls nulls_ordering = 8 [json_name="nulls_ordering"]; +} + +message DefElem +{ + string defnamespace = 1 [json_name="defnamespace"]; + string defname = 2 [json_name="defname"]; + Node arg = 3 [json_name="arg"]; + DefElemAction defaction = 4 [json_name="defaction"]; + int32 location = 5 [json_name="location"]; +} + +message LockingClause +{ + repeated Node locked_rels = 1 [json_name="lockedRels"]; + LockClauseStrength strength = 2 [json_name="strength"]; + LockWaitPolicy wait_policy = 3 [json_name="waitPolicy"]; +} + +message XmlSerialize +{ + XmlOptionType xmloption = 1 [json_name="xmloption"]; + Node expr = 2 [json_name="expr"]; + TypeName type_name = 3 [json_name="typeName"]; + bool indent = 4 [json_name="indent"]; + int32 location = 5 [json_name="location"]; +} + +message PartitionElem +{ + string name = 1 [json_name="name"]; + Node expr = 2 [json_name="expr"]; + repeated Node collation = 3 [json_name="collation"]; + repeated Node opclass = 4 [json_name="opclass"]; + int32 location = 5 [json_name="location"]; +} + +message PartitionSpec +{ + PartitionStrategy strategy = 1 [json_name="strategy"]; + repeated Node part_params = 2 [json_name="partParams"]; + int32 location = 3 [json_name="location"]; +} + +message PartitionBoundSpec +{ + string strategy = 1 [json_name="strategy"]; + bool is_default = 2 [json_name="is_default"]; + int32 modulus = 3 [json_name="modulus"]; + int32 remainder = 4 [json_name="remainder"]; + repeated Node listdatums = 5 [json_name="listdatums"]; + repeated Node lowerdatums = 6 [json_name="lowerdatums"]; + repeated Node upperdatums = 7 [json_name="upperdatums"]; + int32 location = 8 [json_name="location"]; +} + +message PartitionRangeDatum +{ + PartitionRangeDatumKind kind = 1 [json_name="kind"]; + Node value = 2 [json_name="value"]; + int32 location = 3 [json_name="location"]; +} + +message SinglePartitionSpec +{ +} + +message PartitionCmd +{ + RangeVar name = 1 [json_name="name"]; + PartitionBoundSpec bound = 2 [json_name="bound"]; + bool concurrent = 3 [json_name="concurrent"]; +} + +message RangeTblEntry +{ + Alias alias = 1 [json_name="alias"]; + Alias eref = 2 [json_name="eref"]; + RTEKind rtekind = 3 [json_name="rtekind"]; + uint32 relid = 4 [json_name="relid"]; + bool inh = 5 [json_name="inh"]; + string relkind = 6 [json_name="relkind"]; + int32 rellockmode = 7 [json_name="rellockmode"]; + uint32 perminfoindex = 8 [json_name="perminfoindex"]; + TableSampleClause tablesample = 9 [json_name="tablesample"]; + Query subquery = 10 [json_name="subquery"]; + bool security_barrier = 11 [json_name="security_barrier"]; + JoinType jointype = 12 [json_name="jointype"]; + int32 joinmergedcols = 13 [json_name="joinmergedcols"]; + repeated Node joinaliasvars = 14 [json_name="joinaliasvars"]; + repeated Node joinleftcols = 15 [json_name="joinleftcols"]; + repeated Node joinrightcols = 16 [json_name="joinrightcols"]; + Alias join_using_alias = 17 [json_name="join_using_alias"]; + repeated Node functions = 18 [json_name="functions"]; + bool funcordinality = 19 [json_name="funcordinality"]; + TableFunc tablefunc = 20 [json_name="tablefunc"]; + repeated Node values_lists = 21 [json_name="values_lists"]; + string ctename = 22 [json_name="ctename"]; + uint32 ctelevelsup = 23 [json_name="ctelevelsup"]; + bool self_reference = 24 [json_name="self_reference"]; + repeated Node coltypes = 25 [json_name="coltypes"]; + repeated Node coltypmods = 26 [json_name="coltypmods"]; + repeated Node colcollations = 27 [json_name="colcollations"]; + string enrname = 28 [json_name="enrname"]; + double enrtuples = 29 [json_name="enrtuples"]; + bool lateral = 30 [json_name="lateral"]; + bool in_from_cl = 31 [json_name="inFromCl"]; + repeated Node security_quals = 32 [json_name="securityQuals"]; +} + +message RTEPermissionInfo +{ + uint32 relid = 1 [json_name="relid"]; + bool inh = 2 [json_name="inh"]; + uint64 required_perms = 3 [json_name="requiredPerms"]; + uint32 check_as_user = 4 [json_name="checkAsUser"]; + repeated uint64 selected_cols = 5 [json_name="selectedCols"]; + repeated uint64 inserted_cols = 6 [json_name="insertedCols"]; + repeated uint64 updated_cols = 7 [json_name="updatedCols"]; +} + +message RangeTblFunction +{ + Node funcexpr = 1 [json_name="funcexpr"]; + int32 funccolcount = 2 [json_name="funccolcount"]; + repeated Node funccolnames = 3 [json_name="funccolnames"]; + repeated Node funccoltypes = 4 [json_name="funccoltypes"]; + repeated Node funccoltypmods = 5 [json_name="funccoltypmods"]; + repeated Node funccolcollations = 6 [json_name="funccolcollations"]; + repeated uint64 funcparams = 7 [json_name="funcparams"]; +} + +message TableSampleClause +{ + uint32 tsmhandler = 1 [json_name="tsmhandler"]; + repeated Node args = 2 [json_name="args"]; + Node repeatable = 3 [json_name="repeatable"]; +} + +message WithCheckOption +{ + WCOKind kind = 1 [json_name="kind"]; + string relname = 2 [json_name="relname"]; + string polname = 3 [json_name="polname"]; + Node qual = 4 [json_name="qual"]; + bool cascaded = 5 [json_name="cascaded"]; +} + +message SortGroupClause +{ + uint32 tle_sort_group_ref = 1 [json_name="tleSortGroupRef"]; + uint32 eqop = 2 [json_name="eqop"]; + uint32 sortop = 3 [json_name="sortop"]; + bool nulls_first = 4 [json_name="nulls_first"]; + bool hashable = 5 [json_name="hashable"]; +} + +message GroupingSet +{ + GroupingSetKind kind = 1 [json_name="kind"]; + repeated Node content = 2 [json_name="content"]; + int32 location = 3 [json_name="location"]; +} + +message WindowClause +{ + string name = 1 [json_name="name"]; + string refname = 2 [json_name="refname"]; + repeated Node partition_clause = 3 [json_name="partitionClause"]; + repeated Node order_clause = 4 [json_name="orderClause"]; + int32 frame_options = 5 [json_name="frameOptions"]; + Node start_offset = 6 [json_name="startOffset"]; + Node end_offset = 7 [json_name="endOffset"]; + uint32 start_in_range_func = 8 [json_name="startInRangeFunc"]; + uint32 end_in_range_func = 9 [json_name="endInRangeFunc"]; + uint32 in_range_coll = 10 [json_name="inRangeColl"]; + bool in_range_asc = 11 [json_name="inRangeAsc"]; + bool in_range_nulls_first = 12 [json_name="inRangeNullsFirst"]; + uint32 winref = 13 [json_name="winref"]; + bool copied_order = 14 [json_name="copiedOrder"]; +} + +message RowMarkClause +{ + uint32 rti = 1 [json_name="rti"]; + LockClauseStrength strength = 2 [json_name="strength"]; + LockWaitPolicy wait_policy = 3 [json_name="waitPolicy"]; + bool pushed_down = 4 [json_name="pushedDown"]; +} + +message WithClause +{ + repeated Node ctes = 1 [json_name="ctes"]; + bool recursive = 2 [json_name="recursive"]; + int32 location = 3 [json_name="location"]; +} + +message InferClause +{ + repeated Node index_elems = 1 [json_name="indexElems"]; + Node where_clause = 2 [json_name="whereClause"]; + string conname = 3 [json_name="conname"]; + int32 location = 4 [json_name="location"]; +} + +message OnConflictClause +{ + OnConflictAction action = 1 [json_name="action"]; + InferClause infer = 2 [json_name="infer"]; + repeated Node target_list = 3 [json_name="targetList"]; + Node where_clause = 4 [json_name="whereClause"]; + int32 location = 5 [json_name="location"]; +} + +message CTESearchClause +{ + repeated Node search_col_list = 1 [json_name="search_col_list"]; + bool search_breadth_first = 2 [json_name="search_breadth_first"]; + string search_seq_column = 3 [json_name="search_seq_column"]; + int32 location = 4 [json_name="location"]; +} + +message CTECycleClause +{ + repeated Node cycle_col_list = 1 [json_name="cycle_col_list"]; + string cycle_mark_column = 2 [json_name="cycle_mark_column"]; + Node cycle_mark_value = 3 [json_name="cycle_mark_value"]; + Node cycle_mark_default = 4 [json_name="cycle_mark_default"]; + string cycle_path_column = 5 [json_name="cycle_path_column"]; + int32 location = 6 [json_name="location"]; + uint32 cycle_mark_type = 7 [json_name="cycle_mark_type"]; + int32 cycle_mark_typmod = 8 [json_name="cycle_mark_typmod"]; + uint32 cycle_mark_collation = 9 [json_name="cycle_mark_collation"]; + uint32 cycle_mark_neop = 10 [json_name="cycle_mark_neop"]; +} + +message CommonTableExpr +{ + string ctename = 1 [json_name="ctename"]; + repeated Node aliascolnames = 2 [json_name="aliascolnames"]; + CTEMaterialize ctematerialized = 3 [json_name="ctematerialized"]; + Node ctequery = 4 [json_name="ctequery"]; + CTESearchClause search_clause = 5 [json_name="search_clause"]; + CTECycleClause cycle_clause = 6 [json_name="cycle_clause"]; + int32 location = 7 [json_name="location"]; + bool cterecursive = 8 [json_name="cterecursive"]; + int32 cterefcount = 9 [json_name="cterefcount"]; + repeated Node ctecolnames = 10 [json_name="ctecolnames"]; + repeated Node ctecoltypes = 11 [json_name="ctecoltypes"]; + repeated Node ctecoltypmods = 12 [json_name="ctecoltypmods"]; + repeated Node ctecolcollations = 13 [json_name="ctecolcollations"]; +} + +message MergeWhenClause +{ + MergeMatchKind match_kind = 1 [json_name="matchKind"]; + CmdType command_type = 2 [json_name="commandType"]; + OverridingKind override = 3 [json_name="override"]; + Node condition = 4 [json_name="condition"]; + repeated Node target_list = 5 [json_name="targetList"]; + repeated Node values = 6 [json_name="values"]; +} + +message TriggerTransition +{ + string name = 1 [json_name="name"]; + bool is_new = 2 [json_name="isNew"]; + bool is_table = 3 [json_name="isTable"]; +} + +message JsonOutput +{ + TypeName type_name = 1 [json_name="typeName"]; + JsonReturning returning = 2 [json_name="returning"]; +} + +message JsonArgument +{ + JsonValueExpr val = 1 [json_name="val"]; + string name = 2 [json_name="name"]; +} + +message JsonFuncExpr +{ + JsonExprOp op = 1 [json_name="op"]; + string column_name = 2 [json_name="column_name"]; + JsonValueExpr context_item = 3 [json_name="context_item"]; + Node pathspec = 4 [json_name="pathspec"]; + repeated Node passing = 5 [json_name="passing"]; + JsonOutput output = 6 [json_name="output"]; + JsonBehavior on_empty = 7 [json_name="on_empty"]; + JsonBehavior on_error = 8 [json_name="on_error"]; + JsonWrapper wrapper = 9 [json_name="wrapper"]; + JsonQuotes quotes = 10 [json_name="quotes"]; + int32 location = 11 [json_name="location"]; +} + +message JsonTablePathSpec +{ + Node string = 1 [json_name="string"]; + string name = 2 [json_name="name"]; + int32 name_location = 3 [json_name="name_location"]; + int32 location = 4 [json_name="location"]; +} + +message JsonTable +{ + JsonValueExpr context_item = 1 [json_name="context_item"]; + JsonTablePathSpec pathspec = 2 [json_name="pathspec"]; + repeated Node passing = 3 [json_name="passing"]; + repeated Node columns = 4 [json_name="columns"]; + JsonBehavior on_error = 5 [json_name="on_error"]; + Alias alias = 6 [json_name="alias"]; + bool lateral = 7 [json_name="lateral"]; + int32 location = 8 [json_name="location"]; +} + +message JsonTableColumn +{ + JsonTableColumnType coltype = 1 [json_name="coltype"]; + string name = 2 [json_name="name"]; + TypeName type_name = 3 [json_name="typeName"]; + JsonTablePathSpec pathspec = 4 [json_name="pathspec"]; + JsonFormat format = 5 [json_name="format"]; + JsonWrapper wrapper = 6 [json_name="wrapper"]; + JsonQuotes quotes = 7 [json_name="quotes"]; + repeated Node columns = 8 [json_name="columns"]; + JsonBehavior on_empty = 9 [json_name="on_empty"]; + JsonBehavior on_error = 10 [json_name="on_error"]; + int32 location = 11 [json_name="location"]; +} + +message JsonKeyValue +{ + Node key = 1 [json_name="key"]; + JsonValueExpr value = 2 [json_name="value"]; +} + +message JsonParseExpr +{ + JsonValueExpr expr = 1 [json_name="expr"]; + JsonOutput output = 2 [json_name="output"]; + bool unique_keys = 3 [json_name="unique_keys"]; + int32 location = 4 [json_name="location"]; +} + +message JsonScalarExpr +{ + Node expr = 1 [json_name="expr"]; + JsonOutput output = 2 [json_name="output"]; + int32 location = 3 [json_name="location"]; +} + +message JsonSerializeExpr +{ + JsonValueExpr expr = 1 [json_name="expr"]; + JsonOutput output = 2 [json_name="output"]; + int32 location = 3 [json_name="location"]; +} + +message JsonObjectConstructor +{ + repeated Node exprs = 1 [json_name="exprs"]; + JsonOutput output = 2 [json_name="output"]; + bool absent_on_null = 3 [json_name="absent_on_null"]; + bool unique = 4 [json_name="unique"]; + int32 location = 5 [json_name="location"]; +} + +message JsonArrayConstructor +{ + repeated Node exprs = 1 [json_name="exprs"]; + JsonOutput output = 2 [json_name="output"]; + bool absent_on_null = 3 [json_name="absent_on_null"]; + int32 location = 4 [json_name="location"]; +} + +message JsonArrayQueryConstructor +{ + Node query = 1 [json_name="query"]; + JsonOutput output = 2 [json_name="output"]; + JsonFormat format = 3 [json_name="format"]; + bool absent_on_null = 4 [json_name="absent_on_null"]; + int32 location = 5 [json_name="location"]; +} + +message JsonAggConstructor +{ + JsonOutput output = 1 [json_name="output"]; + Node agg_filter = 2 [json_name="agg_filter"]; + repeated Node agg_order = 3 [json_name="agg_order"]; + WindowDef over = 4 [json_name="over"]; + int32 location = 5 [json_name="location"]; +} + +message JsonObjectAgg +{ + JsonAggConstructor constructor = 1 [json_name="constructor"]; + JsonKeyValue arg = 2 [json_name="arg"]; + bool absent_on_null = 3 [json_name="absent_on_null"]; + bool unique = 4 [json_name="unique"]; +} + +message JsonArrayAgg +{ + JsonAggConstructor constructor = 1 [json_name="constructor"]; + JsonValueExpr arg = 2 [json_name="arg"]; + bool absent_on_null = 3 [json_name="absent_on_null"]; +} + +message RawStmt +{ + Node stmt = 1 [json_name="stmt"]; + int32 stmt_location = 2 [json_name="stmt_location"]; + int32 stmt_len = 3 [json_name="stmt_len"]; +} + +message InsertStmt +{ + RangeVar relation = 1 [json_name="relation"]; + repeated Node cols = 2 [json_name="cols"]; + Node select_stmt = 3 [json_name="selectStmt"]; + OnConflictClause on_conflict_clause = 4 [json_name="onConflictClause"]; + repeated Node returning_list = 5 [json_name="returningList"]; + WithClause with_clause = 6 [json_name="withClause"]; + OverridingKind override = 7 [json_name="override"]; +} + +message DeleteStmt +{ + RangeVar relation = 1 [json_name="relation"]; + repeated Node using_clause = 2 [json_name="usingClause"]; + Node where_clause = 3 [json_name="whereClause"]; + repeated Node returning_list = 4 [json_name="returningList"]; + WithClause with_clause = 5 [json_name="withClause"]; +} + +message UpdateStmt +{ + RangeVar relation = 1 [json_name="relation"]; + repeated Node target_list = 2 [json_name="targetList"]; + Node where_clause = 3 [json_name="whereClause"]; + repeated Node from_clause = 4 [json_name="fromClause"]; + repeated Node returning_list = 5 [json_name="returningList"]; + WithClause with_clause = 6 [json_name="withClause"]; +} + +message MergeStmt +{ + RangeVar relation = 1 [json_name="relation"]; + Node source_relation = 2 [json_name="sourceRelation"]; + Node join_condition = 3 [json_name="joinCondition"]; + repeated Node merge_when_clauses = 4 [json_name="mergeWhenClauses"]; + repeated Node returning_list = 5 [json_name="returningList"]; + WithClause with_clause = 6 [json_name="withClause"]; +} + +message SelectStmt +{ + repeated Node distinct_clause = 1 [json_name="distinctClause"]; + IntoClause into_clause = 2 [json_name="intoClause"]; + repeated Node target_list = 3 [json_name="targetList"]; + repeated Node from_clause = 4 [json_name="fromClause"]; + Node where_clause = 5 [json_name="whereClause"]; + repeated Node group_clause = 6 [json_name="groupClause"]; + bool group_distinct = 7 [json_name="groupDistinct"]; + Node having_clause = 8 [json_name="havingClause"]; + repeated Node window_clause = 9 [json_name="windowClause"]; + repeated Node values_lists = 10 [json_name="valuesLists"]; + repeated Node sort_clause = 11 [json_name="sortClause"]; + Node limit_offset = 12 [json_name="limitOffset"]; + Node limit_count = 13 [json_name="limitCount"]; + LimitOption limit_option = 14 [json_name="limitOption"]; + repeated Node locking_clause = 15 [json_name="lockingClause"]; + WithClause with_clause = 16 [json_name="withClause"]; + SetOperation op = 17 [json_name="op"]; + bool all = 18 [json_name="all"]; + SelectStmt larg = 19 [json_name="larg"]; + SelectStmt rarg = 20 [json_name="rarg"]; +} + +message SetOperationStmt +{ + SetOperation op = 1 [json_name="op"]; + bool all = 2 [json_name="all"]; + Node larg = 3 [json_name="larg"]; + Node rarg = 4 [json_name="rarg"]; + repeated Node col_types = 5 [json_name="colTypes"]; + repeated Node col_typmods = 6 [json_name="colTypmods"]; + repeated Node col_collations = 7 [json_name="colCollations"]; + repeated Node group_clauses = 8 [json_name="groupClauses"]; +} + +message ReturnStmt +{ + Node returnval = 1 [json_name="returnval"]; +} + +message PLAssignStmt +{ + string name = 1 [json_name="name"]; + repeated Node indirection = 2 [json_name="indirection"]; + int32 nnames = 3 [json_name="nnames"]; + SelectStmt val = 4 [json_name="val"]; + int32 location = 5 [json_name="location"]; +} + +message CreateSchemaStmt +{ + string schemaname = 1 [json_name="schemaname"]; + RoleSpec authrole = 2 [json_name="authrole"]; + repeated Node schema_elts = 3 [json_name="schemaElts"]; + bool if_not_exists = 4 [json_name="if_not_exists"]; +} + +message AlterTableStmt +{ + RangeVar relation = 1 [json_name="relation"]; + repeated Node cmds = 2 [json_name="cmds"]; + ObjectType objtype = 3 [json_name="objtype"]; + bool missing_ok = 4 [json_name="missing_ok"]; +} + +message ReplicaIdentityStmt +{ + string identity_type = 1 [json_name="identity_type"]; + string name = 2 [json_name="name"]; +} + +message AlterTableCmd +{ + AlterTableType subtype = 1 [json_name="subtype"]; + string name = 2 [json_name="name"]; + int32 num = 3 [json_name="num"]; + RoleSpec newowner = 4 [json_name="newowner"]; + Node def = 5 [json_name="def"]; + DropBehavior behavior = 6 [json_name="behavior"]; + bool missing_ok = 7 [json_name="missing_ok"]; + bool recurse = 8 [json_name="recurse"]; +} + +message AlterCollationStmt +{ + repeated Node collname = 1 [json_name="collname"]; +} + +message AlterDomainStmt +{ + string subtype = 1 [json_name="subtype"]; + repeated Node type_name = 2 [json_name="typeName"]; + string name = 3 [json_name="name"]; + Node def = 4 [json_name="def"]; + DropBehavior behavior = 5 [json_name="behavior"]; + bool missing_ok = 6 [json_name="missing_ok"]; +} + +message GrantStmt +{ + bool is_grant = 1 [json_name="is_grant"]; + GrantTargetType targtype = 2 [json_name="targtype"]; + ObjectType objtype = 3 [json_name="objtype"]; + repeated Node objects = 4 [json_name="objects"]; + repeated Node privileges = 5 [json_name="privileges"]; + repeated Node grantees = 6 [json_name="grantees"]; + bool grant_option = 7 [json_name="grant_option"]; + RoleSpec grantor = 8 [json_name="grantor"]; + DropBehavior behavior = 9 [json_name="behavior"]; +} + +message ObjectWithArgs +{ + repeated Node objname = 1 [json_name="objname"]; + repeated Node objargs = 2 [json_name="objargs"]; + repeated Node objfuncargs = 3 [json_name="objfuncargs"]; + bool args_unspecified = 4 [json_name="args_unspecified"]; +} + +message AccessPriv +{ + string priv_name = 1 [json_name="priv_name"]; + repeated Node cols = 2 [json_name="cols"]; +} + +message GrantRoleStmt +{ + repeated Node granted_roles = 1 [json_name="granted_roles"]; + repeated Node grantee_roles = 2 [json_name="grantee_roles"]; + bool is_grant = 3 [json_name="is_grant"]; + repeated Node opt = 4 [json_name="opt"]; + RoleSpec grantor = 5 [json_name="grantor"]; + DropBehavior behavior = 6 [json_name="behavior"]; +} + +message AlterDefaultPrivilegesStmt +{ + repeated Node options = 1 [json_name="options"]; + GrantStmt action = 2 [json_name="action"]; +} + +message CopyStmt +{ + RangeVar relation = 1 [json_name="relation"]; + Node query = 2 [json_name="query"]; + repeated Node attlist = 3 [json_name="attlist"]; + bool is_from = 4 [json_name="is_from"]; + bool is_program = 5 [json_name="is_program"]; + string filename = 6 [json_name="filename"]; + repeated Node options = 7 [json_name="options"]; + Node where_clause = 8 [json_name="whereClause"]; +} + +message VariableSetStmt +{ + VariableSetKind kind = 1 [json_name="kind"]; + string name = 2 [json_name="name"]; + repeated Node args = 3 [json_name="args"]; + bool is_local = 4 [json_name="is_local"]; +} + +message VariableShowStmt +{ + string name = 1 [json_name="name"]; +} + +message CreateStmt +{ + RangeVar relation = 1 [json_name="relation"]; + repeated Node table_elts = 2 [json_name="tableElts"]; + repeated Node inh_relations = 3 [json_name="inhRelations"]; + PartitionBoundSpec partbound = 4 [json_name="partbound"]; + PartitionSpec partspec = 5 [json_name="partspec"]; + TypeName of_typename = 6 [json_name="ofTypename"]; + repeated Node constraints = 7 [json_name="constraints"]; + repeated Node options = 8 [json_name="options"]; + OnCommitAction oncommit = 9 [json_name="oncommit"]; + string tablespacename = 10 [json_name="tablespacename"]; + string access_method = 11 [json_name="accessMethod"]; + bool if_not_exists = 12 [json_name="if_not_exists"]; +} + +message Constraint +{ + ConstrType contype = 1 [json_name="contype"]; + string conname = 2 [json_name="conname"]; + bool deferrable = 3 [json_name="deferrable"]; + bool initdeferred = 4 [json_name="initdeferred"]; + bool skip_validation = 5 [json_name="skip_validation"]; + bool initially_valid = 6 [json_name="initially_valid"]; + bool is_no_inherit = 7 [json_name="is_no_inherit"]; + Node raw_expr = 8 [json_name="raw_expr"]; + string cooked_expr = 9 [json_name="cooked_expr"]; + string generated_when = 10 [json_name="generated_when"]; + int32 inhcount = 11 [json_name="inhcount"]; + bool nulls_not_distinct = 12 [json_name="nulls_not_distinct"]; + repeated Node keys = 13 [json_name="keys"]; + repeated Node including = 14 [json_name="including"]; + repeated Node exclusions = 15 [json_name="exclusions"]; + repeated Node options = 16 [json_name="options"]; + string indexname = 17 [json_name="indexname"]; + string indexspace = 18 [json_name="indexspace"]; + bool reset_default_tblspc = 19 [json_name="reset_default_tblspc"]; + string access_method = 20 [json_name="access_method"]; + Node where_clause = 21 [json_name="where_clause"]; + RangeVar pktable = 22 [json_name="pktable"]; + repeated Node fk_attrs = 23 [json_name="fk_attrs"]; + repeated Node pk_attrs = 24 [json_name="pk_attrs"]; + string fk_matchtype = 25 [json_name="fk_matchtype"]; + string fk_upd_action = 26 [json_name="fk_upd_action"]; + string fk_del_action = 27 [json_name="fk_del_action"]; + repeated Node fk_del_set_cols = 28 [json_name="fk_del_set_cols"]; + repeated Node old_conpfeqop = 29 [json_name="old_conpfeqop"]; + uint32 old_pktable_oid = 30 [json_name="old_pktable_oid"]; + int32 location = 31 [json_name="location"]; +} + +message CreateTableSpaceStmt +{ + string tablespacename = 1 [json_name="tablespacename"]; + RoleSpec owner = 2 [json_name="owner"]; + string location = 3 [json_name="location"]; + repeated Node options = 4 [json_name="options"]; +} + +message DropTableSpaceStmt +{ + string tablespacename = 1 [json_name="tablespacename"]; + bool missing_ok = 2 [json_name="missing_ok"]; +} + +message AlterTableSpaceOptionsStmt +{ + string tablespacename = 1 [json_name="tablespacename"]; + repeated Node options = 2 [json_name="options"]; + bool is_reset = 3 [json_name="isReset"]; +} + +message AlterTableMoveAllStmt +{ + string orig_tablespacename = 1 [json_name="orig_tablespacename"]; + ObjectType objtype = 2 [json_name="objtype"]; + repeated Node roles = 3 [json_name="roles"]; + string new_tablespacename = 4 [json_name="new_tablespacename"]; + bool nowait = 5 [json_name="nowait"]; +} + +message CreateExtensionStmt +{ + string extname = 1 [json_name="extname"]; + bool if_not_exists = 2 [json_name="if_not_exists"]; + repeated Node options = 3 [json_name="options"]; +} + +message AlterExtensionStmt +{ + string extname = 1 [json_name="extname"]; + repeated Node options = 2 [json_name="options"]; +} + +message AlterExtensionContentsStmt +{ + string extname = 1 [json_name="extname"]; + int32 action = 2 [json_name="action"]; + ObjectType objtype = 3 [json_name="objtype"]; + Node object = 4 [json_name="object"]; +} + +message CreateFdwStmt +{ + string fdwname = 1 [json_name="fdwname"]; + repeated Node func_options = 2 [json_name="func_options"]; + repeated Node options = 3 [json_name="options"]; +} + +message AlterFdwStmt +{ + string fdwname = 1 [json_name="fdwname"]; + repeated Node func_options = 2 [json_name="func_options"]; + repeated Node options = 3 [json_name="options"]; +} + +message CreateForeignServerStmt +{ + string servername = 1 [json_name="servername"]; + string servertype = 2 [json_name="servertype"]; + string version = 3 [json_name="version"]; + string fdwname = 4 [json_name="fdwname"]; + bool if_not_exists = 5 [json_name="if_not_exists"]; + repeated Node options = 6 [json_name="options"]; +} + +message AlterForeignServerStmt +{ + string servername = 1 [json_name="servername"]; + string version = 2 [json_name="version"]; + repeated Node options = 3 [json_name="options"]; + bool has_version = 4 [json_name="has_version"]; +} + +message CreateForeignTableStmt +{ + CreateStmt base_stmt = 1 [json_name="base"]; + string servername = 2 [json_name="servername"]; + repeated Node options = 3 [json_name="options"]; +} + +message CreateUserMappingStmt +{ + RoleSpec user = 1 [json_name="user"]; + string servername = 2 [json_name="servername"]; + bool if_not_exists = 3 [json_name="if_not_exists"]; + repeated Node options = 4 [json_name="options"]; +} + +message AlterUserMappingStmt +{ + RoleSpec user = 1 [json_name="user"]; + string servername = 2 [json_name="servername"]; + repeated Node options = 3 [json_name="options"]; +} + +message DropUserMappingStmt +{ + RoleSpec user = 1 [json_name="user"]; + string servername = 2 [json_name="servername"]; + bool missing_ok = 3 [json_name="missing_ok"]; +} + +message ImportForeignSchemaStmt +{ + string server_name = 1 [json_name="server_name"]; + string remote_schema = 2 [json_name="remote_schema"]; + string local_schema = 3 [json_name="local_schema"]; + ImportForeignSchemaType list_type = 4 [json_name="list_type"]; + repeated Node table_list = 5 [json_name="table_list"]; + repeated Node options = 6 [json_name="options"]; +} + +message CreatePolicyStmt +{ + string policy_name = 1 [json_name="policy_name"]; + RangeVar table = 2 [json_name="table"]; + string cmd_name = 3 [json_name="cmd_name"]; + bool permissive = 4 [json_name="permissive"]; + repeated Node roles = 5 [json_name="roles"]; + Node qual = 6 [json_name="qual"]; + Node with_check = 7 [json_name="with_check"]; +} + +message AlterPolicyStmt +{ + string policy_name = 1 [json_name="policy_name"]; + RangeVar table = 2 [json_name="table"]; + repeated Node roles = 3 [json_name="roles"]; + Node qual = 4 [json_name="qual"]; + Node with_check = 5 [json_name="with_check"]; +} + +message CreateAmStmt +{ + string amname = 1 [json_name="amname"]; + repeated Node handler_name = 2 [json_name="handler_name"]; + string amtype = 3 [json_name="amtype"]; +} + +message CreateTrigStmt +{ + bool replace = 1 [json_name="replace"]; + bool isconstraint = 2 [json_name="isconstraint"]; + string trigname = 3 [json_name="trigname"]; + RangeVar relation = 4 [json_name="relation"]; + repeated Node funcname = 5 [json_name="funcname"]; + repeated Node args = 6 [json_name="args"]; + bool row = 7 [json_name="row"]; + int32 timing = 8 [json_name="timing"]; + int32 events = 9 [json_name="events"]; + repeated Node columns = 10 [json_name="columns"]; + Node when_clause = 11 [json_name="whenClause"]; + repeated Node transition_rels = 12 [json_name="transitionRels"]; + bool deferrable = 13 [json_name="deferrable"]; + bool initdeferred = 14 [json_name="initdeferred"]; + RangeVar constrrel = 15 [json_name="constrrel"]; +} + +message CreateEventTrigStmt +{ + string trigname = 1 [json_name="trigname"]; + string eventname = 2 [json_name="eventname"]; + repeated Node whenclause = 3 [json_name="whenclause"]; + repeated Node funcname = 4 [json_name="funcname"]; +} + +message AlterEventTrigStmt +{ + string trigname = 1 [json_name="trigname"]; + string tgenabled = 2 [json_name="tgenabled"]; +} + +message CreatePLangStmt +{ + bool replace = 1 [json_name="replace"]; + string plname = 2 [json_name="plname"]; + repeated Node plhandler = 3 [json_name="plhandler"]; + repeated Node plinline = 4 [json_name="plinline"]; + repeated Node plvalidator = 5 [json_name="plvalidator"]; + bool pltrusted = 6 [json_name="pltrusted"]; +} + +message CreateRoleStmt +{ + RoleStmtType stmt_type = 1 [json_name="stmt_type"]; + string role = 2 [json_name="role"]; + repeated Node options = 3 [json_name="options"]; +} + +message AlterRoleStmt +{ + RoleSpec role = 1 [json_name="role"]; + repeated Node options = 2 [json_name="options"]; + int32 action = 3 [json_name="action"]; +} + +message AlterRoleSetStmt +{ + RoleSpec role = 1 [json_name="role"]; + string database = 2 [json_name="database"]; + VariableSetStmt setstmt = 3 [json_name="setstmt"]; +} + +message DropRoleStmt +{ + repeated Node roles = 1 [json_name="roles"]; + bool missing_ok = 2 [json_name="missing_ok"]; +} + +message CreateSeqStmt +{ + RangeVar sequence = 1 [json_name="sequence"]; + repeated Node options = 2 [json_name="options"]; + uint32 owner_id = 3 [json_name="ownerId"]; + bool for_identity = 4 [json_name="for_identity"]; + bool if_not_exists = 5 [json_name="if_not_exists"]; +} + +message AlterSeqStmt +{ + RangeVar sequence = 1 [json_name="sequence"]; + repeated Node options = 2 [json_name="options"]; + bool for_identity = 3 [json_name="for_identity"]; + bool missing_ok = 4 [json_name="missing_ok"]; +} + +message DefineStmt +{ + ObjectType kind = 1 [json_name="kind"]; + bool oldstyle = 2 [json_name="oldstyle"]; + repeated Node defnames = 3 [json_name="defnames"]; + repeated Node args = 4 [json_name="args"]; + repeated Node definition = 5 [json_name="definition"]; + bool if_not_exists = 6 [json_name="if_not_exists"]; + bool replace = 7 [json_name="replace"]; +} + +message CreateDomainStmt +{ + repeated Node domainname = 1 [json_name="domainname"]; + TypeName type_name = 2 [json_name="typeName"]; + CollateClause coll_clause = 3 [json_name="collClause"]; + repeated Node constraints = 4 [json_name="constraints"]; +} + +message CreateOpClassStmt +{ + repeated Node opclassname = 1 [json_name="opclassname"]; + repeated Node opfamilyname = 2 [json_name="opfamilyname"]; + string amname = 3 [json_name="amname"]; + TypeName datatype = 4 [json_name="datatype"]; + repeated Node items = 5 [json_name="items"]; + bool is_default = 6 [json_name="isDefault"]; +} + +message CreateOpClassItem +{ + int32 itemtype = 1 [json_name="itemtype"]; + ObjectWithArgs name = 2 [json_name="name"]; + int32 number = 3 [json_name="number"]; + repeated Node order_family = 4 [json_name="order_family"]; + repeated Node class_args = 5 [json_name="class_args"]; + TypeName storedtype = 6 [json_name="storedtype"]; +} + +message CreateOpFamilyStmt +{ + repeated Node opfamilyname = 1 [json_name="opfamilyname"]; + string amname = 2 [json_name="amname"]; +} + +message AlterOpFamilyStmt +{ + repeated Node opfamilyname = 1 [json_name="opfamilyname"]; + string amname = 2 [json_name="amname"]; + bool is_drop = 3 [json_name="isDrop"]; + repeated Node items = 4 [json_name="items"]; +} + +message DropStmt +{ + repeated Node objects = 1 [json_name="objects"]; + ObjectType remove_type = 2 [json_name="removeType"]; + DropBehavior behavior = 3 [json_name="behavior"]; + bool missing_ok = 4 [json_name="missing_ok"]; + bool concurrent = 5 [json_name="concurrent"]; +} + +message TruncateStmt +{ + repeated Node relations = 1 [json_name="relations"]; + bool restart_seqs = 2 [json_name="restart_seqs"]; + DropBehavior behavior = 3 [json_name="behavior"]; +} + +message CommentStmt +{ + ObjectType objtype = 1 [json_name="objtype"]; + Node object = 2 [json_name="object"]; + string comment = 3 [json_name="comment"]; +} + +message SecLabelStmt +{ + ObjectType objtype = 1 [json_name="objtype"]; + Node object = 2 [json_name="object"]; + string provider = 3 [json_name="provider"]; + string label = 4 [json_name="label"]; +} + +message DeclareCursorStmt +{ + string portalname = 1 [json_name="portalname"]; + int32 options = 2 [json_name="options"]; + Node query = 3 [json_name="query"]; +} + +message ClosePortalStmt +{ + string portalname = 1 [json_name="portalname"]; +} + +message FetchStmt +{ + FetchDirection direction = 1 [json_name="direction"]; + int64 how_many = 2 [json_name="howMany"]; + string portalname = 3 [json_name="portalname"]; + bool ismove = 4 [json_name="ismove"]; +} + +message IndexStmt +{ + string idxname = 1 [json_name="idxname"]; + RangeVar relation = 2 [json_name="relation"]; + string access_method = 3 [json_name="accessMethod"]; + string table_space = 4 [json_name="tableSpace"]; + repeated Node index_params = 5 [json_name="indexParams"]; + repeated Node index_including_params = 6 [json_name="indexIncludingParams"]; + repeated Node options = 7 [json_name="options"]; + Node where_clause = 8 [json_name="whereClause"]; + repeated Node exclude_op_names = 9 [json_name="excludeOpNames"]; + string idxcomment = 10 [json_name="idxcomment"]; + uint32 index_oid = 11 [json_name="indexOid"]; + uint32 old_number = 12 [json_name="oldNumber"]; + uint32 old_create_subid = 13 [json_name="oldCreateSubid"]; + uint32 old_first_relfilelocator_subid = 14 [json_name="oldFirstRelfilelocatorSubid"]; + bool unique = 15 [json_name="unique"]; + bool nulls_not_distinct = 16 [json_name="nulls_not_distinct"]; + bool primary = 17 [json_name="primary"]; + bool isconstraint = 18 [json_name="isconstraint"]; + bool deferrable = 19 [json_name="deferrable"]; + bool initdeferred = 20 [json_name="initdeferred"]; + bool transformed = 21 [json_name="transformed"]; + bool concurrent = 22 [json_name="concurrent"]; + bool if_not_exists = 23 [json_name="if_not_exists"]; + bool reset_default_tblspc = 24 [json_name="reset_default_tblspc"]; +} + +message CreateStatsStmt +{ + repeated Node defnames = 1 [json_name="defnames"]; + repeated Node stat_types = 2 [json_name="stat_types"]; + repeated Node exprs = 3 [json_name="exprs"]; + repeated Node relations = 4 [json_name="relations"]; + string stxcomment = 5 [json_name="stxcomment"]; + bool transformed = 6 [json_name="transformed"]; + bool if_not_exists = 7 [json_name="if_not_exists"]; +} + +message StatsElem +{ + string name = 1 [json_name="name"]; + Node expr = 2 [json_name="expr"]; +} + +message AlterStatsStmt +{ + repeated Node defnames = 1 [json_name="defnames"]; + Node stxstattarget = 2 [json_name="stxstattarget"]; + bool missing_ok = 3 [json_name="missing_ok"]; +} + +message CreateFunctionStmt +{ + bool is_procedure = 1 [json_name="is_procedure"]; + bool replace = 2 [json_name="replace"]; + repeated Node funcname = 3 [json_name="funcname"]; + repeated Node parameters = 4 [json_name="parameters"]; + TypeName return_type = 5 [json_name="returnType"]; + repeated Node options = 6 [json_name="options"]; + Node sql_body = 7 [json_name="sql_body"]; +} + +message FunctionParameter +{ + string name = 1 [json_name="name"]; + TypeName arg_type = 2 [json_name="argType"]; + FunctionParameterMode mode = 3 [json_name="mode"]; + Node defexpr = 4 [json_name="defexpr"]; +} + +message AlterFunctionStmt +{ + ObjectType objtype = 1 [json_name="objtype"]; + ObjectWithArgs func = 2 [json_name="func"]; + repeated Node actions = 3 [json_name="actions"]; +} + +message DoStmt +{ + repeated Node args = 1 [json_name="args"]; +} + +message InlineCodeBlock +{ + string source_text = 1 [json_name="source_text"]; + uint32 lang_oid = 2 [json_name="langOid"]; + bool lang_is_trusted = 3 [json_name="langIsTrusted"]; + bool atomic = 4 [json_name="atomic"]; +} + +message CallStmt +{ + FuncCall funccall = 1 [json_name="funccall"]; + FuncExpr funcexpr = 2 [json_name="funcexpr"]; + repeated Node outargs = 3 [json_name="outargs"]; +} + +message CallContext +{ + bool atomic = 1 [json_name="atomic"]; +} + +message RenameStmt +{ + ObjectType rename_type = 1 [json_name="renameType"]; + ObjectType relation_type = 2 [json_name="relationType"]; + RangeVar relation = 3 [json_name="relation"]; + Node object = 4 [json_name="object"]; + string subname = 5 [json_name="subname"]; + string newname = 6 [json_name="newname"]; + DropBehavior behavior = 7 [json_name="behavior"]; + bool missing_ok = 8 [json_name="missing_ok"]; +} + +message AlterObjectDependsStmt +{ + ObjectType object_type = 1 [json_name="objectType"]; + RangeVar relation = 2 [json_name="relation"]; + Node object = 3 [json_name="object"]; + String extname = 4 [json_name="extname"]; + bool remove = 5 [json_name="remove"]; +} + +message AlterObjectSchemaStmt +{ + ObjectType object_type = 1 [json_name="objectType"]; + RangeVar relation = 2 [json_name="relation"]; + Node object = 3 [json_name="object"]; + string newschema = 4 [json_name="newschema"]; + bool missing_ok = 5 [json_name="missing_ok"]; +} + +message AlterOwnerStmt +{ + ObjectType object_type = 1 [json_name="objectType"]; + RangeVar relation = 2 [json_name="relation"]; + Node object = 3 [json_name="object"]; + RoleSpec newowner = 4 [json_name="newowner"]; +} + +message AlterOperatorStmt +{ + ObjectWithArgs opername = 1 [json_name="opername"]; + repeated Node options = 2 [json_name="options"]; +} + +message AlterTypeStmt +{ + repeated Node type_name = 1 [json_name="typeName"]; + repeated Node options = 2 [json_name="options"]; +} + +message RuleStmt +{ + RangeVar relation = 1 [json_name="relation"]; + string rulename = 2 [json_name="rulename"]; + Node where_clause = 3 [json_name="whereClause"]; + CmdType event = 4 [json_name="event"]; + bool instead = 5 [json_name="instead"]; + repeated Node actions = 6 [json_name="actions"]; + bool replace = 7 [json_name="replace"]; +} + +message NotifyStmt +{ + string conditionname = 1 [json_name="conditionname"]; + string payload = 2 [json_name="payload"]; +} + +message ListenStmt +{ + string conditionname = 1 [json_name="conditionname"]; +} + +message UnlistenStmt +{ + string conditionname = 1 [json_name="conditionname"]; +} + +message TransactionStmt +{ + TransactionStmtKind kind = 1 [json_name="kind"]; + repeated Node options = 2 [json_name="options"]; + string savepoint_name = 3 [json_name="savepoint_name"]; + string gid = 4 [json_name="gid"]; + bool chain = 5 [json_name="chain"]; + int32 location = 6 [json_name="location"]; +} + +message CompositeTypeStmt +{ + RangeVar typevar = 1 [json_name="typevar"]; + repeated Node coldeflist = 2 [json_name="coldeflist"]; +} + +message CreateEnumStmt +{ + repeated Node type_name = 1 [json_name="typeName"]; + repeated Node vals = 2 [json_name="vals"]; +} + +message CreateRangeStmt +{ + repeated Node type_name = 1 [json_name="typeName"]; + repeated Node params = 2 [json_name="params"]; +} + +message AlterEnumStmt +{ + repeated Node type_name = 1 [json_name="typeName"]; + string old_val = 2 [json_name="oldVal"]; + string new_val = 3 [json_name="newVal"]; + string new_val_neighbor = 4 [json_name="newValNeighbor"]; + bool new_val_is_after = 5 [json_name="newValIsAfter"]; + bool skip_if_new_val_exists = 6 [json_name="skipIfNewValExists"]; +} + +message ViewStmt +{ + RangeVar view = 1 [json_name="view"]; + repeated Node aliases = 2 [json_name="aliases"]; + Node query = 3 [json_name="query"]; + bool replace = 4 [json_name="replace"]; + repeated Node options = 5 [json_name="options"]; + ViewCheckOption with_check_option = 6 [json_name="withCheckOption"]; +} + +message LoadStmt +{ + string filename = 1 [json_name="filename"]; +} + +message CreatedbStmt +{ + string dbname = 1 [json_name="dbname"]; + repeated Node options = 2 [json_name="options"]; +} + +message AlterDatabaseStmt +{ + string dbname = 1 [json_name="dbname"]; + repeated Node options = 2 [json_name="options"]; +} + +message AlterDatabaseRefreshCollStmt +{ + string dbname = 1 [json_name="dbname"]; +} + +message AlterDatabaseSetStmt +{ + string dbname = 1 [json_name="dbname"]; + VariableSetStmt setstmt = 2 [json_name="setstmt"]; +} + +message DropdbStmt +{ + string dbname = 1 [json_name="dbname"]; + bool missing_ok = 2 [json_name="missing_ok"]; + repeated Node options = 3 [json_name="options"]; +} + +message AlterSystemStmt +{ + VariableSetStmt setstmt = 1 [json_name="setstmt"]; +} + +message ClusterStmt +{ + RangeVar relation = 1 [json_name="relation"]; + string indexname = 2 [json_name="indexname"]; + repeated Node params = 3 [json_name="params"]; +} + +message VacuumStmt +{ + repeated Node options = 1 [json_name="options"]; + repeated Node rels = 2 [json_name="rels"]; + bool is_vacuumcmd = 3 [json_name="is_vacuumcmd"]; +} + +message VacuumRelation +{ + RangeVar relation = 1 [json_name="relation"]; + uint32 oid = 2 [json_name="oid"]; + repeated Node va_cols = 3 [json_name="va_cols"]; +} + +message ExplainStmt +{ + Node query = 1 [json_name="query"]; + repeated Node options = 2 [json_name="options"]; +} + +message CreateTableAsStmt +{ + Node query = 1 [json_name="query"]; + IntoClause into = 2 [json_name="into"]; + ObjectType objtype = 3 [json_name="objtype"]; + bool is_select_into = 4 [json_name="is_select_into"]; + bool if_not_exists = 5 [json_name="if_not_exists"]; +} + +message RefreshMatViewStmt +{ + bool concurrent = 1 [json_name="concurrent"]; + bool skip_data = 2 [json_name="skipData"]; + RangeVar relation = 3 [json_name="relation"]; +} + +message CheckPointStmt +{ +} + +message DiscardStmt +{ + DiscardMode target = 1 [json_name="target"]; +} + +message LockStmt +{ + repeated Node relations = 1 [json_name="relations"]; + int32 mode = 2 [json_name="mode"]; + bool nowait = 3 [json_name="nowait"]; +} + +message ConstraintsSetStmt +{ + repeated Node constraints = 1 [json_name="constraints"]; + bool deferred = 2 [json_name="deferred"]; +} + +message ReindexStmt +{ + ReindexObjectType kind = 1 [json_name="kind"]; + RangeVar relation = 2 [json_name="relation"]; + string name = 3 [json_name="name"]; + repeated Node params = 4 [json_name="params"]; +} + +message CreateConversionStmt +{ + repeated Node conversion_name = 1 [json_name="conversion_name"]; + string for_encoding_name = 2 [json_name="for_encoding_name"]; + string to_encoding_name = 3 [json_name="to_encoding_name"]; + repeated Node func_name = 4 [json_name="func_name"]; + bool def = 5 [json_name="def"]; +} + +message CreateCastStmt +{ + TypeName sourcetype = 1 [json_name="sourcetype"]; + TypeName targettype = 2 [json_name="targettype"]; + ObjectWithArgs func = 3 [json_name="func"]; + CoercionContext context = 4 [json_name="context"]; + bool inout = 5 [json_name="inout"]; +} + +message CreateTransformStmt +{ + bool replace = 1 [json_name="replace"]; + TypeName type_name = 2 [json_name="type_name"]; + string lang = 3 [json_name="lang"]; + ObjectWithArgs fromsql = 4 [json_name="fromsql"]; + ObjectWithArgs tosql = 5 [json_name="tosql"]; +} + +message PrepareStmt +{ + string name = 1 [json_name="name"]; + repeated Node argtypes = 2 [json_name="argtypes"]; + Node query = 3 [json_name="query"]; +} + +message ExecuteStmt +{ + string name = 1 [json_name="name"]; + repeated Node params = 2 [json_name="params"]; +} + +message DeallocateStmt +{ + string name = 1 [json_name="name"]; + bool isall = 2 [json_name="isall"]; + int32 location = 3 [json_name="location"]; +} + +message DropOwnedStmt +{ + repeated Node roles = 1 [json_name="roles"]; + DropBehavior behavior = 2 [json_name="behavior"]; +} + +message ReassignOwnedStmt +{ + repeated Node roles = 1 [json_name="roles"]; + RoleSpec newrole = 2 [json_name="newrole"]; +} + +message AlterTSDictionaryStmt +{ + repeated Node dictname = 1 [json_name="dictname"]; + repeated Node options = 2 [json_name="options"]; +} + +message AlterTSConfigurationStmt +{ + AlterTSConfigType kind = 1 [json_name="kind"]; + repeated Node cfgname = 2 [json_name="cfgname"]; + repeated Node tokentype = 3 [json_name="tokentype"]; + repeated Node dicts = 4 [json_name="dicts"]; + bool override = 5 [json_name="override"]; + bool replace = 6 [json_name="replace"]; + bool missing_ok = 7 [json_name="missing_ok"]; +} + +message PublicationTable +{ + RangeVar relation = 1 [json_name="relation"]; + Node where_clause = 2 [json_name="whereClause"]; + repeated Node columns = 3 [json_name="columns"]; +} + +message PublicationObjSpec +{ + PublicationObjSpecType pubobjtype = 1 [json_name="pubobjtype"]; + string name = 2 [json_name="name"]; + PublicationTable pubtable = 3 [json_name="pubtable"]; + int32 location = 4 [json_name="location"]; +} + +message CreatePublicationStmt +{ + string pubname = 1 [json_name="pubname"]; + repeated Node options = 2 [json_name="options"]; + repeated Node pubobjects = 3 [json_name="pubobjects"]; + bool for_all_tables = 4 [json_name="for_all_tables"]; +} + +message AlterPublicationStmt +{ + string pubname = 1 [json_name="pubname"]; + repeated Node options = 2 [json_name="options"]; + repeated Node pubobjects = 3 [json_name="pubobjects"]; + bool for_all_tables = 4 [json_name="for_all_tables"]; + AlterPublicationAction action = 5 [json_name="action"]; +} + +message CreateSubscriptionStmt +{ + string subname = 1 [json_name="subname"]; + string conninfo = 2 [json_name="conninfo"]; + repeated Node publication = 3 [json_name="publication"]; + repeated Node options = 4 [json_name="options"]; +} + +message AlterSubscriptionStmt +{ + AlterSubscriptionType kind = 1 [json_name="kind"]; + string subname = 2 [json_name="subname"]; + string conninfo = 3 [json_name="conninfo"]; + repeated Node publication = 4 [json_name="publication"]; + repeated Node options = 5 [json_name="options"]; +} + +message DropSubscriptionStmt +{ + string subname = 1 [json_name="subname"]; + bool missing_ok = 2 [json_name="missing_ok"]; + DropBehavior behavior = 3 [json_name="behavior"]; +} + +enum QuerySource +{ + QUERY_SOURCE_UNDEFINED = 0; + QSRC_ORIGINAL = 1; + QSRC_PARSER = 2; + QSRC_INSTEAD_RULE = 3; + QSRC_QUAL_INSTEAD_RULE = 4; + QSRC_NON_INSTEAD_RULE = 5; +} + +enum SortByDir +{ + SORT_BY_DIR_UNDEFINED = 0; + SORTBY_DEFAULT = 1; + SORTBY_ASC = 2; + SORTBY_DESC = 3; + SORTBY_USING = 4; +} + +enum SortByNulls +{ + SORT_BY_NULLS_UNDEFINED = 0; + SORTBY_NULLS_DEFAULT = 1; + SORTBY_NULLS_FIRST = 2; + SORTBY_NULLS_LAST = 3; +} + +enum SetQuantifier +{ + SET_QUANTIFIER_UNDEFINED = 0; + SET_QUANTIFIER_DEFAULT = 1; + SET_QUANTIFIER_ALL = 2; + SET_QUANTIFIER_DISTINCT = 3; +} + +enum A_Expr_Kind +{ + A_EXPR_KIND_UNDEFINED = 0; + AEXPR_OP = 1; + AEXPR_OP_ANY = 2; + AEXPR_OP_ALL = 3; + AEXPR_DISTINCT = 4; + AEXPR_NOT_DISTINCT = 5; + AEXPR_NULLIF = 6; + AEXPR_IN = 7; + AEXPR_LIKE = 8; + AEXPR_ILIKE = 9; + AEXPR_SIMILAR = 10; + AEXPR_BETWEEN = 11; + AEXPR_NOT_BETWEEN = 12; + AEXPR_BETWEEN_SYM = 13; + AEXPR_NOT_BETWEEN_SYM = 14; +} + +enum RoleSpecType +{ + ROLE_SPEC_TYPE_UNDEFINED = 0; + ROLESPEC_CSTRING = 1; + ROLESPEC_CURRENT_ROLE = 2; + ROLESPEC_CURRENT_USER = 3; + ROLESPEC_SESSION_USER = 4; + ROLESPEC_PUBLIC = 5; +} + +enum TableLikeOption +{ + TABLE_LIKE_OPTION_UNDEFINED = 0; + CREATE_TABLE_LIKE_COMMENTS = 1; + CREATE_TABLE_LIKE_COMPRESSION = 2; + CREATE_TABLE_LIKE_CONSTRAINTS = 3; + CREATE_TABLE_LIKE_DEFAULTS = 4; + CREATE_TABLE_LIKE_GENERATED = 5; + CREATE_TABLE_LIKE_IDENTITY = 6; + CREATE_TABLE_LIKE_INDEXES = 7; + CREATE_TABLE_LIKE_STATISTICS = 8; + CREATE_TABLE_LIKE_STORAGE = 9; + CREATE_TABLE_LIKE_ALL = 10; +} + +enum DefElemAction +{ + DEF_ELEM_ACTION_UNDEFINED = 0; + DEFELEM_UNSPEC = 1; + DEFELEM_SET = 2; + DEFELEM_ADD = 3; + DEFELEM_DROP = 4; +} + +enum PartitionStrategy +{ + PARTITION_STRATEGY_UNDEFINED = 0; + PARTITION_STRATEGY_LIST = 1; + PARTITION_STRATEGY_RANGE = 2; + PARTITION_STRATEGY_HASH = 3; +} + +enum PartitionRangeDatumKind +{ + PARTITION_RANGE_DATUM_KIND_UNDEFINED = 0; + PARTITION_RANGE_DATUM_MINVALUE = 1; + PARTITION_RANGE_DATUM_VALUE = 2; + PARTITION_RANGE_DATUM_MAXVALUE = 3; +} + +enum RTEKind +{ + RTEKIND_UNDEFINED = 0; + RTE_RELATION = 1; + RTE_SUBQUERY = 2; + RTE_JOIN = 3; + RTE_FUNCTION = 4; + RTE_TABLEFUNC = 5; + RTE_VALUES = 6; + RTE_CTE = 7; + RTE_NAMEDTUPLESTORE = 8; + RTE_RESULT = 9; +} + +enum WCOKind +{ + WCOKIND_UNDEFINED = 0; + WCO_VIEW_CHECK = 1; + WCO_RLS_INSERT_CHECK = 2; + WCO_RLS_UPDATE_CHECK = 3; + WCO_RLS_CONFLICT_CHECK = 4; + WCO_RLS_MERGE_UPDATE_CHECK = 5; + WCO_RLS_MERGE_DELETE_CHECK = 6; +} + +enum GroupingSetKind +{ + GROUPING_SET_KIND_UNDEFINED = 0; + GROUPING_SET_EMPTY = 1; + GROUPING_SET_SIMPLE = 2; + GROUPING_SET_ROLLUP = 3; + GROUPING_SET_CUBE = 4; + GROUPING_SET_SETS = 5; +} + +enum CTEMaterialize +{ + CTEMATERIALIZE_UNDEFINED = 0; + CTEMaterializeDefault = 1; + CTEMaterializeAlways = 2; + CTEMaterializeNever = 3; +} + +enum JsonQuotes +{ + JSON_QUOTES_UNDEFINED = 0; + JS_QUOTES_UNSPEC = 1; + JS_QUOTES_KEEP = 2; + JS_QUOTES_OMIT = 3; +} + +enum JsonTableColumnType +{ + JSON_TABLE_COLUMN_TYPE_UNDEFINED = 0; + JTC_FOR_ORDINALITY = 1; + JTC_REGULAR = 2; + JTC_EXISTS = 3; + JTC_FORMATTED = 4; + JTC_NESTED = 5; +} + +enum SetOperation +{ + SET_OPERATION_UNDEFINED = 0; + SETOP_NONE = 1; + SETOP_UNION = 2; + SETOP_INTERSECT = 3; + SETOP_EXCEPT = 4; +} + +enum ObjectType +{ + OBJECT_TYPE_UNDEFINED = 0; + OBJECT_ACCESS_METHOD = 1; + OBJECT_AGGREGATE = 2; + OBJECT_AMOP = 3; + OBJECT_AMPROC = 4; + OBJECT_ATTRIBUTE = 5; + OBJECT_CAST = 6; + OBJECT_COLUMN = 7; + OBJECT_COLLATION = 8; + OBJECT_CONVERSION = 9; + OBJECT_DATABASE = 10; + OBJECT_DEFAULT = 11; + OBJECT_DEFACL = 12; + OBJECT_DOMAIN = 13; + OBJECT_DOMCONSTRAINT = 14; + OBJECT_EVENT_TRIGGER = 15; + OBJECT_EXTENSION = 16; + OBJECT_FDW = 17; + OBJECT_FOREIGN_SERVER = 18; + OBJECT_FOREIGN_TABLE = 19; + OBJECT_FUNCTION = 20; + OBJECT_INDEX = 21; + OBJECT_LANGUAGE = 22; + OBJECT_LARGEOBJECT = 23; + OBJECT_MATVIEW = 24; + OBJECT_OPCLASS = 25; + OBJECT_OPERATOR = 26; + OBJECT_OPFAMILY = 27; + OBJECT_PARAMETER_ACL = 28; + OBJECT_POLICY = 29; + OBJECT_PROCEDURE = 30; + OBJECT_PUBLICATION = 31; + OBJECT_PUBLICATION_NAMESPACE = 32; + OBJECT_PUBLICATION_REL = 33; + OBJECT_ROLE = 34; + OBJECT_ROUTINE = 35; + OBJECT_RULE = 36; + OBJECT_SCHEMA = 37; + OBJECT_SEQUENCE = 38; + OBJECT_SUBSCRIPTION = 39; + OBJECT_STATISTIC_EXT = 40; + OBJECT_TABCONSTRAINT = 41; + OBJECT_TABLE = 42; + OBJECT_TABLESPACE = 43; + OBJECT_TRANSFORM = 44; + OBJECT_TRIGGER = 45; + OBJECT_TSCONFIGURATION = 46; + OBJECT_TSDICTIONARY = 47; + OBJECT_TSPARSER = 48; + OBJECT_TSTEMPLATE = 49; + OBJECT_TYPE = 50; + OBJECT_USER_MAPPING = 51; + OBJECT_VIEW = 52; +} + +enum DropBehavior +{ + DROP_BEHAVIOR_UNDEFINED = 0; + DROP_RESTRICT = 1; + DROP_CASCADE = 2; +} + +enum AlterTableType +{ + ALTER_TABLE_TYPE_UNDEFINED = 0; + AT_AddColumn = 1; + AT_AddColumnToView = 2; + AT_ColumnDefault = 3; + AT_CookedColumnDefault = 4; + AT_DropNotNull = 5; + AT_SetNotNull = 6; + AT_SetExpression = 7; + AT_DropExpression = 8; + AT_CheckNotNull = 9; + AT_SetStatistics = 10; + AT_SetOptions = 11; + AT_ResetOptions = 12; + AT_SetStorage = 13; + AT_SetCompression = 14; + AT_DropColumn = 15; + AT_AddIndex = 16; + AT_ReAddIndex = 17; + AT_AddConstraint = 18; + AT_ReAddConstraint = 19; + AT_ReAddDomainConstraint = 20; + AT_AlterConstraint = 21; + AT_ValidateConstraint = 22; + AT_AddIndexConstraint = 23; + AT_DropConstraint = 24; + AT_ReAddComment = 25; + AT_AlterColumnType = 26; + AT_AlterColumnGenericOptions = 27; + AT_ChangeOwner = 28; + AT_ClusterOn = 29; + AT_DropCluster = 30; + AT_SetLogged = 31; + AT_SetUnLogged = 32; + AT_DropOids = 33; + AT_SetAccessMethod = 34; + AT_SetTableSpace = 35; + AT_SetRelOptions = 36; + AT_ResetRelOptions = 37; + AT_ReplaceRelOptions = 38; + AT_EnableTrig = 39; + AT_EnableAlwaysTrig = 40; + AT_EnableReplicaTrig = 41; + AT_DisableTrig = 42; + AT_EnableTrigAll = 43; + AT_DisableTrigAll = 44; + AT_EnableTrigUser = 45; + AT_DisableTrigUser = 46; + AT_EnableRule = 47; + AT_EnableAlwaysRule = 48; + AT_EnableReplicaRule = 49; + AT_DisableRule = 50; + AT_AddInherit = 51; + AT_DropInherit = 52; + AT_AddOf = 53; + AT_DropOf = 54; + AT_ReplicaIdentity = 55; + AT_EnableRowSecurity = 56; + AT_DisableRowSecurity = 57; + AT_ForceRowSecurity = 58; + AT_NoForceRowSecurity = 59; + AT_GenericOptions = 60; + AT_AttachPartition = 61; + AT_DetachPartition = 62; + AT_DetachPartitionFinalize = 63; + AT_AddIdentity = 64; + AT_SetIdentity = 65; + AT_DropIdentity = 66; + AT_ReAddStatistics = 67; +} + +enum GrantTargetType +{ + GRANT_TARGET_TYPE_UNDEFINED = 0; + ACL_TARGET_OBJECT = 1; + ACL_TARGET_ALL_IN_SCHEMA = 2; + ACL_TARGET_DEFAULTS = 3; +} + +enum VariableSetKind +{ + VARIABLE_SET_KIND_UNDEFINED = 0; + VAR_SET_VALUE = 1; + VAR_SET_DEFAULT = 2; + VAR_SET_CURRENT = 3; + VAR_SET_MULTI = 4; + VAR_RESET = 5; + VAR_RESET_ALL = 6; +} + +enum ConstrType +{ + CONSTR_TYPE_UNDEFINED = 0; + CONSTR_NULL = 1; + CONSTR_NOTNULL = 2; + CONSTR_DEFAULT = 3; + CONSTR_IDENTITY = 4; + CONSTR_GENERATED = 5; + CONSTR_CHECK = 6; + CONSTR_PRIMARY = 7; + CONSTR_UNIQUE = 8; + CONSTR_EXCLUSION = 9; + CONSTR_FOREIGN = 10; + CONSTR_ATTR_DEFERRABLE = 11; + CONSTR_ATTR_NOT_DEFERRABLE = 12; + CONSTR_ATTR_DEFERRED = 13; + CONSTR_ATTR_IMMEDIATE = 14; +} + +enum ImportForeignSchemaType +{ + IMPORT_FOREIGN_SCHEMA_TYPE_UNDEFINED = 0; + FDW_IMPORT_SCHEMA_ALL = 1; + FDW_IMPORT_SCHEMA_LIMIT_TO = 2; + FDW_IMPORT_SCHEMA_EXCEPT = 3; +} + +enum RoleStmtType +{ + ROLE_STMT_TYPE_UNDEFINED = 0; + ROLESTMT_ROLE = 1; + ROLESTMT_USER = 2; + ROLESTMT_GROUP = 3; +} + +enum FetchDirection +{ + FETCH_DIRECTION_UNDEFINED = 0; + FETCH_FORWARD = 1; + FETCH_BACKWARD = 2; + FETCH_ABSOLUTE = 3; + FETCH_RELATIVE = 4; +} + +enum FunctionParameterMode +{ + FUNCTION_PARAMETER_MODE_UNDEFINED = 0; + FUNC_PARAM_IN = 1; + FUNC_PARAM_OUT = 2; + FUNC_PARAM_INOUT = 3; + FUNC_PARAM_VARIADIC = 4; + FUNC_PARAM_TABLE = 5; + FUNC_PARAM_DEFAULT = 6; +} + +enum TransactionStmtKind +{ + TRANSACTION_STMT_KIND_UNDEFINED = 0; + TRANS_STMT_BEGIN = 1; + TRANS_STMT_START = 2; + TRANS_STMT_COMMIT = 3; + TRANS_STMT_ROLLBACK = 4; + TRANS_STMT_SAVEPOINT = 5; + TRANS_STMT_RELEASE = 6; + TRANS_STMT_ROLLBACK_TO = 7; + TRANS_STMT_PREPARE = 8; + TRANS_STMT_COMMIT_PREPARED = 9; + TRANS_STMT_ROLLBACK_PREPARED = 10; +} + +enum ViewCheckOption +{ + VIEW_CHECK_OPTION_UNDEFINED = 0; + NO_CHECK_OPTION = 1; + LOCAL_CHECK_OPTION = 2; + CASCADED_CHECK_OPTION = 3; +} + +enum DiscardMode +{ + DISCARD_MODE_UNDEFINED = 0; + DISCARD_ALL = 1; + DISCARD_PLANS = 2; + DISCARD_SEQUENCES = 3; + DISCARD_TEMP = 4; +} + +enum ReindexObjectType +{ + REINDEX_OBJECT_TYPE_UNDEFINED = 0; + REINDEX_OBJECT_INDEX = 1; + REINDEX_OBJECT_TABLE = 2; + REINDEX_OBJECT_SCHEMA = 3; + REINDEX_OBJECT_SYSTEM = 4; + REINDEX_OBJECT_DATABASE = 5; +} + +enum AlterTSConfigType +{ + ALTER_TSCONFIG_TYPE_UNDEFINED = 0; + ALTER_TSCONFIG_ADD_MAPPING = 1; + ALTER_TSCONFIG_ALTER_MAPPING_FOR_TOKEN = 2; + ALTER_TSCONFIG_REPLACE_DICT = 3; + ALTER_TSCONFIG_REPLACE_DICT_FOR_TOKEN = 4; + ALTER_TSCONFIG_DROP_MAPPING = 5; +} + +enum PublicationObjSpecType +{ + PUBLICATION_OBJ_SPEC_TYPE_UNDEFINED = 0; + PUBLICATIONOBJ_TABLE = 1; + PUBLICATIONOBJ_TABLES_IN_SCHEMA = 2; + PUBLICATIONOBJ_TABLES_IN_CUR_SCHEMA = 3; + PUBLICATIONOBJ_CONTINUATION = 4; +} + +enum AlterPublicationAction +{ + ALTER_PUBLICATION_ACTION_UNDEFINED = 0; + AP_AddObjects = 1; + AP_DropObjects = 2; + AP_SetObjects = 3; +} + +enum AlterSubscriptionType +{ + ALTER_SUBSCRIPTION_TYPE_UNDEFINED = 0; + ALTER_SUBSCRIPTION_OPTIONS = 1; + ALTER_SUBSCRIPTION_CONNECTION = 2; + ALTER_SUBSCRIPTION_SET_PUBLICATION = 3; + ALTER_SUBSCRIPTION_ADD_PUBLICATION = 4; + ALTER_SUBSCRIPTION_DROP_PUBLICATION = 5; + ALTER_SUBSCRIPTION_REFRESH = 6; + ALTER_SUBSCRIPTION_ENABLED = 7; + ALTER_SUBSCRIPTION_SKIP = 8; +} + +enum OverridingKind +{ + OVERRIDING_KIND_UNDEFINED = 0; + OVERRIDING_NOT_SET = 1; + OVERRIDING_USER_VALUE = 2; + OVERRIDING_SYSTEM_VALUE = 3; +} + +enum OnCommitAction +{ + ON_COMMIT_ACTION_UNDEFINED = 0; + ONCOMMIT_NOOP = 1; + ONCOMMIT_PRESERVE_ROWS = 2; + ONCOMMIT_DELETE_ROWS = 3; + ONCOMMIT_DROP = 4; +} + +enum TableFuncType +{ + TABLE_FUNC_TYPE_UNDEFINED = 0; + TFT_XMLTABLE = 1; + TFT_JSON_TABLE = 2; +} + +enum ParamKind +{ + PARAM_KIND_UNDEFINED = 0; + PARAM_EXTERN = 1; + PARAM_EXEC = 2; + PARAM_SUBLINK = 3; + PARAM_MULTIEXPR = 4; +} + +enum CoercionContext +{ + COERCION_CONTEXT_UNDEFINED = 0; + COERCION_IMPLICIT = 1; + COERCION_ASSIGNMENT = 2; + COERCION_PLPGSQL = 3; + COERCION_EXPLICIT = 4; +} + +enum CoercionForm +{ + COERCION_FORM_UNDEFINED = 0; + COERCE_EXPLICIT_CALL = 1; + COERCE_EXPLICIT_CAST = 2; + COERCE_IMPLICIT_CAST = 3; + COERCE_SQL_SYNTAX = 4; +} + +enum BoolExprType +{ + BOOL_EXPR_TYPE_UNDEFINED = 0; + AND_EXPR = 1; + OR_EXPR = 2; + NOT_EXPR = 3; +} + +enum SubLinkType +{ + SUB_LINK_TYPE_UNDEFINED = 0; + EXISTS_SUBLINK = 1; + ALL_SUBLINK = 2; + ANY_SUBLINK = 3; + ROWCOMPARE_SUBLINK = 4; + EXPR_SUBLINK = 5; + MULTIEXPR_SUBLINK = 6; + ARRAY_SUBLINK = 7; + CTE_SUBLINK = 8; +} + +enum RowCompareType +{ + ROW_COMPARE_TYPE_UNDEFINED = 0; + ROWCOMPARE_LT = 1; + ROWCOMPARE_LE = 2; + ROWCOMPARE_EQ = 3; + ROWCOMPARE_GE = 4; + ROWCOMPARE_GT = 5; + ROWCOMPARE_NE = 6; +} + +enum MinMaxOp +{ + MIN_MAX_OP_UNDEFINED = 0; + IS_GREATEST = 1; + IS_LEAST = 2; +} + +enum SQLValueFunctionOp +{ + SQLVALUE_FUNCTION_OP_UNDEFINED = 0; + SVFOP_CURRENT_DATE = 1; + SVFOP_CURRENT_TIME = 2; + SVFOP_CURRENT_TIME_N = 3; + SVFOP_CURRENT_TIMESTAMP = 4; + SVFOP_CURRENT_TIMESTAMP_N = 5; + SVFOP_LOCALTIME = 6; + SVFOP_LOCALTIME_N = 7; + SVFOP_LOCALTIMESTAMP = 8; + SVFOP_LOCALTIMESTAMP_N = 9; + SVFOP_CURRENT_ROLE = 10; + SVFOP_CURRENT_USER = 11; + SVFOP_USER = 12; + SVFOP_SESSION_USER = 13; + SVFOP_CURRENT_CATALOG = 14; + SVFOP_CURRENT_SCHEMA = 15; +} + +enum XmlExprOp +{ + XML_EXPR_OP_UNDEFINED = 0; + IS_XMLCONCAT = 1; + IS_XMLELEMENT = 2; + IS_XMLFOREST = 3; + IS_XMLPARSE = 4; + IS_XMLPI = 5; + IS_XMLROOT = 6; + IS_XMLSERIALIZE = 7; + IS_DOCUMENT = 8; +} + +enum XmlOptionType +{ + XML_OPTION_TYPE_UNDEFINED = 0; + XMLOPTION_DOCUMENT = 1; + XMLOPTION_CONTENT = 2; +} + +enum JsonEncoding +{ + JSON_ENCODING_UNDEFINED = 0; + JS_ENC_DEFAULT = 1; + JS_ENC_UTF8 = 2; + JS_ENC_UTF16 = 3; + JS_ENC_UTF32 = 4; +} + +enum JsonFormatType +{ + JSON_FORMAT_TYPE_UNDEFINED = 0; + JS_FORMAT_DEFAULT = 1; + JS_FORMAT_JSON = 2; + JS_FORMAT_JSONB = 3; +} + +enum JsonConstructorType +{ + JSON_CONSTRUCTOR_TYPE_UNDEFINED = 0; + JSCTOR_JSON_OBJECT = 1; + JSCTOR_JSON_ARRAY = 2; + JSCTOR_JSON_OBJECTAGG = 3; + JSCTOR_JSON_ARRAYAGG = 4; + JSCTOR_JSON_PARSE = 5; + JSCTOR_JSON_SCALAR = 6; + JSCTOR_JSON_SERIALIZE = 7; +} + +enum JsonValueType +{ + JSON_VALUE_TYPE_UNDEFINED = 0; + JS_TYPE_ANY = 1; + JS_TYPE_OBJECT = 2; + JS_TYPE_ARRAY = 3; + JS_TYPE_SCALAR = 4; +} + +enum JsonWrapper +{ + JSON_WRAPPER_UNDEFINED = 0; + JSW_UNSPEC = 1; + JSW_NONE = 2; + JSW_CONDITIONAL = 3; + JSW_UNCONDITIONAL = 4; +} + +enum JsonBehaviorType +{ + JSON_BEHAVIOR_TYPE_UNDEFINED = 0; + JSON_BEHAVIOR_NULL = 1; + JSON_BEHAVIOR_ERROR = 2; + JSON_BEHAVIOR_EMPTY = 3; + JSON_BEHAVIOR_TRUE = 4; + JSON_BEHAVIOR_FALSE = 5; + JSON_BEHAVIOR_UNKNOWN = 6; + JSON_BEHAVIOR_EMPTY_ARRAY = 7; + JSON_BEHAVIOR_EMPTY_OBJECT = 8; + JSON_BEHAVIOR_DEFAULT = 9; +} + +enum JsonExprOp +{ + JSON_EXPR_OP_UNDEFINED = 0; + JSON_EXISTS_OP = 1; + JSON_QUERY_OP = 2; + JSON_VALUE_OP = 3; + JSON_TABLE_OP = 4; +} + +enum NullTestType +{ + NULL_TEST_TYPE_UNDEFINED = 0; + IS_NULL = 1; + IS_NOT_NULL = 2; +} + +enum BoolTestType +{ + BOOL_TEST_TYPE_UNDEFINED = 0; + IS_TRUE = 1; + IS_NOT_TRUE = 2; + IS_FALSE = 3; + IS_NOT_FALSE = 4; + IS_UNKNOWN = 5; + IS_NOT_UNKNOWN = 6; +} + +enum MergeMatchKind +{ + MERGE_MATCH_KIND_UNDEFINED = 0; + MERGE_WHEN_MATCHED = 1; + MERGE_WHEN_NOT_MATCHED_BY_SOURCE = 2; + MERGE_WHEN_NOT_MATCHED_BY_TARGET = 3; +} + +enum CmdType +{ + CMD_TYPE_UNDEFINED = 0; + CMD_UNKNOWN = 1; + CMD_SELECT = 2; + CMD_UPDATE = 3; + CMD_INSERT = 4; + CMD_DELETE = 5; + CMD_MERGE = 6; + CMD_UTILITY = 7; + CMD_NOTHING = 8; +} + +enum JoinType +{ + JOIN_TYPE_UNDEFINED = 0; + JOIN_INNER = 1; + JOIN_LEFT = 2; + JOIN_FULL = 3; + JOIN_RIGHT = 4; + JOIN_SEMI = 5; + JOIN_ANTI = 6; + JOIN_RIGHT_ANTI = 7; + JOIN_UNIQUE_OUTER = 8; + JOIN_UNIQUE_INNER = 9; +} + +enum AggStrategy +{ + AGG_STRATEGY_UNDEFINED = 0; + AGG_PLAIN = 1; + AGG_SORTED = 2; + AGG_HASHED = 3; + AGG_MIXED = 4; +} + +enum AggSplit +{ + AGG_SPLIT_UNDEFINED = 0; + AGGSPLIT_SIMPLE = 1; + AGGSPLIT_INITIAL_SERIAL = 2; + AGGSPLIT_FINAL_DESERIAL = 3; +} + +enum SetOpCmd +{ + SET_OP_CMD_UNDEFINED = 0; + SETOPCMD_INTERSECT = 1; + SETOPCMD_INTERSECT_ALL = 2; + SETOPCMD_EXCEPT = 3; + SETOPCMD_EXCEPT_ALL = 4; +} + +enum SetOpStrategy +{ + SET_OP_STRATEGY_UNDEFINED = 0; + SETOP_SORTED = 1; + SETOP_HASHED = 2; +} + +enum OnConflictAction +{ + ON_CONFLICT_ACTION_UNDEFINED = 0; + ONCONFLICT_NONE = 1; + ONCONFLICT_NOTHING = 2; + ONCONFLICT_UPDATE = 3; +} + +enum LimitOption +{ + LIMIT_OPTION_UNDEFINED = 0; + LIMIT_OPTION_DEFAULT = 1; + LIMIT_OPTION_COUNT = 2; + LIMIT_OPTION_WITH_TIES = 3; +} + +enum LockClauseStrength +{ + LOCK_CLAUSE_STRENGTH_UNDEFINED = 0; + LCS_NONE = 1; + LCS_FORKEYSHARE = 2; + LCS_FORSHARE = 3; + LCS_FORNOKEYUPDATE = 4; + LCS_FORUPDATE = 5; +} + +enum LockWaitPolicy +{ + LOCK_WAIT_POLICY_UNDEFINED = 0; + LockWaitBlock = 1; + LockWaitSkip = 2; + LockWaitError = 3; +} + +enum LockTupleMode +{ + LOCK_TUPLE_MODE_UNDEFINED = 0; + LockTupleKeyShare = 1; + LockTupleShare = 2; + LockTupleNoKeyExclusive = 3; + LockTupleExclusive = 4; +} + +message ScanToken { + int32 start = 1; + int32 end = 2; + Token token = 4; + KeywordKind keyword_kind = 5; +} + +enum KeywordKind { + NO_KEYWORD = 0; + UNRESERVED_KEYWORD = 1; + COL_NAME_KEYWORD = 2; + TYPE_FUNC_NAME_KEYWORD = 3; + RESERVED_KEYWORD = 4; +} + +enum Token { + NUL = 0; + // Single-character tokens that are returned 1:1 (identical with "self" list in scan.l) + // Either supporting syntax, or single-character operators (some can be both) + // Also see https://www.postgresql.org/docs/12/sql-syntax-lexical.html#SQL-SYNTAX-SPECIAL-CHARS + ASCII_36 = 36; // "$" + ASCII_37 = 37; // "%" + ASCII_40 = 40; // "(" + ASCII_41 = 41; // ")" + ASCII_42 = 42; // "*" + ASCII_43 = 43; // "+" + ASCII_44 = 44; // "," + ASCII_45 = 45; // "-" + ASCII_46 = 46; // "." + ASCII_47 = 47; // "/" + ASCII_58 = 58; // ":" + ASCII_59 = 59; // ";" + ASCII_60 = 60; // "<" + ASCII_61 = 61; // "=" + ASCII_62 = 62; // ">" + ASCII_63 = 63; // "?" + ASCII_91 = 91; // "[" + ASCII_92 = 92; // "\" + ASCII_93 = 93; // "]" + ASCII_94 = 94; // "^" + // Named tokens in scan.l + IDENT = 258; + UIDENT = 259; + FCONST = 260; + SCONST = 261; + USCONST = 262; + BCONST = 263; + XCONST = 264; + Op = 265; + ICONST = 266; + PARAM = 267; + TYPECAST = 268; + DOT_DOT = 269; + COLON_EQUALS = 270; + EQUALS_GREATER = 271; + LESS_EQUALS = 272; + GREATER_EQUALS = 273; + NOT_EQUALS = 274; + SQL_COMMENT = 275; + C_COMMENT = 276; + ABORT_P = 277; + ABSENT = 278; + ABSOLUTE_P = 279; + ACCESS = 280; + ACTION = 281; + ADD_P = 282; + ADMIN = 283; + AFTER = 284; + AGGREGATE = 285; + ALL = 286; + ALSO = 287; + ALTER = 288; + ALWAYS = 289; + ANALYSE = 290; + ANALYZE = 291; + AND = 292; + ANY = 293; + ARRAY = 294; + AS = 295; + ASC = 296; + ASENSITIVE = 297; + ASSERTION = 298; + ASSIGNMENT = 299; + ASYMMETRIC = 300; + ATOMIC = 301; + AT = 302; + ATTACH = 303; + ATTRIBUTE = 304; + AUTHORIZATION = 305; + BACKWARD = 306; + BEFORE = 307; + BEGIN_P = 308; + BETWEEN = 309; + BIGINT = 310; + BINARY = 311; + BIT = 312; + BOOLEAN_P = 313; + BOTH = 314; + BREADTH = 315; + BY = 316; + CACHE = 317; + CALL = 318; + CALLED = 319; + CASCADE = 320; + CASCADED = 321; + CASE = 322; + CAST = 323; + CATALOG_P = 324; + CHAIN = 325; + CHAR_P = 326; + CHARACTER = 327; + CHARACTERISTICS = 328; + CHECK = 329; + CHECKPOINT = 330; + CLASS = 331; + CLOSE = 332; + CLUSTER = 333; + COALESCE = 334; + COLLATE = 335; + COLLATION = 336; + COLUMN = 337; + COLUMNS = 338; + COMMENT = 339; + COMMENTS = 340; + COMMIT = 341; + COMMITTED = 342; + COMPRESSION = 343; + CONCURRENTLY = 344; + CONDITIONAL = 345; + CONFIGURATION = 346; + CONFLICT = 347; + CONNECTION = 348; + CONSTRAINT = 349; + CONSTRAINTS = 350; + CONTENT_P = 351; + CONTINUE_P = 352; + CONVERSION_P = 353; + COPY = 354; + COST = 355; + CREATE = 356; + CROSS = 357; + CSV = 358; + CUBE = 359; + CURRENT_P = 360; + CURRENT_CATALOG = 361; + CURRENT_DATE = 362; + CURRENT_ROLE = 363; + CURRENT_SCHEMA = 364; + CURRENT_TIME = 365; + CURRENT_TIMESTAMP = 366; + CURRENT_USER = 367; + CURSOR = 368; + CYCLE = 369; + DATA_P = 370; + DATABASE = 371; + DAY_P = 372; + DEALLOCATE = 373; + DEC = 374; + DECIMAL_P = 375; + DECLARE = 376; + DEFAULT = 377; + DEFAULTS = 378; + DEFERRABLE = 379; + DEFERRED = 380; + DEFINER = 381; + DELETE_P = 382; + DELIMITER = 383; + DELIMITERS = 384; + DEPENDS = 385; + DEPTH = 386; + DESC = 387; + DETACH = 388; + DICTIONARY = 389; + DISABLE_P = 390; + DISCARD = 391; + DISTINCT = 392; + DO = 393; + DOCUMENT_P = 394; + DOMAIN_P = 395; + DOUBLE_P = 396; + DROP = 397; + EACH = 398; + ELSE = 399; + EMPTY_P = 400; + ENABLE_P = 401; + ENCODING = 402; + ENCRYPTED = 403; + END_P = 404; + ENUM_P = 405; + ERROR_P = 406; + ESCAPE = 407; + EVENT = 408; + EXCEPT = 409; + EXCLUDE = 410; + EXCLUDING = 411; + EXCLUSIVE = 412; + EXECUTE = 413; + EXISTS = 414; + EXPLAIN = 415; + EXPRESSION = 416; + EXTENSION = 417; + EXTERNAL = 418; + EXTRACT = 419; + FALSE_P = 420; + FAMILY = 421; + FETCH = 422; + FILTER = 423; + FINALIZE = 424; + FIRST_P = 425; + FLOAT_P = 426; + FOLLOWING = 427; + FOR = 428; + FORCE = 429; + FOREIGN = 430; + FORMAT = 431; + FORWARD = 432; + FREEZE = 433; + FROM = 434; + FULL = 435; + FUNCTION = 436; + FUNCTIONS = 437; + GENERATED = 438; + GLOBAL = 439; + GRANT = 440; + GRANTED = 441; + GREATEST = 442; + GROUP_P = 443; + GROUPING = 444; + GROUPS = 445; + HANDLER = 446; + HAVING = 447; + HEADER_P = 448; + HOLD = 449; + HOUR_P = 450; + IDENTITY_P = 451; + IF_P = 452; + ILIKE = 453; + IMMEDIATE = 454; + IMMUTABLE = 455; + IMPLICIT_P = 456; + IMPORT_P = 457; + IN_P = 458; + INCLUDE = 459; + INCLUDING = 460; + INCREMENT = 461; + INDENT = 462; + INDEX = 463; + INDEXES = 464; + INHERIT = 465; + INHERITS = 466; + INITIALLY = 467; + INLINE_P = 468; + INNER_P = 469; + INOUT = 470; + INPUT_P = 471; + INSENSITIVE = 472; + INSERT = 473; + INSTEAD = 474; + INT_P = 475; + INTEGER = 476; + INTERSECT = 477; + INTERVAL = 478; + INTO = 479; + INVOKER = 480; + IS = 481; + ISNULL = 482; + ISOLATION = 483; + JOIN = 484; + JSON = 485; + JSON_ARRAY = 486; + JSON_ARRAYAGG = 487; + JSON_EXISTS = 488; + JSON_OBJECT = 489; + JSON_OBJECTAGG = 490; + JSON_QUERY = 491; + JSON_SCALAR = 492; + JSON_SERIALIZE = 493; + JSON_TABLE = 494; + JSON_VALUE = 495; + KEEP = 496; + KEY = 497; + KEYS = 498; + LABEL = 499; + LANGUAGE = 500; + LARGE_P = 501; + LAST_P = 502; + LATERAL_P = 503; + LEADING = 504; + LEAKPROOF = 505; + LEAST = 506; + LEFT = 507; + LEVEL = 508; + LIKE = 509; + LIMIT = 510; + LISTEN = 511; + LOAD = 512; + LOCAL = 513; + LOCALTIME = 514; + LOCALTIMESTAMP = 515; + LOCATION = 516; + LOCK_P = 517; + LOCKED = 518; + LOGGED = 519; + MAPPING = 520; + MATCH = 521; + MATCHED = 522; + MATERIALIZED = 523; + MAXVALUE = 524; + MERGE = 525; + MERGE_ACTION = 526; + METHOD = 527; + MINUTE_P = 528; + MINVALUE = 529; + MODE = 530; + MONTH_P = 531; + MOVE = 532; + NAME_P = 533; + NAMES = 534; + NATIONAL = 535; + NATURAL = 536; + NCHAR = 537; + NESTED = 538; + NEW = 539; + NEXT = 540; + NFC = 541; + NFD = 542; + NFKC = 543; + NFKD = 544; + NO = 545; + NONE = 546; + NORMALIZE = 547; + NORMALIZED = 548; + NOT = 549; + NOTHING = 550; + NOTIFY = 551; + NOTNULL = 552; + NOWAIT = 553; + NULL_P = 554; + NULLIF = 555; + NULLS_P = 556; + NUMERIC = 557; + OBJECT_P = 558; + OF = 559; + OFF = 560; + OFFSET = 561; + OIDS = 562; + OLD = 563; + OMIT = 564; + ON = 565; + ONLY = 566; + OPERATOR = 567; + OPTION = 568; + OPTIONS = 569; + OR = 570; + ORDER = 571; + ORDINALITY = 572; + OTHERS = 573; + OUT_P = 574; + OUTER_P = 575; + OVER = 576; + OVERLAPS = 577; + OVERLAY = 578; + OVERRIDING = 579; + OWNED = 580; + OWNER = 581; + PARALLEL = 582; + PARAMETER = 583; + PARSER = 584; + PARTIAL = 585; + PARTITION = 586; + PASSING = 587; + PASSWORD = 588; + PATH = 589; + PLACING = 590; + PLAN = 591; + PLANS = 592; + POLICY = 593; + POSITION = 594; + PRECEDING = 595; + PRECISION = 596; + PRESERVE = 597; + PREPARE = 598; + PREPARED = 599; + PRIMARY = 600; + PRIOR = 601; + PRIVILEGES = 602; + PROCEDURAL = 603; + PROCEDURE = 604; + PROCEDURES = 605; + PROGRAM = 606; + PUBLICATION = 607; + QUOTE = 608; + QUOTES = 609; + RANGE = 610; + READ = 611; + REAL = 612; + REASSIGN = 613; + RECHECK = 614; + RECURSIVE = 615; + REF_P = 616; + REFERENCES = 617; + REFERENCING = 618; + REFRESH = 619; + REINDEX = 620; + RELATIVE_P = 621; + RELEASE = 622; + RENAME = 623; + REPEATABLE = 624; + REPLACE = 625; + REPLICA = 626; + RESET = 627; + RESTART = 628; + RESTRICT = 629; + RETURN = 630; + RETURNING = 631; + RETURNS = 632; + REVOKE = 633; + RIGHT = 634; + ROLE = 635; + ROLLBACK = 636; + ROLLUP = 637; + ROUTINE = 638; + ROUTINES = 639; + ROW = 640; + ROWS = 641; + RULE = 642; + SAVEPOINT = 643; + SCALAR = 644; + SCHEMA = 645; + SCHEMAS = 646; + SCROLL = 647; + SEARCH = 648; + SECOND_P = 649; + SECURITY = 650; + SELECT = 651; + SEQUENCE = 652; + SEQUENCES = 653; + SERIALIZABLE = 654; + SERVER = 655; + SESSION = 656; + SESSION_USER = 657; + SET = 658; + SETS = 659; + SETOF = 660; + SHARE = 661; + SHOW = 662; + SIMILAR = 663; + SIMPLE = 664; + SKIP = 665; + SMALLINT = 666; + SNAPSHOT = 667; + SOME = 668; + SOURCE = 669; + SQL_P = 670; + STABLE = 671; + STANDALONE_P = 672; + START = 673; + STATEMENT = 674; + STATISTICS = 675; + STDIN = 676; + STDOUT = 677; + STORAGE = 678; + STORED = 679; + STRICT_P = 680; + STRING_P = 681; + STRIP_P = 682; + SUBSCRIPTION = 683; + SUBSTRING = 684; + SUPPORT = 685; + SYMMETRIC = 686; + SYSID = 687; + SYSTEM_P = 688; + SYSTEM_USER = 689; + TABLE = 690; + TABLES = 691; + TABLESAMPLE = 692; + TABLESPACE = 693; + TARGET = 694; + TEMP = 695; + TEMPLATE = 696; + TEMPORARY = 697; + TEXT_P = 698; + THEN = 699; + TIES = 700; + TIME = 701; + TIMESTAMP = 702; + TO = 703; + TRAILING = 704; + TRANSACTION = 705; + TRANSFORM = 706; + TREAT = 707; + TRIGGER = 708; + TRIM = 709; + TRUE_P = 710; + TRUNCATE = 711; + TRUSTED = 712; + TYPE_P = 713; + TYPES_P = 714; + UESCAPE = 715; + UNBOUNDED = 716; + UNCONDITIONAL = 717; + UNCOMMITTED = 718; + UNENCRYPTED = 719; + UNION = 720; + UNIQUE = 721; + UNKNOWN = 722; + UNLISTEN = 723; + UNLOGGED = 724; + UNTIL = 725; + UPDATE = 726; + USER = 727; + USING = 728; + VACUUM = 729; + VALID = 730; + VALIDATE = 731; + VALIDATOR = 732; + VALUE_P = 733; + VALUES = 734; + VARCHAR = 735; + VARIADIC = 736; + VARYING = 737; + VERBOSE = 738; + VERSION_P = 739; + VIEW = 740; + VIEWS = 741; + VOLATILE = 742; + WHEN = 743; + WHERE = 744; + WHITESPACE_P = 745; + WINDOW = 746; + WITH = 747; + WITHIN = 748; + WITHOUT = 749; + WORK = 750; + WRAPPER = 751; + WRITE = 752; + XML_P = 753; + XMLATTRIBUTES = 754; + XMLCONCAT = 755; + XMLELEMENT = 756; + XMLEXISTS = 757; + XMLFOREST = 758; + XMLNAMESPACES = 759; + XMLPARSE = 760; + XMLPI = 761; + XMLROOT = 762; + XMLSERIALIZE = 763; + XMLTABLE = 764; + YEAR_P = 765; + YES_P = 766; + ZONE = 767; + FORMAT_LA = 768; + NOT_LA = 769; + NULLS_LA = 770; + WITH_LA = 771; + WITHOUT_LA = 772; + MODE_TYPE_NAME = 773; + MODE_PLPGSQL_EXPR = 774; + MODE_PLPGSQL_ASSIGN1 = 775; + MODE_PLPGSQL_ASSIGN2 = 776; + MODE_PLPGSQL_ASSIGN3 = 777; + UMINUS = 778; +} diff --git a/crates/pgt_query_macros/src/iter_mut.rs b/crates/pgt_query_macros/src/iter_mut.rs new file mode 100644 index 000000000..b0bc10dea --- /dev/null +++ b/crates/pgt_query_macros/src/iter_mut.rs @@ -0,0 +1,142 @@ +use proc_macro2::TokenStream; +use quote::{format_ident, quote}; + +use crate::proto_analyser::{FieldType, Node, ProtoAnalyzer}; + +pub fn iter_mut_mod(analyser: ProtoAnalyzer) -> proc_macro2::TokenStream { + let enum_variants = analyser.enum_variants(); + let nodes = analyser.nodes(); + + let mut node_variant_names = Vec::new(); + let mut node_property_handlers = Vec::new(); + + // Create a map from type name to enum variant name + let mut type_to_variant: std::collections::HashMap = + std::collections::HashMap::new(); + for variant in &enum_variants { + type_to_variant.insert(variant.type_name.clone(), variant.name.clone()); + } + + for node in &nodes { + // Use the enum variant name from the Node enum + if let Some(variant_name) = type_to_variant.get(&node.name) { + let variant_ident = format_ident!("{}", variant_name); + node_variant_names.push(variant_ident); + + let property_handlers = property_handlers(node); + node_property_handlers.push(property_handlers); + } + } + + quote! { + use std::collections::VecDeque; + + /// An iterator that provides mutable access to all nodes in an AST tree. + /// + /// This iterator performs a depth-first traversal of the AST, yielding mutable + /// references to each node. It uses unsafe operations internally to work with + /// raw pointers in the AST structure. + /// + /// # Safety Requirements + /// + /// Users of this iterator must ensure: + /// + /// - The root `NodeMut` passed to `new()` must point to a valid, properly + /// constructed AST that remains alive for the iterator's lifetime + /// - No other code concurrently accesses or modifies the AST while this + /// iterator is in use (exclusive access required) + /// - The AST structure must not be modified through other means while + /// iterating (e.g., don't modify parent nodes while iterating children) + /// + /// # Panics + /// + /// This iterator may panic or cause undefined behavior if the safety + /// requirements above are violated. + /// ``` + pub struct NodeMutIterator { + stack: VecDeque, + } + + impl NodeMutIterator { + /// Creates a new iterator starting from the given root node. + /// + /// # Safety + /// + /// The caller must ensure that `roots` points to valid AST nodes + /// and that the safety requirements documented on `NodeMutIterator` + /// are met throughout the iterator's lifetime. + pub fn new(root: NodeMut) -> Self { + Self { + stack: VecDeque::from([root]), + } + } + } + + impl Iterator for NodeMutIterator { + type Item = NodeMut; + + fn next(&mut self) -> Option { + if self.stack.is_empty() { + return None; + } + + let node = self.stack.pop_front().unwrap(); + + unsafe { + match node { + #(NodeMut::#node_variant_names(n) => {#node_property_handlers}),*, + _ => { + // Some node types don't have any child nodes to traverse + } + }; + } + + Some(node) + } + } + } +} + +fn property_handlers(node: &Node) -> TokenStream { + let handlers: Vec = node + .fields + .iter() + .filter_map(|field| { + let field_name = format_ident!("{}", field.name.as_str()); + if matches!(field.r#type, FieldType::Node(_)) && field.repeated { + Some(quote! { + n.#field_name + .iter_mut() + .for_each(|x| { + if let Some(n) = x.node.as_mut() { + self.stack.push_back(n.to_mut()); + } + }); + }) + } else if matches!(field.r#type, FieldType::Node(_)) && !field.is_one_of { + if field.r#type == FieldType::Node(None) { + Some(quote! { + if let Some(n) = n.#field_name.as_mut() { + if let Some(n) = n.node.as_mut() { + self.stack.push_back(n.to_mut()); + } + } + }) + } else { + Some(quote! { + if let Some(field_node) = n.#field_name.as_mut() { + self.stack.push_back(field_node.to_mut()); + } + }) + } + } else { + None // Filter out non-node fields + } + }) + .collect(); + + quote! { + let n = n.as_mut().unwrap(); + #(#handlers)* + } +} diff --git a/crates/pgt_query_macros/src/iter_ref.rs b/crates/pgt_query_macros/src/iter_ref.rs new file mode 100644 index 000000000..8e232340f --- /dev/null +++ b/crates/pgt_query_macros/src/iter_ref.rs @@ -0,0 +1,105 @@ +use proc_macro2::TokenStream; +use quote::{format_ident, quote}; + +use crate::proto_analyser::{FieldType, Node, ProtoAnalyzer}; + +pub fn iter_ref_mod(analyser: ProtoAnalyzer) -> proc_macro2::TokenStream { + let enum_variants = analyser.enum_variants(); + let nodes = analyser.nodes(); + + let mut node_variant_names = Vec::new(); + let mut node_property_handlers = Vec::new(); + + let mut type_to_variant: std::collections::HashMap = + std::collections::HashMap::new(); + for variant in &enum_variants { + type_to_variant.insert(variant.type_name.clone(), variant.name.clone()); + } + + for node in &nodes { + if let Some(variant_name) = type_to_variant.get(&node.name) { + let variant_ident = format_ident!("{}", variant_name); + node_variant_names.push(variant_ident); + + let property_handlers = property_handlers(node); + node_property_handlers.push(quote! { + #(#property_handlers)* + }); + } + } + + quote! { + use std::collections::VecDeque; + + pub struct NodeRefIterator<'a>{ + stack: VecDeque>, + } + + impl<'a> NodeRefIterator<'a> { + pub fn new(root: NodeRef<'a>) -> Self { + Self { + stack: VecDeque::from([root]), + } + } + } + + impl<'a> Iterator for NodeRefIterator<'a> { + type Item = NodeRef<'a>; + + fn next(&mut self) -> Option { + if self.stack.is_empty() { + return None; + } + + let node = self.stack.pop_front().unwrap(); + + match &node { + #(NodeRef::#node_variant_names(n) => {#node_property_handlers}),*, + _ => { + // Some node types don't have any child nodes to traverse + } + }; + + Some(node) + } + } + } +} + +fn property_handlers(node: &Node) -> Vec { + node.fields + .iter() + .filter_map(|field| { + let field_name = format_ident!("{}", field.name.as_str()); + if matches!(field.r#type, FieldType::Node(_)) && field.repeated { + Some(quote! { + n.#field_name + .iter() + .for_each(|x| { + if let Some(n) = x.node.as_ref() { + self.stack.push_back(n.to_ref()); + } + }); + }) + } else if matches!(field.r#type, FieldType::Node(_)) && !field.is_one_of { + if field.r#type == FieldType::Node(None) { + Some(quote! { + if let Some(n) = &n.#field_name { + if let Some(n) = n.node.as_ref() { + self.stack.push_back(n.to_ref()); + } + } + }) + } else { + Some(quote! { + if let Some(field_node) = &n.#field_name { + self.stack.push_back(field_node.to_ref()); + } + }) + } + } else { + None // Filter out non-node fields + } + }) + .collect() +} diff --git a/crates/pgt_query_macros/src/lib.rs b/crates/pgt_query_macros/src/lib.rs new file mode 100644 index 000000000..718da161e --- /dev/null +++ b/crates/pgt_query_macros/src/lib.rs @@ -0,0 +1,106 @@ +use iter_mut::iter_mut_mod; +use iter_ref::iter_ref_mod; +use node_enum::node_enum_mod; +use node_mut::node_mut_mod; +use node_ref::node_ref_mod; +use node_structs::node_structs_mod; +use proto_analyser::ProtoAnalyzer; +use quote::quote; +use std::path; + +mod iter_mut; +mod iter_ref; +mod node_enum; +mod node_mut; +mod node_ref; +mod node_structs; +mod proto_analyser; + +#[proc_macro] +pub fn node_ref_codegen(_input: proc_macro::TokenStream) -> proc_macro::TokenStream { + let analyser = ProtoAnalyzer::from(&proto_file_path()).unwrap(); + + let node_ref = node_ref_mod(analyser); + + quote! { + use crate::*; + + #node_ref + } + .into() +} + +#[proc_macro] +pub fn node_mut_codegen(_input: proc_macro::TokenStream) -> proc_macro::TokenStream { + let analyser = ProtoAnalyzer::from(&proto_file_path()).unwrap(); + + let node_mut = node_mut_mod(analyser); + + quote! { + use crate::*; + + #node_mut + } + .into() +} + +#[proc_macro] +pub fn node_structs_codegen(_input: proc_macro::TokenStream) -> proc_macro::TokenStream { + let analyser = ProtoAnalyzer::from(&proto_file_path()).unwrap(); + + let conversions = node_structs_mod(analyser); + + quote! { + use crate::*; + + #conversions + } + .into() +} + +#[proc_macro] +pub fn node_enum_codegen(_input: proc_macro::TokenStream) -> proc_macro::TokenStream { + let analyser = ProtoAnalyzer::from(&proto_file_path()).unwrap(); + + let node_enum = node_enum_mod(analyser); + + quote! { + use crate::*; + + #node_enum + } + .into() +} + +#[proc_macro] +pub fn iter_ref_codegen(_input: proc_macro::TokenStream) -> proc_macro::TokenStream { + let analyser = ProtoAnalyzer::from(&proto_file_path()).unwrap(); + + let iterator = iter_ref_mod(analyser); + + quote! { + use crate::*; + + #iterator + } + .into() +} + +#[proc_macro] +pub fn iter_mut_codegen(_input: proc_macro::TokenStream) -> proc_macro::TokenStream { + let analyser = ProtoAnalyzer::from(&proto_file_path()).unwrap(); + + let iterator = iter_mut_mod(analyser); + + quote! { + use crate::*; + + #iterator + } + .into() +} + +fn proto_file_path() -> path::PathBuf { + // Use the path set by the build script + path::PathBuf::from(env!("PG_QUERY_PROTO_PATH")) +} diff --git a/crates/pgt_query_macros/src/node_enum.rs b/crates/pgt_query_macros/src/node_enum.rs new file mode 100644 index 000000000..0801bbab6 --- /dev/null +++ b/crates/pgt_query_macros/src/node_enum.rs @@ -0,0 +1,44 @@ +use quote::{format_ident, quote}; + +use crate::proto_analyser::ProtoAnalyzer; + +pub fn node_enum_mod(analyser: ProtoAnalyzer) -> proc_macro2::TokenStream { + let node_variants = analyser.enum_variants(); + + let mut to_ref_matches = Vec::new(); + let mut to_mut_matches = Vec::new(); + + for variant in &node_variants { + let variant_ident = format_ident!("{}", &variant.name); + + to_ref_matches.push(quote! { + NodeEnum::#variant_ident(n) => NodeRef::#variant_ident(&n) + }); + + if variant.boxed { + to_mut_matches.push(quote! { + NodeEnum::#variant_ident(n) => NodeMut::#variant_ident(&mut **n as *mut _) + }); + } else { + to_mut_matches.push(quote! { + NodeEnum::#variant_ident(n) => NodeMut::#variant_ident(n as *mut _) + }); + } + } + + quote! { + impl NodeEnum { + pub fn to_ref(&self) -> NodeRef { + match self { + #(#to_ref_matches,)* + } + } + + pub fn to_mut(&mut self) -> NodeMut { + match self { + #(#to_mut_matches,)* + } + } + } + } +} diff --git a/crates/pgt_query_macros/src/node_mut.rs b/crates/pgt_query_macros/src/node_mut.rs new file mode 100644 index 000000000..52120c1a9 --- /dev/null +++ b/crates/pgt_query_macros/src/node_mut.rs @@ -0,0 +1,50 @@ +use quote::{format_ident, quote}; + +use crate::proto_analyser::ProtoAnalyzer; + +pub fn node_mut_mod(analyser: ProtoAnalyzer) -> proc_macro2::TokenStream { + let node_variants = analyser.enum_variants(); + + let mut to_enum_matches = Vec::new(); + let mut node_enum_variants = Vec::new(); + + for variant in &node_variants { + let variant_ident = format_ident!("{}", &variant.name); + let type_ident = format_ident!("{}", &variant.type_name); + + if variant.boxed { + // For boxed variants, we need to box the cloned value + to_enum_matches.push(quote! { + NodeMut::#variant_ident(n) => Ok(NodeEnum::#variant_ident(Box::new(n.as_ref().ok_or(err)?.clone()))) + }); + } else { + // For non-boxed variants, clone directly + to_enum_matches.push(quote! { + NodeMut::#variant_ident(n) => Ok(NodeEnum::#variant_ident(n.as_ref().ok_or(err)?.clone())) + }); + } + + node_enum_variants.push(quote! { + #variant_ident(*mut protobuf::#type_ident) + }); + } + + quote! { + #[derive(Debug, Copy, Clone)] + pub enum NodeMut { + #(#node_enum_variants, )* + } + + impl NodeMut { + pub fn to_enum(self) -> Result { + unsafe { + let err = Error::InvalidPointer; + match self { + #(#to_enum_matches,)* + _ => Err(Error::InvalidPointer), + } + } + } + } + } +} diff --git a/crates/pgt_query_macros/src/node_ref.rs b/crates/pgt_query_macros/src/node_ref.rs new file mode 100644 index 000000000..64f9b7c49 --- /dev/null +++ b/crates/pgt_query_macros/src/node_ref.rs @@ -0,0 +1,46 @@ +use quote::{format_ident, quote}; + +use crate::proto_analyser::ProtoAnalyzer; + +pub fn node_ref_mod(analyser: ProtoAnalyzer) -> proc_macro2::TokenStream { + let node_variants = analyser.enum_variants(); + + let mut to_enum_matches = Vec::new(); + let mut node_enum_variants = Vec::new(); + + for variant in &node_variants { + let variant_ident = format_ident!("{}", &variant.name); + let type_ident = format_ident!("{}", &variant.type_name); + + if variant.boxed { + // For boxed variants, we need to box the cloned value + to_enum_matches.push(quote! { + NodeRef::#variant_ident(n) => NodeEnum::#variant_ident(::prost::alloc::boxed::Box::new((*n).clone())) + }); + } else { + // For non-boxed variants, clone directly + to_enum_matches.push(quote! { + NodeRef::#variant_ident(n) => NodeEnum::#variant_ident((*n).clone()) + }); + } + + node_enum_variants.push(quote! { + #variant_ident(&'a protobuf::#type_ident) + }); + } + + quote! { + #[derive(Debug, Copy, Clone)] + pub enum NodeRef<'a> { + #(#node_enum_variants,)* + } + + impl<'a> NodeRef<'a> { + pub fn to_enum(self) -> NodeEnum { + match self { + #(#to_enum_matches,)* + } + } + } + } +} diff --git a/crates/pgt_query_macros/src/node_structs.rs b/crates/pgt_query_macros/src/node_structs.rs new file mode 100644 index 000000000..52fca2e00 --- /dev/null +++ b/crates/pgt_query_macros/src/node_structs.rs @@ -0,0 +1,30 @@ +use quote::{format_ident, quote}; + +use crate::proto_analyser::ProtoAnalyzer; + +pub fn node_structs_mod(analyser: ProtoAnalyzer) -> proc_macro2::TokenStream { + let node_variants = analyser.enum_variants(); + + let mut impls = Vec::new(); + + for variant in &node_variants { + let node_ident = format_ident!("{}", &variant.name); + let type_ident = format_ident!("{}", &variant.type_name); + + impls.push(quote! { + impl protobuf::#type_ident { + pub fn to_ref(&self) -> NodeRef { + NodeRef::#node_ident(self) + } + + pub fn to_mut(&mut self) -> NodeMut { + NodeMut::#node_ident(self) + } + } + }); + } + + quote! { + #(#impls)* + } +} diff --git a/crates/pgt_query_macros/src/proto_analyser.rs b/crates/pgt_query_macros/src/proto_analyser.rs new file mode 100644 index 000000000..26a18b600 --- /dev/null +++ b/crates/pgt_query_macros/src/proto_analyser.rs @@ -0,0 +1,252 @@ +use std::{ + collections::{HashMap, HashSet}, + path::Path, +}; + +use convert_case::{Case, Casing}; +use prost_reflect::{ + DescriptorError, DescriptorPool, FieldDescriptor, MessageDescriptor, + prost_types::{ + FieldDescriptorProto, + field_descriptor_proto::{Label, Type}, + }, +}; + +pub(crate) struct ProtoAnalyzer { + pool: DescriptorPool, + message_graph: HashMap>, +} + +pub(crate) struct EnumVariant { + pub name: String, + pub type_name: String, + pub boxed: bool, +} + +#[derive(Debug, PartialEq, Eq)] +pub(crate) enum FieldType { + Node(Option), + Enum(String), + Literal, +} + +pub(crate) struct Field { + pub name: String, + pub r#type: FieldType, + pub repeated: bool, + pub is_one_of: bool, +} + +pub(crate) struct Node { + pub name: String, + #[allow(dead_code)] + pub enum_variant_name: String, + pub fields: Vec, +} + +impl ProtoAnalyzer { + pub fn from(proto_file: &Path) -> Result { + let include_path = proto_file + .parent() + .expect("Proto file must have a parent directory"); + + // protox::compile expects the proto file to be relative to the include path + let file_name = proto_file + .file_name() + .expect("Proto file must have a file name"); + + let pool = DescriptorPool::from_file_descriptor_set( + protox::compile([file_name], [include_path]).expect("unable to parse"), + )?; + + let mut analyzer = ProtoAnalyzer { + pool, + message_graph: HashMap::new(), + }; + + // Build the message graph + analyzer.build_message_graph(); + + Ok(analyzer) + } + + pub fn nodes(&self) -> Vec { + let mut nodes = Vec::new(); + + for msg in self.pool.all_messages() { + if ["ParseResult", "ScanResult", "Node", "ScanToken"].contains(&msg.name()) { + continue; + } + let fields = msg + .fields() + .map(|f| { + let field_type = match f.field_descriptor_proto().r#type() { + Type::Message => match f.field_descriptor_proto().type_name() { + ".pg_query.Node" => FieldType::Node(None), + name => { + FieldType::Node(Some(name.to_string().replace(".pg_query.", ""))) + } + }, + Type::Enum => FieldType::Enum( + f.field_descriptor_proto() + .type_name() + .to_string() + .replace(".pg_query.", ""), + ), + _ => FieldType::Literal, + }; + + Field { + name: f.name().to_string(), + r#type: field_type, + repeated: f.is_list(), + is_one_of: f.containing_oneof().is_some(), + } + }) + .collect(); + + nodes.push(Node { + name: msg.name().to_string(), + enum_variant_name: msg.name().to_case(Case::Pascal), // Convert to PascalCase for enum variant name + fields, + }); + } + + nodes + } + + pub fn enum_variants(&self) -> Vec { + let node = self + .pool + .get_message_by_name(".pg_query.Node") + .expect("Node message not found"); + + let mut variants = Vec::new(); + for field in node.fields() { + // The prost-generated variant name is derived from the field name using snake_case to PascalCase conversion + // For example: ctesearch_clause -> CtesearchClause + let field_name = field.name(); + let variant_name = field_name.to_case(Case::Pascal); + + // Get the actual proto type name (the message type) + let proto_type_name = field + .field_descriptor_proto() + .type_name() + .split('.') + .next_back() + .unwrap_or(&variant_name); + + // The Rust type name is the proto type name converted to PascalCase + // For example: CTESearchClause -> CteSearchClause + let type_name = proto_type_name.to_case(Case::Pascal); + + let boxed = self.is_field_boxed(&field, &node); + + variants.push(EnumVariant { + name: variant_name, + type_name, + boxed, + }); + } + + variants + } + + /// Build a graph of message dependencies for cycle detection + fn build_message_graph(&mut self) { + // Collect all messages first to avoid borrowing issues + let mut all_messages = Vec::new(); + for file in self.pool.files() { + for message in file.messages() { + all_messages.push(message); + } + } + + // Now add them to the graph + for message in all_messages { + self.add_message_to_graph(&message); + } + } + + /// Add a message and its dependencies to the graph + fn add_message_to_graph(&mut self, message: &MessageDescriptor) { + let msg_fq_name = format!(".{}", message.full_name()); + let mut dependencies = Vec::new(); + + // Check all fields for message type dependencies + for field in message.fields() { + if let Some(field_message) = field.kind().as_message() { + // Only add non-repeated message fields as dependencies + // since repeated fields are already heap allocated in Vec + if !field.is_list() { + let field_fq_name = format!(".{}", field_message.full_name()); + dependencies.push(field_fq_name); + } + } + } + + self.message_graph.insert(msg_fq_name, dependencies); + + // Recursively add nested messages + for nested in message.child_messages() { + self.add_message_to_graph(&nested); + } + } + + /// Detect if a field will be boxed by prost due to recursive nesting + fn is_field_boxed(&self, field: &FieldDescriptor, parent_message: &MessageDescriptor) -> bool { + // Check if this is a message field that should be boxed + let parent_fq_name = format!(".{}", parent_message.full_name()); + self.is_boxed(&parent_fq_name, field.field_descriptor_proto()) + } + + /// Check if there's a path from parent_message to field_type in the message graph + /// This indicates that field_type is transitively contained within parent_message + fn is_nested(&self, parent_message_name: &str, field_type_name: &str) -> bool { + self.has_path(parent_message_name, field_type_name, &mut HashSet::new()) + } + + /// Recursive helper to find if there's a path from 'from' to 'to' in the message graph + fn has_path(&self, from: &str, to: &str, visited: &mut HashSet) -> bool { + // If we've already visited this node, return false to avoid cycles + if visited.contains(from) { + return false; + } + + // If we've reached the target, we found a path + if from == to { + return true; + } + + visited.insert(from.to_string()); + + // Check all dependencies of the current message + if let Some(dependencies) = self.message_graph.get(from) { + for dep in dependencies { + if self.has_path(dep, to, visited) { + return true; + } + } + } + + false + } + + /// Returns whether the Rust type for this message field is `Box<_>`. + fn is_boxed(&self, fq_message_name: &str, field: &FieldDescriptorProto) -> bool { + if field.label() == Label::Repeated { + // Repeated field are stored in Vec, therefore it is already heap allocated + return false; + } + let fd_type = field.r#type(); + if fd_type == Type::Message || fd_type == Type::Group { + // The field should be boxed if the field type transitively contains the parent message + // This prevents infinitely sized type definitions + if let Some(field_type_name) = field.type_name.as_ref() { + // IMPORTANT: Check if field_type_name contains fq_message_name (not the other way around) + return self.is_nested(field_type_name, fq_message_name); + } + } + false + } +} diff --git a/crates/pgt_query_proto_parser/src/lib.rs b/crates/pgt_query_proto_parser/src/lib.rs deleted file mode 100644 index 12f8cf9cd..000000000 --- a/crates/pgt_query_proto_parser/src/lib.rs +++ /dev/null @@ -1,9 +0,0 @@ -//! A parser for the libg_query proto file -//! -//! This crate provides a parser for the libg_query proto file, and a struct to represent and interact with the parsed file. - -mod proto_file; -mod proto_parser; - -pub use crate::proto_file::{Field, FieldType, Node, ProtoFile, Token}; -pub use crate::proto_parser::ProtoParser; diff --git a/crates/pgt_query_proto_parser/src/proto_file.rs b/crates/pgt_query_proto_parser/src/proto_file.rs deleted file mode 100644 index 2cc327984..000000000 --- a/crates/pgt_query_proto_parser/src/proto_file.rs +++ /dev/null @@ -1,60 +0,0 @@ -/// The FieldTypes of a protobuf message -#[derive(Debug, Eq, PartialEq)] -pub enum FieldType { - Node, - Double, - Float, - Int64, - Uint64, - Int32, - Fixed64, - Fixed32, - Bool, - String, - Group, - Message, - Bytes, - Uint32, - Enum, - Sfixed32, - Sfixed64, - Sint32, - Sint64, -} - -/// A libg_query token -#[derive(Debug)] -pub struct Token { - pub name: String, - pub value: i32, -} - -/// A libg_query field -#[derive(Debug)] -pub struct Field { - pub name: String, - pub node_name: Option, - pub enum_variant_name: Option, - pub field_type: FieldType, - pub repeated: bool, - pub is_one_of: bool, -} - -/// A libg_query node -#[derive(Debug)] -pub struct Node { - pub name: String, - pub fields: Vec, -} - -/// The libg_query proto file -pub struct ProtoFile { - pub tokens: Vec, - pub nodes: Vec, -} - -impl ProtoFile { - pub fn node(&self, name: &str) -> Option<&Node> { - self.nodes.iter().find(|n| n.name == name) - } -} diff --git a/crates/pgt_query_proto_parser/src/proto_parser.rs b/crates/pgt_query_proto_parser/src/proto_parser.rs deleted file mode 100644 index 56f93c6e8..000000000 --- a/crates/pgt_query_proto_parser/src/proto_parser.rs +++ /dev/null @@ -1,179 +0,0 @@ -use convert_case::{Case, Casing}; -use protobuf::descriptor::{FileDescriptorProto, field_descriptor_proto::Label}; -use protobuf_parse::Parser; -use std::{ffi::OsStr, path::Path}; - -use crate::proto_file::{Field, FieldType, Node, ProtoFile, Token}; - -/// The parser for the libg_query proto file -pub struct ProtoParser { - inner: FileDescriptorProto, -} - -impl ProtoParser { - pub fn new(file_path: &impl AsRef) -> Self { - let proto_file = Path::new(file_path); - let proto_dir = proto_file.parent().unwrap(); - - let result = Parser::new() - .pure() - .include(proto_dir) - .input(proto_file) - .parse_and_typecheck() - .unwrap(); - - ProtoParser { - inner: result.file_descriptors[0].clone(), - } - } - - pub fn parse(&self) -> ProtoFile { - ProtoFile { - tokens: self.tokens(), - nodes: self.nodes(), - } - } - - fn tokens(&self) -> Vec { - self.inner - .enum_type - .iter() - .find(|e| e.name == Some("Token".into())) - .unwrap() - .value - .iter() - .map(|e| Token { - // token names in proto are UPPERCASE_SNAKE_CASE - name: e.name.clone().unwrap().to_case(Case::UpperCamel), - value: e.number.unwrap(), - }) - .collect() - } - - fn get_enum_variant_name(&self, type_name: &str) -> Option { - let variant = self - .inner - .message_type - .iter() - .find(|e| e.name == Some("Node".into())) - .unwrap() - .field - .iter() - .find(|e| e.type_name().split(".").last().unwrap() == type_name); - variant.map(|v| v.name.clone().unwrap().to_case(Case::UpperCamel)) - } - - fn nodes(&self) -> Vec { - self.inner - .message_type - .iter() - .find(|e| e.name == Some("Node".into())) - .unwrap() - .field - .iter() - .map(|e| { - let name: String = e.name.to_owned().unwrap().to_case(Case::UpperCamel); - let node = self - .inner - .message_type - .iter() - .find(|n| { - n.name.clone().unwrap().to_case(Case::UpperCamel) - == e.json_name.as_ref().unwrap().to_case(Case::UpperCamel) - }) - .unwrap(); - - let mut fields: Vec = Vec::new(); - // from node fields - fields.append(&mut - node - .field - .iter() - .filter_map(|e| { - // skip one of fields, they are handled separately - if e.has_oneof_index() { - return None; - } - // use label and type to get the field type - let type_name: FieldType = match e.type_name() { - "" => match e.type_() { - protobuf::descriptor::field_descriptor_proto::Type::TYPE_DOUBLE => FieldType::Double, - protobuf::descriptor::field_descriptor_proto::Type::TYPE_FLOAT => FieldType::Float, - protobuf::descriptor::field_descriptor_proto::Type::TYPE_INT64 => FieldType::Int64, - protobuf::descriptor::field_descriptor_proto::Type::TYPE_UINT64 => FieldType::Uint64, - protobuf::descriptor::field_descriptor_proto::Type::TYPE_INT32 => FieldType::Int32, - protobuf::descriptor::field_descriptor_proto::Type::TYPE_FIXED64 => FieldType::Fixed64, - protobuf::descriptor::field_descriptor_proto::Type::TYPE_FIXED32 => FieldType::Fixed32, - protobuf::descriptor::field_descriptor_proto::Type::TYPE_BOOL => FieldType::Bool, - protobuf::descriptor::field_descriptor_proto::Type::TYPE_STRING => FieldType::String, - protobuf::descriptor::field_descriptor_proto::Type::TYPE_GROUP => FieldType::Group, - protobuf::descriptor::field_descriptor_proto::Type::TYPE_MESSAGE => FieldType::Message, - protobuf::descriptor::field_descriptor_proto::Type::TYPE_BYTES => FieldType::Bytes, - protobuf::descriptor::field_descriptor_proto::Type::TYPE_UINT32 => FieldType::Uint32, - protobuf::descriptor::field_descriptor_proto::Type::TYPE_ENUM => FieldType::Enum, - protobuf::descriptor::field_descriptor_proto::Type::TYPE_SFIXED32 => FieldType::Sfixed32, - protobuf::descriptor::field_descriptor_proto::Type::TYPE_SFIXED64 => FieldType::Sfixed64, - protobuf::descriptor::field_descriptor_proto::Type::TYPE_SINT32 => FieldType::Sint32, - protobuf::descriptor::field_descriptor_proto::Type::TYPE_SINT64 => FieldType::Sint64, - }, - _ => { - if !e.type_name().starts_with(".pg_query") { - panic!("Unknown type: {}", e.type_name()); - - } - if e.type_() == protobuf::descriptor::field_descriptor_proto::Type::TYPE_ENUM { - FieldType::Enum - } else { - FieldType::Node - } - }, - }; - let mut node_name = None; - let mut enum_variant_name = None; - if e.type_name().starts_with(".pg_query") { - let n = e.type_name().split(".").last().unwrap().to_string(); - node_name = Some(n.clone()); - if n != "Node" { - enum_variant_name = self.get_enum_variant_name(e.type_name().split(".").last().unwrap().to_string().as_str()); - } - } - // TODO: node name must be derived from the property name in the node - // enum - Some(Field { - name: e.name.clone().unwrap(), - node_name, - enum_variant_name, - field_type: type_name, - repeated: e.label() == Label::LABEL_REPEATED, - is_one_of: false, - }) - }) - .collect() - ); - - // one of declarations - fields.append(&mut - node - .oneof_decl - .iter() - .map(|e| { - Field { - name: e.name.clone().unwrap(), - node_name: Some("Node".to_string()), - enum_variant_name: None, - field_type: FieldType::Node, - repeated: false, - is_one_of: true, - } - }) - .collect() - ); - Node { - // token names in proto are UPPERCASE_SNAKE_CASE - name: name.clone(), - fields, - } - }) - .collect() - } -} diff --git a/crates/pgt_schema_cache/src/columns.rs b/crates/pgt_schema_cache/src/columns.rs index 60d422fd6..01f9b41ce 100644 --- a/crates/pgt_schema_cache/src/columns.rs +++ b/crates/pgt_schema_cache/src/columns.rs @@ -82,15 +82,12 @@ impl SchemaCacheItem for Column { #[cfg(test)] mod tests { - use pgt_test_utils::test_database::get_new_test_db; - use sqlx::Executor; + use sqlx::{Executor, PgPool}; use crate::{SchemaCache, columns::ColumnClassKind}; - #[tokio::test] - async fn loads_columns() { - let test_db = get_new_test_db().await; - + #[sqlx::test(migrator = "pgt_test_utils::MIGRATIONS")] + async fn loads_columns(test_db: PgPool) { let setup = r#" create table public.users ( id serial primary key, @@ -129,7 +126,7 @@ mod tests { let public_schema_columns = cache .columns .iter() - .filter(|c| c.schema_name.as_str() == "public") + .filter(|c| c.schema_name.as_str() == "public" && !c.table_name.contains("migrations")) .count(); assert_eq!(public_schema_columns, 4); diff --git a/crates/pgt_schema_cache/src/extensions.rs b/crates/pgt_schema_cache/src/extensions.rs new file mode 100644 index 000000000..8494397cd --- /dev/null +++ b/crates/pgt_schema_cache/src/extensions.rs @@ -0,0 +1,22 @@ +use sqlx::PgPool; + +use crate::schema_cache::SchemaCacheItem; + +#[derive(Debug, Default)] +pub struct Extension { + pub name: String, + pub schema: Option, + pub default_version: String, + pub installed_version: Option, + pub comment: Option, +} + +impl SchemaCacheItem for Extension { + type Item = Extension; + + async fn load(pool: &PgPool) -> Result, sqlx::Error> { + sqlx::query_file_as!(Extension, "src/queries/extensions.sql") + .fetch_all(pool) + .await + } +} diff --git a/crates/pgt_schema_cache/src/functions.rs b/crates/pgt_schema_cache/src/functions.rs index 5e40709f1..4afaa76dd 100644 --- a/crates/pgt_schema_cache/src/functions.rs +++ b/crates/pgt_schema_cache/src/functions.rs @@ -4,6 +4,33 @@ use sqlx::types::JsonValue; use crate::schema_cache::SchemaCacheItem; +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, Default)] +pub enum ProcKind { + #[default] + Function, + Aggregate, + Window, + Procedure, +} + +impl From for ProcKind { + fn from(value: char) -> Self { + match value { + 'f' => Self::Function, + 'p' => Self::Procedure, + 'w' => Self::Window, + 'a' => Self::Aggregate, + _ => unreachable!(), + } + } +} + +impl From for ProcKind { + fn from(value: i8) -> Self { + char::from(u8::try_from(value).unwrap()).into() + } +} + /// `Behavior` describes the characteristics of the function. Is it deterministic? Does it changed due to side effects, and if so, when? #[derive(Debug, Clone, PartialEq, Serialize, Deserialize, Default)] pub enum Behavior { @@ -72,6 +99,8 @@ pub struct Function { /// e.g. `plpgsql/sql` or `internal`. pub language: String, + pub kind: ProcKind, + /// The body of the function – the `declare [..] begin [..] end [..]` block.` Not set for internal functions. pub body: Option, @@ -88,10 +117,10 @@ pub struct Function { pub identity_argument_types: Option, /// An ID identifying the return type. For example, `2275` refers to `cstring`. 2278 refers to `void`. - pub return_type_id: i64, + pub return_type_id: Option, /// The return type, for example "text", "trigger", or "void". - pub return_type: String, + pub return_type: Option, /// If the return type is a composite type, this will point the matching entry's `oid` column in the `pg_class` table. `None` if the function does not return a composite type. pub return_type_relation_id: Option, @@ -115,3 +144,114 @@ impl SchemaCacheItem for Function { .await } } + +#[cfg(test)] +mod tests { + use sqlx::{Executor, PgPool}; + + use crate::{Behavior, SchemaCache, functions::ProcKind}; + + #[sqlx::test(migrator = "pgt_test_utils::MIGRATIONS")] + async fn loads(pool: PgPool) { + let setup = r#" + create table coos ( + id serial primary key, + name text + ); + + create or replace function my_cool_foo() + returns trigger + language plpgsql + security invoker + as $$ + begin + raise exception 'dont matter'; + end; + $$; + + create or replace procedure my_cool_proc() + language plpgsql + security invoker + as $$ + begin + raise exception 'dont matter'; + end; + $$; + + create or replace function string_concat_state( + state text, + value text, + separator text) + returns text + language plpgsql + as $$ + begin + if state is null then + return value; + else + return state || separator || value; + end if; + end; + $$; + + create aggregate string_concat(text, text) ( + sfunc = string_concat_state, + stype = text, + initcond = '' + ); + "#; + + pool.execute(setup).await.unwrap(); + + let cache = SchemaCache::load(&pool).await.unwrap(); + + // Find and check the function + let foo_fn = cache + .functions + .iter() + .find(|f| f.name == "my_cool_foo") + .unwrap(); + assert_eq!(foo_fn.schema, "public"); + assert_eq!(foo_fn.kind, ProcKind::Function); + assert_eq!(foo_fn.language, "plpgsql"); + assert_eq!(foo_fn.return_type.as_deref(), Some("trigger")); + assert!(!foo_fn.security_definer); + assert_eq!(foo_fn.behavior, Behavior::Volatile); + + // Find and check the procedure + let proc_fn = cache + .functions + .iter() + .find(|f| f.name == "my_cool_proc") + .unwrap(); + + assert_eq!(proc_fn.kind, ProcKind::Procedure); + assert_eq!(proc_fn.language, "plpgsql"); + assert!(!proc_fn.security_definer); + + // Find and check the aggregate + let agg_fn = cache + .functions + .iter() + .find(|f| f.name == "string_concat") + .unwrap(); + assert_eq!(agg_fn.kind, ProcKind::Aggregate); + assert_eq!(agg_fn.language, "internal"); // Aggregates are often "internal" + // The return type should be text + assert_eq!(agg_fn.return_type.as_deref(), Some("text")); + + // Find and check the state function for the aggregate + let state_fn = cache + .functions + .iter() + .find(|f| f.name == "string_concat_state") + .unwrap(); + + assert_eq!(state_fn.kind, ProcKind::Function); + assert_eq!(state_fn.language, "plpgsql"); + assert_eq!(state_fn.return_type.as_deref(), Some("text")); + assert_eq!(state_fn.args.args.len(), 3); + let arg_names: Vec<_> = state_fn.args.args.iter().map(|a| a.name.as_str()).collect(); + assert_eq!(arg_names, &["state", "value", "separator"]); + } +} diff --git a/crates/pgt_schema_cache/src/lib.rs b/crates/pgt_schema_cache/src/lib.rs index 186fbdb92..6440cd01a 100644 --- a/crates/pgt_schema_cache/src/lib.rs +++ b/crates/pgt_schema_cache/src/lib.rs @@ -3,6 +3,7 @@ #![allow(dead_code)] mod columns; +mod extensions; mod functions; mod policies; mod roles; @@ -14,11 +15,12 @@ mod types; mod versions; pub use columns::*; -pub use functions::{Behavior, Function, FunctionArg, FunctionArgs}; +pub use extensions::Extension; +pub use functions::{Behavior, Function, FunctionArg, FunctionArgs, ProcKind}; pub use policies::{Policy, PolicyCommand}; pub use roles::*; pub use schema_cache::SchemaCache; pub use schemas::Schema; -pub use tables::{ReplicaIdentity, Table}; +pub use tables::{ReplicaIdentity, Table, TableKind}; pub use triggers::{Trigger, TriggerAffected, TriggerEvent}; pub use types::{PostgresType, PostgresTypeAttribute}; diff --git a/crates/pgt_schema_cache/src/policies.rs b/crates/pgt_schema_cache/src/policies.rs index 85cd78219..8e2ee4d70 100644 --- a/crates/pgt_schema_cache/src/policies.rs +++ b/crates/pgt_schema_cache/src/policies.rs @@ -80,27 +80,14 @@ impl SchemaCacheItem for Policy { #[cfg(test)] mod tests { - use pgt_test_utils::test_database::get_new_test_db; - use sqlx::Executor; - use crate::{SchemaCache, policies::PolicyCommand}; + use sqlx::{Executor, PgPool}; - #[tokio::test] - async fn loads_policies() { - let test_db = get_new_test_db().await; + use crate::{SchemaCache, policies::PolicyCommand}; + #[sqlx::test(migrator = "pgt_test_utils::MIGRATIONS")] + async fn loads_policies(test_db: PgPool) { let setup = r#" - do $$ - begin - if not exists ( - select from pg_catalog.pg_roles - where rolname = 'admin' - ) then - create role admin; - end if; - end $$; - - create table public.users ( id serial primary key, name varchar(255) not null @@ -125,22 +112,12 @@ mod tests { to public with check (true); - create policy admin_policy + create policy owner_policy on public.users for all - to admin + to owner with check (true); - do $$ - begin - if not exists ( - select from pg_catalog.pg_roles - where rolname = 'owner' - ) then - create role owner; - end if; - end $$; - create schema real_estate; create table real_estate.properties ( @@ -148,10 +125,10 @@ mod tests { owner_id int not null ); - create policy owner_policy + create policy test_nologin_policy on real_estate.properties for update - to owner + to test_nologin using (owner_id = current_user::int); "#; @@ -193,29 +170,29 @@ mod tests { assert_eq!(public_policy.security_qualification, Some("true".into())); assert_eq!(public_policy.with_check, None); - let admin_policy = cache + let owner_policy = cache .policies .iter() - .find(|p| p.name == "admin_policy") + .find(|p| p.name == "owner_policy") .unwrap(); - assert_eq!(admin_policy.table_name, "users"); - assert_eq!(admin_policy.schema_name, "public"); - assert!(admin_policy.is_permissive); - assert_eq!(admin_policy.command, PolicyCommand::All); - assert_eq!(admin_policy.role_names, vec!["admin"]); - assert_eq!(admin_policy.security_qualification, None); - assert_eq!(admin_policy.with_check, Some("true".into())); + assert_eq!(owner_policy.table_name, "users"); + assert_eq!(owner_policy.schema_name, "public"); + assert!(owner_policy.is_permissive); + assert_eq!(owner_policy.command, PolicyCommand::All); + assert_eq!(owner_policy.role_names, vec!["owner"]); + assert_eq!(owner_policy.security_qualification, None); + assert_eq!(owner_policy.with_check, Some("true".into())); let owner_policy = cache .policies .iter() - .find(|p| p.name == "owner_policy") + .find(|p| p.name == "test_nologin_policy") .unwrap(); assert_eq!(owner_policy.table_name, "properties"); assert_eq!(owner_policy.schema_name, "real_estate"); assert!(owner_policy.is_permissive); assert_eq!(owner_policy.command, PolicyCommand::Update); - assert_eq!(owner_policy.role_names, vec!["owner"]); + assert_eq!(owner_policy.role_names, vec!["test_nologin"]); assert_eq!( owner_policy.security_qualification, Some("(owner_id = (CURRENT_USER)::integer)".into()) diff --git a/crates/pgt_schema_cache/src/queries/extensions.sql b/crates/pgt_schema_cache/src/queries/extensions.sql new file mode 100644 index 000000000..aedc71b21 --- /dev/null +++ b/crates/pgt_schema_cache/src/queries/extensions.sql @@ -0,0 +1,10 @@ +SELECT + e.name as "name!", + n.nspname AS schema, + e.default_version as "default_version!", + x.extversion AS installed_version, + e.comment +FROM + pg_available_extensions() e(name, default_version, comment) + LEFT JOIN pg_extension x ON e.name = x.extname + LEFT JOIN pg_namespace n ON x.extnamespace = n.oid diff --git a/crates/pgt_schema_cache/src/queries/functions.sql b/crates/pgt_schema_cache/src/queries/functions.sql index f78ba91e6..9be1992db 100644 --- a/crates/pgt_schema_cache/src/queries/functions.sql +++ b/crates/pgt_schema_cache/src/queries/functions.sql @@ -10,6 +10,7 @@ with functions as ( prolang, pronamespace, proconfig, + prokind, -- proargmodes is null when all arg modes are IN coalesce( p.proargmodes, @@ -27,21 +28,20 @@ with functions as ( ) ) as arg_names, -- proallargtypes is null when all arg modes are IN - coalesce(p.proallargtypes, p.proargtypes) as arg_types, + coalesce(p.proallargtypes, string_to_array(proargtypes::text, ' ')::int[]) as arg_types, array_cat( array_fill(false, array [pronargs - pronargdefaults]), array_fill(true, array [pronargdefaults]) ) as arg_has_defaults from pg_proc as p - where - p.prokind = 'f' ) select f.oid :: int8 as "id!", n.nspname as "schema!", f.proname as "name!", l.lanname as "language!", + f.prokind as "kind!", case when l.lanname = 'internal' then null else f.prosrc @@ -53,16 +53,16 @@ select coalesce(f_args.args, '[]') as args, nullif(pg_get_function_arguments(f.oid), '') as argument_types, nullif(pg_get_function_identity_arguments(f.oid), '') as identity_argument_types, - f.prorettype :: int8 as "return_type_id!", - pg_get_function_result(f.oid) as "return_type!", + f.prorettype :: int8 as return_type_id, + pg_get_function_result(f.oid) as return_type, nullif(rt.typrelid :: int8, 0) as return_type_relation_id, - f.proretset as is_set_returning_function, + f.proretset as "is_set_returning_function!", case when f.provolatile = 'i' then 'IMMUTABLE' when f.provolatile = 's' then 'STABLE' when f.provolatile = 'v' then 'VOLATILE' end as behavior, - f.prosecdef as security_definer + f.prosecdef as "security_definer!" from functions f left join pg_namespace n on f.pronamespace = n.oid @@ -106,12 +106,13 @@ from ( select oid, - unnest(arg_modes) as mode, - unnest(arg_names) as name, - unnest(arg_types) :: int8 as type_id, - unnest(arg_has_defaults) as has_default + arg_modes[i] as mode, + arg_names[i] as name, + arg_types[i] :: int8 as type_id, + arg_has_defaults[i] as has_default from - functions + functions, + pg_catalog.generate_subscripts(arg_names, 1) as i ) as t1, lateral ( select diff --git a/crates/pgt_schema_cache/src/queries/tables.sql b/crates/pgt_schema_cache/src/queries/tables.sql index bcce4fcc7..6e6865a2c 100644 --- a/crates/pgt_schema_cache/src/queries/tables.sql +++ b/crates/pgt_schema_cache/src/queries/tables.sql @@ -2,6 +2,7 @@ select c.oid :: int8 as "id!", nc.nspname as schema, c.relname as name, + c.relkind as table_kind, c.relrowsecurity as rls_enabled, c.relforcerowsecurity as rls_forced, case @@ -21,7 +22,7 @@ from pg_namespace nc join pg_class c on nc.oid = c.relnamespace where - c.relkind in ('r', 'p') + c.relkind in ('r', 'p', 'v', 'm') and not pg_is_other_temp_schema(nc.oid) and ( pg_has_role(c.relowner, 'USAGE') diff --git a/crates/pgt_schema_cache/src/queries/triggers.sql b/crates/pgt_schema_cache/src/queries/triggers.sql index c28cc39fa..895d1be0d 100644 --- a/crates/pgt_schema_cache/src/queries/triggers.sql +++ b/crates/pgt_schema_cache/src/queries/triggers.sql @@ -1,17 +1,18 @@ --- we need to join tables from the pg_catalog since "TRUNCATE" triggers are +-- we need to join tables from the pg_catalog since "TRUNCATE" triggers are -- not available in the information_schema.trigger table. -select - t.tgname as "name!", - c.relname as "table_name!", - p.proname as "proc_name!", - n.nspname as "schema_name!", - t.tgtype as "details_bitmask!" -from - pg_catalog.pg_trigger t - left join pg_catalog.pg_proc p on t.tgfoid = p.oid - left join pg_catalog.pg_class c on t.tgrelid = c.oid - left join pg_catalog.pg_namespace n on c.relnamespace = n.oid -where - -- triggers enforcing constraints (e.g. unique fields) should not be included. - t.tgisinternal = false and - t.tgconstraint = 0; +select + t.tgname as "name!", + c.relname as "table_name!", + p.proname as "proc_name!", + proc_ns.nspname as "proc_schema!", + table_ns.nspname as "table_schema!", + t.tgtype as "details_bitmask!" +from + pg_catalog.pg_trigger t +left join pg_catalog.pg_proc p on t.tgfoid = p.oid +left join pg_catalog.pg_class c on t.tgrelid = c.oid +left join pg_catalog.pg_namespace table_ns on c.relnamespace = table_ns.oid +left join pg_catalog.pg_namespace proc_ns on p.pronamespace = proc_ns.oid +where + t.tgisinternal = false and + t.tgconstraint = 0; diff --git a/crates/pgt_schema_cache/src/roles.rs b/crates/pgt_schema_cache/src/roles.rs index c212b7919..7ced66f97 100644 --- a/crates/pgt_schema_cache/src/roles.rs +++ b/crates/pgt_schema_cache/src/roles.rs @@ -21,50 +21,19 @@ impl SchemaCacheItem for Role { #[cfg(test)] mod tests { - use crate::SchemaCache; - use pgt_test_utils::test_database::get_new_test_db; - use sqlx::Executor; - - #[tokio::test] - async fn loads_roles() { - let test_db = get_new_test_db().await; - - let setup = r#" - do $$ - begin - if not exists ( - select from pg_catalog.pg_roles - where rolname = 'test_super' - ) then - create role test_super superuser createdb login bypassrls; - end if; - if not exists ( - select from pg_catalog.pg_roles - where rolname = 'test_nologin' - ) then - create role test_nologin; - end if; - if not exists ( - select from pg_catalog.pg_roles - where rolname = 'test_login' - ) then - create role test_login login; - end if; - end $$; - "#; + use sqlx::PgPool; - test_db - .execute(setup) - .await - .expect("Failed to setup test database"); + use crate::SchemaCache; + #[sqlx::test(migrator = "pgt_test_utils::MIGRATIONS")] + async fn loads_roles(test_db: PgPool) { let cache = SchemaCache::load(&test_db) .await .expect("Failed to load Schema Cache"); let roles = &cache.roles; - let super_role = roles.iter().find(|r| r.name == "test_super").unwrap(); + let super_role = roles.iter().find(|r| r.name == "owner").unwrap(); assert!(super_role.is_super_user); assert!(super_role.can_create_db); assert!(super_role.can_login); diff --git a/crates/pgt_schema_cache/src/schema_cache.rs b/crates/pgt_schema_cache/src/schema_cache.rs index 516b37e6d..84bcd77c8 100644 --- a/crates/pgt_schema_cache/src/schema_cache.rs +++ b/crates/pgt_schema_cache/src/schema_cache.rs @@ -7,7 +7,7 @@ use crate::schemas::Schema; use crate::tables::Table; use crate::types::PostgresType; use crate::versions::Version; -use crate::{Role, Trigger}; +use crate::{Extension, Role, Trigger}; #[derive(Debug, Default)] pub struct SchemaCache { @@ -18,13 +18,25 @@ pub struct SchemaCache { pub versions: Vec, pub columns: Vec, pub policies: Vec, + pub extensions: Vec, pub triggers: Vec, pub roles: Vec, } impl SchemaCache { pub async fn load(pool: &PgPool) -> Result { - let (schemas, tables, functions, types, versions, columns, policies, triggers, roles) = futures_util::try_join!( + let ( + schemas, + tables, + functions, + types, + versions, + columns, + policies, + triggers, + roles, + extensions, + ) = futures_util::try_join!( Schema::load(pool), Table::load(pool), Function::load(pool), @@ -33,7 +45,8 @@ impl SchemaCache { Column::load(pool), Policy::load(pool), Trigger::load(pool), - Role::load(pool) + Role::load(pool), + Extension::load(pool), )?; Ok(SchemaCache { @@ -46,17 +59,10 @@ impl SchemaCache { policies, triggers, roles, + extensions, }) } - /// Applies an AST node to the repository - /// - /// For example, alter table add column will add the column to the table if it does not exist - /// yet - pub fn mutate(&mut self) { - unimplemented!(); - } - pub fn find_table(&self, name: &str, schema: Option<&str>) -> Option<&Table> { self.tables .iter() @@ -93,14 +99,12 @@ pub trait SchemaCacheItem { #[cfg(test)] mod tests { - use pgt_test_utils::test_database::get_new_test_db; + use sqlx::PgPool; use crate::SchemaCache; - #[tokio::test] - async fn it_loads() { - let test_db = get_new_test_db().await; - + #[sqlx::test(migrator = "pgt_test_utils::MIGRATIONS")] + async fn it_loads(test_db: PgPool) { SchemaCache::load(&test_db) .await .expect("Couldnt' load Schema Cache"); diff --git a/crates/pgt_schema_cache/src/tables.rs b/crates/pgt_schema_cache/src/tables.rs index 99061384c..16b86c54a 100644 --- a/crates/pgt_schema_cache/src/tables.rs +++ b/crates/pgt_schema_cache/src/tables.rs @@ -23,6 +23,34 @@ impl From for ReplicaIdentity { } } +#[derive(Debug, Clone, PartialEq, Eq, Default)] +pub enum TableKind { + #[default] + Ordinary, + View, + MaterializedView, + Partitioned, +} + +impl From for TableKind { + fn from(s: char) -> Self { + match s { + 'r' => Self::Ordinary, + 'p' => Self::Partitioned, + 'v' => Self::View, + 'm' => Self::MaterializedView, + _ => panic!("Invalid table kind"), + } + } +} + +impl From for TableKind { + fn from(s: i8) -> Self { + let c = char::from(u8::try_from(s).unwrap()); + c.into() + } +} + #[derive(Debug, Default, PartialEq, Eq)] pub struct Table { pub id: i64, @@ -31,6 +59,7 @@ pub struct Table { pub rls_enabled: bool, pub rls_forced: bool, pub replica_identity: ReplicaIdentity, + pub table_kind: TableKind, pub bytes: i64, pub size: String, pub live_rows_estimate: i64, @@ -47,3 +76,72 @@ impl SchemaCacheItem for Table { .await } } + +#[cfg(test)] +mod tests { + use sqlx::{Executor, PgPool}; + + use crate::{SchemaCache, tables::TableKind}; + + #[sqlx::test(migrator = "pgt_test_utils::MIGRATIONS")] + async fn includes_views_in_query(test_db: PgPool) { + let setup = r#" + create table public.base_table ( + id serial primary key, + value text + ); + + create view public.my_view as + select * from public.base_table; + "#; + + test_db + .execute(setup) + .await + .expect("Failed to setup test database"); + + let cache = SchemaCache::load(&test_db) + .await + .expect("Failed to load Schema Cache"); + + let view = cache + .tables + .iter() + .find(|t| t.name == "my_view") + .expect("View not found"); + + assert_eq!(view.table_kind, TableKind::View); + assert_eq!(view.schema, "public"); + } + + #[sqlx::test(migrator = "pgt_test_utils::MIGRATIONS")] + async fn includes_materialized_views_in_query(test_db: PgPool) { + let setup = r#" + create table public.base_table ( + id serial primary key, + value text + ); + + create materialized view public.my_mat_view as + select * from public.base_table; + "#; + + test_db + .execute(setup) + .await + .expect("Failed to setup test database"); + + let cache = SchemaCache::load(&test_db) + .await + .expect("Failed to load Schema Cache"); + + let mat_view = cache + .tables + .iter() + .find(|t| t.name == "my_mat_view") + .expect("Materialized view not found"); + + assert_eq!(mat_view.table_kind, TableKind::MaterializedView); + assert_eq!(mat_view.schema, "public"); + } +} diff --git a/crates/pgt_schema_cache/src/triggers.rs b/crates/pgt_schema_cache/src/triggers.rs index 0a5241d60..d0a4788a0 100644 --- a/crates/pgt_schema_cache/src/triggers.rs +++ b/crates/pgt_schema_cache/src/triggers.rs @@ -82,20 +82,22 @@ impl TryFrom for TriggerTiming { pub struct TriggerQueried { name: String, table_name: String, - schema_name: String, + table_schema: String, proc_name: String, + proc_schema: String, details_bitmask: i16, } #[derive(Debug, PartialEq, Eq)] pub struct Trigger { - name: String, - table_name: String, - schema_name: String, - proc_name: String, - affected: TriggerAffected, - timing: TriggerTiming, - events: Vec, + pub name: String, + pub table_name: String, + pub table_schema: String, + pub proc_name: String, + pub proc_schema: String, + pub affected: TriggerAffected, + pub timing: TriggerTiming, + pub events: Vec, } impl From for Trigger { @@ -104,7 +106,8 @@ impl From for Trigger { name: value.name, table_name: value.table_name, proc_name: value.proc_name, - schema_name: value.schema_name, + proc_schema: value.proc_schema, + table_schema: value.table_schema, affected: value.details_bitmask.into(), timing: value.details_bitmask.try_into().unwrap(), events: TriggerEvents::from(value.details_bitmask).0, @@ -126,24 +129,22 @@ impl SchemaCacheItem for Trigger { #[cfg(test)] mod tests { - use pgt_test_utils::test_database::get_new_test_db; - use sqlx::Executor; + + use sqlx::{Executor, PgPool}; use crate::{ SchemaCache, triggers::{TriggerAffected, TriggerEvent, TriggerTiming}, }; - #[tokio::test] - async fn loads_triggers() { - let test_db = get_new_test_db().await; - + #[sqlx::test(migrator = "pgt_test_utils::MIGRATIONS")] + async fn loads_triggers(test_db: PgPool) { let setup = r#" create table public.users ( id serial primary key, name text ); - + create or replace function public.log_user_insert() returns trigger as $$ begin @@ -151,17 +152,17 @@ mod tests { return new; end; $$ language plpgsql; - + create trigger trg_users_insert before insert on public.users for each row execute function public.log_user_insert(); - + create trigger trg_users_update after update or insert on public.users for each statement execute function public.log_user_insert(); - + create trigger trg_users_delete before delete on public.users for each row @@ -188,7 +189,7 @@ mod tests { .iter() .find(|t| t.name == "trg_users_insert") .unwrap(); - assert_eq!(insert_trigger.schema_name, "public"); + assert_eq!(insert_trigger.table_schema, "public"); assert_eq!(insert_trigger.table_name, "users"); assert_eq!(insert_trigger.timing, TriggerTiming::Before); assert_eq!(insert_trigger.affected, TriggerAffected::Row); @@ -199,7 +200,7 @@ mod tests { .iter() .find(|t| t.name == "trg_users_update") .unwrap(); - assert_eq!(insert_trigger.schema_name, "public"); + assert_eq!(insert_trigger.table_schema, "public"); assert_eq!(insert_trigger.table_name, "users"); assert_eq!(update_trigger.timing, TriggerTiming::After); assert_eq!(update_trigger.affected, TriggerAffected::Statement); @@ -211,7 +212,7 @@ mod tests { .iter() .find(|t| t.name == "trg_users_delete") .unwrap(); - assert_eq!(insert_trigger.schema_name, "public"); + assert_eq!(insert_trigger.table_schema, "public"); assert_eq!(insert_trigger.table_name, "users"); assert_eq!(delete_trigger.timing, TriggerTiming::Before); assert_eq!(delete_trigger.affected, TriggerAffected::Row); @@ -219,10 +220,8 @@ mod tests { assert_eq!(delete_trigger.proc_name, "log_user_insert"); } - #[tokio::test] - async fn loads_instead_and_truncate_triggers() { - let test_db = get_new_test_db().await; - + #[sqlx::test(migrator = "pgt_test_utils::MIGRATIONS")] + async fn loads_instead_and_truncate_triggers(test_db: PgPool) { let setup = r#" create table public.docs ( id serial primary key, @@ -279,7 +278,7 @@ mod tests { .iter() .find(|t| t.name == "trg_docs_instead_update") .unwrap(); - assert_eq!(instead_trigger.schema_name, "public"); + assert_eq!(instead_trigger.table_schema, "public"); assert_eq!(instead_trigger.table_name, "docs_view"); assert_eq!(instead_trigger.timing, TriggerTiming::Instead); assert_eq!(instead_trigger.affected, TriggerAffected::Row); @@ -290,7 +289,7 @@ mod tests { .iter() .find(|t| t.name == "trg_docs_truncate") .unwrap(); - assert_eq!(truncate_trigger.schema_name, "public"); + assert_eq!(truncate_trigger.table_schema, "public"); assert_eq!(truncate_trigger.table_name, "docs"); assert_eq!(truncate_trigger.timing, TriggerTiming::After); assert_eq!(truncate_trigger.affected, TriggerAffected::Statement); diff --git a/crates/pgt_statement_splitter/Cargo.toml b/crates/pgt_statement_splitter/Cargo.toml index deea07bb1..45a42ebc6 100644 --- a/crates/pgt_statement_splitter/Cargo.toml +++ b/crates/pgt_statement_splitter/Cargo.toml @@ -14,9 +14,14 @@ version = "0.0.0" [dependencies] pgt_diagnostics = { workspace = true } pgt_lexer.workspace = true -pgt_query_ext.workspace = true +pgt_query.workspace = true pgt_text_size.workspace = true regex.workspace = true [dev-dependencies] -ntest = "0.9.3" +criterion = "0.3" +ntest = "0.9.3" + +[[bench]] +harness = false +name = "splitter" diff --git a/crates/pgt_statement_splitter/benches/splitter.rs b/crates/pgt_statement_splitter/benches/splitter.rs new file mode 100644 index 000000000..e7cdeeef6 --- /dev/null +++ b/crates/pgt_statement_splitter/benches/splitter.rs @@ -0,0 +1,85 @@ +use criterion::{Criterion, black_box, criterion_group, criterion_main}; +use pgt_statement_splitter::split; + +pub fn splitter_benchmark(c: &mut Criterion) { + let large_statement = r#"with + available_tables as ( + select + c.relname as table_name, + c.oid as table_oid, + c.relkind as class_kind, + n.nspname as schema_name + from + pg_catalog.pg_class c + join pg_catalog.pg_namespace n on n.oid = c.relnamespace + where + -- r: normal tables + -- v: views + -- m: materialized views + -- f: foreign tables + -- p: partitioned tables + c.relkind in ('r', 'v', 'm', 'f', 'p') + ), + available_indexes as ( + select + unnest (ix.indkey) as attnum, + ix.indisprimary as is_primary, + ix.indisunique as is_unique, + ix.indrelid as table_oid + from + pg_catalog.pg_class c + join pg_catalog.pg_index ix on c.oid = ix.indexrelid + where + c.relkind = 'i' + ) +select + atts.attname as name, + ts.table_name, + ts.table_oid :: int8 as "table_oid!", + ts.class_kind :: char as "class_kind!", + ts.schema_name, + atts.atttypid :: int8 as "type_id!", + not atts.attnotnull as "is_nullable!", + nullif( + information_schema._pg_char_max_length (atts.atttypid, atts.atttypmod), + -1 + ) as varchar_length, + pg_get_expr (def.adbin, def.adrelid) as default_expr, + coalesce(ix.is_primary, false) as "is_primary_key!", + coalesce(ix.is_unique, false) as "is_unique!", + pg_catalog.col_description (ts.table_oid, atts.attnum) as comment +from + pg_catalog.pg_attribute atts + join available_tables ts on atts.attrelid = ts.table_oid + left join available_indexes ix on atts.attrelid = ix.table_oid + and atts.attnum = ix.attnum + left join pg_catalog.pg_attrdef def on atts.attrelid = def.adrelid + and atts.attnum = def.adnum +where + -- system columns, such as `cmax` or `tableoid`, have negative `attnum`s + atts.attnum >= 0; + +"#; + + let large_content = large_statement.repeat(500); + + c.bench_function( + format!("large statement with length {}", large_content.len()).as_str(), + |b| { + b.iter(|| black_box(split(&large_content))); + }, + ); + + let small_statement = r#"select 1 from public.user where id = 1"#; + let small_content = small_statement.repeat(500); + + c.bench_function( + format!("small statement with length {}", small_content.len()).as_str(), + |b| { + b.iter(|| black_box(split(&small_content))); + }, + ); +} + +criterion_group!(benches, splitter_benchmark); +criterion_main!(benches); diff --git a/crates/pgt_statement_splitter/src/diagnostics.rs b/crates/pgt_statement_splitter/src/diagnostics.rs index bcff6e800..d543d4e51 100644 --- a/crates/pgt_statement_splitter/src/diagnostics.rs +++ b/crates/pgt_statement_splitter/src/diagnostics.rs @@ -1,6 +1,9 @@ use pgt_diagnostics::{Diagnostic, MessageAndDescription}; +use pgt_lexer::{LexDiagnostic, Lexed}; use pgt_text_size::TextRange; +use crate::splitter::SplitError; + /// A specialized diagnostic for the statement splitter parser. /// /// Parser diagnostics are always **errors**. @@ -23,3 +26,22 @@ impl SplitDiagnostic { } } } + +impl From for SplitDiagnostic { + fn from(lex_diagnostic: LexDiagnostic) -> Self { + Self { + span: Some(lex_diagnostic.span), + message: lex_diagnostic.message, + } + } +} + +impl SplitDiagnostic { + pub fn from_split_error(split_error: SplitError, lexed: &Lexed) -> Self { + let range = lexed.range(split_error.token); + Self { + span: Some(range), + message: MessageAndDescription::from(split_error.msg), + } + } +} diff --git a/crates/pgt_statement_splitter/src/lib.rs b/crates/pgt_statement_splitter/src/lib.rs index e43a1095e..02ca1b304 100644 --- a/crates/pgt_statement_splitter/src/lib.rs +++ b/crates/pgt_statement_splitter/src/lib.rs @@ -2,19 +2,40 @@ //! //! This crate provides a function to split a SQL source string into individual statements. pub mod diagnostics; -mod parser; +mod splitter; -use parser::{Parser, ParserResult, source}; -use pgt_lexer::diagnostics::ScanError; +use diagnostics::SplitDiagnostic; +use pgt_lexer::Lexer; +use pgt_text_size::TextRange; +use splitter::{Splitter, source}; -pub fn split(sql: &str) -> Result> { - let tokens = pgt_lexer::lex(sql)?; +pub struct SplitResult { + pub ranges: Vec, + pub errors: Vec, +} + +pub fn split(sql: &str) -> SplitResult { + let lexed = Lexer::new(sql).lex(); + + let mut splitter = Splitter::new(&lexed); - let mut parser = Parser::new(tokens); + source(&mut splitter); - source(&mut parser); + let split_result = splitter.finish(); - Ok(parser.finish()) + let mut errors: Vec = lexed.errors().into_iter().map(Into::into).collect(); + + errors.extend( + split_result + .errors + .into_iter() + .map(|err| SplitDiagnostic::from_split_error(err, &lexed)), + ); + + SplitResult { + ranges: split_result.ranges, + errors, + } } #[cfg(test)] @@ -28,13 +49,13 @@ mod tests { struct Tester { input: String, - parse: ParserResult, + result: SplitResult, } impl From<&str> for Tester { fn from(input: &str) -> Self { Tester { - parse: split(input).expect("Failed to split"), + result: split(input), input: input.to_string(), } } @@ -43,24 +64,25 @@ mod tests { impl Tester { fn expect_statements(&self, expected: Vec<&str>) -> &Self { assert_eq!( - self.parse.ranges.len(), + self.result.ranges.len(), expected.len(), - "Expected {} statements, got {}: {:?}", + "Expected {} statements for input {}, got {}: {:?}", expected.len(), - self.parse.ranges.len(), - self.parse + self.input, + self.result.ranges.len(), + self.result .ranges .iter() .map(|r| &self.input[*r]) .collect::>() ); - for (range, expected) in self.parse.ranges.iter().zip(expected.iter()) { + for (range, expected) in self.result.ranges.iter().zip(expected.iter()) { assert_eq!(*expected, self.input[*range].to_string()); } assert!( - self.parse.ranges.is_sorted_by_key(|r| r.start()), + self.result.ranges.is_sorted_by_key(|r| r.start()), "Ranges are not sorted" ); @@ -69,15 +91,15 @@ mod tests { fn expect_errors(&self, expected: Vec) -> &Self { assert_eq!( - self.parse.errors.len(), + self.result.errors.len(), expected.len(), "Expected {} errors, got {}: {:?}", expected.len(), - self.parse.errors.len(), - self.parse.errors + self.result.errors.len(), + self.result.errors ); - for (err, expected) in self.parse.errors.iter().zip(expected.iter()) { + for (err, expected) in self.result.errors.iter().zip(expected.iter()) { assert_eq!(expected, err); } @@ -93,10 +115,11 @@ mod tests { } #[test] - fn failing_lexer() { - let input = "select 1443ddwwd33djwdkjw13331333333333"; - let res = split(input).unwrap_err(); - assert!(!res.is_empty()); + fn test_crash_eof() { + Tester::from("CREATE INDEX \"idx_analytics_read_ratio\" ON \"public\".\"message\" USING \"btree\" (\"inbox_id\", \"timestamp\") INCLUDE (\"status\") WHERE (\"is_inbound\" = false and channel_type not in ('postal'', 'sms'));") + .expect_statements(vec![ + "CREATE INDEX \"idx_analytics_read_ratio\" ON \"public\".\"message\" USING \"btree\" (\"inbox_id\", \"timestamp\") INCLUDE (\"status\") WHERE (\"is_inbound\" = false and channel_type not in ('postal'', 'sms'));", + ]); } #[test] @@ -114,9 +137,46 @@ mod tests { #[test] fn grant() { - Tester::from("GRANT SELECT ON TABLE \"public\".\"my_table\" TO \"my_role\";") + let stmts = vec![ + "GRANT SELECT ON TABLE \"public\".\"my_table\" TO \"my_role\";", + "GRANT UPDATE ON TABLE \"public\".\"my_table\" TO \"my_role\";", + "GRANT DELETE ON TABLE \"public\".\"my_table\" TO \"my_role\";", + "GRANT INSERT ON TABLE \"public\".\"my_table\" TO \"my_role\";", + "GRANT CREATE ON SCHEMA \"public\" TO \"my_role\";", + "GRANT ALL PRIVILEGES ON DATABASE \"my_database\" TO \"my_role\";", + "GRANT USAGE ON SCHEMA \"public\" TO \"my_role\";", + "GRANT EXECUTE ON FUNCTION \"public\".\"my_function\"() TO \"my_role\";", + "GRANT REFERENCES ON TABLE \"public\".\"my_table\" TO \"my_role\";", + "GRANT SELECT, UPDATE ON ALL TABLES IN SCHEMA \"public\" TO \"my_role\";", + "GRANT SELECT, INSERT ON public.users TO anon WITH GRANT OPION GRANTED BY owner;", + "GRANT owner, admin to anon WITH ADMIN;", + ]; + + for stmt in stmts { + Tester::from(stmt).expect_statements(vec![stmt]); + } + } + + #[test] + fn revoke() { + Tester::from("revoke delete on table \"public\".\"voice_call\" from \"anon\";") + .expect_statements(vec![ + "revoke delete on table \"public\".\"voice_call\" from \"anon\";", + ]); + + Tester::from("revoke select on table \"public\".\"voice_call\" from \"anon\";") + .expect_statements(vec![ + "revoke select on table \"public\".\"voice_call\" from \"anon\";", + ]); + + Tester::from("revoke update on table \"public\".\"voice_call\" from \"anon\";") + .expect_statements(vec![ + "revoke update on table \"public\".\"voice_call\" from \"anon\";", + ]); + + Tester::from("revoke insert on table \"public\".\"voice_call\" from \"anon\";") .expect_statements(vec![ - "GRANT SELECT ON TABLE \"public\".\"my_table\" TO \"my_role\";", + "revoke insert on table \"public\".\"voice_call\" from \"anon\";", ]); } @@ -146,7 +206,7 @@ mod tests { Tester::from("\ninsert select 1\n\nselect 3") .expect_statements(vec!["insert select 1", "select 3"]) .expect_errors(vec![SplitDiagnostic::new( - format!("Expected {:?}", SyntaxKind::Into), + format!("Expected {:?}", SyntaxKind::INTO_KW), TextRange::new(8.into(), 14.into()), )]); } diff --git a/crates/pgt_statement_splitter/src/parser.rs b/crates/pgt_statement_splitter/src/parser.rs deleted file mode 100644 index 241d0c703..000000000 --- a/crates/pgt_statement_splitter/src/parser.rs +++ /dev/null @@ -1,237 +0,0 @@ -mod common; -mod data; -mod ddl; -mod dml; - -pub use common::source; - -use pgt_lexer::{SyntaxKind, Token, WHITESPACE_TOKENS}; -use pgt_text_size::{TextRange, TextSize}; - -use crate::diagnostics::SplitDiagnostic; - -/// Main parser that exposes the `cstree` api, and collects errors and statements -/// It is modelled after a Pratt Parser. For a gentle introduction to Pratt Parsing, see https://matklad.github.io/2020/04/13/simple-but-powerful-pratt-parsing.html -pub struct Parser { - /// The statement ranges are defined by the indices of the start/end tokens - stmt_ranges: Vec<(usize, usize)>, - - /// The syntax errors accumulated during parsing - errors: Vec, - - current_stmt_start: Option, - - tokens: Vec, - - eof_token: Token, - - current_pos: usize, -} - -#[derive(Debug)] -pub struct ParserResult { - /// The ranges of the parsed statements - pub ranges: Vec, - /// The syntax errors accumulated during parsing - pub errors: Vec, -} - -impl Parser { - pub fn new(tokens: Vec) -> Self { - let eof_token = Token::eof(usize::from( - tokens - .last() - .map(|t| t.span.end()) - .unwrap_or(TextSize::from(0)), - )); - - // Place `current_pos` on the first relevant token - let mut current_pos = 0; - while is_irrelevant_token(tokens.get(current_pos).unwrap_or(&eof_token)) { - current_pos += 1; - } - - Self { - stmt_ranges: Vec::new(), - eof_token, - errors: Vec::new(), - current_stmt_start: None, - tokens, - current_pos, - } - } - - pub fn finish(self) -> ParserResult { - ParserResult { - ranges: self - .stmt_ranges - .iter() - .map(|(start_token_pos, end_token_pos)| { - let from = self.tokens.get(*start_token_pos); - let to = self.tokens.get(*end_token_pos).unwrap_or(&self.eof_token); - - TextRange::new(from.unwrap().span.start(), to.span.end()) - }) - .collect(), - errors: self.errors, - } - } - - pub fn start_stmt(&mut self) { - assert!( - self.current_stmt_start.is_none(), - "cannot start statement within statement at {:?}", - self.tokens.get(self.current_stmt_start.unwrap()) - ); - self.current_stmt_start = Some(self.current_pos); - } - - pub fn close_stmt(&mut self) { - assert!( - self.current_stmt_start.is_some(), - "Must start statement before closing it." - ); - - let start_token_pos = self.current_stmt_start.unwrap(); - - assert!( - self.current_pos > start_token_pos, - "Must close the statement on a token that's later than the start token." - ); - - let (end_token_pos, _) = self.find_last_relevant().unwrap(); - - self.stmt_ranges.push((start_token_pos, end_token_pos)); - - self.current_stmt_start = None; - } - - fn current(&self) -> &Token { - match self.tokens.get(self.current_pos) { - Some(token) => token, - None => &self.eof_token, - } - } - - /// Advances the parser to the next relevant token and returns it. - /// - /// NOTE: This will skip irrelevant tokens. - fn advance(&mut self) -> &Token { - // can't reuse any `find_next_relevant` logic because of Mr. Borrow Checker - let (pos, token) = self - .tokens - .iter() - .enumerate() - .skip(self.current_pos + 1) - .find(|(_, t)| is_relevant(t)) - .unwrap_or((self.tokens.len(), &self.eof_token)); - - self.current_pos = pos; - token - } - - fn look_ahead(&self) -> Option<&Token> { - self.tokens - .iter() - .skip(self.current_pos + 1) - .find(|t| is_relevant(t)) - } - - /// Returns `None` if there are no previous relevant tokens - fn look_back(&self) -> Option<&Token> { - self.find_last_relevant().map(|it| it.1) - } - - /// Will advance if the `kind` matches the current token. - /// Otherwise, will add a diagnostic to the internal `errors`. - pub fn expect(&mut self, kind: SyntaxKind) { - if self.current().kind == kind { - self.advance(); - } else { - self.errors.push(SplitDiagnostic::new( - format!("Expected {:#?}", kind), - self.current().span, - )); - } - } - - fn find_last_relevant(&self) -> Option<(usize, &Token)> { - self.tokens - .iter() - .enumerate() - .take(self.current_pos) - .rfind(|(_, t)| is_relevant(t)) - } -} - -#[cfg(windows)] -/// Returns true if the token is relevant for the parsing process -/// -/// On windows, a newline is represented by `\r\n` which is two characters. -fn is_irrelevant_token(t: &Token) -> bool { - WHITESPACE_TOKENS.contains(&t.kind) - // double new lines are relevant, single ones are not - && (t.kind != SyntaxKind::Newline || t.text == "\r\n" || t.text.chars().count() == 1) -} - -#[cfg(not(windows))] -/// Returns true if the token is relevant for the parsing process -fn is_irrelevant_token(t: &Token) -> bool { - WHITESPACE_TOKENS.contains(&t.kind) - // double new lines are relevant, single ones are not - && (t.kind != SyntaxKind::Newline || t.text.chars().count() == 1) -} - -fn is_relevant(t: &Token) -> bool { - !is_irrelevant_token(t) -} - -#[cfg(test)] -mod tests { - use pgt_lexer::SyntaxKind; - - use crate::parser::Parser; - - #[test] - fn advance_works_as_expected() { - let sql = r#" - create table users ( - id serial primary key, - name text, - email text - ); - "#; - let tokens = pgt_lexer::lex(sql).unwrap(); - let total_num_tokens = tokens.len(); - - let mut parser = Parser::new(tokens); - - let expected = vec![ - (SyntaxKind::Create, 2), - (SyntaxKind::Table, 4), - (SyntaxKind::Ident, 6), - (SyntaxKind::Ascii40, 8), - (SyntaxKind::Ident, 11), - (SyntaxKind::Ident, 13), - (SyntaxKind::Primary, 15), - (SyntaxKind::Key, 17), - (SyntaxKind::Ascii44, 18), - (SyntaxKind::NameP, 21), - (SyntaxKind::TextP, 23), - (SyntaxKind::Ascii44, 24), - (SyntaxKind::Ident, 27), - (SyntaxKind::TextP, 29), - (SyntaxKind::Ascii41, 32), - (SyntaxKind::Ascii59, 33), - ]; - - for (kind, pos) in expected { - assert_eq!(parser.current().kind, kind); - assert_eq!(parser.current_pos, pos); - parser.advance(); - } - - assert_eq!(parser.current().kind, SyntaxKind::Eof); - assert_eq!(parser.current_pos, total_num_tokens); - } -} diff --git a/crates/pgt_statement_splitter/src/parser/common.rs b/crates/pgt_statement_splitter/src/parser/common.rs deleted file mode 100644 index a5d68df18..000000000 --- a/crates/pgt_statement_splitter/src/parser/common.rs +++ /dev/null @@ -1,283 +0,0 @@ -use pgt_lexer::{SyntaxKind, Token, TokenType, WHITESPACE_TOKENS}; - -use super::{ - Parser, - data::at_statement_start, - ddl::{alter, create}, - dml::{cte, delete, insert, select, update}, -}; - -pub fn source(p: &mut Parser) { - loop { - match p.current() { - Token { - kind: SyntaxKind::Eof, - .. - } => { - break; - } - Token { - // we might want to ignore TokenType::NoKeyword here too - // but this will lead to invalid statements to not being picked up - token_type: TokenType::Whitespace, - .. - } => { - p.advance(); - } - Token { - kind: SyntaxKind::Ascii92, - .. - } => { - plpgsql_command(p); - } - _ => { - statement(p); - } - } - } -} - -pub(crate) fn statement(p: &mut Parser) { - p.start_stmt(); - match p.current().kind { - SyntaxKind::With => { - cte(p); - } - SyntaxKind::Select => { - select(p); - } - SyntaxKind::Insert => { - insert(p); - } - SyntaxKind::Update => { - update(p); - } - SyntaxKind::DeleteP => { - delete(p); - } - SyntaxKind::Create => { - create(p); - } - SyntaxKind::Alter => { - alter(p); - } - _ => { - unknown(p, &[]); - } - } - p.close_stmt(); -} - -pub(crate) fn parenthesis(p: &mut Parser) { - p.expect(SyntaxKind::Ascii40); - - let mut depth = 1; - - loop { - match p.current().kind { - SyntaxKind::Ascii40 => { - p.advance(); - depth += 1; - } - SyntaxKind::Ascii41 | SyntaxKind::Eof => { - p.advance(); - depth -= 1; - if depth == 0 { - break; - } - } - _ => { - p.advance(); - } - } - } -} - -pub(crate) fn plpgsql_command(p: &mut Parser) { - p.expect(SyntaxKind::Ascii92); - - loop { - match p.current().kind { - SyntaxKind::Newline => { - p.advance(); - break; - } - _ => { - // advance the parser to the next token without ignoring irrelevant tokens - // we would skip a newline with `advance()` - p.current_pos += 1; - } - } - } -} - -pub(crate) fn case(p: &mut Parser) { - p.expect(SyntaxKind::Case); - - loop { - match p.current().kind { - SyntaxKind::EndP => { - p.advance(); - break; - } - _ => { - p.advance(); - } - } - } -} - -pub(crate) fn unknown(p: &mut Parser, exclude: &[SyntaxKind]) { - loop { - match p.current() { - Token { - kind: SyntaxKind::Ascii59, - .. - } => { - p.advance(); - break; - } - Token { - kind: SyntaxKind::Eof, - .. - } => { - break; - } - Token { - kind: SyntaxKind::Newline, - .. - } => { - if p.look_back().is_some_and(|t| t.kind == SyntaxKind::Ascii44) { - p.advance(); - } else { - break; - } - } - Token { - kind: SyntaxKind::Case, - .. - } => { - case(p); - } - Token { - kind: SyntaxKind::Ascii92, - .. - } => { - // pgsql commands e.g. - // - // ``` - // \if test - // ``` - // - // we wait for "\" and check if the previous token is a newline - - // newline is a whitespace, but we do not want to ignore it here - let irrelevant = WHITESPACE_TOKENS - .iter() - .filter(|t| **t != SyntaxKind::Newline) - .collect::>(); - - // go back from the current position without ignoring irrelevant tokens - if p.tokens - .iter() - .take(p.current_pos) - .rev() - .find(|t| !irrelevant.contains(&&t.kind)) - .is_some_and(|t| t.kind == SyntaxKind::Newline) - { - break; - } - p.advance(); - } - Token { - kind: SyntaxKind::Ascii40, - .. - } => { - parenthesis(p); - } - t => match at_statement_start(t.kind, exclude) { - Some(SyntaxKind::Select) => { - let prev = p.look_back().map(|t| t.kind); - if [ - // for policies, with for select - SyntaxKind::For, - // for create view / table as - SyntaxKind::As, - // for create rule - SyntaxKind::On, - // for create rule - SyntaxKind::Also, - // for create rule - SyntaxKind::Instead, - // for UNION - SyntaxKind::Union, - // for UNION ALL - SyntaxKind::All, - // for UNION ... EXCEPT - SyntaxKind::Except, - // for grant - SyntaxKind::Grant, - ] - .iter() - .all(|x| Some(x) != prev.as_ref()) - { - break; - } - - p.advance(); - } - Some(SyntaxKind::Insert) | Some(SyntaxKind::Update) | Some(SyntaxKind::DeleteP) => { - let prev = p.look_back().map(|t| t.kind); - if [ - // for create trigger - SyntaxKind::Before, - SyntaxKind::After, - // for policies, e.g. for insert - SyntaxKind::For, - // e.g. on insert or delete - SyntaxKind::Or, - // e.g. INSTEAD OF INSERT - SyntaxKind::Of, - // for create rule - SyntaxKind::On, - // for create rule - SyntaxKind::Also, - // for create rule - SyntaxKind::Instead, - // for grant - SyntaxKind::Grant, - ] - .iter() - .all(|x| Some(x) != prev.as_ref()) - { - break; - } - p.advance(); - } - Some(SyntaxKind::With) => { - let next = p.look_ahead().map(|t| t.kind); - if [ - // WITH ORDINALITY should not start a new statement - SyntaxKind::Ordinality, - // WITH CHECK should not start a new statement - SyntaxKind::Check, - // TIMESTAMP WITH TIME ZONE should not start a new statement - SyntaxKind::Time, - ] - .iter() - .all(|x| Some(x) != next.as_ref()) - { - break; - } - p.advance(); - } - Some(_) => { - break; - } - None => { - p.advance(); - } - }, - } - } -} diff --git a/crates/pgt_statement_splitter/src/parser/ddl.rs b/crates/pgt_statement_splitter/src/parser/ddl.rs deleted file mode 100644 index d9f233c20..000000000 --- a/crates/pgt_statement_splitter/src/parser/ddl.rs +++ /dev/null @@ -1,15 +0,0 @@ -use pgt_lexer::SyntaxKind; - -use super::{Parser, common::unknown}; - -pub(crate) fn create(p: &mut Parser) { - p.expect(SyntaxKind::Create); - - unknown(p, &[SyntaxKind::With]); -} - -pub(crate) fn alter(p: &mut Parser) { - p.expect(SyntaxKind::Alter); - - unknown(p, &[SyntaxKind::Alter]); -} diff --git a/crates/pgt_statement_splitter/src/parser/dml.rs b/crates/pgt_statement_splitter/src/parser/dml.rs deleted file mode 100644 index 015c50b63..000000000 --- a/crates/pgt_statement_splitter/src/parser/dml.rs +++ /dev/null @@ -1,59 +0,0 @@ -use pgt_lexer::SyntaxKind; - -use super::{ - Parser, - common::{parenthesis, unknown}, -}; - -pub(crate) fn cte(p: &mut Parser) { - p.expect(SyntaxKind::With); - - loop { - p.expect(SyntaxKind::Ident); - p.expect(SyntaxKind::As); - parenthesis(p); - - if p.current().kind == SyntaxKind::Ascii44 { - p.advance(); - } else { - break; - } - } - - unknown( - p, - &[ - SyntaxKind::Select, - SyntaxKind::Insert, - SyntaxKind::Update, - SyntaxKind::DeleteP, - SyntaxKind::Merge, - ], - ); -} - -pub(crate) fn select(p: &mut Parser) { - p.expect(SyntaxKind::Select); - - unknown(p, &[]); -} - -pub(crate) fn insert(p: &mut Parser) { - p.expect(SyntaxKind::Insert); - p.expect(SyntaxKind::Into); - - unknown(p, &[SyntaxKind::Select]); -} - -pub(crate) fn update(p: &mut Parser) { - p.expect(SyntaxKind::Update); - - unknown(p, &[]); -} - -pub(crate) fn delete(p: &mut Parser) { - p.expect(SyntaxKind::DeleteP); - p.expect(SyntaxKind::From); - - unknown(p, &[]); -} diff --git a/crates/pgt_statement_splitter/src/splitter.rs b/crates/pgt_statement_splitter/src/splitter.rs new file mode 100644 index 000000000..cfb4716d5 --- /dev/null +++ b/crates/pgt_statement_splitter/src/splitter.rs @@ -0,0 +1,168 @@ +mod common; +mod data; +mod ddl; +mod dml; + +pub use common::source; + +use pgt_lexer::{Lexed, SyntaxKind}; +use pgt_text_size::TextRange; + +pub struct SplitResult { + pub ranges: Vec, + pub errors: Vec, +} + +pub static TRIVIA_TOKENS: &[SyntaxKind] = &[ + SyntaxKind::SPACE, + SyntaxKind::TAB, + SyntaxKind::VERTICAL_TAB, + SyntaxKind::FORM_FEED, + SyntaxKind::COMMENT, + // LINE_ENDING is relevant +]; + +/// Internal error type used during splitting +#[derive(Debug, Clone)] +pub struct SplitError { + pub msg: String, + pub token: usize, +} + +pub struct Splitter<'a> { + lexed: &'a Lexed<'a>, + current_pos: usize, + stmt_ranges: Vec<(usize, usize)>, + errors: Vec, + current_stmt_start: Option, +} + +impl<'a> Splitter<'a> { + pub fn new(lexed: &'a Lexed<'a>) -> Self { + Self { + lexed, + current_pos: 0, + stmt_ranges: Vec::new(), + errors: Vec::new(), + current_stmt_start: None, + } + } + + pub fn finish(self) -> SplitResult { + let ranges = self + .stmt_ranges + .iter() + .map(|(start_token_pos, end_token_pos)| { + let from = self.lexed.range(*start_token_pos).start(); + let to = self.lexed.range(*end_token_pos).end(); + TextRange::new(from, to) + }) + .collect(); + + SplitResult { + ranges, + errors: self.errors, + } + } + + pub fn start_stmt(&mut self) { + assert!( + self.current_stmt_start.is_none(), + "cannot start statement within statement", + ); + self.current_stmt_start = Some(self.current_pos); + } + + pub fn close_stmt(&mut self) { + assert!( + self.current_stmt_start.is_some(), + "Must start statement before closing it." + ); + + let start_token_pos = self.current_stmt_start.unwrap(); + + assert!( + self.current_pos > start_token_pos, + "Must close the statement on a token that's later than the start token: {} > {}", + self.current_pos, + start_token_pos + ); + + let end_token_pos = (0..self.current_pos) + .rev() + .find(|&idx| !self.is_trivia(idx)) + .unwrap(); + + self.stmt_ranges.push((start_token_pos, end_token_pos)); + + self.current_stmt_start = None; + } + + fn current(&self) -> SyntaxKind { + self.lexed.kind(self.current_pos) + } + + fn kind(&self, idx: usize) -> SyntaxKind { + self.lexed.kind(idx) + } + + /// Advances the parser to the next relevant token and returns it. + /// + /// NOTE: This will skip trivia tokens. + fn advance(&mut self) -> SyntaxKind { + let pos = (self.current_pos + 1..self.lexed.len()) + .find(|&idx| !self.is_trivia(idx)) + .expect("lexed should have non-trivia eof token"); + + self.current_pos = pos; + self.lexed.kind(pos) + } + + fn look_ahead(&self, ignore_trivia: bool) -> SyntaxKind { + let pos = if ignore_trivia { + (self.current_pos + 1..self.lexed.len()) + .find(|&idx| !self.is_trivia(idx)) + .expect("lexed should have non-trivia eof token") + } else { + (self.current_pos + 1..self.lexed.len()) + .next() + .expect("lexed should have a eof token") + }; + self.lexed.kind(pos) + } + + /// Returns `None` if there are no previous relevant tokens + fn look_back(&self, ignore_trivia: bool) -> Option { + if ignore_trivia { + (0..self.current_pos) + .rev() + .find(|&idx| !self.is_trivia(idx)) + .map(|idx| self.lexed.kind(idx)) + } else { + (0..self.current_pos) + .next_back() + .map(|idx| self.lexed.kind(idx)) + } + } + + fn is_trivia(&self, idx: usize) -> bool { + match self.lexed.kind(idx) { + k if TRIVIA_TOKENS.contains(&k) => true, + SyntaxKind::LINE_ENDING => self.lexed.line_ending_count(idx) < 2, + _ => false, + } + } + + /// Will advance if the `kind` matches the current token. + /// Otherwise, will add a diagnostic to the internal `errors`. + fn expect(&mut self, kind: SyntaxKind) { + if self.current() == kind { + self.advance(); + } else { + self.errors.push(SplitError { + msg: format!("Expected {:#?}", kind), + token: self.current_pos, + }); + } + } +} diff --git a/crates/pgt_statement_splitter/src/splitter/common.rs b/crates/pgt_statement_splitter/src/splitter/common.rs new file mode 100644 index 000000000..786c24788 --- /dev/null +++ b/crates/pgt_statement_splitter/src/splitter/common.rs @@ -0,0 +1,281 @@ +use super::TRIVIA_TOKENS; +use pgt_lexer::SyntaxKind; + +use super::{ + Splitter, + data::at_statement_start, + ddl::{alter, create}, + dml::{cte, delete, insert, select, update}, +}; + +pub fn source(p: &mut Splitter) { + loop { + match p.current() { + SyntaxKind::EOF => { + break; + } + kind if TRIVIA_TOKENS.contains(&kind) || kind == SyntaxKind::LINE_ENDING => { + p.advance(); + } + SyntaxKind::BACKSLASH => { + plpgsql_command(p); + } + _ => { + statement(p); + } + } + } +} + +pub(crate) fn statement(p: &mut Splitter) { + p.start_stmt(); + match p.current() { + SyntaxKind::WITH_KW => { + cte(p); + } + SyntaxKind::SELECT_KW => { + select(p); + } + SyntaxKind::INSERT_KW => { + insert(p); + } + SyntaxKind::UPDATE_KW => { + update(p); + } + SyntaxKind::DELETE_KW => { + delete(p); + } + SyntaxKind::CREATE_KW => { + create(p); + } + SyntaxKind::ALTER_KW => { + alter(p); + } + _ => { + unknown(p, &[]); + } + } + p.close_stmt(); +} + +pub(crate) fn parenthesis(p: &mut Splitter) { + p.expect(SyntaxKind::L_PAREN); + + let mut depth = 1; + + loop { + match p.current() { + SyntaxKind::L_PAREN => { + p.advance(); + depth += 1; + } + SyntaxKind::R_PAREN | SyntaxKind::EOF => { + if p.current() == SyntaxKind::R_PAREN { + p.advance(); + } + depth -= 1; + if depth == 0 { + break; + } + } + _ => { + p.advance(); + } + } + } +} + +pub(crate) fn plpgsql_command(p: &mut Splitter) { + p.expect(SyntaxKind::BACKSLASH); + + loop { + match p.current() { + SyntaxKind::LINE_ENDING => { + p.advance(); + break; + } + _ => { + // advance the splitter to the next token without ignoring irrelevant tokens + // we would skip a newline with `advance()` + p.current_pos += 1; + } + } + } +} + +pub(crate) fn case(p: &mut Splitter) { + p.expect(SyntaxKind::CASE_KW); + + loop { + match p.current() { + SyntaxKind::END_KW => { + p.advance(); + break; + } + _ => { + p.advance(); + } + } + } +} + +pub(crate) fn unknown(p: &mut Splitter, exclude: &[SyntaxKind]) { + loop { + match p.current() { + SyntaxKind::SEMICOLON => { + p.advance(); + break; + } + SyntaxKind::EOF => { + break; + } + SyntaxKind::LINE_ENDING => { + if p.look_back(true).is_some_and(|t| t == SyntaxKind::COMMA) { + p.advance(); + } else { + break; + } + } + SyntaxKind::CASE_KW => { + case(p); + } + SyntaxKind::BACKSLASH => { + // pgsql commands + // we want to check if the previous token non-trivia token is a LINE_ENDING + // we cannot use the is_trivia() method because that would exclude LINE_ENDINGs + // with count > 1 + if (0..p.current_pos) + .rev() + .find_map(|idx| { + let kind = p.kind(idx); + if !TRIVIA_TOKENS.contains(&kind) { + Some(kind) + } else { + None + } + }) + .is_some_and(|t| t == SyntaxKind::LINE_ENDING) + { + break; + } + p.advance(); + } + SyntaxKind::L_PAREN => { + parenthesis(p); + } + t => match at_statement_start(t, exclude) { + Some(SyntaxKind::SELECT_KW) => { + let prev = p.look_back(true); + if [ + // for policies, with for select + SyntaxKind::FOR_KW, + // for create view / table as + SyntaxKind::AS_KW, + // for create rule + SyntaxKind::ON_KW, + // for create rule + SyntaxKind::ALSO_KW, + // for create rule + SyntaxKind::INSTEAD_KW, + // for UNION + SyntaxKind::UNION_KW, + // for UNION ALL + SyntaxKind::ALL_KW, + // for UNION ... EXCEPT + SyntaxKind::EXCEPT_KW, + // for grant + SyntaxKind::GRANT_KW, + // for revoke + SyntaxKind::REVOKE_KW, + SyntaxKind::COMMA, + ] + .iter() + .all(|x| Some(x) != prev.as_ref()) + { + break; + } + + p.advance(); + } + Some(SyntaxKind::INSERT_KW) + | Some(SyntaxKind::UPDATE_KW) + | Some(SyntaxKind::DELETE_KW) => { + let prev = p.look_back(true); + if [ + // for create trigger + SyntaxKind::BEFORE_KW, + SyntaxKind::AFTER_KW, + // for policies, e.g. for insert + SyntaxKind::FOR_KW, + // e.g. on insert or delete + SyntaxKind::OR_KW, + // e.g. INSTEAD OF INSERT + SyntaxKind::OF_KW, + // for create rule + SyntaxKind::ON_KW, + // for create rule + SyntaxKind::ALSO_KW, + // for create rule + SyntaxKind::INSTEAD_KW, + // for grant + SyntaxKind::GRANT_KW, + // for revoke + SyntaxKind::REVOKE_KW, + SyntaxKind::COMMA, + // Do update in INSERT stmt + SyntaxKind::DO_KW, + ] + .iter() + .all(|x| Some(x) != prev.as_ref()) + { + break; + } + p.advance(); + } + Some(SyntaxKind::WITH_KW) => { + let next = p.look_ahead(true); + if [ + // WITH ORDINALITY should not start a new statement + SyntaxKind::ORDINALITY_KW, + // WITH CHECK should not start a new statement + SyntaxKind::CHECK_KW, + // TIMESTAMP WITH TIME ZONE should not start a new statement + SyntaxKind::TIME_KW, + SyntaxKind::GRANT_KW, + SyntaxKind::ADMIN_KW, + SyntaxKind::INHERIT_KW, + SyntaxKind::SET_KW, + ] + .iter() + .all(|x| x != &next) + { + break; + } + p.advance(); + } + + Some(SyntaxKind::CREATE_KW) => { + let prev = p.look_back(true); + if [ + // for grant + SyntaxKind::GRANT_KW, + SyntaxKind::COMMA, + ] + .iter() + .all(|x| Some(x) != prev.as_ref()) + { + break; + } + + p.advance(); + } + Some(_) => { + break; + } + None => { + p.advance(); + } + }, + } + } +} diff --git a/crates/pgt_statement_splitter/src/parser/data.rs b/crates/pgt_statement_splitter/src/splitter/data.rs similarity index 62% rename from crates/pgt_statement_splitter/src/parser/data.rs rename to crates/pgt_statement_splitter/src/splitter/data.rs index c0792c39d..0827484be 100644 --- a/crates/pgt_statement_splitter/src/parser/data.rs +++ b/crates/pgt_statement_splitter/src/splitter/data.rs @@ -3,15 +3,15 @@ use pgt_lexer::SyntaxKind; // All tokens listed here must be explicitly handled in the `unknown` function to ensure that we do // not break in the middle of another statement that contains a statement start token. // -// All of these statements must have a dedicated parser function called from the `statement` function +// All of these statements must have a dedicated splitter function called from the `statement` function static STATEMENT_START_TOKENS: &[SyntaxKind] = &[ - SyntaxKind::With, - SyntaxKind::Select, - SyntaxKind::Insert, - SyntaxKind::Update, - SyntaxKind::DeleteP, - SyntaxKind::Create, - SyntaxKind::Alter, + SyntaxKind::WITH_KW, + SyntaxKind::SELECT_KW, + SyntaxKind::INSERT_KW, + SyntaxKind::UPDATE_KW, + SyntaxKind::DELETE_KW, + SyntaxKind::CREATE_KW, + SyntaxKind::ALTER_KW, ]; pub(crate) fn at_statement_start(kind: SyntaxKind, exclude: &[SyntaxKind]) -> Option<&SyntaxKind> { diff --git a/crates/pgt_statement_splitter/src/splitter/ddl.rs b/crates/pgt_statement_splitter/src/splitter/ddl.rs new file mode 100644 index 000000000..449288aab --- /dev/null +++ b/crates/pgt_statement_splitter/src/splitter/ddl.rs @@ -0,0 +1,15 @@ +use pgt_lexer::SyntaxKind; + +use super::{Splitter, common::unknown}; + +pub(crate) fn create(p: &mut Splitter) { + p.expect(SyntaxKind::CREATE_KW); + + unknown(p, &[SyntaxKind::WITH_KW]); +} + +pub(crate) fn alter(p: &mut Splitter) { + p.expect(SyntaxKind::ALTER_KW); + + unknown(p, &[SyntaxKind::ALTER_KW]); +} diff --git a/crates/pgt_statement_splitter/src/splitter/dml.rs b/crates/pgt_statement_splitter/src/splitter/dml.rs new file mode 100644 index 000000000..9c8333016 --- /dev/null +++ b/crates/pgt_statement_splitter/src/splitter/dml.rs @@ -0,0 +1,59 @@ +use pgt_lexer::SyntaxKind; + +use super::{ + Splitter, + common::{parenthesis, unknown}, +}; + +pub(crate) fn cte(p: &mut Splitter) { + p.expect(SyntaxKind::WITH_KW); + + loop { + p.expect(SyntaxKind::IDENT); + p.expect(SyntaxKind::AS_KW); + parenthesis(p); + + if p.current() == SyntaxKind::COMMA { + p.advance(); + } else { + break; + } + } + + unknown( + p, + &[ + SyntaxKind::SELECT_KW, + SyntaxKind::INSERT_KW, + SyntaxKind::UPDATE_KW, + SyntaxKind::DELETE_KW, + SyntaxKind::MERGE_KW, + ], + ); +} + +pub(crate) fn select(p: &mut Splitter) { + p.expect(SyntaxKind::SELECT_KW); + + unknown(p, &[]); +} + +pub(crate) fn insert(p: &mut Splitter) { + p.expect(SyntaxKind::INSERT_KW); + p.expect(SyntaxKind::INTO_KW); + + unknown(p, &[SyntaxKind::SELECT_KW]); +} + +pub(crate) fn update(p: &mut Splitter) { + p.expect(SyntaxKind::UPDATE_KW); + + unknown(p, &[]); +} + +pub(crate) fn delete(p: &mut Splitter) { + p.expect(SyntaxKind::DELETE_KW); + p.expect(SyntaxKind::FROM_KW); + + unknown(p, &[]); +} diff --git a/crates/pgt_statement_splitter/tests/data/grant_statements__4.sql b/crates/pgt_statement_splitter/tests/data/grant_statements__4.sql new file mode 100644 index 000000000..5811810e0 --- /dev/null +++ b/crates/pgt_statement_splitter/tests/data/grant_statements__4.sql @@ -0,0 +1,11 @@ +GRANT CREATE ON SCHEMA public TO anon; + +GRANT SELECT, INSERT ON public.users TO anon WITH GRANT OPTION GRANTED BY Owner; + +GRANT read_access, write_access TO user_role + WITH INHERIT TRUE + GRANTED BY security_admin; + +GRANT manager_role TO employee_role + WITH ADMIN OPTION + GRANTED BY admin_role; diff --git a/crates/pgt_statement_splitter/tests/data/on_conflict_do_update__1.sql b/crates/pgt_statement_splitter/tests/data/on_conflict_do_update__1.sql new file mode 100644 index 000000000..0b62366b5 --- /dev/null +++ b/crates/pgt_statement_splitter/tests/data/on_conflict_do_update__1.sql @@ -0,0 +1,7 @@ +INSERT INTO foo.bar ( + pk +) VALUES ( + $1 +) ON CONFLICT (pk) DO UPDATE SET + date_deleted = DEFAULT, + date_created = DEFAULT; \ No newline at end of file diff --git a/crates/pgt_statement_splitter/tests/statement_splitter_tests.rs b/crates/pgt_statement_splitter/tests/statement_splitter_tests.rs index e05347253..a4cf32599 100644 --- a/crates/pgt_statement_splitter/tests/statement_splitter_tests.rs +++ b/crates/pgt_statement_splitter/tests/statement_splitter_tests.rs @@ -22,7 +22,7 @@ fn test_statement_splitter() { let contents = fs::read_to_string(&path).unwrap(); - let split = pgt_statement_splitter::split(&contents).expect("Failed to split"); + let split = pgt_statement_splitter::split(&contents); assert_eq!( split.ranges.len(), diff --git a/crates/pgt_suppressions/Cargo.toml b/crates/pgt_suppressions/Cargo.toml new file mode 100644 index 000000000..ee723b3bf --- /dev/null +++ b/crates/pgt_suppressions/Cargo.toml @@ -0,0 +1,18 @@ + +[package] +authors.workspace = true +categories.workspace = true +description = "Provides an API that parses suppressions from SQL files, and provides a way to check if a diagnostic is suppressed." +edition.workspace = true +homepage.workspace = true +keywords.workspace = true +license.workspace = true +name = "pgt_suppressions" +repository.workspace = true +version = "0.0.0" + +[dependencies] +pgt_analyse = { workspace = true } +pgt_diagnostics = { workspace = true } +pgt_text_size = { workspace = true } +tracing = { workspace = true } diff --git a/crates/pgt_suppressions/src/lib.rs b/crates/pgt_suppressions/src/lib.rs new file mode 100644 index 000000000..2577ea412 --- /dev/null +++ b/crates/pgt_suppressions/src/lib.rs @@ -0,0 +1,351 @@ +use std::collections::HashMap; +pub mod parser; +pub mod suppression; + +use pgt_analyse::RuleFilter; +use pgt_diagnostics::{Diagnostic, MessageAndDescription}; + +pub mod line_index; + +use line_index::LineIndex; + +use crate::{ + parser::SuppressionsParser, + suppression::{RangeSuppression, RuleSpecifier, Suppression, SuppressionDiagnostic}, +}; + +type Line = usize; + +#[derive(Debug, Default, Clone)] +pub struct Suppressions { + file_suppressions: Vec, + line_suppressions: std::collections::HashMap, + range_suppressions: Vec, + pub diagnostics: Vec, + line_index: LineIndex, +} + +impl From<&str> for Suppressions { + fn from(doc: &str) -> Self { + SuppressionsParser::parse(doc) + } +} +impl From for Suppressions { + fn from(doc: String) -> Self { + SuppressionsParser::parse(doc.as_str()) + } +} + +impl Suppressions { + /// Some diagnostics can be turned off via the configuration. + /// This will mark suppressions that try to suppress these disabled diagnostics as errors. + pub fn get_disabled_diagnostic_suppressions_as_errors( + &self, + disabled_rules: &[RuleFilter<'_>], + ) -> Vec { + let mut diagnostics = vec![]; + + { + let disabled = self + .file_suppressions + .iter() + .filter(|s| s.rule_specifier.is_disabled(disabled_rules)); + + for suppr in disabled { + diagnostics.push(suppr.to_disabled_diagnostic()); + } + } + + { + let disabled = self + .line_suppressions + .iter() + .filter(|(_, s)| s.rule_specifier.is_disabled(disabled_rules)); + + for (_, suppr) in disabled { + diagnostics.push(suppr.to_disabled_diagnostic()); + } + } + + { + let disabled = self.range_suppressions.iter().filter(|s| { + s.start_suppression + .rule_specifier + .is_disabled(disabled_rules) + }); + + for range_suppr in disabled { + diagnostics.push(range_suppr.start_suppression.to_disabled_diagnostic()); + } + } + + diagnostics + } + + pub fn get_unused_suppressions_as_errors( + &self, + diagnostics: &[D], + ) -> Vec { + let mut results = vec![]; + + let mut diagnostics_by_line: HashMap> = HashMap::new(); + for diag in diagnostics { + if let Some(line) = diag + .location() + .span + .and_then(|sp| self.line_index.line_for_offset(sp.start())) + { + let entry = diagnostics_by_line.entry(line); + entry + .and_modify(|current| { + current.push(diag); + }) + .or_insert(vec![diag]); + } + } + + // Users may use many suppressions for a single diagnostic, like so: + // ``` + // -- pgt-ignore lint/safety/banDropTable + // -- pgt-ignore lint/safety/banDropColumn + // + // ``` + // So to find a matching diagnostic for any suppression, we're moving + // down lines until we find a line where there's no suppression. + for (line, suppr) in &self.line_suppressions { + let mut expected_diagnostic_line = line + 1; + while self + .line_suppressions + .contains_key(&expected_diagnostic_line) + { + expected_diagnostic_line += 1; + } + + if diagnostics_by_line + .get(&expected_diagnostic_line) + .is_some_and(|diags| { + diags.iter().any(|d| { + d.category() + .is_some_and(|cat| match RuleSpecifier::try_from(cat.name()) { + Ok(spec) => suppr.matches(&spec), + Err(_) => false, + }) + }) + }) + { + continue; + } else { + results.push(SuppressionDiagnostic { + span: suppr.suppression_range, + message: MessageAndDescription::from( + "This suppression has no effect.".to_string(), + ), + }) + } + } + + results + } + + pub fn is_suppressed(&self, diagnostic: &D) -> bool { + diagnostic + .category() + .map(|c| match RuleSpecifier::try_from(c.name()) { + Ok(specifier) => { + self.by_file_suppression(&specifier) + || self.by_range_suppression(diagnostic, &specifier) + || self.by_line_suppression(diagnostic, &specifier) + } + Err(_) => false, + }) + .unwrap_or(false) + } + + fn by_file_suppression(&self, specifier: &RuleSpecifier) -> bool { + self.file_suppressions.iter().any(|s| s.matches(specifier)) + } + + fn by_line_suppression( + &self, + diagnostic: &D, + specifier: &RuleSpecifier, + ) -> bool { + self.get_eligible_line_suppressions_for_diagnostic(diagnostic) + .iter() + .any(|s| s.matches(specifier)) + } + + fn by_range_suppression( + &self, + diagnostic: &D, + specifier: &RuleSpecifier, + ) -> bool { + self.range_suppressions.iter().any(|range_suppr| { + range_suppr.start_suppression.matches(specifier) + && diagnostic + .location() + .span + .is_some_and(|sp| range_suppr.suppressed_range.contains_range(sp)) + }) + } + + fn get_eligible_line_suppressions_for_diagnostic( + &self, + diagnostic: &D, + ) -> Vec<&Suppression> { + diagnostic + .location() + .span + .and_then(|span| self.line_index.line_for_offset(span.start())) + .filter(|line_no| *line_no > 0) + .map(|mut line_no| { + let mut eligible = vec![]; + + // one-for-one, we're checking the lines above a diagnostic location + // until there are no more suppressions + line_no -= 1; + while let Some(suppr) = self.line_suppressions.get(&line_no) { + eligible.push(suppr); + line_no -= 1; + } + + eligible + }) + .unwrap_or_default() + } +} + +#[cfg(test)] +mod tests { + use pgt_diagnostics::{Diagnostic, MessageAndDescription}; + use pgt_text_size::TextRange; + + use crate::suppression::SuppressionDiagnostic; + + #[derive(Clone, Debug, Diagnostic)] + #[diagnostic(category = "lint", severity = Error)] + pub struct TestDiagnostic { + #[location(span)] + pub span: TextRange, + } + + #[test] + fn correctly_suppresses_diagnostics_at_top_level() { + let doc = r#" + -- pgt-ignore-all lint + + select 1; + "#; + + let len_doc: u32 = doc.len().try_into().unwrap(); + + let suppressions = super::Suppressions::from(doc); + + assert!(suppressions.is_suppressed(&TestDiagnostic { + span: TextRange::new((len_doc - 10).into(), len_doc.into()), + })); + } + + #[test] + fn correctly_suppresses_diagnostics_at_line() { + let doc = r#" + select 2; + + -- pgt-ignore lint + select 1; + "#; + + let suppressions = super::Suppressions::from(doc); + + assert!(suppressions.is_suppressed(&TestDiagnostic { + span: TextRange::new(67.into(), 76.into()), + })); + } + + #[test] + fn correctly_suppresses_with_multiple_line_diagnostics() { + let doc = r#" + select 2; + + -- pgt-ignore lint + -- pgt-ignore syntax + select 1; + "#; + + let suppressions = super::Suppressions::from(doc); + + assert!(suppressions.is_suppressed(&TestDiagnostic { + span: TextRange::new(100.into(), 109.into()), + })); + } + + #[test] + fn correctly_suppresses_diagnostics_with_ranges() { + let doc = r#" + select 2; + + -- pgt-ignore-start lint + select 1; + -- pgt-ignore-end lint + "#; + + let suppressions = super::Suppressions::from(doc); + + assert!(suppressions.is_suppressed(&TestDiagnostic { + span: TextRange::new(73.into(), 82.into()), + })); + } + + #[test] + fn marks_disabled_rule_suppressions_as_errors() { + let doc = r#" + select 2; + + -- pgt-ignore lint/safety/banDropTable + select 1; + "#; + + let suppressions = super::Suppressions::from(doc); + + let disabled_diagnostics = suppressions.get_disabled_diagnostic_suppressions_as_errors(&[ + pgt_analyse::RuleFilter::Group("safety"), + ]); + + assert_eq!(disabled_diagnostics.len(), 1); + + assert_eq!( + disabled_diagnostics[0], + SuppressionDiagnostic { + span: TextRange::new(36.into(), 74.into()), + message: MessageAndDescription::from("This rule has been disabled via the configuration. The suppression has no effect.".to_string()) + } + ); + } + + #[test] + fn marks_unused_suppressions_as_errors() { + let doc = r#" + select 2; + + -- pgt-ignore lint + select 1; + "#; + + // no diagnostics + let diagnostics: Vec = vec![]; + + let suppressions = super::Suppressions::from(doc); + + let unused_diagnostics = suppressions.get_unused_suppressions_as_errors(&diagnostics); + + assert_eq!(unused_diagnostics.len(), 1); + + assert_eq!( + unused_diagnostics[0], + SuppressionDiagnostic { + span: TextRange::new(36.into(), 54.into()), + message: MessageAndDescription::from("This suppression has no effect.".to_string()) + } + ); + } +} diff --git a/crates/pgt_suppressions/src/line_index.rs b/crates/pgt_suppressions/src/line_index.rs new file mode 100644 index 000000000..16af72dde --- /dev/null +++ b/crates/pgt_suppressions/src/line_index.rs @@ -0,0 +1,43 @@ +use pgt_text_size::TextSize; + +#[derive(Debug, Default, Clone)] +pub(crate) struct LineIndex { + line_offset: Vec, +} + +impl LineIndex { + pub fn new(doc: &str) -> Self { + let line_offset = std::iter::once(0) + .chain(doc.match_indices(&['\n', '\r']).filter_map(|(i, _)| { + let bytes = doc.as_bytes(); + + match bytes[i] { + // Filter out the `\r` in `\r\n` to avoid counting the line break twice + b'\r' if i + 1 < bytes.len() && bytes[i + 1] == b'\n' => None, + _ => Some(i + 1), + } + })) + .map(|i| TextSize::try_from(i).expect("integer overflow")) + .collect(); + + Self { line_offset } + } + + pub fn offset_for_line(&self, idx: usize) -> Option<&pgt_text_size::TextSize> { + self.line_offset.get(idx) + } + + pub fn line_for_offset(&self, offset: TextSize) -> Option { + self.line_offset + .iter() + .enumerate() + .filter_map(|(i, line_offset)| { + if offset >= *line_offset { + Some(i) + } else { + None + } + }) + .next_back() + } +} diff --git a/crates/pgt_suppressions/src/parser.rs b/crates/pgt_suppressions/src/parser.rs new file mode 100644 index 000000000..663e52fef --- /dev/null +++ b/crates/pgt_suppressions/src/parser.rs @@ -0,0 +1,353 @@ +use std::{ + iter::{Enumerate, Peekable}, + str::Lines, +}; + +use pgt_diagnostics::MessageAndDescription; +use pgt_text_size::TextRange; + +use crate::{ + Suppressions, + line_index::LineIndex, + suppression::{RangeSuppression, Suppression, SuppressionDiagnostic, SuppressionKind}, +}; + +#[derive(Debug)] +pub(crate) struct SuppressionsParser<'a> { + file_suppressions: Vec, + line_suppressions: std::collections::HashMap, + range_suppressions: Vec, + diagnostics: Vec, + lines: Peekable>>, + line_index: LineIndex, + + start_suppressions_stack: Vec, +} + +impl<'a> SuppressionsParser<'a> { + pub fn new(doc: &'a str) -> Self { + let lines = doc.lines().enumerate().peekable(); + + Self { + file_suppressions: vec![], + line_suppressions: std::collections::HashMap::default(), + range_suppressions: vec![], + diagnostics: vec![], + lines, + line_index: LineIndex::new(doc), + start_suppressions_stack: vec![], + } + } + + pub fn parse(doc: &str) -> Suppressions { + let mut parser = SuppressionsParser::new(doc); + + parser.parse_file_suppressions(); + parser.parse_suppressions(); + parser.handle_unmatched_start_suppressions(); + + Suppressions { + file_suppressions: parser.file_suppressions, + line_suppressions: parser.line_suppressions, + range_suppressions: parser.range_suppressions, + diagnostics: parser.diagnostics, + line_index: parser.line_index, + } + } + + /// Will parse the suppressions at the start of the file. + /// As soon as anything is encountered that's not a `pgt-ignore-all` + /// suppression or an empty line, this will stop. + fn parse_file_suppressions(&mut self) { + while let Some((_, preview)) = self.lines.peek() { + if preview.trim().is_empty() { + self.lines.next(); + continue; + } + + if !preview.trim().starts_with("-- pgt-ignore-all") { + return; + } + + let (idx, line) = self.lines.next().unwrap(); + + let offset = self.line_index.offset_for_line(idx).unwrap(); + + match Suppression::from_line(line, offset) { + Ok(suppr) => self.file_suppressions.push(suppr), + Err(diag) => self.diagnostics.push(diag), + } + } + } + + fn parse_suppressions(&mut self) { + for (idx, line) in self.lines.by_ref() { + if !line.trim().starts_with("-- pgt-ignore") { + continue; + } + + let offset = self.line_index.offset_for_line(idx).unwrap(); + + let suppr = match Suppression::from_line(line, offset) { + Ok(suppr) => suppr, + Err(diag) => { + self.diagnostics.push(diag); + continue; + } + }; + + match suppr.kind { + SuppressionKind::File => { + self.diagnostics.push(SuppressionDiagnostic { + span: suppr.suppression_range, + message: MessageAndDescription::from( + "File suppressions should be at the top of the file.".to_string(), + ), + }); + } + + SuppressionKind::Line => { + self.line_suppressions.insert(idx, suppr); + } + + SuppressionKind::Start => self.start_suppressions_stack.push(suppr), + SuppressionKind::End => { + let matching_start_idx = self + .start_suppressions_stack + .iter() + .enumerate() + .filter_map(|(idx, s)| { + if s.rule_specifier == suppr.rule_specifier { + Some(idx) + } else { + None + } + }) + .next_back(); + + if let Some(start_idx) = matching_start_idx { + let start = self.start_suppressions_stack.remove(start_idx); + + let full_range = TextRange::new( + start.suppression_range.start(), + suppr.suppression_range.end(), + ); + + self.range_suppressions.push(RangeSuppression { + suppressed_range: full_range, + start_suppression: start, + }); + } else { + self.diagnostics.push(SuppressionDiagnostic { + span: suppr.suppression_range, + message: MessageAndDescription::from( + "This end suppression does not have a matching start.".to_string(), + ), + }); + } + } + } + } + } + + /// If we have `pgt-ignore-start` suppressions without matching end tags after parsing the entire file, + /// we'll report diagnostics for those. + fn handle_unmatched_start_suppressions(&mut self) { + let start_suppressions = std::mem::take(&mut self.start_suppressions_stack); + + for suppr in start_suppressions { + self.diagnostics.push(SuppressionDiagnostic { + span: suppr.suppression_range, + message: MessageAndDescription::from( + "This start suppression does not have a matching end.".to_string(), + ), + }); + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::suppression::{RuleSpecifier, SuppressionKind}; + + #[test] + fn test_parse_line_suppressions() { + let doc = r#" +SELECT 1; +-- pgt-ignore lint/safety/banDropColumn +SELECT 2; +"#; + let suppressions = SuppressionsParser::parse(doc); + + // Should have a line suppression on line 1 (0-based index) + let suppression = suppressions + .line_suppressions + .get(&2) + .expect("no suppression found"); + + assert_eq!(suppression.kind, SuppressionKind::Line); + assert_eq!( + suppression.rule_specifier, + RuleSpecifier::Rule( + "lint".to_string(), + "safety".to_string(), + "banDropColumn".to_string() + ) + ); + } + + #[test] + fn test_parse_multiple_line_suppressions() { + let doc = r#" +SELECT 1; +-- pgt-ignore lint/safety/banDropColumn +-- pgt-ignore lint/safety/banDropTable +-- pgt-ignore lint/safety/banDropNotNull +"#; + + let suppressions = SuppressionsParser::parse(doc); + + assert_eq!(suppressions.line_suppressions.len(), 3); + + assert_eq!( + suppressions + .line_suppressions + .get(&2) + .unwrap() + .rule_specifier + .rule(), + Some("banDropColumn") + ); + + assert_eq!( + suppressions + .line_suppressions + .get(&3) + .unwrap() + .rule_specifier + .rule(), + Some("banDropTable") + ); + + assert_eq!( + suppressions + .line_suppressions + .get(&4) + .unwrap() + .rule_specifier + .rule(), + Some("banDropNotNull") + ); + } + + #[test] + fn parses_file_level_suppressions() { + let doc = r#" +-- pgt-ignore-all lint +-- pgt-ignore-all typecheck + +SELECT 1; +-- pgt-ignore-all lint/safety +"#; + + let suppressions = SuppressionsParser::parse(doc); + + assert_eq!(suppressions.diagnostics.len(), 1); + assert_eq!(suppressions.file_suppressions.len(), 2); + + assert_eq!( + suppressions.file_suppressions[0].rule_specifier, + RuleSpecifier::Category("lint".to_string()) + ); + assert_eq!( + suppressions.file_suppressions[1].rule_specifier, + RuleSpecifier::Category("typecheck".to_string()) + ); + + assert_eq!( + suppressions.diagnostics[0].message.to_string(), + String::from("File suppressions should be at the top of the file.") + ); + } + + #[test] + fn parses_range_suppressions() { + let doc = r#" +-- pgt-ignore-start lint/safety/banDropTable +drop table users; +drop table auth; +drop table posts; +-- pgt-ignore-end lint/safety/banDropTable +"#; + + let suppressions = SuppressionsParser::parse(doc); + + assert_eq!(suppressions.range_suppressions.len(), 1); + + assert_eq!( + suppressions.range_suppressions[0], + RangeSuppression { + suppressed_range: TextRange::new(1.into(), 141.into()), + start_suppression: Suppression { + kind: SuppressionKind::Start, + rule_specifier: RuleSpecifier::Rule( + "lint".to_string(), + "safety".to_string(), + "banDropTable".to_string() + ), + suppression_range: TextRange::new(1.into(), 45.into()), + explanation: None, + }, + } + ); + } + + #[test] + fn parses_range_suppressions_with_errors() { + let doc = r#" +-- pgt-ignore-start lint/safety/banDropTable +drop table users; +-- pgt-ignore-start lint/safety/banDropTable +drop table auth; +drop table posts; +-- pgt-ignore-end lint/safety/banDropTable +-- pgt-ignore-end lint/safety/banDropColumn +"#; + + let suppressions = SuppressionsParser::parse(doc); + + assert_eq!(suppressions.range_suppressions.len(), 1); + assert_eq!(suppressions.diagnostics.len(), 2); + + // the inner, nested start/end combination is recognized. + assert_eq!( + suppressions.range_suppressions[0], + RangeSuppression { + suppressed_range: TextRange::new(64.into(), 186.into()), + start_suppression: Suppression { + kind: SuppressionKind::Start, + rule_specifier: RuleSpecifier::Rule( + "lint".to_string(), + "safety".to_string(), + "banDropTable".to_string() + ), + suppression_range: TextRange::new(64.into(), 108.into()), + explanation: None, + }, + } + ); + + // the outer end is an error + assert_eq!( + suppressions.diagnostics[0].message.to_string(), + String::from("This end suppression does not have a matching start.") + ); + + // the outer start is an error + assert_eq!( + suppressions.diagnostics[1].message.to_string(), + String::from("This start suppression does not have a matching end.") + ); + } +} diff --git a/crates/pgt_suppressions/src/suppression.rs b/crates/pgt_suppressions/src/suppression.rs new file mode 100644 index 000000000..6ebaf25c8 --- /dev/null +++ b/crates/pgt_suppressions/src/suppression.rs @@ -0,0 +1,459 @@ +use pgt_analyse::RuleFilter; +use pgt_diagnostics::{Category, Diagnostic, MessageAndDescription}; +use pgt_text_size::{TextRange, TextSize}; + +/// A specialized diagnostic for the typechecker. +/// +/// Type diagnostics are always **errors**. +#[derive(Clone, Debug, Diagnostic, PartialEq)] +#[diagnostic(category = "lint", severity = Warning)] +pub struct SuppressionDiagnostic { + #[location(span)] + pub span: TextRange, + #[description] + #[message] + pub message: MessageAndDescription, +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub(crate) enum SuppressionKind { + File, + Line, + Start, + End, +} + +#[derive(Debug, PartialEq, Clone, Eq)] +/// Represents the suppressed rule, as written in the suppression comment. +/// e.g. `lint/safety/banDropColumn`, or `lint/safety`, or just `lint`. +/// The format of a rule specifier string is `(/(/))`. +/// +/// `RuleSpecifier` can only be constructed from a `&str` that matches a valid +/// [pgt_diagnostics::Category]. +pub(crate) enum RuleSpecifier { + Category(String), + Group(String, String), + Rule(String, String, String), +} + +impl RuleSpecifier { + pub(crate) fn category(&self) -> &str { + match self { + RuleSpecifier::Category(rule_category) => rule_category, + RuleSpecifier::Group(rule_category, _) => rule_category, + RuleSpecifier::Rule(rule_category, _, _) => rule_category, + } + } + + pub(crate) fn group(&self) -> Option<&str> { + match self { + RuleSpecifier::Category(_) => None, + RuleSpecifier::Group(_, gr) => Some(gr), + RuleSpecifier::Rule(_, gr, _) => Some(gr), + } + } + + pub(crate) fn rule(&self) -> Option<&str> { + match self { + RuleSpecifier::Rule(_, _, ru) => Some(ru), + _ => None, + } + } + + pub(crate) fn is_disabled(&self, disabled_rules: &[RuleFilter<'_>]) -> bool { + // note: it is not possible to disable entire categories via the config + let group = self.group(); + let rule = self.rule(); + + disabled_rules.iter().any(|r| match r { + RuleFilter::Group(gr) => group.is_some_and(|specifier_group| specifier_group == *gr), + RuleFilter::Rule(gr, ru) => group.is_some_and(|specifier_group| { + rule.is_some_and(|specifier_rule| specifier_group == *gr && specifier_rule == *ru) + }), + }) + } +} + +impl From<&Category> for RuleSpecifier { + fn from(category: &Category) -> Self { + let mut specifiers = category.name().split('/').map(|s| s.to_string()); + + let category_str = specifiers.next(); + let group = specifiers.next(); + let rule = specifiers.next(); + + match (category_str, group, rule) { + (Some(c), Some(g), Some(r)) => RuleSpecifier::Rule(c, g, r), + (Some(c), Some(g), None) => RuleSpecifier::Group(c, g), + (Some(c), None, None) => RuleSpecifier::Category(c), + _ => unreachable!(), + } + } +} + +impl TryFrom<&str> for RuleSpecifier { + type Error = String; + + fn try_from(specifier_str: &str) -> Result { + let cat = specifier_str + .parse::<&Category>() + .map_err(|_| "Invalid rule.".to_string())?; + + Ok(RuleSpecifier::from(cat)) + } +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub(crate) struct Suppression { + pub(crate) suppression_range: TextRange, + pub(crate) kind: SuppressionKind, + pub(crate) rule_specifier: RuleSpecifier, + #[allow(unused)] + pub(crate) explanation: Option, +} + +impl Suppression { + /// Creates a suppression from a suppression comment line. + /// The line start must match `-- pgt-ignore`, otherwise, this will panic. + /// Leading whitespace is ignored. + pub(crate) fn from_line(line: &str, offset: &TextSize) -> Result { + let start_trimmed = line.trim_ascii_start(); + let leading_whitespace_offset = line.len() - start_trimmed.len(); + let trimmed = start_trimmed.trim_ascii_end(); + + assert!( + start_trimmed.starts_with("-- pgt-ignore"), + "Only try parsing suppressions from lines starting with `-- pgt-ignore`." + ); + + let full_offset = *offset + TextSize::new(leading_whitespace_offset.try_into().unwrap()); + let span = TextRange::new( + full_offset, + pgt_text_size::TextSize::new(trimmed.len().try_into().unwrap()) + full_offset, + ); + + let (line, explanation) = match trimmed.split_once(':') { + Some((suppr, explanation)) => (suppr, Some(explanation.trim())), + None => (trimmed, None), + }; + + let mut parts = line.split_ascii_whitespace(); + + let _ = parts.next(); + let kind = match parts.next().unwrap() { + "pgt-ignore-all" => SuppressionKind::File, + "pgt-ignore-start" => SuppressionKind::Start, + "pgt-ignore-end" => SuppressionKind::End, + "pgt-ignore" => SuppressionKind::Line, + k => { + return Err(SuppressionDiagnostic { + span, + message: MessageAndDescription::from(format!( + "'{}' is not a valid suppression tag.", + k, + )), + }); + } + }; + + let specifier_str = match parts.next() { + Some(it) => it, + None => { + return Err(SuppressionDiagnostic { + span, + message: MessageAndDescription::from( + "You must specify which lints to suppress.".to_string(), + ), + }); + } + }; + + let rule_specifier = + RuleSpecifier::try_from(specifier_str).map_err(|e| SuppressionDiagnostic { + span, + message: MessageAndDescription::from(e), + })?; + + Ok(Self { + rule_specifier, + kind, + suppression_range: span, + explanation: explanation.map(|e| e.to_string()), + }) + } + + pub(crate) fn matches(&self, diagnostic_specifier: &RuleSpecifier) -> bool { + let d_category = diagnostic_specifier.category(); + let d_group = diagnostic_specifier.group(); + let d_rule = diagnostic_specifier.rule(); + + match &self.rule_specifier { + // Check if we suppress the entire category + RuleSpecifier::Category(cat) if cat == d_category => return true, + + // Check if we suppress the category & group + RuleSpecifier::Group(cat, group) => { + if cat == d_category && Some(group.as_str()) == d_group { + return true; + } + } + + // Check if we suppress the category & group & specific rule + RuleSpecifier::Rule(cat, group, rule) => { + if cat == d_category + && Some(group.as_str()) == d_group + && Some(rule.as_str()) == d_rule + { + return true; + } + } + + _ => {} + } + + false + } + + pub(crate) fn to_disabled_diagnostic(&self) -> SuppressionDiagnostic { + SuppressionDiagnostic { + span: self.suppression_range, + message: MessageAndDescription::from( + "This rule has been disabled via the configuration. The suppression has no effect." + .to_string(), + ), + } + } +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub(crate) struct RangeSuppression { + pub(crate) suppressed_range: TextRange, + pub(crate) start_suppression: Suppression, +} + +#[cfg(test)] +mod tests { + use super::*; + use pgt_text_size::{TextRange, TextSize}; + + #[test] + fn test_suppression_from_line_rule() { + let line = "-- pgt-ignore lint/safety/banDropColumn: explanation"; + let offset = &TextSize::new(0); + let suppression = Suppression::from_line(line, offset).unwrap(); + + assert_eq!(suppression.kind, SuppressionKind::Line); + assert_eq!( + suppression.rule_specifier, + RuleSpecifier::Rule( + "lint".to_string(), + "safety".to_string(), + "banDropColumn".to_string() + ) + ); + assert_eq!(suppression.explanation.as_deref(), Some("explanation")); + } + + #[test] + fn test_suppression_from_line_group() { + let line = "-- pgt-ignore lint/safety: explanation"; + let offset = &TextSize::new(0); + let suppression = Suppression::from_line(line, offset).unwrap(); + + assert_eq!(suppression.kind, SuppressionKind::Line); + assert_eq!( + suppression.rule_specifier, + RuleSpecifier::Group("lint".to_string(), "safety".to_string()) + ); + assert_eq!(suppression.explanation.as_deref(), Some("explanation")); + } + + #[test] + fn test_suppression_from_line_category() { + let line = "-- pgt-ignore lint"; + let offset = &TextSize::new(0); + let suppression = Suppression::from_line(line, offset).unwrap(); + + assert_eq!(suppression.kind, SuppressionKind::Line); + assert_eq!( + suppression.rule_specifier, + RuleSpecifier::Category("lint".to_string()) + ); + } + + #[test] + fn test_suppression_from_line_category_with_explanation() { + let line = "-- pgt-ignore lint: explanation"; + let offset = &TextSize::new(0); + let suppression = Suppression::from_line(line, offset).unwrap(); + + assert_eq!(suppression.kind, SuppressionKind::Line); + assert_eq!( + suppression.rule_specifier, + RuleSpecifier::Category("lint".to_string()) + ); + assert_eq!(suppression.explanation.as_deref(), Some("explanation")); + } + + #[test] + fn test_suppression_from_line_file_kind() { + let line = "-- pgt-ignore-all lint/safety/banDropColumn: explanation"; + let offset = &TextSize::new(0); + let suppression = Suppression::from_line(line, offset).unwrap(); + + assert_eq!(suppression.kind, SuppressionKind::File); + assert_eq!( + suppression.rule_specifier, + RuleSpecifier::Rule( + "lint".to_string(), + "safety".to_string(), + "banDropColumn".to_string() + ) + ); + assert_eq!(suppression.explanation.as_deref(), Some("explanation")); + } + + #[test] + fn test_suppression_from_line_start_kind() { + let line = "-- pgt-ignore-start lint/safety/banDropColumn: explanation"; + let offset = &TextSize::new(0); + let suppression = Suppression::from_line(line, offset).unwrap(); + + assert_eq!(suppression.kind, SuppressionKind::Start); + assert_eq!( + suppression.rule_specifier, + RuleSpecifier::Rule( + "lint".to_string(), + "safety".to_string(), + "banDropColumn".to_string() + ) + ); + assert_eq!(suppression.explanation.as_deref(), Some("explanation")); + } + + #[test] + fn test_suppression_from_line_end_kind() { + let line = "-- pgt-ignore-end lint/safety/banDropColumn: explanation"; + let offset = &TextSize::new(0); + let suppression = Suppression::from_line(line, offset).unwrap(); + + assert_eq!(suppression.kind, SuppressionKind::End); + assert_eq!( + suppression.rule_specifier, + RuleSpecifier::Rule( + "lint".to_string(), + "safety".to_string(), + "banDropColumn".to_string() + ) + ); + assert_eq!(suppression.explanation.as_deref(), Some("explanation")); + } + + #[test] + fn test_suppression_span_with_offset() { + let line = " \n-- pgt-ignore lint/safety/banDropColumn: explanation"; + let offset = TextSize::new(5); + let suppression = Suppression::from_line(line, &offset).unwrap(); + + let expected_start = offset + TextSize::new(5); + let expected_len = TextSize::new(line.trim_ascii().len() as u32); + + let expected_end = expected_start + expected_len; + let expected_span = TextRange::new(expected_start, expected_end); + + assert_eq!(suppression.suppression_range, expected_span); + } + + #[test] + fn test_suppression_from_line_invalid_tag_and_missing_specifier() { + let lines = vec![ + "-- pgt-ignore-foo lint/safety/banDropColumn: explanation", + "-- pgt-ignore foo lint/safety/banDropColumn: explanation", + "-- pgt-ignore xyz lint/safety/banDropColumn: explanation", + "-- pgt-ignore", + ]; + let offset = &TextSize::new(0); + for line in lines { + let result = Suppression::from_line(line, offset); + assert!(result.is_err(), "Expected error for line: {}", line); + } + } + + #[test] + fn test_suppression_matches() { + let cases = vec![ + // the category works for all groups & rules + ("-- pgt-ignore lint", "lint/safety/banDropNotNull", true), + ("-- pgt-ignore lint", "lint/safety/banDropColumn", true), + // the group works for all rules in that group + ( + "-- pgt-ignore lint/safety", + "lint/safety/banDropColumn", + true, + ), + ("-- pgt-ignore lint", "typecheck", false), + ("-- pgt-ignore lint/safety", "typecheck", false), + // a specific supppression only works for that same rule + ( + "-- pgt-ignore lint/safety/banDropColumn", + "lint/safety/banDropColumn", + true, + ), + ( + "-- pgt-ignore lint/safety/banDropColumn", + "lint/safety/banDropTable", + false, + ), + ]; + + let offset = &TextSize::new(0); + + for (suppr_line, specifier_str, expected) in cases { + let suppression = Suppression::from_line(suppr_line, offset).unwrap(); + let specifier = RuleSpecifier::try_from(specifier_str).unwrap(); + assert_eq!( + suppression.matches(&specifier), + expected, + "Suppression line '{}' vs specifier '{}' should be {}", + suppr_line, + specifier_str, + expected + ); + } + } + + #[test] + fn test_rule_specifier_is_disabled() { + use pgt_analyse::RuleFilter; + + // Group filter disables all rules in that group + let spec = RuleSpecifier::Rule( + "lint".to_string(), + "safety".to_string(), + "banDropColumn".to_string(), + ); + let disabled = vec![RuleFilter::Group("safety")]; + assert!(spec.is_disabled(&disabled)); + + let spec2 = RuleSpecifier::Rule( + "lint".to_string(), + "safety".to_string(), + "banDropColumn".to_string(), + ); + let disabled2 = vec![RuleFilter::Rule("safety", "banDropColumn")]; + assert!(spec2.is_disabled(&disabled2)); + + let disabled3 = vec![RuleFilter::Rule("safety", "otherRule")]; + assert!(!spec2.is_disabled(&disabled3)); + + let disabled4 = vec![RuleFilter::Group("perf")]; + assert!(!spec.is_disabled(&disabled4)); + + // one match is enough + let disabled5 = vec![ + RuleFilter::Group("perf"), + RuleFilter::Rule("safety", "banDropColumn"), + ]; + assert!(spec.is_disabled(&disabled5)); + } +} diff --git a/crates/pgt_test_utils/src/lib.rs b/crates/pgt_test_utils/src/lib.rs index 4d6d3070d..11bb1aebe 100644 --- a/crates/pgt_test_utils/src/lib.rs +++ b/crates/pgt_test_utils/src/lib.rs @@ -1 +1,85 @@ -pub mod test_database; +use std::fmt::Display; + +pub static MIGRATIONS: sqlx::migrate::Migrator = sqlx::migrate!("./testdb_migrations"); + +static CURSOR_POS: char = '€'; + +#[derive(Clone)] +pub struct QueryWithCursorPosition { + sql: String, + position: usize, +} + +impl QueryWithCursorPosition { + pub fn cursor_marker() -> char { + CURSOR_POS + } + + pub fn get_text_and_position(&self) -> (usize, String) { + (self.position, self.sql.clone()) + } +} + +impl From for QueryWithCursorPosition { + fn from(value: String) -> Self { + value.as_str().into() + } +} + +impl From<&str> for QueryWithCursorPosition { + fn from(value: &str) -> Self { + let position = value + .find(CURSOR_POS) + .expect("Use `QueryWithCursorPosition::cursor_marker()` to insert cursor position into your Query."); + + QueryWithCursorPosition { + sql: value.replace(CURSOR_POS, "").trim().to_string(), + position, + } + } +} + +impl Display for QueryWithCursorPosition { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", self.sql) + } +} + +#[cfg(test)] +mod tests { + + use super::QueryWithCursorPosition; + + #[test] + fn input_query_should_extract_correct_position() { + struct TestCase { + query: String, + expected_pos: usize, + expected_sql_len: usize, + } + + let cases = vec![ + TestCase { + query: format!("select * from{}", QueryWithCursorPosition::cursor_marker()), + expected_pos: 13, + expected_sql_len: 13, + }, + TestCase { + query: format!("{}select * from", QueryWithCursorPosition::cursor_marker()), + expected_pos: 0, + expected_sql_len: 13, + }, + TestCase { + query: format!("select {} from", QueryWithCursorPosition::cursor_marker()), + expected_pos: 7, + expected_sql_len: 12, + }, + ]; + + for case in cases { + let query = QueryWithCursorPosition::from(case.query.as_str()); + assert_eq!(query.position, case.expected_pos); + assert_eq!(query.sql.len(), case.expected_sql_len); + } + } +} diff --git a/crates/pgt_test_utils/src/test_database.rs b/crates/pgt_test_utils/src/test_database.rs deleted file mode 100644 index 67415c4a8..000000000 --- a/crates/pgt_test_utils/src/test_database.rs +++ /dev/null @@ -1,42 +0,0 @@ -use sqlx::{Executor, PgPool, postgres::PgConnectOptions}; -use uuid::Uuid; - -// TODO: Work with proper config objects instead of a connection_string. -// With the current implementation, we can't parse the password from the connection string. -pub async fn get_new_test_db() -> PgPool { - dotenv::dotenv().expect("Unable to load .env file for tests"); - - let connection_string = std::env::var("DATABASE_URL").expect("DATABASE_URL not set"); - let password = std::env::var("DB_PASSWORD").unwrap_or("postgres".into()); - - let options_from_conn_str: PgConnectOptions = connection_string - .parse() - .expect("Invalid Connection String"); - - let host = options_from_conn_str.get_host(); - assert!( - host == "localhost" || host == "127.0.0.1", - "Running tests against non-local database!" - ); - - let options_without_db_name = PgConnectOptions::new() - .host(host) - .port(options_from_conn_str.get_port()) - .username(options_from_conn_str.get_username()) - .password(&password); - - let postgres = sqlx::PgPool::connect_with(options_without_db_name.clone()) - .await - .expect("Unable to connect to test postgres instance"); - - let database_name = Uuid::new_v4().to_string(); - - postgres - .execute(format!(r#"create database "{}";"#, database_name).as_str()) - .await - .expect("Failed to create test database."); - - sqlx::PgPool::connect_with(options_without_db_name.database(&database_name)) - .await - .expect("Could not connect to test database") -} diff --git a/crates/pgt_test_utils/testdb_migrations/0001_setup-roles.sql b/crates/pgt_test_utils/testdb_migrations/0001_setup-roles.sql new file mode 100644 index 000000000..1f1d50b3f --- /dev/null +++ b/crates/pgt_test_utils/testdb_migrations/0001_setup-roles.sql @@ -0,0 +1,32 @@ +do $$ +begin + +begin + create role owner superuser createdb login bypassrls; +exception + when duplicate_object then + null; + when unique_violation then + null; +end; + +begin + create role test_login login; +exception + when duplicate_object then + null; + when unique_violation then + null; +end; + +begin + create role test_nologin; +exception + when duplicate_object then + null; + when unique_violation then + null; +end; + +end +$$; \ No newline at end of file diff --git a/crates/pgt_query_proto_parser/Cargo.toml b/crates/pgt_tokenizer/Cargo.toml similarity index 68% rename from crates/pgt_query_proto_parser/Cargo.toml rename to crates/pgt_tokenizer/Cargo.toml index 729c94b42..9cd4bf5e8 100644 --- a/crates/pgt_query_proto_parser/Cargo.toml +++ b/crates/pgt_tokenizer/Cargo.toml @@ -6,15 +6,14 @@ edition.workspace = true homepage.workspace = true keywords.workspace = true license.workspace = true -name = "pgt_query_proto_parser" +name = "pgt_tokenizer" repository.workspace = true version = "0.0.0" [dependencies] -convert_case = "0.6.0" -protobuf = "3.3.0" -protobuf-parse = "3.3.0" + +[dev-dependencies] +insta.workspace = true [lib] -doctest = false diff --git a/crates/pgt_tokenizer/README.md b/crates/pgt_tokenizer/README.md new file mode 100644 index 000000000..8fc21d349 --- /dev/null +++ b/crates/pgt_tokenizer/README.md @@ -0,0 +1 @@ +Heavily inspired by and copied from [squawk_lexer](https://github.com/sbdchd/squawk/tree/9acfecbbb7f3c7eedcbaf060e7b25f9afa136db3/crates/squawk_lexer). Thanks for making all the hard work MIT-licensed! diff --git a/crates/pgt_tokenizer/src/cursor.rs b/crates/pgt_tokenizer/src/cursor.rs new file mode 100644 index 000000000..64710f29c --- /dev/null +++ b/crates/pgt_tokenizer/src/cursor.rs @@ -0,0 +1,73 @@ +use std::str::Chars; + +/// Peekable iterator over a char sequence. +/// +/// Next characters can be peeked via `first` method, +/// and position can be shifted forward via `bump` method. +/// based on: +/// - +/// - +/// +pub(crate) struct Cursor<'a> { + /// Iterator over chars. Slightly faster than a &str. + chars: Chars<'a>, + len_remaining: usize, +} + +pub(crate) const EOF_CHAR: char = '\0'; + +impl<'a> Cursor<'a> { + pub(crate) fn new(input: &'a str) -> Cursor<'a> { + Cursor { + len_remaining: input.len(), + chars: input.chars(), + } + } + + /// Peeks the next symbol from the input stream without consuming it. + /// If requested position doesn't exist, `EOF_CHAR` is returned. + /// However, getting `EOF_CHAR` doesn't always mean actual end of file, + /// it should be checked with `is_eof` method. + pub(crate) fn first(&self) -> char { + // `.next()` optimizes better than `.nth(0)` + self.chars.clone().next().unwrap_or(EOF_CHAR) + } + + /// Peeks the second next symbol from the input stream without consuming it. + /// If requested position doesn't exist, `EOF_CHAR` is returned. + /// However, getting `EOF_CHAR` doesn't always mean actual end of file, + /// it should be checked with `is_eof` method. + pub(crate) fn second(&self) -> char { + self.chars.clone().nth(1).unwrap_or(EOF_CHAR) + } + + /// Checks if there is nothing more to consume. + pub(crate) fn is_eof(&self) -> bool { + self.chars.as_str().is_empty() + } + + /// Returns amount of already consumed symbols. + pub(crate) fn pos_within_token(&self) -> u32 { + (self.len_remaining - self.chars.as_str().len()) as u32 + } + + /// Resets the number of bytes consumed to 0. + pub(crate) fn reset_pos_within_token(&mut self) { + self.len_remaining = self.chars.as_str().len(); + } + + /// Moves to the next character. + pub(crate) fn bump(&mut self) -> Option { + let c = self.chars.next()?; + Some(c) + } + + /// Eats symbols while predicate returns true or until the end of file is reached. + pub(crate) fn eat_while(&mut self, mut predicate: impl FnMut(char) -> bool) { + // It was tried making optimized version of this for eg. line comments, but + // LLVM can inline all of this and compile it down to fast iteration over bytes. + while predicate(self.first()) && !self.is_eof() { + self.bump(); + } + } +} diff --git a/crates/pgt_tokenizer/src/lib.rs b/crates/pgt_tokenizer/src/lib.rs new file mode 100644 index 000000000..16093db8f --- /dev/null +++ b/crates/pgt_tokenizer/src/lib.rs @@ -0,0 +1,926 @@ +mod cursor; +mod token; +use cursor::{Cursor, EOF_CHAR}; +pub use token::{Base, LiteralKind, NamedParamKind, Token, TokenKind}; + +// via: https://github.com/postgres/postgres/blob/db0c96cc18aec417101e37e59fcc53d4bf647915/src/backend/parser/scan.l#L346 +// ident_start [A-Za-z\200-\377_] +const fn is_ident_start(c: char) -> bool { + matches!(c, 'a'..='z' | 'A'..='Z' | '_' | '\u{80}'..='\u{FF}') +} + +// ident_cont [A-Za-z\200-\377_0-9\$] +const fn is_ident_cont(c: char) -> bool { + matches!(c, 'a'..='z' | 'A'..='Z' | '_' | '0'..='9' | '$' | '\u{80}'..='\u{FF}') +} + +// whitespace +// - https://github.com/postgres/postgres/blob/db0c96cc18aec417101e37e59fcc53d4bf647915/src/backend/parser/scansup.c#L107-L128 +// - https://github.com/postgres/postgres/blob/db0c96cc18aec417101e37e59fcc53d4bf647915/src/backend/parser/scan.l#L204-L229 + +const fn is_space(c: char) -> bool { + matches!( + c, ' ' // space + ) +} + +const fn is_tab(c: char) -> bool { + matches!( + c, '\t' // tab + ) +} + +const fn is_line_ending(c: char) -> bool { + matches!( + c, + '\n' | '\r' // newline or carriage return + ) +} + +const fn is_vertical_tab(c: char) -> bool { + matches!( + c, '\u{000B}' // vertical tab + ) +} + +const fn is_form_feed(c: char) -> bool { + matches!( + c, '\u{000C}' // form feed + ) +} + +impl Cursor<'_> { + // see: https://github.com/rust-lang/rust/blob/ba1d7f4a083e6402679105115ded645512a7aea8/compiler/rustc_lexer/src/lib.rs#L339 + pub(crate) fn advance_token(&mut self) -> Token { + let Some(first_char) = self.bump() else { + return Token::new(TokenKind::Eof, 0); + }; + let token_kind = match first_char { + // Slash, comment or block comment. + '/' => match self.first() { + '*' => self.block_comment(), + _ => TokenKind::Slash, + }, + '-' => match self.first() { + '-' => self.line_comment(), + _ => TokenKind::Minus, + }, + + c if is_space(c) => { + self.eat_while(is_space); + TokenKind::Space + } + + c if is_tab(c) => { + self.eat_while(is_tab); + TokenKind::Tab + } + + c if is_line_ending(c) => self.line_ending_sequence(c), + + c if is_vertical_tab(c) => { + self.eat_while(is_vertical_tab); + TokenKind::VerticalTab + } + + c if is_form_feed(c) => { + self.eat_while(is_form_feed); + TokenKind::FormFeed + } + + // https://www.postgresql.org/docs/current/sql-syntax-lexical.html#SQL-SYNTAX-STRINGS-UESCAPE + 'u' | 'U' => match self.first() { + '&' => { + self.bump(); + self.prefixed_string( + |terminated| LiteralKind::UnicodeEscStr { terminated }, + true, + ) + } + _ => self.ident_or_unknown_prefix(), + }, + + // escaped strings + 'e' | 'E' => { + self.prefixed_string(|terminated| LiteralKind::EscStr { terminated }, false) + } + + // bit string + 'b' | 'B' => { + self.prefixed_string(|terminated| LiteralKind::BitStr { terminated }, false) + } + + // hexadecimal byte string + 'x' | 'X' => { + self.prefixed_string(|terminated| LiteralKind::ByteStr { terminated }, false) + } + + // Identifier (this should be checked after other variant that can + // start as identifier). + c if is_ident_start(c) => self.ident(), + + // Numeric literal. + // see: https://www.postgresql.org/docs/current/sql-syntax-lexical.html#SQL-SYNTAX-CONSTANTS-NUMERIC + c @ '0'..='9' => { + let literal_kind = self.number(c); + TokenKind::Literal { kind: literal_kind } + } + '.' => match self.first() { + '0'..='9' => { + let literal_kind = self.number('.'); + TokenKind::Literal { kind: literal_kind } + } + _ => TokenKind::Dot, + }, + '@' => { + if is_ident_start(self.first()) { + // Named parameter with @ prefix. + self.eat_while(is_ident_cont); + TokenKind::NamedParam { + kind: NamedParamKind::AtPrefix, + } + } else { + TokenKind::At + } + } + ':' => { + if self.first() == ':' { + self.bump(); + TokenKind::DoubleColon + } else { + // Named parameters in psql with different substitution styles. + // + // https://www.postgresql.org/docs/current/app-psql.html#APP-PSQL-INTERPOLATION + match self.first() { + '\'' => { + // Named parameter with colon prefix and single quotes. + self.bump(); + let terminated = self.single_quoted_string(); + let kind = NamedParamKind::ColonString { terminated }; + TokenKind::NamedParam { kind } + } + '"' => { + // Named parameter with colon prefix and double quotes. + self.bump(); + let terminated = self.double_quoted_string(); + let kind = NamedParamKind::ColonIdentifier { terminated }; + TokenKind::NamedParam { kind } + } + c if is_ident_start(c) => { + // Named parameter with colon prefix. + self.eat_while(is_ident_cont); + TokenKind::NamedParam { + kind: NamedParamKind::ColonRaw, + } + } + _ => TokenKind::Colon, + } + } + } + // One-symbol tokens. + ';' => TokenKind::Semi, + '\\' => TokenKind::Backslash, + ',' => TokenKind::Comma, + '(' => TokenKind::OpenParen, + ')' => TokenKind::CloseParen, + '[' => TokenKind::OpenBracket, + ']' => TokenKind::CloseBracket, + '#' => TokenKind::Pound, + '~' => TokenKind::Tilde, + '?' => TokenKind::Question, + '$' => { + // Dollar quoted strings + if is_ident_start(self.first()) || self.first() == '$' { + // Get the start sequence of the dollar quote, i.e., 'foo' in $foo$hello$foo$ + // if ident does not continue and there is no terminating dollar + // sign, we have a positional param `$name` + let mut start = vec![]; + loop { + match self.first() { + '$' => { + self.bump(); + break self.dollar_quoted_string(start); + } + c if is_ident_cont(c) => { + self.bump(); + start.push(c); + } + _ => { + break TokenKind::NamedParam { + kind: NamedParamKind::DollarRaw, + }; + } + } + } + } else { + // positional parameter, e.g. `$1` + while self.first().is_ascii_digit() { + self.bump(); + } + TokenKind::PositionalParam + } + } + '`' => TokenKind::Backtick, + '=' => TokenKind::Eq, + '!' => TokenKind::Bang, + '<' => TokenKind::Lt, + '>' => TokenKind::Gt, + '&' => TokenKind::And, + '|' => TokenKind::Or, + '+' => TokenKind::Plus, + '*' => TokenKind::Star, + '^' => TokenKind::Caret, + '%' => TokenKind::Percent, + + // String literal + '\'' => { + let terminated = self.single_quoted_string(); + let kind = LiteralKind::Str { terminated }; + TokenKind::Literal { kind } + } + + // Quoted indentifiers + '"' => { + let terminated = self.double_quoted_string(); + TokenKind::QuotedIdent { terminated } + } + _ => TokenKind::Unknown, + }; + let res = Token::new(token_kind, self.pos_within_token()); + self.reset_pos_within_token(); + res + } + pub(crate) fn ident(&mut self) -> TokenKind { + self.eat_while(is_ident_cont); + TokenKind::Ident + } + + fn ident_or_unknown_prefix(&mut self) -> TokenKind { + // Start is already eaten, eat the rest of identifier. + self.eat_while(is_ident_cont); + // Known prefixes must have been handled earlier. So if + // we see a prefix here, it is definitely an unknown prefix. + match self.first() { + '#' | '"' | '\'' => TokenKind::UnknownPrefix, + _ => TokenKind::Ident, + } + } + + // see: https://github.com/postgres/postgres/blob/db0c96cc18aec417101e37e59fcc53d4bf647915/src/backend/parser/scan.l#L227 + // comment ("--"{non_newline}*) + pub(crate) fn line_comment(&mut self) -> TokenKind { + self.bump(); + + self.eat_while(|c| c != '\n'); + TokenKind::LineComment + } + + // see: https://github.com/postgres/postgres/blob/db0c96cc18aec417101e37e59fcc53d4bf647915/src/backend/parser/scan.l#L324-L344 + pub(crate) fn block_comment(&mut self) -> TokenKind { + self.bump(); + + let mut depth = 1usize; + while let Some(c) = self.bump() { + match c { + '/' if self.first() == '*' => { + self.bump(); + depth += 1; + } + '*' if self.first() == '/' => { + self.bump(); + depth -= 1; + if depth == 0 { + // This block comment is closed, so for a construction like "/* */ */" + // there will be a successfully parsed block comment "/* */" + // and " */" will be processed separately. + break; + } + } + _ => (), + } + } + + TokenKind::BlockComment { + terminated: depth == 0, + } + } + + // invariant: we care about the number of consecutive newlines so we count them. + // + // Postgres considers a DOS-style \r\n sequence as two successive newlines, but we care about + // logical line breaks and consider \r\n as one logical line break + fn line_ending_sequence(&mut self, prev: char) -> TokenKind { + // already consumed first line ending character (\n or \r) + let mut line_breaks = 1; + + // started with \r, check if it's part of \r\n + if prev == '\r' && self.first() == '\n' { + // consume the \n - \r\n still counts as 1 logical line break + self.bump(); + } + + // continue checking for more line endings + loop { + match self.first() { + '\r' if self.second() == '\n' => { + self.bump(); // consume \r + self.bump(); // consume \n + line_breaks += 1; + } + '\n' => { + self.bump(); + line_breaks += 1; + } + '\r' => { + self.bump(); + line_breaks += 1; + } + _ => break, + } + } + + TokenKind::LineEnding { count: line_breaks } + } + + fn prefixed_string( + &mut self, + mk_kind: fn(bool) -> LiteralKind, + allows_double: bool, + ) -> TokenKind { + match self.first() { + '\'' => { + self.bump(); + let terminated = self.single_quoted_string(); + let kind = mk_kind(terminated); + TokenKind::Literal { kind } + } + '"' if allows_double => { + self.bump(); + let terminated = self.double_quoted_string(); + TokenKind::QuotedIdent { terminated } + } + _ => self.ident_or_unknown_prefix(), + } + } + + fn number(&mut self, first_digit: char) -> LiteralKind { + let mut base = Base::Decimal; + if first_digit == '0' { + // Attempt to parse encoding base. + match self.first() { + // https://github.com/postgres/postgres/blob/db0c96cc18aec417101e37e59fcc53d4bf647915/src/backend/parser/scan.l#L403 + 'b' | 'B' => { + base = Base::Binary; + self.bump(); + if !self.eat_decimal_digits() { + return LiteralKind::Int { + base, + empty_int: true, + }; + } + } + // https://github.com/postgres/postgres/blob/db0c96cc18aec417101e37e59fcc53d4bf647915/src/backend/parser/scan.l#L402 + 'o' | 'O' => { + base = Base::Octal; + self.bump(); + if !self.eat_decimal_digits() { + return LiteralKind::Int { + base, + empty_int: true, + }; + } + } + // https://github.com/postgres/postgres/blob/db0c96cc18aec417101e37e59fcc53d4bf647915/src/backend/parser/scan.l#L401 + 'x' | 'X' => { + base = Base::Hexadecimal; + self.bump(); + if !self.eat_hexadecimal_digits() { + return LiteralKind::Int { + base, + empty_int: true, + }; + } + } + // Not a base prefix; consume additional digits. + '0'..='9' | '_' => { + self.eat_decimal_digits(); + } + + // Also not a base prefix; nothing more to do here. + '.' | 'e' | 'E' => {} + + // Just a 0. + _ => { + return LiteralKind::Int { + base, + empty_int: false, + }; + } + } + } else { + // No base prefix, parse number in the usual way. + self.eat_decimal_digits(); + }; + + match self.first() { + '.' => { + // might have stuff after the ., and if it does, it needs to start + // with a number + self.bump(); + let mut empty_exponent = false; + if self.first().is_ascii_digit() { + self.eat_decimal_digits(); + match self.first() { + 'e' | 'E' => { + self.bump(); + empty_exponent = !self.eat_float_exponent(); + } + _ => (), + } + } else { + match self.first() { + 'e' | 'E' => { + self.bump(); + empty_exponent = !self.eat_float_exponent(); + } + _ => (), + } + } + LiteralKind::Float { + base, + empty_exponent, + } + } + 'e' | 'E' => { + self.bump(); + let empty_exponent = !self.eat_float_exponent(); + LiteralKind::Float { + base, + empty_exponent, + } + } + _ => LiteralKind::Int { + base, + empty_int: false, + }, + } + } + + fn single_quoted_string(&mut self) -> bool { + // Parse until either quotes are terminated or error is detected. + loop { + match self.first() { + // Quotes might be terminated. + '\'' => { + self.bump(); + + match self.first() { + // encountered an escaped quote '' + '\'' => { + self.bump(); + } + // encountered terminating quote + _ => return true, + } + } + // End of file, stop parsing. + EOF_CHAR if self.is_eof() => break, + // Skip the character. + _ => { + self.bump(); + } + } + } + // String was not terminated. + false + } + + /// Eats double-quoted string and returns true + /// if string is terminated. + fn double_quoted_string(&mut self) -> bool { + while let Some(c) = self.bump() { + match c { + '"' if self.first() == '"' => { + // Bump again to skip escaped character. + self.bump(); + } + '"' => { + return true; + } + _ => (), + } + } + // End of file reached. + false + } + + // https://www.postgresql.org/docs/16/sql-syntax-lexical.html#SQL-SYNTAX-DOLLAR-QUOTING + fn dollar_quoted_string(&mut self, start: Vec) -> TokenKind { + // we have a dollar quoted string deliminated with `$$` + if start.is_empty() { + loop { + self.eat_while(|c| c != '$'); + if self.is_eof() { + return TokenKind::Literal { + kind: LiteralKind::DollarQuotedString { terminated: false }, + }; + } + // eat $ + self.bump(); + if self.first() == '$' { + self.bump(); + return TokenKind::Literal { + kind: LiteralKind::DollarQuotedString { terminated: true }, + }; + } + } + } else { + loop { + self.eat_while(|c| c != start[0]); + if self.is_eof() { + return TokenKind::Literal { + kind: LiteralKind::DollarQuotedString { terminated: false }, + }; + } + + // might be the start of our start/end sequence + let mut match_count = 0; + for start_char in &start { + if self.first() == *start_char { + self.bump(); + match_count += 1; + } else { + self.bump(); + break; + } + } + + // closing '$' + let terminated = match_count == start.len(); + if self.first() == '$' && terminated { + self.bump(); + return TokenKind::Literal { + kind: LiteralKind::DollarQuotedString { terminated }, + }; + } + } + } + } + + fn eat_decimal_digits(&mut self) -> bool { + let mut has_digits = false; + loop { + match self.first() { + '_' => { + self.bump(); + } + '0'..='9' => { + has_digits = true; + self.bump(); + } + _ => break, + } + } + has_digits + } + + fn eat_hexadecimal_digits(&mut self) -> bool { + let mut has_digits = false; + loop { + match self.first() { + '_' => { + self.bump(); + } + '0'..='9' | 'a'..='f' | 'A'..='F' => { + has_digits = true; + self.bump(); + } + _ => break, + } + } + has_digits + } + + /// Eats the float exponent. Returns true if at least one digit was met, + /// and returns false otherwise. + fn eat_float_exponent(&mut self) -> bool { + if self.first() == '-' || self.first() == '+' { + self.bump(); + } + self.eat_decimal_digits() + } +} + +/// Creates an iterator that produces tokens from the input string. +pub fn tokenize(input: &str) -> impl Iterator + '_ { + let mut cursor = Cursor::new(input); + std::iter::from_fn(move || { + let token = cursor.advance_token(); + if token.kind != TokenKind::Eof { + Some(token) + } else { + None + } + }) +} + +#[cfg(test)] +mod tests { + use std::fmt; + + use super::*; + use insta::assert_debug_snapshot; + + struct TokenDebug<'a> { + content: &'a str, + token: Token, + } + impl fmt::Debug for TokenDebug<'_> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{:?} @ {:?}", self.content, self.token.kind) + } + } + + impl<'a> TokenDebug<'a> { + fn new(token: Token, input: &'a str, start: u32) -> TokenDebug<'a> { + TokenDebug { + token, + content: &input[start as usize..(start + token.len) as usize], + } + } + } + + fn lex(input: &str) -> Vec { + let mut tokens = vec![]; + let mut start = 0; + + for token in tokenize(input) { + let length = token.len; + tokens.push(TokenDebug::new(token, input, start)); + start += length; + } + tokens + } + + #[test] + fn named_param_at() { + let result = lex("select 1 from c where id = @id;"); + assert_debug_snapshot!(result); + } + + #[test] + fn named_param_dollar_raw() { + let result = lex("select 1 from c where id = $id;"); + assert_debug_snapshot!(result); + } + + #[test] + fn named_param_colon_raw() { + let result = lex("select 1 from c where id = :id;"); + assert_debug_snapshot!(result); + } + + #[test] + fn debug_simple_cast() { + let result = lex("::test"); + assert_debug_snapshot!(result, @r###" + [ + "::" @ DoubleColon, + "test" @ Ident, + ] + "###); + } + + #[test] + fn named_param_colon_raw_vs_cast() { + let result = lex("select 1 from c where id::test = :id;"); + assert_debug_snapshot!(result); + } + + #[test] + fn named_param_colon_string() { + let result = lex("select 1 from c where id = :'id';"); + assert_debug_snapshot!(result); + } + + #[test] + fn named_param_colon_identifier() { + let result = lex("select 1 from c where id = :\"id\";"); + assert_debug_snapshot!(result); + } + + #[test] + fn lex_statement() { + let result = lex("select 1;"); + assert_debug_snapshot!(result); + } + + #[test] + fn block_comment() { + let result = lex(r#" +/* + * foo + * bar +*/"#); + assert_debug_snapshot!(result); + } + + #[test] + fn block_comment_unterminated() { + let result = lex(r#" +/* + * foo + * bar + /* +*/"#); + assert_debug_snapshot!(result); + } + + #[test] + fn line_comment() { + let result = lex(r#" +-- foooooooooooo bar buzz +"#); + assert_debug_snapshot!(result); + } + + #[test] + fn line_comment_whitespace() { + assert_debug_snapshot!(lex(r#" +select 'Hello' -- This is a comment +' World';"#)) + } + + #[test] + fn dollar_quoting() { + assert_debug_snapshot!(lex(r#" +$$Dianne's horse$$ +$SomeTag$Dianne's horse$SomeTag$ + +-- with dollar inside and matching tags +$foo$hello$world$bar$ +"#)) + } + + #[test] + fn dollar_strings_part2() { + assert_debug_snapshot!(lex(r#" +DO $doblock$ +end +$doblock$;"#)) + } + + #[test] + fn dollar_quote_mismatch_tags_simple() { + assert_debug_snapshot!(lex(r#" +-- dollar quoting with mismatched tags +$foo$hello world$bar$ +"#)); + } + + #[test] + fn dollar_quote_mismatch_tags_complex() { + assert_debug_snapshot!(lex(r#" +-- with dollar inside but mismatched tags +$foo$hello$world$bar$ +"#)); + } + + #[test] + fn numeric() { + assert_debug_snapshot!(lex(r#" +42 +3.5 +4. +.001 +.123e10 +5e2 +1.925e-3 +1e-10 +1e+10 +1e10 +4664.E+5 +"#)) + } + + #[test] + fn numeric_non_decimal() { + assert_debug_snapshot!(lex(r#" +0b100101 +0B10011001 +0o273 +0O755 +0x42f +0XFFFF +"#)) + } + + #[test] + fn numeric_with_seperators() { + assert_debug_snapshot!(lex(r#" +1_500_000_000 +0b10001000_00000000 +0o_1_755 +0xFFFF_FFFF +1.618_034 +"#)) + } + + #[test] + fn select_with_period() { + assert_debug_snapshot!(lex(r#" +select public.users; +"#)) + } + + #[test] + fn bitstring() { + assert_debug_snapshot!(lex(r#" +B'1001' +b'1001' +X'1FF' +x'1FF' +"#)) + } + + #[test] + fn string() { + assert_debug_snapshot!(lex(r#" +'Dianne''s horse' + +select 'foo '' +bar'; + +select 'foooo' + 'bar'; + + +'foo \\ \n \tbar' + +'forgot to close the string +"#)) + } + + #[test] + fn params() { + assert_debug_snapshot!(lex(r#" +select $1 + $2; + +select $1123123123123; + +select $; +"#)) + } + + #[test] + fn string_with_escapes() { + // https://www.postgresql.org/docs/current/sql-syntax-lexical.html#SQL-SYNTAX-STRINGS-ESCAPE + + assert_debug_snapshot!(lex(r#" +E'foo' + +e'bar' + +e'\b\f\n\r\t' + +e'\0\11\777' + +e'\x0\x11\xFF' + +e'\uAAAA \UFFFFFFFF' + +"#)) + } + + #[test] + fn string_unicode_escape() { + // https://www.postgresql.org/docs/current/sql-syntax-lexical.html#SQL-SYNTAX-STRINGS-UESCAPE + + assert_debug_snapshot!(lex(r#" +U&"d\0061t\+000061" + +U&"\0441\043B\043E\043D" + +u&'\0441\043B' + +U&"d!0061t!+000061" UESCAPE '!' +"#)) + } + + #[test] + fn quoted_ident() { + assert_debug_snapshot!(lex(r#" +"hello &1 -world"; + + +"hello-world +"#)) + } + + #[test] + fn quoted_ident_with_escape_quote() { + assert_debug_snapshot!(lex(r#" +"foo "" bar" +"#)) + } +} diff --git a/crates/pgt_tokenizer/src/snapshots/pgt_tokenizer__tests__bitstring.snap b/crates/pgt_tokenizer/src/snapshots/pgt_tokenizer__tests__bitstring.snap new file mode 100644 index 000000000..ff3eec09d --- /dev/null +++ b/crates/pgt_tokenizer/src/snapshots/pgt_tokenizer__tests__bitstring.snap @@ -0,0 +1,16 @@ +--- +source: crates/pgt_tokenizer/src/lib.rs +expression: "lex(r#\"\nB'1001'\nb'1001'\nX'1FF'\nx'1FF'\n\"#)" +snapshot_kind: text +--- +[ + "\n" @ LineEnding { count: 1 }, + "B'1001'" @ Literal { kind: BitStr { terminated: true } }, + "\n" @ LineEnding { count: 1 }, + "b'1001'" @ Literal { kind: BitStr { terminated: true } }, + "\n" @ LineEnding { count: 1 }, + "X'1FF'" @ Literal { kind: ByteStr { terminated: true } }, + "\n" @ LineEnding { count: 1 }, + "x'1FF'" @ Literal { kind: ByteStr { terminated: true } }, + "\n" @ LineEnding { count: 1 }, +] diff --git a/crates/pgt_tokenizer/src/snapshots/pgt_tokenizer__tests__block_comment.snap b/crates/pgt_tokenizer/src/snapshots/pgt_tokenizer__tests__block_comment.snap new file mode 100644 index 000000000..22961ecfe --- /dev/null +++ b/crates/pgt_tokenizer/src/snapshots/pgt_tokenizer__tests__block_comment.snap @@ -0,0 +1,9 @@ +--- +source: crates/pgt_tokenizer/src/lib.rs +expression: result +snapshot_kind: text +--- +[ + "\n" @ LineEnding { count: 1 }, + "/*\n * foo\n * bar\n*/" @ BlockComment { terminated: true }, +] diff --git a/crates/pgt_tokenizer/src/snapshots/pgt_tokenizer__tests__block_comment_unterminated.snap b/crates/pgt_tokenizer/src/snapshots/pgt_tokenizer__tests__block_comment_unterminated.snap new file mode 100644 index 000000000..4dd6957e5 --- /dev/null +++ b/crates/pgt_tokenizer/src/snapshots/pgt_tokenizer__tests__block_comment_unterminated.snap @@ -0,0 +1,9 @@ +--- +source: crates/pgt_tokenizer/src/lib.rs +expression: result +snapshot_kind: text +--- +[ + "\n" @ LineEnding { count: 1 }, + "/*\n * foo\n * bar\n /*\n*/" @ BlockComment { terminated: false }, +] diff --git a/crates/pgt_tokenizer/src/snapshots/pgt_tokenizer__tests__dollar_quote_mismatch_tags_complex.snap b/crates/pgt_tokenizer/src/snapshots/pgt_tokenizer__tests__dollar_quote_mismatch_tags_complex.snap new file mode 100644 index 000000000..7f6a66496 --- /dev/null +++ b/crates/pgt_tokenizer/src/snapshots/pgt_tokenizer__tests__dollar_quote_mismatch_tags_complex.snap @@ -0,0 +1,11 @@ +--- +source: crates/pgt_tokenizer/src/lib.rs +expression: "lex(r#\"\n-- with dollar inside but mismatched tags\n$foo$hello$world$bar$\n\"#)" +snapshot_kind: text +--- +[ + "\n" @ LineEnding { count: 1 }, + "-- with dollar inside but mismatched tags" @ LineComment, + "\n" @ LineEnding { count: 1 }, + "$foo$hello$world$bar$\n" @ Literal { kind: DollarQuotedString { terminated: false } }, +] diff --git a/crates/pgt_tokenizer/src/snapshots/pgt_tokenizer__tests__dollar_quote_mismatch_tags_simple.snap b/crates/pgt_tokenizer/src/snapshots/pgt_tokenizer__tests__dollar_quote_mismatch_tags_simple.snap new file mode 100644 index 000000000..9d6d43a06 --- /dev/null +++ b/crates/pgt_tokenizer/src/snapshots/pgt_tokenizer__tests__dollar_quote_mismatch_tags_simple.snap @@ -0,0 +1,11 @@ +--- +source: crates/pgt_tokenizer/src/lib.rs +expression: "lex(r#\"\n-- dollar quoting with mismatched tags\n$foo$hello world$bar$\n\"#)" +snapshot_kind: text +--- +[ + "\n" @ LineEnding { count: 1 }, + "-- dollar quoting with mismatched tags" @ LineComment, + "\n" @ LineEnding { count: 1 }, + "$foo$hello world$bar$\n" @ Literal { kind: DollarQuotedString { terminated: false } }, +] diff --git a/crates/pgt_tokenizer/src/snapshots/pgt_tokenizer__tests__dollar_quoting.snap b/crates/pgt_tokenizer/src/snapshots/pgt_tokenizer__tests__dollar_quoting.snap new file mode 100644 index 000000000..ad1aa07de --- /dev/null +++ b/crates/pgt_tokenizer/src/snapshots/pgt_tokenizer__tests__dollar_quoting.snap @@ -0,0 +1,15 @@ +--- +source: crates/pgt_tokenizer/src/lib.rs +expression: "lex(r#\"\n$$Dianne's horse$$\n$SomeTag$Dianne's horse$SomeTag$\n\n-- with dollar inside and matching tags\n$foo$hello$world$bar$\n\"#)" +snapshot_kind: text +--- +[ + "\n" @ LineEnding { count: 1 }, + "$$Dianne's horse$$" @ Literal { kind: DollarQuotedString { terminated: true } }, + "\n" @ LineEnding { count: 1 }, + "$SomeTag$Dianne's horse$SomeTag$" @ Literal { kind: DollarQuotedString { terminated: true } }, + "\n\n" @ LineEnding { count: 2 }, + "-- with dollar inside and matching tags" @ LineComment, + "\n" @ LineEnding { count: 1 }, + "$foo$hello$world$bar$\n" @ Literal { kind: DollarQuotedString { terminated: false } }, +] diff --git a/crates/pgt_tokenizer/src/snapshots/pgt_tokenizer__tests__dollar_strings_part2.snap b/crates/pgt_tokenizer/src/snapshots/pgt_tokenizer__tests__dollar_strings_part2.snap new file mode 100644 index 000000000..9aa494465 --- /dev/null +++ b/crates/pgt_tokenizer/src/snapshots/pgt_tokenizer__tests__dollar_strings_part2.snap @@ -0,0 +1,12 @@ +--- +source: crates/pgt_tokenizer/src/lib.rs +expression: "lex(r#\"\nDO $doblock$\nend\n$doblock$;\"#)" +snapshot_kind: text +--- +[ + "\n" @ LineEnding { count: 1 }, + "DO" @ Ident, + " " @ Space, + "$doblock$\nend\n$doblock$" @ Literal { kind: DollarQuotedString { terminated: true } }, + ";" @ Semi, +] diff --git a/crates/pgt_tokenizer/src/snapshots/pgt_tokenizer__tests__lex_statement.snap b/crates/pgt_tokenizer/src/snapshots/pgt_tokenizer__tests__lex_statement.snap new file mode 100644 index 000000000..5679f2a73 --- /dev/null +++ b/crates/pgt_tokenizer/src/snapshots/pgt_tokenizer__tests__lex_statement.snap @@ -0,0 +1,11 @@ +--- +source: crates/pgt_tokenizer/src/lib.rs +expression: result +snapshot_kind: text +--- +[ + "select" @ Ident, + " " @ Space, + "1" @ Literal { kind: Int { base: Decimal, empty_int: false } }, + ";" @ Semi, +] diff --git a/crates/pgt_tokenizer/src/snapshots/pgt_tokenizer__tests__line_comment.snap b/crates/pgt_tokenizer/src/snapshots/pgt_tokenizer__tests__line_comment.snap new file mode 100644 index 000000000..1cd8782a8 --- /dev/null +++ b/crates/pgt_tokenizer/src/snapshots/pgt_tokenizer__tests__line_comment.snap @@ -0,0 +1,10 @@ +--- +source: crates/pgt_tokenizer/src/lib.rs +expression: result +snapshot_kind: text +--- +[ + "\n" @ LineEnding { count: 1 }, + "-- foooooooooooo bar buzz" @ LineComment, + "\n" @ LineEnding { count: 1 }, +] diff --git a/crates/pgt_tokenizer/src/snapshots/pgt_tokenizer__tests__line_comment_whitespace.snap b/crates/pgt_tokenizer/src/snapshots/pgt_tokenizer__tests__line_comment_whitespace.snap new file mode 100644 index 000000000..3cf5fb502 --- /dev/null +++ b/crates/pgt_tokenizer/src/snapshots/pgt_tokenizer__tests__line_comment_whitespace.snap @@ -0,0 +1,16 @@ +--- +source: crates/pgt_tokenizer/src/lib.rs +expression: "lex(r#\"\nselect 'Hello' -- This is a comment\n' World';\"#)" +snapshot_kind: text +--- +[ + "\n" @ LineEnding { count: 1 }, + "select" @ Ident, + " " @ Space, + "'Hello'" @ Literal { kind: Str { terminated: true } }, + " " @ Space, + "-- This is a comment" @ LineComment, + "\n" @ LineEnding { count: 1 }, + "' World'" @ Literal { kind: Str { terminated: true } }, + ";" @ Semi, +] diff --git a/crates/pgt_tokenizer/src/snapshots/pgt_tokenizer__tests__named_param_at.snap b/crates/pgt_tokenizer/src/snapshots/pgt_tokenizer__tests__named_param_at.snap new file mode 100644 index 000000000..30bbe87fb --- /dev/null +++ b/crates/pgt_tokenizer/src/snapshots/pgt_tokenizer__tests__named_param_at.snap @@ -0,0 +1,23 @@ +--- +source: crates/pgt_tokenizer/src/lib.rs +expression: result +snapshot_kind: text +--- +[ + "select" @ Ident, + " " @ Space, + "1" @ Literal { kind: Int { base: Decimal, empty_int: false } }, + " " @ Space, + "from" @ Ident, + " " @ Space, + "c" @ Ident, + " " @ Space, + "where" @ Ident, + " " @ Space, + "id" @ Ident, + " " @ Space, + "=" @ Eq, + " " @ Space, + "@id" @ NamedParam { kind: AtPrefix }, + ";" @ Semi, +] diff --git a/crates/pgt_tokenizer/src/snapshots/pgt_tokenizer__tests__named_param_colon_identifier.snap b/crates/pgt_tokenizer/src/snapshots/pgt_tokenizer__tests__named_param_colon_identifier.snap new file mode 100644 index 000000000..6986ab0e0 --- /dev/null +++ b/crates/pgt_tokenizer/src/snapshots/pgt_tokenizer__tests__named_param_colon_identifier.snap @@ -0,0 +1,23 @@ +--- +source: crates/pgt_tokenizer/src/lib.rs +expression: result +snapshot_kind: text +--- +[ + "select" @ Ident, + " " @ Space, + "1" @ Literal { kind: Int { base: Decimal, empty_int: false } }, + " " @ Space, + "from" @ Ident, + " " @ Space, + "c" @ Ident, + " " @ Space, + "where" @ Ident, + " " @ Space, + "id" @ Ident, + " " @ Space, + "=" @ Eq, + " " @ Space, + ":\"id\"" @ NamedParam { kind: ColonIdentifier { terminated: true } }, + ";" @ Semi, +] diff --git a/crates/pgt_tokenizer/src/snapshots/pgt_tokenizer__tests__named_param_colon_raw.snap b/crates/pgt_tokenizer/src/snapshots/pgt_tokenizer__tests__named_param_colon_raw.snap new file mode 100644 index 000000000..f6db199d9 --- /dev/null +++ b/crates/pgt_tokenizer/src/snapshots/pgt_tokenizer__tests__named_param_colon_raw.snap @@ -0,0 +1,23 @@ +--- +source: crates/pgt_tokenizer/src/lib.rs +expression: result +snapshot_kind: text +--- +[ + "select" @ Ident, + " " @ Space, + "1" @ Literal { kind: Int { base: Decimal, empty_int: false } }, + " " @ Space, + "from" @ Ident, + " " @ Space, + "c" @ Ident, + " " @ Space, + "where" @ Ident, + " " @ Space, + "id" @ Ident, + " " @ Space, + "=" @ Eq, + " " @ Space, + ":id" @ NamedParam { kind: ColonRaw }, + ";" @ Semi, +] diff --git a/crates/pgt_tokenizer/src/snapshots/pgt_tokenizer__tests__named_param_colon_raw_vs_cast.snap b/crates/pgt_tokenizer/src/snapshots/pgt_tokenizer__tests__named_param_colon_raw_vs_cast.snap new file mode 100644 index 000000000..ecfd48212 --- /dev/null +++ b/crates/pgt_tokenizer/src/snapshots/pgt_tokenizer__tests__named_param_colon_raw_vs_cast.snap @@ -0,0 +1,25 @@ +--- +source: crates/pgt_tokenizer/src/lib.rs +expression: result +snapshot_kind: text +--- +[ + "select" @ Ident, + " " @ Space, + "1" @ Literal { kind: Int { base: Decimal, empty_int: false } }, + " " @ Space, + "from" @ Ident, + " " @ Space, + "c" @ Ident, + " " @ Space, + "where" @ Ident, + " " @ Space, + "id" @ Ident, + "::" @ DoubleColon, + "test" @ Ident, + " " @ Space, + "=" @ Eq, + " " @ Space, + ":id" @ NamedParam { kind: ColonRaw }, + ";" @ Semi, +] diff --git a/crates/pgt_tokenizer/src/snapshots/pgt_tokenizer__tests__named_param_colon_string.snap b/crates/pgt_tokenizer/src/snapshots/pgt_tokenizer__tests__named_param_colon_string.snap new file mode 100644 index 000000000..d91500831 --- /dev/null +++ b/crates/pgt_tokenizer/src/snapshots/pgt_tokenizer__tests__named_param_colon_string.snap @@ -0,0 +1,23 @@ +--- +source: crates/pgt_tokenizer/src/lib.rs +expression: result +snapshot_kind: text +--- +[ + "select" @ Ident, + " " @ Space, + "1" @ Literal { kind: Int { base: Decimal, empty_int: false } }, + " " @ Space, + "from" @ Ident, + " " @ Space, + "c" @ Ident, + " " @ Space, + "where" @ Ident, + " " @ Space, + "id" @ Ident, + " " @ Space, + "=" @ Eq, + " " @ Space, + ":'id'" @ NamedParam { kind: ColonString { terminated: true } }, + ";" @ Semi, +] diff --git a/crates/pgt_tokenizer/src/snapshots/pgt_tokenizer__tests__named_param_dollar_raw.snap b/crates/pgt_tokenizer/src/snapshots/pgt_tokenizer__tests__named_param_dollar_raw.snap new file mode 100644 index 000000000..db0f94128 --- /dev/null +++ b/crates/pgt_tokenizer/src/snapshots/pgt_tokenizer__tests__named_param_dollar_raw.snap @@ -0,0 +1,23 @@ +--- +source: crates/pgt_tokenizer/src/lib.rs +expression: result +snapshot_kind: text +--- +[ + "select" @ Ident, + " " @ Space, + "1" @ Literal { kind: Int { base: Decimal, empty_int: false } }, + " " @ Space, + "from" @ Ident, + " " @ Space, + "c" @ Ident, + " " @ Space, + "where" @ Ident, + " " @ Space, + "id" @ Ident, + " " @ Space, + "=" @ Eq, + " " @ Space, + "$id" @ NamedParam { kind: DollarRaw }, + ";" @ Semi, +] diff --git a/crates/pgt_tokenizer/src/snapshots/pgt_tokenizer__tests__numeric.snap b/crates/pgt_tokenizer/src/snapshots/pgt_tokenizer__tests__numeric.snap new file mode 100644 index 000000000..95fdb27a0 --- /dev/null +++ b/crates/pgt_tokenizer/src/snapshots/pgt_tokenizer__tests__numeric.snap @@ -0,0 +1,30 @@ +--- +source: crates/pgt_tokenizer/src/lib.rs +expression: "lex(r#\"\n42\n3.5\n4.\n.001\n.123e10\n5e2\n1.925e-3\n1e-10\n1e+10\n1e10\n4664.E+5\n\"#)" +snapshot_kind: text +--- +[ + "\n" @ LineEnding { count: 1 }, + "42" @ Literal { kind: Int { base: Decimal, empty_int: false } }, + "\n" @ LineEnding { count: 1 }, + "3.5" @ Literal { kind: Float { base: Decimal, empty_exponent: false } }, + "\n" @ LineEnding { count: 1 }, + "4." @ Literal { kind: Float { base: Decimal, empty_exponent: false } }, + "\n" @ LineEnding { count: 1 }, + ".001" @ Literal { kind: Int { base: Decimal, empty_int: false } }, + "\n" @ LineEnding { count: 1 }, + ".123e10" @ Literal { kind: Float { base: Decimal, empty_exponent: false } }, + "\n" @ LineEnding { count: 1 }, + "5e2" @ Literal { kind: Float { base: Decimal, empty_exponent: false } }, + "\n" @ LineEnding { count: 1 }, + "1.925e-3" @ Literal { kind: Float { base: Decimal, empty_exponent: false } }, + "\n" @ LineEnding { count: 1 }, + "1e-10" @ Literal { kind: Float { base: Decimal, empty_exponent: false } }, + "\n" @ LineEnding { count: 1 }, + "1e+10" @ Literal { kind: Float { base: Decimal, empty_exponent: false } }, + "\n" @ LineEnding { count: 1 }, + "1e10" @ Literal { kind: Float { base: Decimal, empty_exponent: false } }, + "\n" @ LineEnding { count: 1 }, + "4664.E+5" @ Literal { kind: Float { base: Decimal, empty_exponent: false } }, + "\n" @ LineEnding { count: 1 }, +] diff --git a/crates/pgt_tokenizer/src/snapshots/pgt_tokenizer__tests__numeric_non_decimal.snap b/crates/pgt_tokenizer/src/snapshots/pgt_tokenizer__tests__numeric_non_decimal.snap new file mode 100644 index 000000000..e44303487 --- /dev/null +++ b/crates/pgt_tokenizer/src/snapshots/pgt_tokenizer__tests__numeric_non_decimal.snap @@ -0,0 +1,20 @@ +--- +source: crates/pgt_tokenizer/src/lib.rs +expression: "lex(r#\"\n0b100101\n0B10011001\n0o273\n0O755\n0x42f\n0XFFFF\n\"#)" +snapshot_kind: text +--- +[ + "\n" @ LineEnding { count: 1 }, + "0b100101" @ Literal { kind: Int { base: Binary, empty_int: false } }, + "\n" @ LineEnding { count: 1 }, + "0B10011001" @ Literal { kind: Int { base: Binary, empty_int: false } }, + "\n" @ LineEnding { count: 1 }, + "0o273" @ Literal { kind: Int { base: Octal, empty_int: false } }, + "\n" @ LineEnding { count: 1 }, + "0O755" @ Literal { kind: Int { base: Octal, empty_int: false } }, + "\n" @ LineEnding { count: 1 }, + "0x42f" @ Literal { kind: Int { base: Hexadecimal, empty_int: false } }, + "\n" @ LineEnding { count: 1 }, + "0XFFFF" @ Literal { kind: Int { base: Hexadecimal, empty_int: false } }, + "\n" @ LineEnding { count: 1 }, +] diff --git a/crates/pgt_tokenizer/src/snapshots/pgt_tokenizer__tests__numeric_with_seperators.snap b/crates/pgt_tokenizer/src/snapshots/pgt_tokenizer__tests__numeric_with_seperators.snap new file mode 100644 index 000000000..cd0ecb210 --- /dev/null +++ b/crates/pgt_tokenizer/src/snapshots/pgt_tokenizer__tests__numeric_with_seperators.snap @@ -0,0 +1,18 @@ +--- +source: crates/pgt_tokenizer/src/lib.rs +expression: "lex(r#\"\n1_500_000_000\n0b10001000_00000000\n0o_1_755\n0xFFFF_FFFF\n1.618_034\n\"#)" +snapshot_kind: text +--- +[ + "\n" @ LineEnding { count: 1 }, + "1_500_000_000" @ Literal { kind: Int { base: Decimal, empty_int: false } }, + "\n" @ LineEnding { count: 1 }, + "0b10001000_00000000" @ Literal { kind: Int { base: Binary, empty_int: false } }, + "\n" @ LineEnding { count: 1 }, + "0o_1_755" @ Literal { kind: Int { base: Octal, empty_int: false } }, + "\n" @ LineEnding { count: 1 }, + "0xFFFF_FFFF" @ Literal { kind: Int { base: Hexadecimal, empty_int: false } }, + "\n" @ LineEnding { count: 1 }, + "1.618_034" @ Literal { kind: Float { base: Decimal, empty_exponent: false } }, + "\n" @ LineEnding { count: 1 }, +] diff --git a/crates/pgt_tokenizer/src/snapshots/pgt_tokenizer__tests__params.snap b/crates/pgt_tokenizer/src/snapshots/pgt_tokenizer__tests__params.snap new file mode 100644 index 000000000..6a4364173 --- /dev/null +++ b/crates/pgt_tokenizer/src/snapshots/pgt_tokenizer__tests__params.snap @@ -0,0 +1,27 @@ +--- +source: crates/pgt_tokenizer/src/lib.rs +expression: "lex(r#\"\nselect $1 + $2;\n\nselect $1123123123123;\n\nselect $;\n\"#)" +snapshot_kind: text +--- +[ + "\n" @ LineEnding { count: 1 }, + "select" @ Ident, + " " @ Space, + "$1" @ PositionalParam, + " " @ Space, + "+" @ Plus, + " " @ Space, + "$2" @ PositionalParam, + ";" @ Semi, + "\n\n" @ LineEnding { count: 2 }, + "select" @ Ident, + " " @ Space, + "$1123123123123" @ PositionalParam, + ";" @ Semi, + "\n\n" @ LineEnding { count: 2 }, + "select" @ Ident, + " " @ Space, + "$" @ PositionalParam, + ";" @ Semi, + "\n" @ LineEnding { count: 1 }, +] diff --git a/crates/pgt_tokenizer/src/snapshots/pgt_tokenizer__tests__quoted_ident.snap b/crates/pgt_tokenizer/src/snapshots/pgt_tokenizer__tests__quoted_ident.snap new file mode 100644 index 000000000..e1dffb066 --- /dev/null +++ b/crates/pgt_tokenizer/src/snapshots/pgt_tokenizer__tests__quoted_ident.snap @@ -0,0 +1,12 @@ +--- +source: crates/pgt_tokenizer/src/lib.rs +expression: "lex(r#\"\n\"hello &1 -world\";\n\n\n\"hello-world\n\"#)" +snapshot_kind: text +--- +[ + "\n" @ LineEnding { count: 1 }, + "\"hello &1 -world\"" @ QuotedIdent { terminated: true }, + ";" @ Semi, + "\n\n\n" @ LineEnding { count: 3 }, + "\"hello-world\n" @ QuotedIdent { terminated: false }, +] diff --git a/crates/pgt_tokenizer/src/snapshots/pgt_tokenizer__tests__quoted_ident_with_escape_quote.snap b/crates/pgt_tokenizer/src/snapshots/pgt_tokenizer__tests__quoted_ident_with_escape_quote.snap new file mode 100644 index 000000000..44ff06e5c --- /dev/null +++ b/crates/pgt_tokenizer/src/snapshots/pgt_tokenizer__tests__quoted_ident_with_escape_quote.snap @@ -0,0 +1,10 @@ +--- +source: crates/pgt_tokenizer/src/lib.rs +expression: "lex(r#\"\n\"foo \"\" bar\"\n\"#)" +snapshot_kind: text +--- +[ + "\n" @ LineEnding { count: 1 }, + "\"foo \"\" bar\"" @ QuotedIdent { terminated: true }, + "\n" @ LineEnding { count: 1 }, +] diff --git a/crates/pgt_tokenizer/src/snapshots/pgt_tokenizer__tests__select_with_period.snap b/crates/pgt_tokenizer/src/snapshots/pgt_tokenizer__tests__select_with_period.snap new file mode 100644 index 000000000..bc03da6a9 --- /dev/null +++ b/crates/pgt_tokenizer/src/snapshots/pgt_tokenizer__tests__select_with_period.snap @@ -0,0 +1,15 @@ +--- +source: crates/pgt_tokenizer/src/lib.rs +expression: "lex(r#\"\nselect public.users;\n\"#)" +snapshot_kind: text +--- +[ + "\n" @ LineEnding { count: 1 }, + "select" @ Ident, + " " @ Space, + "public" @ Ident, + "." @ Dot, + "users" @ Ident, + ";" @ Semi, + "\n" @ LineEnding { count: 1 }, +] diff --git a/crates/pgt_tokenizer/src/snapshots/pgt_tokenizer__tests__string.snap b/crates/pgt_tokenizer/src/snapshots/pgt_tokenizer__tests__string.snap new file mode 100644 index 000000000..c7e5b8bac --- /dev/null +++ b/crates/pgt_tokenizer/src/snapshots/pgt_tokenizer__tests__string.snap @@ -0,0 +1,26 @@ +--- +source: crates/pgt_tokenizer/src/lib.rs +expression: "lex(r#\"\n'Dianne''s horse'\n\nselect 'foo ''\nbar';\n\nselect 'foooo'\n 'bar';\n\n\n'foo \\\\ \\n \\tbar'\n\n'forgot to close the string\n\"#)" +snapshot_kind: text +--- +[ + "\n" @ LineEnding { count: 1 }, + "'Dianne''s horse'" @ Literal { kind: Str { terminated: true } }, + "\n\n" @ LineEnding { count: 2 }, + "select" @ Ident, + " " @ Space, + "'foo ''\nbar'" @ Literal { kind: Str { terminated: true } }, + ";" @ Semi, + "\n\n" @ LineEnding { count: 2 }, + "select" @ Ident, + " " @ Space, + "'foooo'" @ Literal { kind: Str { terminated: true } }, + "\n" @ LineEnding { count: 1 }, + " " @ Space, + "'bar'" @ Literal { kind: Str { terminated: true } }, + ";" @ Semi, + "\n\n\n" @ LineEnding { count: 3 }, + "'foo \\\\ \\n \\tbar'" @ Literal { kind: Str { terminated: true } }, + "\n\n" @ LineEnding { count: 2 }, + "'forgot to close the string\n" @ Literal { kind: Str { terminated: false } }, +] diff --git a/crates/pgt_tokenizer/src/snapshots/pgt_tokenizer__tests__string_unicode_escape.snap b/crates/pgt_tokenizer/src/snapshots/pgt_tokenizer__tests__string_unicode_escape.snap new file mode 100644 index 000000000..225a208a0 --- /dev/null +++ b/crates/pgt_tokenizer/src/snapshots/pgt_tokenizer__tests__string_unicode_escape.snap @@ -0,0 +1,20 @@ +--- +source: crates/pgt_tokenizer/src/lib.rs +expression: "lex(r#\"\nU&\"d\\0061t\\+000061\"\n\nU&\"\\0441\\043B\\043E\\043D\"\n\nu&'\\0441\\043B'\n\nU&\"d!0061t!+000061\" UESCAPE '!'\n\"#)" +snapshot_kind: text +--- +[ + "\n" @ LineEnding { count: 1 }, + "U&\"d\\0061t\\+000061\"" @ QuotedIdent { terminated: true }, + "\n\n" @ LineEnding { count: 2 }, + "U&\"\\0441\\043B\\043E\\043D\"" @ QuotedIdent { terminated: true }, + "\n\n" @ LineEnding { count: 2 }, + "u&'\\0441\\043B'" @ Literal { kind: UnicodeEscStr { terminated: true } }, + "\n\n" @ LineEnding { count: 2 }, + "U&\"d!0061t!+000061\"" @ QuotedIdent { terminated: true }, + " " @ Space, + "UESCAPE" @ Ident, + " " @ Space, + "'!'" @ Literal { kind: Str { terminated: true } }, + "\n" @ LineEnding { count: 1 }, +] diff --git a/crates/pgt_tokenizer/src/snapshots/pgt_tokenizer__tests__string_with_escapes.snap b/crates/pgt_tokenizer/src/snapshots/pgt_tokenizer__tests__string_with_escapes.snap new file mode 100644 index 000000000..bbc940481 --- /dev/null +++ b/crates/pgt_tokenizer/src/snapshots/pgt_tokenizer__tests__string_with_escapes.snap @@ -0,0 +1,20 @@ +--- +source: crates/pgt_tokenizer/src/lib.rs +expression: "lex(r#\"\nE'foo'\n\ne'bar'\n\ne'\\b\\f\\n\\r\\t'\n\ne'\\0\\11\\777'\n\ne'\\x0\\x11\\xFF'\n\ne'\\uAAAA \\UFFFFFFFF'\n\n\"#)" +snapshot_kind: text +--- +[ + "\n" @ LineEnding { count: 1 }, + "E'foo'" @ Literal { kind: EscStr { terminated: true } }, + "\n\n" @ LineEnding { count: 2 }, + "e'bar'" @ Literal { kind: EscStr { terminated: true } }, + "\n\n" @ LineEnding { count: 2 }, + "e'\\b\\f\\n\\r\\t'" @ Literal { kind: EscStr { terminated: true } }, + "\n\n" @ LineEnding { count: 2 }, + "e'\\0\\11\\777'" @ Literal { kind: EscStr { terminated: true } }, + "\n\n" @ LineEnding { count: 2 }, + "e'\\x0\\x11\\xFF'" @ Literal { kind: EscStr { terminated: true } }, + "\n\n" @ LineEnding { count: 2 }, + "e'\\uAAAA \\UFFFFFFFF'" @ Literal { kind: EscStr { terminated: true } }, + "\n\n" @ LineEnding { count: 2 }, +] diff --git a/crates/pgt_tokenizer/src/token.rs b/crates/pgt_tokenizer/src/token.rs new file mode 100644 index 000000000..1312773d9 --- /dev/null +++ b/crates/pgt_tokenizer/src/token.rs @@ -0,0 +1,205 @@ +// based on: https://github.com/rust-lang/rust/blob/d1b7355d3d7b4ead564dbecb1d240fcc74fff21b/compiler/rustc_lexer/src/lib.rs#L58 +#[derive(Debug, PartialEq, Clone, Copy)] +pub enum TokenKind { + /// Used when there's an error of some sort while lexing. + Unknown, + /// Examples: `12u8`, `1.0e-40`, `b"123"`. Note that `_` is an invalid + /// suffix, but may be present here on string and float literals. Users of + /// this type will need to check for and reject that case. + /// + /// See [`LiteralKind`] for more details. + Literal { + kind: LiteralKind, + }, + /// Whitespace characters. + Space, + Tab, + VerticalTab, + FormFeed, + // Handles \n, \r, and sequences + LineEnding { + count: usize, + }, + /// Identifier + /// + /// case-sensitive + Ident, + /// `;` + Semi, + /// End of file + Eof, + /// `/` + Slash, + /// `\` + Backslash, + /// `-- foo` + LineComment, + /// ``` + /// /* + /// foo + /// */ + /// ``` + BlockComment { + terminated: bool, + }, + /// `-` + Minus, + /// `:` + Colon, + /// `::` + DoubleColon, + /// `.` + Dot, + /// `=` + Eq, + /// `>` + Gt, + /// `&` + And, + /// `<` + Lt, + /// `!` + Bang, + /// `+` + Plus, + /// `~` + Tilde, + /// `#` + Pound, + /// `?` + Question, + /// `|` + Or, + /// `%` + Percent, + /// `^` + Caret, + /// `*` + Star, + /// `` ` `` + Backtick, + /// `@` + At, + /// `]` + CloseBracket, + /// `[` + OpenBracket, + /// `)` + CloseParen, + /// `(` + OpenParen, + /// `,` + Comma, + /// Error case that we need to report later on. + UnknownPrefix, + /// Positional Parameter, e.g., `$1` + /// + /// see: + PositionalParam, + /// Named Parameter, e.g., `@name` + /// + /// This is used in some ORMs and query builders, like sqlc. + NamedParam { + kind: NamedParamKind, + }, + /// Quoted Identifier, e.g., `"update"` in `update "my_table" set "a" = 5;` + /// + /// These are case-sensitive, unlike [`TokenKind::Ident`] + /// + /// see: + QuotedIdent { + terminated: bool, + }, +} + +#[derive(Debug, PartialEq, Clone, Copy)] +pub enum NamedParamKind { + /// e.g. `@name` + /// + /// Used in: + /// - sqlc: https://docs.sqlc.dev/en/latest/howto/named_parameters.html + AtPrefix, + + /// e.g. `:name` (raw substitution) + /// + /// Used in: psql + ColonRaw, + + /// e.g. `:'name'` (quoted string substitution) + /// + /// Used in: psql + ColonString { terminated: bool }, + + /// e.g. `:"name"` (quoted identifier substitution) + /// + /// Used in: psql + ColonIdentifier { terminated: bool }, + + /// e.g. `$name` + DollarRaw, +} + +/// Parsed token. +/// It doesn't contain information about data that has been parsed, +/// only the type of the token and its size. +#[derive(Debug, Clone, Copy)] +pub struct Token { + pub kind: TokenKind, + pub len: u32, +} + +impl Token { + pub(crate) fn new(kind: TokenKind, len: u32) -> Token { + Token { kind, len } + } +} + +/// Base of numeric literal encoding according to its prefix. +#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)] +pub enum Base { + /// Literal starts with "0b". + Binary = 2, + /// Literal starts with "0o". + Octal = 8, + /// Literal doesn't contain a prefix. + Decimal = 10, + /// Literal starts with "0x". + Hexadecimal = 16, +} + +// Enum representing the literal types supported by the lexer. +#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)] +pub enum LiteralKind { + /// Integer Numeric, e.g., `42` + /// + /// see: + Int { base: Base, empty_int: bool }, + /// Float Numeric, e.g., `1.925e-3` + /// + /// see: + Float { base: Base, empty_exponent: bool }, + /// String, e.g., `'foo'` + /// + /// see: + Str { terminated: bool }, + /// Hexidecimal Bit String, e.g., `X'1FF'` + /// + /// see: + ByteStr { terminated: bool }, + /// Bit String, e.g., `B'1001'` + /// + /// see: + BitStr { terminated: bool }, + /// Dollar Quoted String, e.g., `$$Dianne's horse$$` + /// + /// see: + DollarQuotedString { terminated: bool }, + /// Unicode Escape String, e.g., `U&'d\0061t\+000061'` + /// + /// see: + UnicodeEscStr { terminated: bool }, + /// Escape String, e.g, `E'foo'` + /// + /// see: + EscStr { terminated: bool }, +} diff --git a/crates/pgt_treesitter_queries/Cargo.toml b/crates/pgt_treesitter/Cargo.toml similarity index 54% rename from crates/pgt_treesitter_queries/Cargo.toml rename to crates/pgt_treesitter/Cargo.toml index 5806861f5..f2d8b46e1 100644 --- a/crates/pgt_treesitter_queries/Cargo.toml +++ b/crates/pgt_treesitter/Cargo.toml @@ -6,17 +6,20 @@ edition.workspace = true homepage.workspace = true keywords.workspace = true license.workspace = true -name = "pgt_treesitter_queries" +name = "pgt_treesitter" repository.workspace = true version = "0.0.0" [dependencies] -clap = { version = "4.5.23", features = ["derive"] } -tree-sitter.workspace = true -tree_sitter_sql.workspace = true +clap = { version = "4.5.23", features = ["derive"] } +pgt_schema_cache.workspace = true +pgt_text_size.workspace = true +tree-sitter.workspace = true +tree_sitter_sql.workspace = true [dev-dependencies] +pgt_test_utils.workspace = true [lib] doctest = false diff --git a/crates/pgt_treesitter/src/context/base_parser.rs b/crates/pgt_treesitter/src/context/base_parser.rs new file mode 100644 index 000000000..83b315828 --- /dev/null +++ b/crates/pgt_treesitter/src/context/base_parser.rs @@ -0,0 +1,273 @@ +use pgt_text_size::{TextRange, TextSize}; +use std::iter::Peekable; + +pub(crate) struct TokenNavigator { + tokens: Peekable>, + pub previous_token: Option, + pub current_token: Option, +} + +impl TokenNavigator { + pub(crate) fn next_matches(&mut self, options: &[&str]) -> bool { + self.tokens + .peek() + .is_some_and(|c| options.contains(&c.get_word_without_quotes().as_str())) + } + + pub(crate) fn prev_matches(&self, options: &[&str]) -> bool { + self.previous_token + .as_ref() + .is_some_and(|t| options.contains(&t.get_word_without_quotes().as_str())) + } + + pub(crate) fn advance(&mut self) -> Option { + // we can't peek back n an iterator, so we'll have to keep track manually. + self.previous_token = self.current_token.take(); + self.current_token = self.tokens.next(); + self.current_token.clone() + } +} + +impl From> for TokenNavigator { + fn from(tokens: Vec) -> Self { + TokenNavigator { + tokens: tokens.into_iter().peekable(), + previous_token: None, + current_token: None, + } + } +} + +pub(crate) trait CompletionStatementParser: Sized { + type Context: Default; + const NAME: &'static str; + + fn looks_like_matching_stmt(sql: &str) -> bool; + fn parse(self) -> Self::Context; + fn make_parser(tokens: Vec, cursor_position: usize) -> Self; + + fn get_context(sql: &str, cursor_position: usize) -> Self::Context { + assert!( + Self::looks_like_matching_stmt(sql), + "Using {} for a wrong statement! Developer Error!", + Self::NAME + ); + + match sql_to_words(sql) { + Ok(tokens) => { + let parser = Self::make_parser(tokens, cursor_position); + parser.parse() + } + Err(_) => Self::Context::default(), + } + } +} + +pub(crate) fn schema_and_table_name(token: &WordWithIndex) -> (String, Option) { + let word = token.get_word_without_quotes(); + let mut parts = word.split('.'); + + ( + parts.next().unwrap().into(), + parts.next().map(|tb| tb.into()), + ) +} + +#[derive(Clone, Debug, PartialEq, Eq)] +pub(crate) struct WordWithIndex { + word: String, + start: usize, + end: usize, +} + +impl WordWithIndex { + pub(crate) fn is_under_cursor(&self, cursor_pos: usize) -> bool { + self.start <= cursor_pos && self.end > cursor_pos + } + + pub(crate) fn get_range(&self) -> TextRange { + let start: u32 = self.start.try_into().expect("Text too long"); + let end: u32 = self.end.try_into().expect("Text too long"); + TextRange::new(TextSize::from(start), TextSize::from(end)) + } + + pub(crate) fn get_word_without_quotes(&self) -> String { + self.word.replace('"', "") + } + + pub(crate) fn get_word(&self) -> String { + self.word.clone() + } +} + +pub(crate) struct SubStatementParser { + start_of_word: Option, + current_word: String, + in_quotation_marks: bool, + is_fn_call: bool, + words: Vec, +} + +impl SubStatementParser { + pub(crate) fn parse(sql: &str) -> Result, String> { + let mut parser = SubStatementParser { + start_of_word: None, + current_word: String::new(), + in_quotation_marks: false, + is_fn_call: false, + words: vec![], + }; + + parser.collect_words(sql); + + if parser.in_quotation_marks { + Err("String was not closed properly.".into()) + } else { + Ok(parser.words) + } + } + + pub fn collect_words(&mut self, sql: &str) { + for (pos, c) in sql.char_indices() { + match c { + '"' => { + if !self.has_started_word() { + self.in_quotation_marks = true; + self.add_char(c); + self.start_word(pos); + } else { + self.in_quotation_marks = false; + self.add_char(c); + } + } + + '(' => { + if !self.has_started_word() { + self.push_char_as_word(c, pos); + } else { + self.add_char(c); + self.is_fn_call = true; + } + } + + ')' => { + if self.is_fn_call { + self.add_char(c); + self.is_fn_call = false; + } else { + if self.has_started_word() { + self.push_word(pos); + } + self.push_char_as_word(c, pos); + } + } + + _ => { + if c.is_ascii_whitespace() || c == ';' { + if self.in_quotation_marks { + self.add_char(c); + } else if !self.is_empty() && self.has_started_word() { + self.push_word(pos); + } + } else if self.has_started_word() { + self.add_char(c); + } else { + self.start_word(pos); + self.add_char(c) + } + } + } + } + + if self.has_started_word() && !self.is_empty() { + self.push_word(sql.len()) + } + } + + fn is_empty(&self) -> bool { + self.current_word.is_empty() + } + + fn add_char(&mut self, c: char) { + self.current_word.push(c) + } + + fn start_word(&mut self, pos: usize) { + self.start_of_word = Some(pos); + } + + fn has_started_word(&self) -> bool { + self.start_of_word.is_some() + } + + fn push_char_as_word(&mut self, c: char, pos: usize) { + self.words.push(WordWithIndex { + word: String::from(c), + start: pos, + end: pos + 1, + }); + } + + fn push_word(&mut self, current_position: usize) { + self.words.push(WordWithIndex { + word: self.current_word.clone(), + start: self.start_of_word.unwrap(), + end: current_position, + }); + self.current_word = String::new(); + self.start_of_word = None; + } +} + +/// Note: A policy name within quotation marks will be considered a single word. +pub(crate) fn sql_to_words(sql: &str) -> Result, String> { + SubStatementParser::parse(sql) +} + +#[cfg(test)] +mod tests { + use crate::context::base_parser::{SubStatementParser, WordWithIndex, sql_to_words}; + + #[test] + fn determines_positions_correctly() { + let query = "\ncreate policy \"my cool pol\"\n\ton auth.users\n\tas permissive\n\tfor select\n\t\tto public\n\t\tusing (auth.uid());".to_string(); + + let words = SubStatementParser::parse(query.as_str()).unwrap(); + + assert_eq!(words[0], to_word("create", 1, 7)); + assert_eq!(words[1], to_word("policy", 8, 14)); + assert_eq!(words[2], to_word("\"my cool pol\"", 15, 28)); + assert_eq!(words[3], to_word("on", 30, 32)); + assert_eq!(words[4], to_word("auth.users", 33, 43)); + assert_eq!(words[5], to_word("as", 45, 47)); + assert_eq!(words[6], to_word("permissive", 48, 58)); + assert_eq!(words[7], to_word("for", 60, 63)); + assert_eq!(words[8], to_word("select", 64, 70)); + assert_eq!(words[9], to_word("to", 73, 75)); + assert_eq!(words[10], to_word("public", 78, 84)); + assert_eq!(words[11], to_word("using", 87, 92)); + assert_eq!(words[12], to_word("(", 93, 94)); + assert_eq!(words[13], to_word("auth.uid()", 94, 104)); + assert_eq!(words[14], to_word(")", 104, 105)); + } + + #[test] + fn handles_schemas_in_quotation_marks() { + let query = r#"grant select on "public"."users""#.to_string(); + + let words = sql_to_words(query.as_str()).unwrap(); + + assert_eq!(words[0], to_word("grant", 0, 5)); + assert_eq!(words[1], to_word("select", 6, 12)); + assert_eq!(words[2], to_word("on", 13, 15)); + assert_eq!(words[3], to_word(r#""public"."users""#, 16, 32)); + } + + fn to_word(word: &str, start: usize, end: usize) -> WordWithIndex { + WordWithIndex { + word: word.into(), + start, + end, + } + } +} diff --git a/crates/pgt_treesitter/src/context/grant_parser.rs b/crates/pgt_treesitter/src/context/grant_parser.rs new file mode 100644 index 000000000..c9aebc33b --- /dev/null +++ b/crates/pgt_treesitter/src/context/grant_parser.rs @@ -0,0 +1,418 @@ +use pgt_text_size::{TextRange, TextSize}; + +use crate::context::base_parser::{ + CompletionStatementParser, TokenNavigator, WordWithIndex, schema_and_table_name, +}; + +#[derive(Default, Debug, PartialEq, Eq)] +pub(crate) struct GrantContext { + pub table_name: Option, + pub schema_name: Option, + pub node_text: String, + pub node_range: TextRange, + pub node_kind: String, +} + +/// Simple parser that'll turn a policy-related statement into a context object required for +/// completions. +/// The parser will only work if the (trimmed) sql starts with `create policy`, `drop policy`, or `alter policy`. +/// It can only parse policy statements. +pub(crate) struct GrantParser { + navigator: TokenNavigator, + context: GrantContext, + cursor_position: usize, + in_roles_list: bool, +} + +impl CompletionStatementParser for GrantParser { + type Context = GrantContext; + const NAME: &'static str = "GrantParser"; + + fn looks_like_matching_stmt(sql: &str) -> bool { + let lowercased = sql.to_ascii_lowercase(); + let trimmed = lowercased.trim(); + trimmed.starts_with("grant") + } + + fn parse(mut self) -> Self::Context { + while let Some(token) = self.navigator.advance() { + if token.is_under_cursor(self.cursor_position) { + self.handle_token_under_cursor(token); + } else { + self.handle_token(token); + } + } + + self.context + } + + fn make_parser(tokens: Vec, cursor_position: usize) -> Self { + Self { + navigator: tokens.into(), + context: GrantContext::default(), + cursor_position, + in_roles_list: false, + } + } +} + +impl GrantParser { + fn handle_token_under_cursor(&mut self, token: WordWithIndex) { + if self.navigator.previous_token.is_none() { + return; + } + + let previous = self.navigator.previous_token.take().unwrap(); + let current = self + .navigator + .current_token + .as_ref() + .map(|w| w.get_word_without_quotes()); + + match previous + .get_word_without_quotes() + .to_ascii_lowercase() + .as_str() + { + "grant" => { + self.context.node_range = token.get_range(); + self.context.node_kind = "grant_role".into(); + self.context.node_text = token.get_word(); + } + "on" if !matches!(current.as_deref(), Some("table")) => self.handle_table(&token), + + "table" => { + self.handle_table(&token); + } + "to" => { + self.context.node_range = token.get_range(); + self.context.node_kind = "grant_role".into(); + self.context.node_text = token.get_word(); + } + t => { + if self.in_roles_list && t.ends_with(',') { + self.context.node_kind = "grant_role".into(); + } + + self.context.node_range = token.get_range(); + self.context.node_text = token.get_word(); + } + } + } + + fn handle_table(&mut self, token: &WordWithIndex) { + if token.get_word_without_quotes().contains('.') { + let (schema_name, table_name) = schema_and_table_name(token); + + let schema_name_len = schema_name.len(); + self.context.schema_name = Some(schema_name); + + let offset: u32 = schema_name_len.try_into().expect("Text too long"); + let range_without_schema = token + .get_range() + .checked_expand_start( + TextSize::new(offset + 1), // kill the dot as well + ) + .expect("Text too long"); + + self.context.node_range = range_without_schema; + self.context.node_kind = "grant_table".into(); + + // In practice, we should always have a table name. + // The completion sanitization will add a word after a `.` if nothing follows it; + // the token_text will then look like `schema.REPLACED_TOKEN`. + self.context.node_text = table_name.unwrap_or_default(); + } else { + self.context.node_range = token.get_range(); + self.context.node_text = token.get_word(); + self.context.node_kind = "grant_table".into(); + } + } + + fn handle_token(&mut self, token: WordWithIndex) { + match token.get_word_without_quotes().as_str() { + "on" if !self.navigator.next_matches(&[ + "table", + "schema", + "foreign", + "domain", + "sequence", + "database", + "function", + "procedure", + "routine", + "language", + "large", + "parameter", + "schema", + "tablespace", + "type", + ]) => + { + self.table_with_schema() + } + "table" => self.table_with_schema(), + + "to" => { + self.in_roles_list = true; + } + + t => { + if self.in_roles_list && !t.ends_with(',') { + self.in_roles_list = false; + } + } + } + } + + fn table_with_schema(&mut self) { + if let Some(token) = self.navigator.advance() { + if token.is_under_cursor(self.cursor_position) { + self.handle_token_under_cursor(token); + } else if token.get_word_without_quotes().contains('.') { + let (schema, maybe_table) = schema_and_table_name(&token); + self.context.schema_name = Some(schema); + self.context.table_name = maybe_table; + } else { + self.context.table_name = Some(token.get_word()); + } + }; + } +} + +#[cfg(test)] +mod tests { + use pgt_text_size::{TextRange, TextSize}; + + use crate::{ + context::base_parser::CompletionStatementParser, + context::grant_parser::{GrantContext, GrantParser}, + }; + + use pgt_test_utils::QueryWithCursorPosition; + + fn with_pos(query: String) -> (usize, String) { + let mut pos: Option = None; + + for (p, c) in query.char_indices() { + if c == QueryWithCursorPosition::cursor_marker() { + pos = Some(p); + break; + } + } + + ( + pos.expect("Please add cursor position!"), + query + .replace(QueryWithCursorPosition::cursor_marker(), "REPLACED_TOKEN") + .to_string(), + ) + } + + #[test] + fn infers_grant_keyword() { + let (pos, query) = with_pos(format!( + r#" + grant {} + "#, + QueryWithCursorPosition::cursor_marker() + )); + + let context = GrantParser::get_context(query.as_str(), pos); + + assert_eq!( + context, + GrantContext { + table_name: None, + schema_name: None, + node_text: "REPLACED_TOKEN".into(), + node_range: TextRange::new(TextSize::new(19), TextSize::new(33)), + node_kind: "grant_role".into(), + } + ); + } + + #[test] + fn infers_table_name() { + let (pos, query) = with_pos(format!( + r#" + grant select on {} + "#, + QueryWithCursorPosition::cursor_marker() + )); + + let context = GrantParser::get_context(query.as_str(), pos); + + assert_eq!( + context, + GrantContext { + table_name: None, + schema_name: None, + node_text: "REPLACED_TOKEN".into(), + node_range: TextRange::new(TextSize::new(29), TextSize::new(43)), + node_kind: "grant_table".into(), + } + ); + } + + #[test] + fn infers_table_name_with_keyword() { + let (pos, query) = with_pos(format!( + r#" + grant select on table {} + "#, + QueryWithCursorPosition::cursor_marker() + )); + + let context = GrantParser::get_context(query.as_str(), pos); + + assert_eq!( + context, + GrantContext { + table_name: None, + schema_name: None, + node_text: "REPLACED_TOKEN".into(), + node_range: TextRange::new(TextSize::new(35), TextSize::new(49)), + node_kind: "grant_table".into(), + } + ); + } + + #[test] + fn infers_schema_and_table_name() { + let (pos, query) = with_pos(format!( + r#" + grant select on public.{} + "#, + QueryWithCursorPosition::cursor_marker() + )); + + let context = GrantParser::get_context(query.as_str(), pos); + + assert_eq!( + context, + GrantContext { + table_name: None, + schema_name: Some("public".into()), + node_text: "REPLACED_TOKEN".into(), + node_range: TextRange::new(TextSize::new(36), TextSize::new(50)), + node_kind: "grant_table".into(), + } + ); + } + + #[test] + fn infers_schema_and_table_name_with_keyword() { + let (pos, query) = with_pos(format!( + r#" + grant select on table public.{} + "#, + QueryWithCursorPosition::cursor_marker() + )); + + let context = GrantParser::get_context(query.as_str(), pos); + + assert_eq!( + context, + GrantContext { + table_name: None, + schema_name: Some("public".into()), + node_text: "REPLACED_TOKEN".into(), + node_range: TextRange::new(TextSize::new(42), TextSize::new(56)), + node_kind: "grant_table".into(), + } + ); + } + + #[test] + fn infers_role_name() { + let (pos, query) = with_pos(format!( + r#" + grant select on public.users to {} + "#, + QueryWithCursorPosition::cursor_marker() + )); + + let context = GrantParser::get_context(query.as_str(), pos); + + assert_eq!( + context, + GrantContext { + table_name: Some("users".into()), + schema_name: Some("public".into()), + node_text: "REPLACED_TOKEN".into(), + node_range: TextRange::new(TextSize::new(45), TextSize::new(59)), + node_kind: "grant_role".into(), + } + ); + } + + #[test] + fn determines_table_name_after_schema() { + let (pos, query) = with_pos(format!( + r#" + grant select on public.{} to test_role + "#, + QueryWithCursorPosition::cursor_marker() + )); + + let context = GrantParser::get_context(query.as_str(), pos); + + assert_eq!( + context, + GrantContext { + table_name: None, + schema_name: Some("public".into()), + node_text: "REPLACED_TOKEN".into(), + node_range: TextRange::new(TextSize::new(36), TextSize::new(50)), + node_kind: "grant_table".into(), + } + ); + } + + #[test] + fn infers_quoted_schema_and_table() { + let (pos, query) = with_pos(format!( + r#" + grant select on "MySchema"."MyTable" to {} + "#, + QueryWithCursorPosition::cursor_marker() + )); + + let context = GrantParser::get_context(query.as_str(), pos); + + assert_eq!( + context, + GrantContext { + table_name: Some("MyTable".into()), + schema_name: Some("MySchema".into()), + node_text: "REPLACED_TOKEN".into(), + node_range: TextRange::new(TextSize::new(53), TextSize::new(67)), + node_kind: "grant_role".into(), + } + ); + } + + #[test] + fn infers_multiple_roles() { + let (pos, query) = with_pos(format!( + r#" + grant select on public.users to alice, {} + "#, + QueryWithCursorPosition::cursor_marker() + )); + + let context = GrantParser::get_context(query.as_str(), pos); + + assert_eq!( + context, + GrantContext { + table_name: Some("users".into()), + schema_name: Some("public".into()), + node_text: "REPLACED_TOKEN".into(), + node_range: TextRange::new(TextSize::new(52), TextSize::new(66)), + node_kind: "grant_role".into(), + } + ); + } +} diff --git a/crates/pgt_completions/src/context/mod.rs b/crates/pgt_treesitter/src/context/mod.rs similarity index 64% rename from crates/pgt_completions/src/context/mod.rs rename to crates/pgt_treesitter/src/context/mod.rs index 0bb190a96..9cfaadea1 100644 --- a/crates/pgt_completions/src/context/mod.rs +++ b/crates/pgt_treesitter/src/context/mod.rs @@ -2,19 +2,19 @@ use std::{ cmp, collections::{HashMap, HashSet}, }; +mod base_parser; +mod grant_parser; mod policy_parser; +mod revoke_parser; -use pgt_schema_cache::SchemaCache; -use pgt_text_size::TextRange; -use pgt_treesitter_queries::{ - TreeSitterQueriesExecutor, - queries::{self, QueryResult}, -}; +use crate::queries::{self, QueryResult, TreeSitterQueriesExecutor}; +use pgt_text_size::{TextRange, TextSize}; -use crate::{ - NodeText, - context::policy_parser::{PolicyParser, PolicyStmtKind}, - sanitization::SanitizedCompletionParams, +use crate::context::{ + base_parser::CompletionStatementParser, + grant_parser::GrantParser, + policy_parser::{PolicyParser, PolicyStmtKind}, + revoke_parser::RevokeParser, }; #[derive(Debug, PartialEq, Eq, Hash, Clone)] @@ -31,14 +31,29 @@ pub enum WrappingClause<'a> { Insert, AlterTable, DropTable, + DropColumn, + AlterColumn, + RenameColumn, PolicyName, ToRoleAssignment, + SetStatement, + AlterRole, + DropRole, + + /// `PolicyCheck` refers to either the `WITH CHECK` or the `USING` clause + /// in a policy statement. + /// ```sql + /// CREATE POLICY "my pol" ON PUBLIC.USERS + /// FOR SELECT + /// USING (...) -- this one! + /// ``` + PolicyCheck, } #[derive(PartialEq, Eq, Hash, Debug, Clone)] -pub(crate) struct MentionedColumn { - pub(crate) column: String, - pub(crate) alias: Option, +pub struct MentionedColumn { + pub column: String, + pub alias: Option, } /// We can map a few nodes, such as the "update" node, to actual SQL clauses. @@ -58,12 +73,13 @@ pub enum WrappingNode { } #[derive(Debug)] -pub(crate) enum NodeUnderCursor<'a> { +pub enum NodeUnderCursor<'a> { TsNode(tree_sitter::Node<'a>), CustomNode { - text: NodeText, + text: String, range: TextRange, kind: String, + previous_node_kind: Option, }, } @@ -126,13 +142,18 @@ impl TryFrom for WrappingNode { } } +pub struct TreeSitterContextParams<'a> { + pub position: TextSize, + pub text: &'a str, + pub tree: &'a tree_sitter::Tree, +} + #[derive(Debug)] -pub(crate) struct CompletionContext<'a> { +pub struct TreesitterContext<'a> { pub node_under_cursor: Option>, pub tree: &'a tree_sitter::Tree, pub text: &'a str, - pub schema_cache: &'a SchemaCache, pub position: usize, /// If the cursor is on a node that uses dot notation @@ -154,6 +175,7 @@ pub(crate) struct CompletionContext<'a> { /// on u.id = i.user_id; /// ``` pub schema_or_alias_name: Option, + pub wrapping_clause_type: Option>, pub wrapping_node_kind: Option, @@ -166,12 +188,11 @@ pub(crate) struct CompletionContext<'a> { pub mentioned_columns: HashMap>, HashSet>, } -impl<'a> CompletionContext<'a> { - pub fn new(params: &'a SanitizedCompletionParams) -> Self { +impl<'a> TreesitterContext<'a> { + pub fn new(params: TreeSitterContextParams<'a>) -> Self { let mut ctx = Self { - tree: params.tree.as_ref(), - text: ¶ms.text, - schema_cache: params.schema, + tree: params.tree, + text: params.text, position: usize::from(params.position), node_under_cursor: None, schema_or_alias_name: None, @@ -187,8 +208,12 @@ impl<'a> CompletionContext<'a> { // policy handling is important to Supabase, but they are a PostgreSQL specific extension, // so the tree_sitter_sql language does not support it. // We infer the context manually. - if PolicyParser::looks_like_policy_stmt(¶ms.text) { + if PolicyParser::looks_like_matching_stmt(params.text) { ctx.gather_policy_context(); + } else if GrantParser::looks_like_matching_stmt(params.text) { + ctx.gather_grant_context(); + } else if RevokeParser::looks_like_matching_stmt(params.text) { + ctx.gather_revoke_context(); } else { ctx.gather_tree_context(); ctx.gather_info_from_ts_queries(); @@ -197,13 +222,70 @@ impl<'a> CompletionContext<'a> { ctx } + fn gather_revoke_context(&mut self) { + let revoke_context = RevokeParser::get_context(self.text, self.position); + + self.node_under_cursor = Some(NodeUnderCursor::CustomNode { + text: revoke_context.node_text, + range: revoke_context.node_range, + kind: revoke_context.node_kind.clone(), + previous_node_kind: None, + }); + + if revoke_context.node_kind == "revoke_table" { + self.schema_or_alias_name = revoke_context.schema_name.clone(); + } + + if revoke_context.table_name.is_some() { + let mut new = HashSet::new(); + new.insert(revoke_context.table_name.unwrap()); + self.mentioned_relations + .insert(revoke_context.schema_name, new); + } + + self.wrapping_clause_type = match revoke_context.node_kind.as_str() { + "revoke_role" => Some(WrappingClause::ToRoleAssignment), + "revoke_table" => Some(WrappingClause::From), + _ => None, + }; + } + + fn gather_grant_context(&mut self) { + let grant_context = GrantParser::get_context(self.text, self.position); + + self.node_under_cursor = Some(NodeUnderCursor::CustomNode { + text: grant_context.node_text, + range: grant_context.node_range, + kind: grant_context.node_kind.clone(), + previous_node_kind: None, + }); + + if grant_context.node_kind == "grant_table" { + self.schema_or_alias_name = grant_context.schema_name.clone(); + } + + if grant_context.table_name.is_some() { + let mut new = HashSet::new(); + new.insert(grant_context.table_name.unwrap()); + self.mentioned_relations + .insert(grant_context.schema_name, new); + } + + self.wrapping_clause_type = match grant_context.node_kind.as_str() { + "grant_role" => Some(WrappingClause::ToRoleAssignment), + "grant_table" => Some(WrappingClause::From), + _ => None, + }; + } + fn gather_policy_context(&mut self) { let policy_context = PolicyParser::get_context(self.text, self.position); self.node_under_cursor = Some(NodeUnderCursor::CustomNode { - text: policy_context.node_text.into(), + text: policy_context.node_text, range: policy_context.node_range, kind: policy_context.node_kind.clone(), + previous_node_kind: Some(policy_context.previous_node_kind), }); if policy_context.node_kind == "policy_table" { @@ -223,7 +305,13 @@ impl<'a> CompletionContext<'a> { } "policy_role" => Some(WrappingClause::ToRoleAssignment), "policy_table" => Some(WrappingClause::From), - _ => None, + _ => { + if policy_context.in_check_or_using_clause { + Some(WrappingClause::PolicyCheck) + } else { + None + } + } }; } @@ -306,29 +394,18 @@ impl<'a> CompletionContext<'a> { } } - fn get_ts_node_content(&self, ts_node: &tree_sitter::Node<'a>) -> Option { + fn get_ts_node_content(&self, ts_node: &tree_sitter::Node<'a>) -> Option { let source = self.text; - ts_node.utf8_text(source.as_bytes()).ok().map(|txt| { - if SanitizedCompletionParams::is_sanitized_token(txt) { - NodeText::Replaced - } else { - NodeText::Original(txt.into()) - } - }) + ts_node + .utf8_text(source.as_bytes()) + .ok() + .map(|txt| txt.into()) } pub fn get_node_under_cursor_content(&self) -> Option { match self.node_under_cursor.as_ref()? { - NodeUnderCursor::TsNode(node) => { - self.get_ts_node_content(node).and_then(|nt| match nt { - NodeText::Replaced => None, - NodeText::Original(c) => Some(c.to_string()), - }) - } - NodeUnderCursor::CustomNode { text, .. } => match text { - NodeText::Replaced => None, - NodeText::Original(c) => Some(c.to_string()), - }, + NodeUnderCursor::TsNode(node) => self.get_ts_node_content(node), + NodeUnderCursor::CustomNode { text, .. } => Some(text.clone()), } } @@ -410,21 +487,17 @@ impl<'a> CompletionContext<'a> { match current_node_kind { "object_reference" | "field" => { let content = self.get_ts_node_content(¤t_node); - if let Some(node_txt) = content { - match node_txt { - NodeText::Original(txt) => { - let parts: Vec<&str> = txt.split('.').collect(); - if parts.len() == 2 { - self.schema_or_alias_name = Some(parts[0].to_string()); - } - } - NodeText::Replaced => {} + if let Some(txt) = content { + let parts: Vec<&str> = txt.split('.').collect(); + if parts.len() == 2 { + self.schema_or_alias_name = Some(parts[0].to_string()); } } } "where" | "update" | "select" | "delete" | "from" | "join" | "column_definitions" - | "drop_table" | "alter_table" => { + | "alter_role" | "drop_role" | "set_statement" | "drop_table" | "alter_table" + | "drop_column" | "alter_column" | "rename_column" => { self.wrapping_clause_type = self.get_wrapping_clause_from_current_node(current_node, &mut cursor); } @@ -515,6 +588,8 @@ impl<'a> CompletionContext<'a> { (WrappingClause::From, &["from"]), (WrappingClause::Join { on_node: None }, &["join"]), (WrappingClause::AlterTable, &["alter", "table"]), + (WrappingClause::AlterColumn, &["alter", "table", "alter"]), + (WrappingClause::RenameColumn, &["alter", "table", "rename"]), ( WrappingClause::AlterTable, &["alter", "table", "if", "exists"], @@ -544,12 +619,7 @@ impl<'a> CompletionContext<'a> { break; } - if let Some(sibling_content) = - self.get_ts_node_content(&sib).and_then(|txt| match txt { - NodeText::Original(txt) => Some(txt), - NodeText::Replaced => None, - }) - { + if let Some(sibling_content) = self.get_ts_node_content(&sib) { if sibling_content == tokens[idx] { idx += 1; } @@ -575,11 +645,51 @@ impl<'a> CompletionContext<'a> { let mut first_sibling = self.get_first_sibling(node); if let Some(clause) = self.wrapping_clause_type.as_ref() { - if clause == &WrappingClause::Insert { - while let Some(sib) = first_sibling.next_sibling() { - match sib.kind() { - "object_reference" => { - if let Some(NodeText::Original(txt)) = self.get_ts_node_content(&sib) { + match *clause { + WrappingClause::Insert => { + while let Some(sib) = first_sibling.next_sibling() { + match sib.kind() { + "object_reference" => { + if let Some(txt) = self.get_ts_node_content(&sib) { + let mut iter = txt.split('.').rev(); + let table = iter.next().unwrap().to_string(); + let schema = iter.next().map(|s| s.to_string()); + self.mentioned_relations + .entry(schema) + .and_modify(|s| { + s.insert(table.clone()); + }) + .or_insert(HashSet::from([table])); + } + } + + "column" => { + if let Some(txt) = self.get_ts_node_content(&sib) { + let entry = MentionedColumn { + column: txt, + alias: None, + }; + + self.mentioned_columns + .entry(Some(WrappingClause::Insert)) + .and_modify(|s| { + s.insert(entry.clone()); + }) + .or_insert(HashSet::from([entry])); + } + } + + _ => {} + } + + first_sibling = sib; + } + } + + WrappingClause::AlterColumn => { + while let Some(sib) = first_sibling.next_sibling() { + if sib.kind() == "object_reference" { + if let Some(txt) = self.get_ts_node_content(&sib) { let mut iter = txt.split('.').rev(); let table = iter.next().unwrap().to_string(); let schema = iter.next().map(|s| s.to_string()); @@ -591,27 +701,12 @@ impl<'a> CompletionContext<'a> { .or_insert(HashSet::from([table])); } } - "column" => { - if let Some(NodeText::Original(txt)) = self.get_ts_node_content(&sib) { - let entry = MentionedColumn { - column: txt, - alias: None, - }; - - self.mentioned_columns - .entry(Some(WrappingClause::Insert)) - .and_modify(|s| { - s.insert(entry.clone()); - }) - .or_insert(HashSet::from([entry])); - } - } - _ => {} + first_sibling = sib; } - - first_sibling = sib; } + + _ => {} } } } @@ -628,7 +723,13 @@ impl<'a> CompletionContext<'a> { "delete" => Some(WrappingClause::Delete), "from" => Some(WrappingClause::From), "drop_table" => Some(WrappingClause::DropTable), + "alter_role" => Some(WrappingClause::AlterRole), + "drop_role" => Some(WrappingClause::DropRole), + "drop_column" => Some(WrappingClause::DropColumn), + "alter_column" => Some(WrappingClause::AlterColumn), + "rename_column" => Some(WrappingClause::RenameColumn), "alter_table" => Some(WrappingClause::AlterTable), + "set_statement" => Some(WrappingClause::SetStatement), "column_definitions" => Some(WrappingClause::ColumnDefinitions), "insert" => Some(WrappingClause::Insert), "join" => { @@ -648,7 +749,7 @@ impl<'a> CompletionContext<'a> { } } - pub(crate) fn parent_matches_one_of_kind(&self, kinds: &[&'static str]) -> bool { + pub fn parent_matches_one_of_kind(&self, kinds: &[&'static str]) -> bool { self.node_under_cursor .as_ref() .is_some_and(|under_cursor| match under_cursor { @@ -659,7 +760,7 @@ impl<'a> CompletionContext<'a> { NodeUnderCursor::CustomNode { .. } => false, }) } - pub(crate) fn before_cursor_matches_kind(&self, kinds: &[&'static str]) -> bool { + pub fn before_cursor_matches_kind(&self, kinds: &[&'static str]) -> bool { self.node_under_cursor.as_ref().is_some_and(|under_cursor| { match under_cursor { NodeUnderCursor::TsNode(node) => { @@ -675,7 +776,11 @@ impl<'a> CompletionContext<'a> { .is_some_and(|sib| kinds.contains(&sib.kind())) } - NodeUnderCursor::CustomNode { .. } => false, + NodeUnderCursor::CustomNode { + previous_node_kind, .. + } => previous_node_kind + .as_ref() + .is_some_and(|k| kinds.contains(&k.as_str())), } }) } @@ -683,12 +788,9 @@ impl<'a> CompletionContext<'a> { #[cfg(test)] mod tests { - use crate::{ - NodeText, - context::{CompletionContext, WrappingClause}, - sanitization::SanitizedCompletionParams, - test_helper::{CURSOR_POS, get_text_and_position}, - }; + use crate::context::{TreeSitterContextParams, TreesitterContext, WrappingClause}; + + use pgt_test_utils::QueryWithCursorPosition; use super::NodeUnderCursor; @@ -705,56 +807,82 @@ mod tests { fn identifies_clauses() { let test_cases = vec![ ( - format!("Select {}* from users;", CURSOR_POS), + format!( + "Select {}* from users;", + QueryWithCursorPosition::cursor_marker() + ), WrappingClause::Select, ), ( - format!("Select * from u{};", CURSOR_POS), + format!( + "Select * from u{};", + QueryWithCursorPosition::cursor_marker() + ), WrappingClause::From, ), ( - format!("Select {}* from users where n = 1;", CURSOR_POS), + format!( + "Select {}* from users where n = 1;", + QueryWithCursorPosition::cursor_marker() + ), WrappingClause::Select, ), ( - format!("Select * from users where {}n = 1;", CURSOR_POS), + format!( + "Select * from users where {}n = 1;", + QueryWithCursorPosition::cursor_marker() + ), WrappingClause::Where, ), ( - format!("update users set u{} = 1 where n = 2;", CURSOR_POS), + format!( + "update users set u{} = 1 where n = 2;", + QueryWithCursorPosition::cursor_marker() + ), WrappingClause::Update, ), ( - format!("update users set u = 1 where n{} = 2;", CURSOR_POS), + format!( + "update users set u = 1 where n{} = 2;", + QueryWithCursorPosition::cursor_marker() + ), WrappingClause::Where, ), ( - format!("delete{} from users;", CURSOR_POS), + format!( + "delete{} from users;", + QueryWithCursorPosition::cursor_marker() + ), WrappingClause::Delete, ), ( - format!("delete from {}users;", CURSOR_POS), + format!( + "delete from {}users;", + QueryWithCursorPosition::cursor_marker() + ), WrappingClause::From, ), ( - format!("select name, age, location from public.u{}sers", CURSOR_POS), + format!( + "select name, age, location from public.u{}sers", + QueryWithCursorPosition::cursor_marker() + ), WrappingClause::From, ), ]; for (query, expected_clause) in test_cases { - let (position, text) = get_text_and_position(query.as_str().into()); + let (position, text) = QueryWithCursorPosition::from(query).get_text_and_position(); let tree = get_tree(text.as_str()); - let params = SanitizedCompletionParams { + let params = TreeSitterContextParams { position: (position as u32).into(), - text, - tree: std::borrow::Cow::Owned(tree), - schema: &pgt_schema_cache::SchemaCache::default(), + text: &text, + tree: &tree, }; - let ctx = CompletionContext::new(¶ms); + let ctx = TreesitterContext::new(params); assert_eq!(ctx.wrapping_clause_type, Some(expected_clause)); } @@ -764,29 +892,46 @@ mod tests { fn identifies_schema() { let test_cases = vec![ ( - format!("Select * from private.u{}", CURSOR_POS), + format!( + "Select * from private.u{}", + QueryWithCursorPosition::cursor_marker() + ), Some("private"), ), ( - format!("Select * from private.u{}sers()", CURSOR_POS), + format!( + "Select * from private.u{}sers()", + QueryWithCursorPosition::cursor_marker() + ), Some("private"), ), - (format!("Select * from u{}sers", CURSOR_POS), None), - (format!("Select * from u{}sers()", CURSOR_POS), None), + ( + format!( + "Select * from u{}sers", + QueryWithCursorPosition::cursor_marker() + ), + None, + ), + ( + format!( + "Select * from u{}sers()", + QueryWithCursorPosition::cursor_marker() + ), + None, + ), ]; for (query, expected_schema) in test_cases { - let (position, text) = get_text_and_position(query.as_str().into()); + let (position, text) = QueryWithCursorPosition::from(query).get_text_and_position(); let tree = get_tree(text.as_str()); - let params = SanitizedCompletionParams { + let params = TreeSitterContextParams { position: (position as u32).into(), - text, - tree: std::borrow::Cow::Owned(tree), - schema: &pgt_schema_cache::SchemaCache::default(), + text: &text, + tree: &tree, }; - let ctx = CompletionContext::new(¶ms); + let ctx = TreesitterContext::new(params); assert_eq!( ctx.schema_or_alias_name, @@ -798,32 +943,55 @@ mod tests { #[test] fn identifies_invocation() { let test_cases = vec![ - (format!("Select * from u{}sers", CURSOR_POS), false), - (format!("Select * from u{}sers()", CURSOR_POS), true), - (format!("Select cool{};", CURSOR_POS), false), - (format!("Select cool{}();", CURSOR_POS), true), ( - format!("Select upp{}ercase as title from users;", CURSOR_POS), + format!( + "Select * from u{}sers", + QueryWithCursorPosition::cursor_marker() + ), + false, + ), + ( + format!( + "Select * from u{}sers()", + QueryWithCursorPosition::cursor_marker() + ), + true, + ), + ( + format!("Select cool{};", QueryWithCursorPosition::cursor_marker()), false, ), ( - format!("Select upp{}ercase(name) as title from users;", CURSOR_POS), + format!("Select cool{}();", QueryWithCursorPosition::cursor_marker()), + true, + ), + ( + format!( + "Select upp{}ercase as title from users;", + QueryWithCursorPosition::cursor_marker() + ), + false, + ), + ( + format!( + "Select upp{}ercase(name) as title from users;", + QueryWithCursorPosition::cursor_marker() + ), true, ), ]; for (query, is_invocation) in test_cases { - let (position, text) = get_text_and_position(query.as_str().into()); + let (position, text) = QueryWithCursorPosition::from(query).get_text_and_position(); let tree = get_tree(text.as_str()); - let params = SanitizedCompletionParams { + let params = TreeSitterContextParams { position: (position as u32).into(), - text, - tree: std::borrow::Cow::Owned(tree), - schema: &pgt_schema_cache::SchemaCache::default(), + text: text.as_str(), + tree: &tree, }; - let ctx = CompletionContext::new(¶ms); + let ctx = TreesitterContext::new(params); assert_eq!(ctx.is_invocation, is_invocation); } @@ -832,32 +1000,34 @@ mod tests { #[test] fn does_not_fail_on_leading_whitespace() { let cases = vec![ - format!("{} select * from", CURSOR_POS), - format!(" {} select * from", CURSOR_POS), + format!( + "{} select * from", + QueryWithCursorPosition::cursor_marker() + ), + format!( + " {} select * from", + QueryWithCursorPosition::cursor_marker() + ), ]; for query in cases { - let (position, text) = get_text_and_position(query.as_str().into()); + let (position, text) = QueryWithCursorPosition::from(query).get_text_and_position(); let tree = get_tree(text.as_str()); - let params = SanitizedCompletionParams { + let params = TreeSitterContextParams { position: (position as u32).into(), - text, - tree: std::borrow::Cow::Owned(tree), - schema: &pgt_schema_cache::SchemaCache::default(), + text: &text, + tree: &tree, }; - let ctx = CompletionContext::new(¶ms); + let ctx = TreesitterContext::new(params); let node = ctx.node_under_cursor.as_ref().unwrap(); match node { NodeUnderCursor::TsNode(node) => { - assert_eq!( - ctx.get_ts_node_content(node), - Some(NodeText::Original("select".into())) - ); + assert_eq!(ctx.get_ts_node_content(node), Some("select".into())); assert_eq!( ctx.wrapping_clause_type, @@ -871,29 +1041,28 @@ mod tests { #[test] fn does_not_fail_on_trailing_whitespace() { - let query = format!("select * from {}", CURSOR_POS); + let query = format!( + "select * from {}", + QueryWithCursorPosition::cursor_marker() + ); - let (position, text) = get_text_and_position(query.as_str().into()); + let (position, text) = QueryWithCursorPosition::from(query).get_text_and_position(); let tree = get_tree(text.as_str()); - let params = SanitizedCompletionParams { + let params = TreeSitterContextParams { position: (position as u32).into(), - text, - tree: std::borrow::Cow::Owned(tree), - schema: &pgt_schema_cache::SchemaCache::default(), + text: &text, + tree: &tree, }; - let ctx = CompletionContext::new(¶ms); + let ctx = TreesitterContext::new(params); let node = ctx.node_under_cursor.as_ref().unwrap(); match node { NodeUnderCursor::TsNode(node) => { - assert_eq!( - ctx.get_ts_node_content(node), - Some(NodeText::Original("from".into())) - ); + assert_eq!(ctx.get_ts_node_content(node), Some("from".into())); } _ => unreachable!(), } @@ -901,29 +1070,25 @@ mod tests { #[test] fn does_not_fail_with_empty_statements() { - let query = format!("{}", CURSOR_POS); + let query = format!("{}", QueryWithCursorPosition::cursor_marker()); - let (position, text) = get_text_and_position(query.as_str().into()); + let (position, text) = QueryWithCursorPosition::from(query).get_text_and_position(); let tree = get_tree(text.as_str()); - let params = SanitizedCompletionParams { + let params = TreeSitterContextParams { position: (position as u32).into(), - text, - tree: std::borrow::Cow::Owned(tree), - schema: &pgt_schema_cache::SchemaCache::default(), + text: &text, + tree: &tree, }; - let ctx = CompletionContext::new(¶ms); + let ctx = TreesitterContext::new(params); let node = ctx.node_under_cursor.as_ref().unwrap(); match node { NodeUnderCursor::TsNode(node) => { - assert_eq!( - ctx.get_ts_node_content(node), - Some(NodeText::Original("".into())) - ); + assert_eq!(ctx.get_ts_node_content(node), Some("".into())); assert_eq!(ctx.wrapping_clause_type, None); } _ => unreachable!(), @@ -934,29 +1099,25 @@ mod tests { fn does_not_fail_on_incomplete_keywords() { // Instead of autocompleting "FROM", we'll assume that the user // is selecting a certain column name, such as `frozen_account`. - let query = format!("select * fro{}", CURSOR_POS); + let query = format!("select * fro{}", QueryWithCursorPosition::cursor_marker()); - let (position, text) = get_text_and_position(query.as_str().into()); + let (position, text) = QueryWithCursorPosition::from(query).get_text_and_position(); let tree = get_tree(text.as_str()); - let params = SanitizedCompletionParams { + let params = TreeSitterContextParams { position: (position as u32).into(), - text, - tree: std::borrow::Cow::Owned(tree), - schema: &pgt_schema_cache::SchemaCache::default(), + text: &text, + tree: &tree, }; - let ctx = CompletionContext::new(¶ms); + let ctx = TreesitterContext::new(params); let node = ctx.node_under_cursor.as_ref().unwrap(); match node { NodeUnderCursor::TsNode(node) => { - assert_eq!( - ctx.get_ts_node_content(node), - Some(NodeText::Original("fro".into())) - ); + assert_eq!(ctx.get_ts_node_content(node), Some("fro".into())); assert_eq!(ctx.wrapping_clause_type, Some(WrappingClause::Select)); } _ => unreachable!(), diff --git a/crates/pgt_completions/src/context/policy_parser.rs b/crates/pgt_treesitter/src/context/policy_parser.rs similarity index 53% rename from crates/pgt_completions/src/context/policy_parser.rs rename to crates/pgt_treesitter/src/context/policy_parser.rs index db37a13f3..776645163 100644 --- a/crates/pgt_completions/src/context/policy_parser.rs +++ b/crates/pgt_treesitter/src/context/policy_parser.rs @@ -1,7 +1,9 @@ -use std::iter::Peekable; - use pgt_text_size::{TextRange, TextSize}; +use crate::context::base_parser::{ + CompletionStatementParser, TokenNavigator, WordWithIndex, schema_and_table_name, +}; + #[derive(Default, Debug, PartialEq, Eq)] pub(crate) enum PolicyStmtKind { #[default] @@ -11,90 +13,6 @@ pub(crate) enum PolicyStmtKind { Drop, } -#[derive(Clone, Debug, PartialEq, Eq)] -struct WordWithIndex { - word: String, - start: usize, - end: usize, -} - -impl WordWithIndex { - fn is_under_cursor(&self, cursor_pos: usize) -> bool { - self.start <= cursor_pos && self.end > cursor_pos - } - - fn get_range(&self) -> TextRange { - let start: u32 = self.start.try_into().expect("Text too long"); - let end: u32 = self.end.try_into().expect("Text too long"); - TextRange::new(TextSize::from(start), TextSize::from(end)) - } -} - -/// Note: A policy name within quotation marks will be considered a single word. -fn sql_to_words(sql: &str) -> Result, String> { - let mut words = vec![]; - - let mut start_of_word: Option = None; - let mut current_word = String::new(); - let mut in_quotation_marks = false; - - for (current_position, current_char) in sql.char_indices() { - if (current_char.is_ascii_whitespace() || current_char == ';') - && !current_word.is_empty() - && start_of_word.is_some() - && !in_quotation_marks - { - words.push(WordWithIndex { - word: current_word, - start: start_of_word.unwrap(), - end: current_position, - }); - - current_word = String::new(); - start_of_word = None; - } else if (current_char.is_ascii_whitespace() || current_char == ';') - && current_word.is_empty() - { - // do nothing - } else if current_char == '"' && start_of_word.is_none() { - in_quotation_marks = true; - current_word.push(current_char); - start_of_word = Some(current_position); - } else if current_char == '"' && start_of_word.is_some() { - current_word.push(current_char); - words.push(WordWithIndex { - word: current_word, - start: start_of_word.unwrap(), - end: current_position + 1, - }); - in_quotation_marks = false; - start_of_word = None; - current_word = String::new() - } else if start_of_word.is_some() { - current_word.push(current_char) - } else { - start_of_word = Some(current_position); - current_word.push(current_char); - } - } - - if let Some(start_of_word) = start_of_word { - if !current_word.is_empty() { - words.push(WordWithIndex { - word: current_word, - start: start_of_word, - end: sql.len(), - }); - } - } - - if in_quotation_marks { - Err("String was not closed properly.".into()) - } else { - Ok(words) - } -} - #[derive(Default, Debug, PartialEq, Eq)] pub(crate) struct PolicyContext { pub policy_name: Option, @@ -104,6 +22,10 @@ pub(crate) struct PolicyContext { pub node_text: String, pub node_range: TextRange, pub node_kind: String, + pub previous_node_text: String, + pub previous_node_range: TextRange, + pub previous_node_kind: String, + pub in_check_or_using_clause: bool, } /// Simple parser that'll turn a policy-related statement into a context object required for @@ -111,15 +33,17 @@ pub(crate) struct PolicyContext { /// The parser will only work if the (trimmed) sql starts with `create policy`, `drop policy`, or `alter policy`. /// It can only parse policy statements. pub(crate) struct PolicyParser { - tokens: Peekable>, - previous_token: Option, - current_token: Option, + navigator: TokenNavigator, context: PolicyContext, cursor_position: usize, + in_check_or_using_clause: bool, } -impl PolicyParser { - pub(crate) fn looks_like_policy_stmt(sql: &str) -> bool { +impl CompletionStatementParser for PolicyParser { + type Context = PolicyContext; + const NAME: &'static str = "PolicyParser"; + + fn looks_like_matching_stmt(sql: &str) -> bool { let lowercased = sql.to_ascii_lowercase(); let trimmed = lowercased.trim(); trimmed.starts_with("create policy") @@ -127,30 +51,8 @@ impl PolicyParser { || trimmed.starts_with("alter policy") } - pub(crate) fn get_context(sql: &str, cursor_position: usize) -> PolicyContext { - assert!( - Self::looks_like_policy_stmt(sql), - "PolicyParser should only be used for policy statements. Developer error!" - ); - - match sql_to_words(sql) { - Ok(tokens) => { - let parser = PolicyParser { - tokens: tokens.into_iter().peekable(), - context: PolicyContext::default(), - previous_token: None, - current_token: None, - cursor_position, - }; - - parser.parse() - } - Err(_) => PolicyContext::default(), - } - } - - fn parse(mut self) -> PolicyContext { - while let Some(token) = self.advance() { + fn parse(mut self) -> Self::Context { + while let Some(token) = self.navigator.advance() { if token.is_under_cursor(self.cursor_position) { self.handle_token_under_cursor(token); } else { @@ -161,22 +63,41 @@ impl PolicyParser { self.context } + fn make_parser(tokens: Vec, cursor_position: usize) -> Self { + Self { + navigator: tokens.into(), + context: PolicyContext::default(), + cursor_position, + in_check_or_using_clause: false, + } + } +} + +impl PolicyParser { fn handle_token_under_cursor(&mut self, token: WordWithIndex) { - if self.previous_token.is_none() { + if self.navigator.previous_token.is_none() { return; } - let previous = self.previous_token.take().unwrap(); + self.context.in_check_or_using_clause = self.in_check_or_using_clause; - match previous.word.to_ascii_lowercase().as_str() { + let previous = self.navigator.previous_token.take().unwrap(); + + match previous + .get_word_without_quotes() + .to_ascii_lowercase() + .as_str() + { "policy" => { self.context.node_range = token.get_range(); self.context.node_kind = "policy_name".into(); - self.context.node_text = token.word; + self.context.node_text = token.get_word(); + + self.context.previous_node_kind = "keyword_policy".into(); } "on" => { - if token.word.contains('.') { - let (schema_name, table_name) = self.schema_and_table_name(&token); + if token.get_word_without_quotes().contains('.') { + let (schema_name, table_name) = schema_and_table_name(&token); let schema_name_len = schema_name.len(); self.context.schema_name = Some(schema_name); @@ -198,85 +119,90 @@ impl PolicyParser { self.context.node_text = table_name.unwrap_or_default(); } else { self.context.node_range = token.get_range(); - self.context.node_text = token.word; + self.context.node_text = token.get_word(); self.context.node_kind = "policy_table".into(); } + + self.context.previous_node_kind = "keyword_on".into(); } "to" => { self.context.node_range = token.get_range(); self.context.node_kind = "policy_role".into(); - self.context.node_text = token.word; + self.context.node_text = token.get_word(); + + self.context.previous_node_kind = "keyword_to".into(); } - _ => { + + other => { self.context.node_range = token.get_range(); - self.context.node_text = token.word; + self.context.node_text = token.get_word(); + + self.context.previous_node_range = previous.get_range(); + self.context.previous_node_text = previous.get_word(); + + match other { + "(" | "=" => self.context.previous_node_kind = other.into(), + "and" => self.context.previous_node_kind = "keyword_and".into(), + + _ => self.context.previous_node_kind = "".into(), + } } } + + self.context.previous_node_range = previous.get_range(); + self.context.previous_node_text = previous.get_word(); } fn handle_token(&mut self, token: WordWithIndex) { - match token.word.to_ascii_lowercase().as_str() { - "create" if self.next_matches("policy") => { + match token + .get_word_without_quotes() + .to_ascii_lowercase() + .as_str() + { + "create" if self.navigator.next_matches(&["policy"]) => { self.context.statement_kind = PolicyStmtKind::Create; } - "alter" if self.next_matches("policy") => { + "alter" if self.navigator.next_matches(&["policy"]) => { self.context.statement_kind = PolicyStmtKind::Alter; } - "drop" if self.next_matches("policy") => { + "drop" if self.navigator.next_matches(&["policy"]) => { self.context.statement_kind = PolicyStmtKind::Drop; } "on" => self.table_with_schema(), + "(" if self.navigator.prev_matches(&["using", "check"]) => { + self.in_check_or_using_clause = true; + } + ")" => { + self.in_check_or_using_clause = false; + } + // skip the "to" so we don't parse it as the TO rolename when it's under the cursor - "rename" if self.next_matches("to") => { - self.advance(); + "rename" if self.navigator.next_matches(&["to"]) => { + self.navigator.advance(); } _ => { - if self.prev_matches("policy") { - self.context.policy_name = Some(token.word); + if self.navigator.prev_matches(&["policy"]) { + self.context.policy_name = Some(token.get_word()); } } } } - fn next_matches(&mut self, it: &str) -> bool { - self.tokens.peek().is_some_and(|c| c.word.as_str() == it) - } - - fn prev_matches(&self, it: &str) -> bool { - self.previous_token.as_ref().is_some_and(|t| t.word == it) - } - - fn advance(&mut self) -> Option { - // we can't peek back n an iterator, so we'll have to keep track manually. - self.previous_token = self.current_token.take(); - self.current_token = self.tokens.next(); - self.current_token.clone() - } - fn table_with_schema(&mut self) { - if let Some(token) = self.advance() { + if let Some(token) = self.navigator.advance() { if token.is_under_cursor(self.cursor_position) { self.handle_token_under_cursor(token); - } else if token.word.contains('.') { - let (schema, maybe_table) = self.schema_and_table_name(&token); + } else if token.get_word_without_quotes().contains('.') { + let (schema, maybe_table) = schema_and_table_name(&token); self.context.schema_name = Some(schema); self.context.table_name = maybe_table; } else { - self.context.table_name = Some(token.word); + self.context.table_name = Some(token.get_word()); } }; } - - fn schema_and_table_name(&self, token: &WordWithIndex) -> (String, Option) { - let mut parts = token.word.split('.'); - - ( - parts.next().unwrap().into(), - parts.next().map(|tb| tb.into()), - ) - } } #[cfg(test)] @@ -284,17 +210,19 @@ mod tests { use pgt_text_size::{TextRange, TextSize}; use crate::{ - context::policy_parser::{PolicyContext, PolicyStmtKind, WordWithIndex}, - test_helper::CURSOR_POS, + context::base_parser::CompletionStatementParser, + context::policy_parser::{PolicyContext, PolicyStmtKind}, }; - use super::{PolicyParser, sql_to_words}; + use pgt_test_utils::QueryWithCursorPosition; + + use super::PolicyParser; fn with_pos(query: String) -> (usize, String) { let mut pos: Option = None; for (p, c) in query.char_indices() { - if c == CURSOR_POS { + if c == QueryWithCursorPosition::cursor_marker() { pos = Some(p); break; } @@ -302,7 +230,9 @@ mod tests { ( pos.expect("Please add cursor position!"), - query.replace(CURSOR_POS, "REPLACED_TOKEN").to_string(), + query + .replace(QueryWithCursorPosition::cursor_marker(), "REPLACED_TOKEN") + .to_string(), ) } @@ -312,7 +242,7 @@ mod tests { r#" create policy {} "#, - CURSOR_POS + QueryWithCursorPosition::cursor_marker() )); let context = PolicyParser::get_context(query.as_str(), pos); @@ -326,7 +256,11 @@ mod tests { statement_kind: PolicyStmtKind::Create, node_text: "REPLACED_TOKEN".into(), node_range: TextRange::new(TextSize::new(25), TextSize::new(39)), - node_kind: "policy_name".into() + node_kind: "policy_name".into(), + in_check_or_using_clause: false, + previous_node_kind: "keyword_policy".into(), + previous_node_range: TextRange::new(18.into(), 24.into()), + previous_node_text: "policy".into(), } ); @@ -334,7 +268,7 @@ mod tests { r#" create policy "my cool policy" {} "#, - CURSOR_POS + QueryWithCursorPosition::cursor_marker() )); let context = PolicyParser::get_context(query.as_str(), pos); @@ -349,6 +283,10 @@ mod tests { node_text: "REPLACED_TOKEN".into(), node_kind: "".into(), node_range: TextRange::new(TextSize::new(42), TextSize::new(56)), + in_check_or_using_clause: false, + previous_node_kind: "".into(), + previous_node_range: TextRange::new(25.into(), 41.into()), + previous_node_text: "\"my cool policy\"".into(), } ); @@ -356,7 +294,7 @@ mod tests { r#" create policy "my cool policy" on {} "#, - CURSOR_POS + QueryWithCursorPosition::cursor_marker() )); let context = PolicyParser::get_context(query.as_str(), pos); @@ -371,6 +309,10 @@ mod tests { node_text: "REPLACED_TOKEN".into(), node_kind: "policy_table".into(), node_range: TextRange::new(TextSize::new(45), TextSize::new(59)), + in_check_or_using_clause: false, + previous_node_kind: "keyword_on".into(), + previous_node_range: TextRange::new(42.into(), 44.into()), + previous_node_text: "on".into(), } ); @@ -378,7 +320,7 @@ mod tests { r#" create policy "my cool policy" on auth.{} "#, - CURSOR_POS + QueryWithCursorPosition::cursor_marker() )); let context = PolicyParser::get_context(query.as_str(), pos); @@ -393,6 +335,10 @@ mod tests { node_text: "REPLACED_TOKEN".into(), node_kind: "policy_table".into(), node_range: TextRange::new(TextSize::new(50), TextSize::new(64)), + in_check_or_using_clause: false, + previous_node_kind: "keyword_on".into(), + previous_node_range: TextRange::new(42.into(), 44.into()), + previous_node_text: "on".into(), } ); @@ -401,7 +347,7 @@ mod tests { create policy "my cool policy" on auth.users as {} "#, - CURSOR_POS + QueryWithCursorPosition::cursor_marker() )); let context = PolicyParser::get_context(query.as_str(), pos); @@ -416,6 +362,10 @@ mod tests { node_text: "REPLACED_TOKEN".into(), node_kind: "".into(), node_range: TextRange::new(TextSize::new(72), TextSize::new(86)), + in_check_or_using_clause: false, + previous_node_kind: "".into(), + previous_node_range: TextRange::new(69.into(), 71.into()), + previous_node_text: "as".into(), } ); @@ -425,7 +375,7 @@ mod tests { as permissive {} "#, - CURSOR_POS + QueryWithCursorPosition::cursor_marker() )); let context = PolicyParser::get_context(query.as_str(), pos); @@ -440,6 +390,10 @@ mod tests { node_text: "REPLACED_TOKEN".into(), node_kind: "".into(), node_range: TextRange::new(TextSize::new(95), TextSize::new(109)), + in_check_or_using_clause: false, + previous_node_kind: "".into(), + previous_node_range: TextRange::new(72.into(), 82.into()), + previous_node_text: "permissive".into(), } ); @@ -449,7 +403,7 @@ mod tests { as permissive to {} "#, - CURSOR_POS + QueryWithCursorPosition::cursor_marker() )); let context = PolicyParser::get_context(query.as_str(), pos); @@ -464,6 +418,10 @@ mod tests { node_text: "REPLACED_TOKEN".into(), node_kind: "policy_role".into(), node_range: TextRange::new(TextSize::new(98), TextSize::new(112)), + in_check_or_using_clause: false, + previous_node_kind: "keyword_to".into(), + previous_node_range: TextRange::new(95.into(), 97.into()), + previous_node_text: "to".into(), } ); } @@ -477,7 +435,7 @@ mod tests { to all using (true); "#, - CURSOR_POS + QueryWithCursorPosition::cursor_marker() )); let context = PolicyParser::get_context(query.as_str(), pos); @@ -491,7 +449,11 @@ mod tests { statement_kind: PolicyStmtKind::Create, node_text: "REPLACED_TOKEN".into(), node_range: TextRange::new(TextSize::new(57), TextSize::new(71)), - node_kind: "policy_table".into() + node_kind: "policy_table".into(), + in_check_or_using_clause: false, + previous_node_kind: "keyword_on".into(), + previous_node_range: TextRange::new(54.into(), 56.into()), + previous_node_text: "on".into(), } ) } @@ -505,7 +467,7 @@ mod tests { to all using (true); "#, - CURSOR_POS + QueryWithCursorPosition::cursor_marker() )); let context = PolicyParser::get_context(query.as_str(), pos); @@ -519,7 +481,11 @@ mod tests { statement_kind: PolicyStmtKind::Create, node_text: "REPLACED_TOKEN".into(), node_range: TextRange::new(TextSize::new(62), TextSize::new(76)), - node_kind: "policy_table".into() + node_kind: "policy_table".into(), + in_check_or_using_clause: false, + previous_node_kind: "keyword_on".into(), + previous_node_range: TextRange::new(54.into(), 56.into()), + previous_node_text: "on".into(), } ) } @@ -530,7 +496,7 @@ mod tests { r#" drop policy {} on auth.users; "#, - CURSOR_POS + QueryWithCursorPosition::cursor_marker() )); let context = PolicyParser::get_context(query.as_str(), pos); @@ -544,7 +510,11 @@ mod tests { statement_kind: PolicyStmtKind::Drop, node_text: "REPLACED_TOKEN".into(), node_range: TextRange::new(TextSize::new(23), TextSize::new(37)), - node_kind: "policy_name".into() + node_kind: "policy_name".into(), + in_check_or_using_clause: false, + previous_node_kind: "keyword_policy".into(), + previous_node_range: TextRange::new(16.into(), 22.into()), + previous_node_text: "policy".into(), } ); @@ -553,7 +523,7 @@ mod tests { r#" drop policy "{}" on auth.users; "#, - CURSOR_POS + QueryWithCursorPosition::cursor_marker() )); let context = PolicyParser::get_context(query.as_str(), pos); @@ -567,7 +537,11 @@ mod tests { statement_kind: PolicyStmtKind::Drop, node_text: "\"REPLACED_TOKEN\"".into(), node_range: TextRange::new(TextSize::new(23), TextSize::new(39)), - node_kind: "policy_name".into() + node_kind: "policy_name".into(), + in_check_or_using_clause: false, + previous_node_kind: "keyword_policy".into(), + previous_node_range: TextRange::new(16.into(), 22.into()), + previous_node_text: "policy".into(), } ); } @@ -578,7 +552,7 @@ mod tests { r#" drop policy "{} on auth.users; "#, - CURSOR_POS + QueryWithCursorPosition::cursor_marker() )); let context = PolicyParser::get_context(query.as_str(), pos); @@ -586,32 +560,99 @@ mod tests { assert_eq!(context, PolicyContext::default()); } - fn to_word(word: &str, start: usize, end: usize) -> WordWithIndex { - WordWithIndex { - word: word.into(), - start, - end, + #[test] + fn correctly_determines_we_are_inside_checks() { + { + let (pos, query) = with_pos(format!( + r#" + create policy "my cool policy" + on auth.users + to all + using (id = {}) + "#, + QueryWithCursorPosition::cursor_marker() + )); + + let context = PolicyParser::get_context(query.as_str(), pos); + + assert_eq!( + context, + PolicyContext { + policy_name: Some(r#""my cool policy""#.into()), + table_name: Some("users".into()), + schema_name: Some("auth".into()), + statement_kind: PolicyStmtKind::Create, + node_text: "REPLACED_TOKEN".into(), + node_range: TextRange::new(TextSize::new(112), TextSize::new(126)), + node_kind: "".into(), + in_check_or_using_clause: true, + previous_node_kind: "=".into(), + previous_node_range: TextRange::new(110.into(), 111.into()), + previous_node_text: "=".into(), + } + ); } - } - #[test] - fn determines_positions_correctly() { - let query = "\ncreate policy \"my cool pol\"\n\ton auth.users\n\tas permissive\n\tfor select\n\t\tto public\n\t\tusing (true);".to_string(); - - let words = sql_to_words(query.as_str()).unwrap(); - - assert_eq!(words[0], to_word("create", 1, 7)); - assert_eq!(words[1], to_word("policy", 8, 14)); - assert_eq!(words[2], to_word("\"my cool pol\"", 15, 28)); - assert_eq!(words[3], to_word("on", 30, 32)); - assert_eq!(words[4], to_word("auth.users", 33, 43)); - assert_eq!(words[5], to_word("as", 45, 47)); - assert_eq!(words[6], to_word("permissive", 48, 58)); - assert_eq!(words[7], to_word("for", 60, 63)); - assert_eq!(words[8], to_word("select", 64, 70)); - assert_eq!(words[9], to_word("to", 73, 75)); - assert_eq!(words[10], to_word("public", 78, 84)); - assert_eq!(words[11], to_word("using", 87, 92)); - assert_eq!(words[12], to_word("(true)", 93, 99)); + { + let (pos, query) = with_pos(format!( + r#" + create policy "my cool policy" + on auth.users + to all + using ({} + "#, + QueryWithCursorPosition::cursor_marker() + )); + + let context = PolicyParser::get_context(query.as_str(), pos); + + assert_eq!( + context, + PolicyContext { + policy_name: Some(r#""my cool policy""#.into()), + table_name: Some("users".into()), + schema_name: Some("auth".into()), + statement_kind: PolicyStmtKind::Create, + node_text: "REPLACED_TOKEN".into(), + node_range: TextRange::new(TextSize::new(106), TextSize::new(120)), + node_kind: "".into(), + in_check_or_using_clause: true, + previous_node_kind: "(".into(), + previous_node_range: TextRange::new(105.into(), 106.into()), + previous_node_text: "(".into(), + } + ) + } + + { + let (pos, query) = with_pos(format!( + r#" + create policy "my cool policy" + on auth.users + to all + with check ({} + "#, + QueryWithCursorPosition::cursor_marker() + )); + + let context = PolicyParser::get_context(query.as_str(), pos); + + assert_eq!( + context, + PolicyContext { + policy_name: Some(r#""my cool policy""#.into()), + table_name: Some("users".into()), + schema_name: Some("auth".into()), + statement_kind: PolicyStmtKind::Create, + node_text: "REPLACED_TOKEN".into(), + node_range: TextRange::new(TextSize::new(111), TextSize::new(125)), + node_kind: "".into(), + in_check_or_using_clause: true, + previous_node_kind: "(".into(), + previous_node_range: TextRange::new(110.into(), 111.into()), + previous_node_text: "(".into(), + } + ) + } } } diff --git a/crates/pgt_treesitter/src/context/revoke_parser.rs b/crates/pgt_treesitter/src/context/revoke_parser.rs new file mode 100644 index 000000000..4f5b09ec8 --- /dev/null +++ b/crates/pgt_treesitter/src/context/revoke_parser.rs @@ -0,0 +1,342 @@ +use pgt_text_size::{TextRange, TextSize}; + +use crate::context::base_parser::{ + CompletionStatementParser, TokenNavigator, WordWithIndex, schema_and_table_name, +}; + +#[derive(Default, Debug, PartialEq, Eq)] +pub(crate) struct RevokeContext { + pub table_name: Option, + pub schema_name: Option, + pub node_text: String, + pub node_range: TextRange, + pub node_kind: String, +} + +/// Simple parser that'll turn a policy-related statement into a context object required for +/// completions. +/// The parser will only work if the (trimmed) sql starts with `create policy`, `drop policy`, or `alter policy`. +/// It can only parse policy statements. +pub(crate) struct RevokeParser { + navigator: TokenNavigator, + context: RevokeContext, + cursor_position: usize, + in_roles_list: bool, + is_revoking_role: bool, +} + +impl CompletionStatementParser for RevokeParser { + type Context = RevokeContext; + const NAME: &'static str = "RevokeParser"; + + fn looks_like_matching_stmt(sql: &str) -> bool { + let lowercased = sql.to_ascii_lowercase(); + let trimmed = lowercased.trim(); + trimmed.starts_with("revoke") + } + + fn parse(mut self) -> Self::Context { + while let Some(token) = self.navigator.advance() { + if token.is_under_cursor(self.cursor_position) { + self.handle_token_under_cursor(token); + } else { + self.handle_token(token); + } + } + + self.context + } + + fn make_parser(tokens: Vec, cursor_position: usize) -> Self { + Self { + navigator: tokens.into(), + context: RevokeContext::default(), + cursor_position, + in_roles_list: false, + is_revoking_role: false, + } + } +} + +impl RevokeParser { + fn handle_token_under_cursor(&mut self, token: WordWithIndex) { + if self.navigator.previous_token.is_none() { + return; + } + + let previous = self.navigator.previous_token.take().unwrap(); + let current = self + .navigator + .current_token + .as_ref() + .map(|w| w.get_word_without_quotes()); + + match previous + .get_word_without_quotes() + .to_ascii_lowercase() + .as_str() + { + "on" if !matches!(current.as_deref(), Some("table")) => self.handle_table(&token), + + "table" => { + self.handle_table(&token); + } + + "from" | "revoke" => { + self.context.node_range = token.get_range(); + self.context.node_kind = "revoke_role".into(); + self.context.node_text = token.get_word(); + } + + "for" if self.is_revoking_role => { + self.context.node_range = token.get_range(); + self.context.node_kind = "revoke_role".into(); + self.context.node_text = token.get_word(); + } + + t => { + if self.in_roles_list && t.ends_with(',') { + self.context.node_kind = "revoke_role".into(); + } + + self.context.node_range = token.get_range(); + self.context.node_text = token.get_word(); + } + } + } + + fn handle_table(&mut self, token: &WordWithIndex) { + if token.get_word_without_quotes().contains('.') { + let (schema_name, table_name) = schema_and_table_name(token); + + let schema_name_len = schema_name.len(); + self.context.schema_name = Some(schema_name); + + let offset: u32 = schema_name_len.try_into().expect("Text too long"); + let range_without_schema = token + .get_range() + .checked_expand_start( + TextSize::new(offset + 1), // kill the dot as well + ) + .expect("Text too long"); + + self.context.node_range = range_without_schema; + self.context.node_kind = "revoke_table".into(); + + // In practice, we should always have a table name. + // The completion sanitization will add a word after a `.` if nothing follows it; + // the token_text will then look like `schema.REPLACED_TOKEN`. + self.context.node_text = table_name.unwrap_or_default(); + } else { + self.context.node_range = token.get_range(); + self.context.node_text = token.get_word(); + self.context.node_kind = "revoke_table".into(); + } + } + + fn handle_token(&mut self, token: WordWithIndex) { + match token.get_word_without_quotes().as_str() { + "on" if !self.navigator.next_matches(&["table"]) => self.table_with_schema(), + + // This is the only case where there is no "GRANT" before the option: + // REVOKE [ { ADMIN | INHERIT | SET } OPTION FOR ] role_name + "option" if !self.navigator.prev_matches(&["grant"]) => { + self.is_revoking_role = true; + } + + "table" => self.table_with_schema(), + + "from" => { + self.in_roles_list = true; + } + + t => { + if self.in_roles_list && !t.ends_with(',') { + self.in_roles_list = false; + } + } + } + } + + fn table_with_schema(&mut self) { + if let Some(token) = self.navigator.advance() { + if token.is_under_cursor(self.cursor_position) { + self.handle_token_under_cursor(token); + } else if token.get_word_without_quotes().contains('.') { + let (schema, maybe_table) = schema_and_table_name(&token); + self.context.schema_name = Some(schema); + self.context.table_name = maybe_table; + } else { + self.context.table_name = Some(token.get_word()); + } + }; + } +} + +#[cfg(test)] +mod tests { + use pgt_text_size::{TextRange, TextSize}; + + use crate::{ + context::base_parser::CompletionStatementParser, + context::revoke_parser::{RevokeContext, RevokeParser}, + }; + + use pgt_test_utils::QueryWithCursorPosition; + + fn with_pos(query: String) -> (usize, String) { + let mut pos: Option = None; + + for (p, c) in query.char_indices() { + if c == QueryWithCursorPosition::cursor_marker() { + pos = Some(p); + break; + } + } + + ( + pos.expect("Please add cursor position!"), + query + .replace(QueryWithCursorPosition::cursor_marker(), "REPLACED_TOKEN") + .to_string(), + ) + } + + #[test] + fn infers_revoke_keyword() { + let (pos, query) = with_pos(format!( + r#" + revoke {} + "#, + QueryWithCursorPosition::cursor_marker() + )); + + let context = RevokeParser::get_context(query.as_str(), pos); + + assert_eq!( + context, + RevokeContext { + table_name: None, + schema_name: None, + node_text: "REPLACED_TOKEN".into(), + node_range: TextRange::new(TextSize::new(20), TextSize::new(34)), + node_kind: "revoke_role".into(), + } + ); + } + + #[test] + fn infers_table_name() { + let (pos, query) = with_pos(format!( + r#" + revoke select on {} + "#, + QueryWithCursorPosition::cursor_marker() + )); + + let context = RevokeParser::get_context(query.as_str(), pos); + + assert_eq!( + context, + RevokeContext { + table_name: None, + schema_name: None, + node_text: "REPLACED_TOKEN".into(), + node_range: TextRange::new(TextSize::new(30), TextSize::new(44)), + node_kind: "revoke_table".into(), + } + ); + } + + #[test] + fn infers_schema_and_table_name() { + let (pos, query) = with_pos(format!( + r#" + revoke select on public.{} + "#, + QueryWithCursorPosition::cursor_marker() + )); + + let context = RevokeParser::get_context(query.as_str(), pos); + + assert_eq!( + context, + RevokeContext { + table_name: None, + schema_name: Some("public".into()), + node_text: "REPLACED_TOKEN".into(), + node_range: TextRange::new(TextSize::new(37), TextSize::new(51)), + node_kind: "revoke_table".into(), + } + ); + } + + #[test] + fn infers_role_name() { + let (pos, query) = with_pos(format!( + r#" + revoke select on public.users from {} + "#, + QueryWithCursorPosition::cursor_marker() + )); + + let context = RevokeParser::get_context(query.as_str(), pos); + + assert_eq!( + context, + RevokeContext { + table_name: Some("users".into()), + schema_name: Some("public".into()), + node_text: "REPLACED_TOKEN".into(), + node_range: TextRange::new(TextSize::new(48), TextSize::new(62)), + node_kind: "revoke_role".into(), + } + ); + } + + #[test] + fn infers_multiple_roles() { + let (pos, query) = with_pos(format!( + r#" + revoke select on public.users from alice, {} + "#, + QueryWithCursorPosition::cursor_marker() + )); + + let context = RevokeParser::get_context(query.as_str(), pos); + + assert_eq!( + context, + RevokeContext { + table_name: Some("users".into()), + schema_name: Some("public".into()), + node_text: "REPLACED_TOKEN".into(), + node_range: TextRange::new(TextSize::new(55), TextSize::new(69)), + node_kind: "revoke_role".into(), + } + ); + } + + #[test] + fn infers_quoted_schema_and_table() { + let (pos, query) = with_pos(format!( + r#" + revoke select on "MySchema"."MyTable" from {} + "#, + QueryWithCursorPosition::cursor_marker() + )); + + let context = RevokeParser::get_context(query.as_str(), pos); + + assert_eq!( + context, + RevokeContext { + table_name: Some("MyTable".into()), + schema_name: Some("MySchema".into()), + node_text: "REPLACED_TOKEN".into(), + node_range: TextRange::new(TextSize::new(56), TextSize::new(70)), + node_kind: "revoke_role".into(), + } + ); + } +} diff --git a/crates/pgt_treesitter/src/lib.rs b/crates/pgt_treesitter/src/lib.rs new file mode 100644 index 000000000..6b19db53a --- /dev/null +++ b/crates/pgt_treesitter/src/lib.rs @@ -0,0 +1,5 @@ +pub mod context; +pub mod queries; + +pub use context::*; +pub use queries::*; diff --git a/crates/pgt_treesitter_queries/src/queries/insert_columns.rs b/crates/pgt_treesitter/src/queries/insert_columns.rs similarity index 97% rename from crates/pgt_treesitter_queries/src/queries/insert_columns.rs rename to crates/pgt_treesitter/src/queries/insert_columns.rs index 3e88d998f..94d67b690 100644 --- a/crates/pgt_treesitter_queries/src/queries/insert_columns.rs +++ b/crates/pgt_treesitter/src/queries/insert_columns.rs @@ -1,6 +1,6 @@ use std::sync::LazyLock; -use crate::{Query, QueryResult}; +use crate::queries::{Query, QueryResult}; use super::QueryTryFrom; @@ -51,7 +51,7 @@ impl<'a> QueryTryFrom<'a> for InsertColumnMatch<'a> { } impl<'a> Query<'a> for InsertColumnMatch<'a> { - fn execute(root_node: tree_sitter::Node<'a>, stmt: &'a str) -> Vec> { + fn execute(root_node: tree_sitter::Node<'a>, stmt: &'a str) -> Vec> { let mut cursor = tree_sitter::QueryCursor::new(); let matches = cursor.matches(&TS_QUERY, root_node, stmt.as_bytes()); @@ -73,7 +73,7 @@ impl<'a> Query<'a> for InsertColumnMatch<'a> { #[cfg(test)] mod tests { use super::InsertColumnMatch; - use crate::TreeSitterQueriesExecutor; + use crate::queries::TreeSitterQueriesExecutor; #[test] fn finds_all_insert_columns() { diff --git a/crates/pgt_treesitter_queries/src/lib.rs b/crates/pgt_treesitter/src/queries/mod.rs similarity index 72% rename from crates/pgt_treesitter_queries/src/lib.rs rename to crates/pgt_treesitter/src/queries/mod.rs index 4bf71e744..1d24f07a4 100644 --- a/crates/pgt_treesitter_queries/src/lib.rs +++ b/crates/pgt_treesitter/src/queries/mod.rs @@ -1,8 +1,91 @@ -pub mod queries; +mod insert_columns; +mod parameters; +mod relations; +mod select_columns; +mod table_aliases; +mod where_columns; use std::slice::Iter; -use queries::{Query, QueryResult}; +pub use insert_columns::*; +pub use parameters::*; +pub use relations::*; +pub use select_columns::*; +pub use table_aliases::*; +pub use where_columns::*; + +#[derive(Debug)] +pub enum QueryResult<'a> { + Relation(RelationMatch<'a>), + Parameter(ParameterMatch<'a>), + TableAliases(TableAliasMatch<'a>), + SelectClauseColumns(SelectColumnMatch<'a>), + InsertClauseColumns(InsertColumnMatch<'a>), + WhereClauseColumns(WhereColumnMatch<'a>), +} + +impl QueryResult<'_> { + pub fn within_range(&self, range: &tree_sitter::Range) -> bool { + match self { + QueryResult::Relation(rm) => { + let start = match rm.schema { + Some(s) => s.start_position(), + None => rm.table.start_position(), + }; + + let end = rm.table.end_position(); + + start >= range.start_point && end <= range.end_point + } + Self::Parameter(pm) => { + let node_range = pm.node.range(); + + node_range.start_point >= range.start_point + && node_range.end_point <= range.end_point + } + QueryResult::TableAliases(m) => { + let start = m.table.start_position(); + let end = m.alias.end_position(); + start >= range.start_point && end <= range.end_point + } + Self::SelectClauseColumns(cm) => { + let start = match cm.alias { + Some(n) => n.start_position(), + None => cm.column.start_position(), + }; + + let end = cm.column.end_position(); + + start >= range.start_point && end <= range.end_point + } + Self::WhereClauseColumns(cm) => { + let start = match cm.alias { + Some(n) => n.start_position(), + None => cm.column.start_position(), + }; + + let end = cm.column.end_position(); + + start >= range.start_point && end <= range.end_point + } + Self::InsertClauseColumns(cm) => { + let start = cm.column.start_position(); + let end = cm.column.end_position(); + start >= range.start_point && end <= range.end_point + } + } + } +} + +// This trait enforces that for any `Self` that implements `Query`, +// its &Self must implement TryFrom<&QueryResult> +pub(crate) trait QueryTryFrom<'a>: Sized { + type Ref: for<'any> TryFrom<&'a QueryResult<'a>, Error = String>; +} + +pub(crate) trait Query<'a>: QueryTryFrom<'a> { + fn execute(root_node: tree_sitter::Node<'a>, stmt: &'a str) -> Vec>; +} pub struct TreeSitterQueriesExecutor<'a> { root_node: tree_sitter::Node<'a>, @@ -68,9 +151,8 @@ impl<'a> Iterator for QueryResultIter<'a> { #[cfg(test)] mod tests { - use crate::{ - TreeSitterQueriesExecutor, - queries::{ParameterMatch, RelationMatch, TableAliasMatch}, + use crate::queries::{ + ParameterMatch, RelationMatch, TableAliasMatch, TreeSitterQueriesExecutor, }; #[test] diff --git a/crates/pgt_treesitter_queries/src/queries/parameters.rs b/crates/pgt_treesitter/src/queries/parameters.rs similarity index 96% rename from crates/pgt_treesitter_queries/src/queries/parameters.rs rename to crates/pgt_treesitter/src/queries/parameters.rs index 85ea9ad25..0b7f2e3df 100644 --- a/crates/pgt_treesitter_queries/src/queries/parameters.rs +++ b/crates/pgt_treesitter/src/queries/parameters.rs @@ -1,6 +1,6 @@ use std::sync::LazyLock; -use crate::{Query, QueryResult}; +use crate::queries::{Query, QueryResult}; use super::QueryTryFrom; @@ -59,7 +59,7 @@ impl<'a> QueryTryFrom<'a> for ParameterMatch<'a> { } impl<'a> Query<'a> for ParameterMatch<'a> { - fn execute(root_node: tree_sitter::Node<'a>, stmt: &'a str) -> Vec> { + fn execute(root_node: tree_sitter::Node<'a>, stmt: &'a str) -> Vec> { let mut cursor = tree_sitter::QueryCursor::new(); let matches = cursor.matches(&TS_QUERY, root_node, stmt.as_bytes()); diff --git a/crates/pgt_treesitter_queries/src/queries/relations.rs b/crates/pgt_treesitter/src/queries/relations.rs similarity index 75% rename from crates/pgt_treesitter_queries/src/queries/relations.rs rename to crates/pgt_treesitter/src/queries/relations.rs index 38fd0513b..cb6a6bea9 100644 --- a/crates/pgt_treesitter_queries/src/queries/relations.rs +++ b/crates/pgt_treesitter/src/queries/relations.rs @@ -1,6 +1,6 @@ use std::sync::LazyLock; -use crate::{Query, QueryResult}; +use crate::queries::{Query, QueryResult}; use super::QueryTryFrom; @@ -22,6 +22,16 @@ static TS_QUERY: LazyLock = LazyLock::new(|| { (identifier)? @table )+ ) + (alter_table + (keyword_alter) + (keyword_table) + (object_reference + . + (identifier) @schema_or_table + "."? + (identifier)? @table + )+ + ) "#; tree_sitter::Query::new(tree_sitter_sql::language(), QUERY_STR).expect("Invalid TS Query") }); @@ -69,7 +79,7 @@ impl<'a> QueryTryFrom<'a> for RelationMatch<'a> { } impl<'a> Query<'a> for RelationMatch<'a> { - fn execute(root_node: tree_sitter::Node<'a>, stmt: &'a str) -> Vec> { + fn execute(root_node: tree_sitter::Node<'a>, stmt: &'a str) -> Vec> { let mut cursor = tree_sitter::QueryCursor::new(); let matches = cursor.matches(&TS_QUERY, root_node, stmt.as_bytes()); @@ -102,8 +112,9 @@ impl<'a> Query<'a> for RelationMatch<'a> { #[cfg(test)] mod tests { + use crate::queries::TreeSitterQueriesExecutor; + use super::RelationMatch; - use crate::TreeSitterQueriesExecutor; #[test] fn finds_table_without_schema() { @@ -196,4 +207,50 @@ mod tests { assert_eq!(results[0].get_schema(sql), None); assert_eq!(results[0].get_table(sql), "users"); } + + #[test] + fn finds_alter_table_with_schema() { + let sql = r#"alter table public.users alter some_col set default 15;"#; + + let mut parser = tree_sitter::Parser::new(); + parser.set_language(tree_sitter_sql::language()).unwrap(); + + let tree = parser.parse(sql, None).unwrap(); + + let mut executor = TreeSitterQueriesExecutor::new(tree.root_node(), sql); + + executor.add_query_results::(); + + let results: Vec<&RelationMatch> = executor + .get_iter(None) + .filter_map(|q| q.try_into().ok()) + .collect(); + + assert_eq!(results.len(), 1); + assert_eq!(results[0].get_schema(sql), Some("public".into())); + assert_eq!(results[0].get_table(sql), "users"); + } + + #[test] + fn finds_alter_table_without_schema() { + let sql = r#"alter table users alter some_col set default 15;"#; + + let mut parser = tree_sitter::Parser::new(); + parser.set_language(tree_sitter_sql::language()).unwrap(); + + let tree = parser.parse(sql, None).unwrap(); + + let mut executor = TreeSitterQueriesExecutor::new(tree.root_node(), sql); + + executor.add_query_results::(); + + let results: Vec<&RelationMatch> = executor + .get_iter(None) + .filter_map(|q| q.try_into().ok()) + .collect(); + + assert_eq!(results.len(), 1); + assert_eq!(results[0].get_schema(sql), None); + assert_eq!(results[0].get_table(sql), "users"); + } } diff --git a/crates/pgt_treesitter_queries/src/queries/select_columns.rs b/crates/pgt_treesitter/src/queries/select_columns.rs similarity index 97% rename from crates/pgt_treesitter_queries/src/queries/select_columns.rs rename to crates/pgt_treesitter/src/queries/select_columns.rs index 00b6977d0..f232abc38 100644 --- a/crates/pgt_treesitter_queries/src/queries/select_columns.rs +++ b/crates/pgt_treesitter/src/queries/select_columns.rs @@ -1,6 +1,6 @@ use std::sync::LazyLock; -use crate::{Query, QueryResult}; +use crate::queries::{Query, QueryResult}; use super::QueryTryFrom; @@ -63,7 +63,7 @@ impl<'a> QueryTryFrom<'a> for SelectColumnMatch<'a> { } impl<'a> Query<'a> for SelectColumnMatch<'a> { - fn execute(root_node: tree_sitter::Node<'a>, stmt: &'a str) -> Vec> { + fn execute(root_node: tree_sitter::Node<'a>, stmt: &'a str) -> Vec> { let mut cursor = tree_sitter::QueryCursor::new(); let matches = cursor.matches(&TS_QUERY, root_node, stmt.as_bytes()); @@ -96,7 +96,7 @@ impl<'a> Query<'a> for SelectColumnMatch<'a> { #[cfg(test)] mod tests { - use crate::TreeSitterQueriesExecutor; + use crate::queries::TreeSitterQueriesExecutor; use super::SelectColumnMatch; diff --git a/crates/pgt_treesitter_queries/src/queries/table_aliases.rs b/crates/pgt_treesitter/src/queries/table_aliases.rs similarity index 97% rename from crates/pgt_treesitter_queries/src/queries/table_aliases.rs rename to crates/pgt_treesitter/src/queries/table_aliases.rs index 4297a2186..70d4d52ef 100644 --- a/crates/pgt_treesitter_queries/src/queries/table_aliases.rs +++ b/crates/pgt_treesitter/src/queries/table_aliases.rs @@ -1,6 +1,6 @@ use std::sync::LazyLock; -use crate::{Query, QueryResult}; +use crate::queries::{Query, QueryResult}; use super::QueryTryFrom; @@ -69,7 +69,7 @@ impl<'a> QueryTryFrom<'a> for TableAliasMatch<'a> { } impl<'a> Query<'a> for TableAliasMatch<'a> { - fn execute(root_node: tree_sitter::Node<'a>, stmt: &'a str) -> Vec> { + fn execute(root_node: tree_sitter::Node<'a>, stmt: &'a str) -> Vec> { let mut cursor = tree_sitter::QueryCursor::new(); let matches = cursor.matches(&TS_QUERY, root_node, stmt.as_bytes()); diff --git a/crates/pgt_treesitter_queries/src/queries/where_columns.rs b/crates/pgt_treesitter/src/queries/where_columns.rs similarity index 97% rename from crates/pgt_treesitter_queries/src/queries/where_columns.rs rename to crates/pgt_treesitter/src/queries/where_columns.rs index 8e19590de..b683300b6 100644 --- a/crates/pgt_treesitter_queries/src/queries/where_columns.rs +++ b/crates/pgt_treesitter/src/queries/where_columns.rs @@ -1,6 +1,6 @@ use std::sync::LazyLock; -use crate::{Query, QueryResult}; +use crate::queries::{Query, QueryResult}; use super::QueryTryFrom; @@ -64,7 +64,7 @@ impl<'a> QueryTryFrom<'a> for WhereColumnMatch<'a> { } impl<'a> Query<'a> for WhereColumnMatch<'a> { - fn execute(root_node: tree_sitter::Node<'a>, stmt: &'a str) -> Vec> { + fn execute(root_node: tree_sitter::Node<'a>, stmt: &'a str) -> Vec> { let mut cursor = tree_sitter::QueryCursor::new(); let matches = cursor.matches(&TS_QUERY, root_node, stmt.as_bytes()); diff --git a/crates/pgt_treesitter_queries/src/queries/mod.rs b/crates/pgt_treesitter_queries/src/queries/mod.rs deleted file mode 100644 index b9f39aed8..000000000 --- a/crates/pgt_treesitter_queries/src/queries/mod.rs +++ /dev/null @@ -1,86 +0,0 @@ -mod insert_columns; -mod parameters; -mod relations; -mod select_columns; -mod table_aliases; -mod where_columns; - -pub use insert_columns::*; -pub use parameters::*; -pub use relations::*; -pub use select_columns::*; -pub use table_aliases::*; -pub use where_columns::*; - -#[derive(Debug)] -pub enum QueryResult<'a> { - Relation(RelationMatch<'a>), - Parameter(ParameterMatch<'a>), - TableAliases(TableAliasMatch<'a>), - SelectClauseColumns(SelectColumnMatch<'a>), - InsertClauseColumns(InsertColumnMatch<'a>), - WhereClauseColumns(WhereColumnMatch<'a>), -} - -impl QueryResult<'_> { - pub fn within_range(&self, range: &tree_sitter::Range) -> bool { - match self { - QueryResult::Relation(rm) => { - let start = match rm.schema { - Some(s) => s.start_position(), - None => rm.table.start_position(), - }; - - let end = rm.table.end_position(); - - start >= range.start_point && end <= range.end_point - } - Self::Parameter(pm) => { - let node_range = pm.node.range(); - - node_range.start_point >= range.start_point - && node_range.end_point <= range.end_point - } - QueryResult::TableAliases(m) => { - let start = m.table.start_position(); - let end = m.alias.end_position(); - start >= range.start_point && end <= range.end_point - } - Self::SelectClauseColumns(cm) => { - let start = match cm.alias { - Some(n) => n.start_position(), - None => cm.column.start_position(), - }; - - let end = cm.column.end_position(); - - start >= range.start_point && end <= range.end_point - } - Self::WhereClauseColumns(cm) => { - let start = match cm.alias { - Some(n) => n.start_position(), - None => cm.column.start_position(), - }; - - let end = cm.column.end_position(); - - start >= range.start_point && end <= range.end_point - } - Self::InsertClauseColumns(cm) => { - let start = cm.column.start_position(); - let end = cm.column.end_position(); - start >= range.start_point && end <= range.end_point - } - } - } -} - -// This trait enforces that for any `Self` that implements `Query`, -// its &Self must implement TryFrom<&QueryResult> -pub(crate) trait QueryTryFrom<'a>: Sized { - type Ref: for<'any> TryFrom<&'a QueryResult<'a>, Error = String>; -} - -pub(crate) trait Query<'a>: QueryTryFrom<'a> { - fn execute(root_node: tree_sitter::Node<'a>, stmt: &'a str) -> Vec>; -} diff --git a/crates/pgt_type_resolver/Cargo.toml b/crates/pgt_type_resolver/Cargo.toml index 5d2a8eb15..9a190fdf4 100644 --- a/crates/pgt_type_resolver/Cargo.toml +++ b/crates/pgt_type_resolver/Cargo.toml @@ -12,7 +12,7 @@ version = "0.0.0" [dependencies] -pgt_query_ext.workspace = true +pgt_query.workspace = true pgt_schema_cache.workspace = true [dev-dependencies] diff --git a/crates/pgt_type_resolver/src/functions.rs b/crates/pgt_type_resolver/src/functions.rs index 1b0036b56..208af30dd 100644 --- a/crates/pgt_type_resolver/src/functions.rs +++ b/crates/pgt_type_resolver/src/functions.rs @@ -6,7 +6,7 @@ use crate::{ }; pub fn resolve_func_call<'b>( - node: &pgt_query_ext::protobuf::FuncCall, + node: &pgt_query::protobuf::FuncCall, schema_cache: &'b SchemaCache, ) -> Option<&'b Function> { let (schema, name) = resolve_func_identifier(node); @@ -30,7 +30,7 @@ pub fn resolve_func_call<'b>( if fns.len() == 1 { Some(fns[0]) } else { None } } -fn resolve_func_identifier(node: &pgt_query_ext::protobuf::FuncCall) -> (Option, String) { +fn resolve_func_identifier(node: &pgt_query::protobuf::FuncCall) -> (Option, String) { match node.funcname.as_slice() { [name] => (None, get_string_from_node(name)), [schema, name] => ( diff --git a/crates/pgt_type_resolver/src/types.rs b/crates/pgt_type_resolver/src/types.rs index b5560114b..85e1d8d2d 100644 --- a/crates/pgt_type_resolver/src/types.rs +++ b/crates/pgt_type_resolver/src/types.rs @@ -5,9 +5,9 @@ pub(crate) enum PossibleType { AnyOf(Vec), } -pub fn resolve_type(node: &pgt_query_ext::NodeEnum, schema_cache: &SchemaCache) -> PossibleType { +pub fn resolve_type(node: &pgt_query::NodeEnum, schema_cache: &SchemaCache) -> PossibleType { match node { - pgt_query_ext::NodeEnum::AConst(n) => { + pgt_query::NodeEnum::AConst(n) => { if n.isnull { PossibleType::Null } else { @@ -16,7 +16,7 @@ pub fn resolve_type(node: &pgt_query_ext::NodeEnum, schema_cache: &SchemaCache) .as_ref() .expect("expected non-nullable AConst to have a value") { - pgt_query_ext::protobuf::a_const::Val::Ival(_) => { + pgt_query::protobuf::a_const::Val::Ival(_) => { let types: Vec = ["int2", "int4", "int8"] .iter() .map(|s| s.to_string()) @@ -33,7 +33,7 @@ pub fn resolve_type(node: &pgt_query_ext::NodeEnum, schema_cache: &SchemaCache) .collect(), ) } - pgt_query_ext::protobuf::a_const::Val::Fval(_) => { + pgt_query::protobuf::a_const::Val::Fval(_) => { let types: Vec = ["float4", "float8"].iter().map(|s| s.to_string()).collect(); @@ -46,7 +46,7 @@ pub fn resolve_type(node: &pgt_query_ext::NodeEnum, schema_cache: &SchemaCache) .collect(), ) } - pgt_query_ext::protobuf::a_const::Val::Boolval(_) => PossibleType::AnyOf( + pgt_query::protobuf::a_const::Val::Boolval(_) => PossibleType::AnyOf( schema_cache .types .iter() @@ -54,7 +54,7 @@ pub fn resolve_type(node: &pgt_query_ext::NodeEnum, schema_cache: &SchemaCache) .map(|t| t.id) .collect(), ), - pgt_query_ext::protobuf::a_const::Val::Sval(v) => { + pgt_query::protobuf::a_const::Val::Sval(v) => { let types: Vec = ["text", "varchar"].iter().map(|s| s.to_string()).collect(); @@ -70,7 +70,7 @@ pub fn resolve_type(node: &pgt_query_ext::NodeEnum, schema_cache: &SchemaCache) .collect(), ) } - pgt_query_ext::protobuf::a_const::Val::Bsval(_) => todo!(), + pgt_query::protobuf::a_const::Val::Bsval(_) => todo!(), } } } diff --git a/crates/pgt_type_resolver/src/util.rs b/crates/pgt_type_resolver/src/util.rs index f10cf5bbc..d31d1fa8f 100644 --- a/crates/pgt_type_resolver/src/util.rs +++ b/crates/pgt_type_resolver/src/util.rs @@ -1,6 +1,6 @@ -pub(crate) fn get_string_from_node(node: &pgt_query_ext::protobuf::Node) -> String { +pub(crate) fn get_string_from_node(node: &pgt_query::protobuf::Node) -> String { match &node.node { - Some(pgt_query_ext::NodeEnum::String(s)) => s.sval.to_string(), + Some(pgt_query::NodeEnum::String(s)) => s.sval.to_string(), _ => "".to_string(), } } diff --git a/crates/pgt_typecheck/Cargo.toml b/crates/pgt_typecheck/Cargo.toml index caacc6d17..f61f6a37e 100644 --- a/crates/pgt_typecheck/Cargo.toml +++ b/crates/pgt_typecheck/Cargo.toml @@ -12,16 +12,16 @@ version = "0.0.0" [dependencies] -pgt_console.workspace = true -pgt_diagnostics.workspace = true -pgt_query_ext.workspace = true -pgt_schema_cache.workspace = true -pgt_text_size.workspace = true -pgt_treesitter_queries.workspace = true -sqlx.workspace = true -tokio.workspace = true -tree-sitter.workspace = true -tree_sitter_sql.workspace = true +pgt_console.workspace = true +pgt_diagnostics.workspace = true +pgt_query.workspace = true +pgt_schema_cache.workspace = true +pgt_text_size.workspace = true +pgt_treesitter.workspace = true +sqlx.workspace = true +tokio.workspace = true +tree-sitter.workspace = true +tree_sitter_sql.workspace = true [dev-dependencies] insta.workspace = true diff --git a/crates/pgt_typecheck/src/lib.rs b/crates/pgt_typecheck/src/lib.rs index e1dcd259f..ceb36b94e 100644 --- a/crates/pgt_typecheck/src/lib.rs +++ b/crates/pgt_typecheck/src/lib.rs @@ -3,7 +3,6 @@ mod typed_identifier; pub use diagnostics::TypecheckDiagnostic; use diagnostics::create_type_error; -use pgt_text_size::TextRange; use sqlx::postgres::PgDatabaseError; pub use sqlx::postgres::PgSeverity; use sqlx::{Executor, PgPool}; @@ -14,36 +13,23 @@ pub use typed_identifier::{IdentifierType, TypedIdentifier}; pub struct TypecheckParams<'a> { pub conn: &'a PgPool, pub sql: &'a str, - pub ast: &'a pgt_query_ext::NodeEnum, + pub ast: &'a pgt_query::NodeEnum, pub tree: &'a tree_sitter::Tree, pub schema_cache: &'a pgt_schema_cache::SchemaCache, pub identifiers: Vec, } -#[derive(Debug, Clone)] -pub struct TypeError { - pub message: String, - pub code: String, - pub severity: PgSeverity, - pub position: Option, - pub range: Option, - pub table: Option, - pub column: Option, - pub data_type: Option, - pub constraint: Option, -} - pub async fn check_sql( params: TypecheckParams<'_>, ) -> Result, sqlx::Error> { // Check if the AST is not a supported statement type if !matches!( params.ast, - pgt_query_ext::NodeEnum::SelectStmt(_) - | pgt_query_ext::NodeEnum::InsertStmt(_) - | pgt_query_ext::NodeEnum::UpdateStmt(_) - | pgt_query_ext::NodeEnum::DeleteStmt(_) - | pgt_query_ext::NodeEnum::CommonTableExpr(_) + pgt_query::NodeEnum::SelectStmt(_) + | pgt_query::NodeEnum::InsertStmt(_) + | pgt_query::NodeEnum::UpdateStmt(_) + | pgt_query::NodeEnum::DeleteStmt(_) + | pgt_query::NodeEnum::CommonTableExpr(_) ) { return Ok(None); } diff --git a/crates/pgt_typecheck/src/typed_identifier.rs b/crates/pgt_typecheck/src/typed_identifier.rs index 5efe04211..1ee4095dc 100644 --- a/crates/pgt_typecheck/src/typed_identifier.rs +++ b/crates/pgt_typecheck/src/typed_identifier.rs @@ -1,5 +1,5 @@ use pgt_schema_cache::PostgresType; -use pgt_treesitter_queries::{TreeSitterQueriesExecutor, queries::ParameterMatch}; +use pgt_treesitter::queries::{ParameterMatch, TreeSitterQueriesExecutor}; /// A typed identifier is a parameter that has a type associated with it. /// It is used to replace parameters within the SQL string. @@ -231,11 +231,10 @@ fn resolve_type<'a>( #[cfg(test)] mod tests { - use pgt_test_utils::test_database::get_new_test_db; - use sqlx::Executor; + use sqlx::{Executor, PgPool}; - #[tokio::test] - async fn test_apply_identifiers() { + #[sqlx::test(migrator = "pgt_test_utils::MIGRATIONS")] + async fn test_apply_identifiers(test_db: PgPool) { let input = "select v_test + fn_name.custom_type.v_test2 + $3 + custom_type.v_test3 + fn_name.v_test2 + enum_type"; let identifiers = vec![ @@ -295,8 +294,6 @@ mod tests { }, ]; - let test_db = get_new_test_db().await; - let setup = r#" CREATE TYPE "public"."custom_type" AS ( v_test2 integer, diff --git a/crates/pgt_typecheck/tests/diagnostics.rs b/crates/pgt_typecheck/tests/diagnostics.rs index 9628962de..f21d9ef9b 100644 --- a/crates/pgt_typecheck/tests/diagnostics.rs +++ b/crates/pgt_typecheck/tests/diagnostics.rs @@ -3,13 +3,10 @@ use pgt_console::{ markup, }; use pgt_diagnostics::PrintDiagnostic; -use pgt_test_utils::test_database::get_new_test_db; use pgt_typecheck::{TypecheckParams, check_sql}; -use sqlx::Executor; - -async fn test(name: &str, query: &str, setup: Option<&str>) { - let test_db = get_new_test_db().await; +use sqlx::{Executor, PgPool}; +async fn test(name: &str, query: &str, setup: Option<&str>, test_db: &PgPool) { if let Some(setup) = setup { test_db .execute(setup) @@ -22,11 +19,14 @@ async fn test(name: &str, query: &str, setup: Option<&str>) { .set_language(tree_sitter_sql::language()) .expect("Error loading sql language"); - let schema_cache = pgt_schema_cache::SchemaCache::load(&test_db) + let schema_cache = pgt_schema_cache::SchemaCache::load(test_db) .await .expect("Failed to load Schema Cache"); - let root = pgt_query_ext::parse(query).unwrap(); + let root = pgt_query::parse(query) + .unwrap() + .into_root() + .expect("Failed to parse query"); let tree = parser.parse(query, None).unwrap(); let conn = &test_db; @@ -58,8 +58,8 @@ async fn test(name: &str, query: &str, setup: Option<&str>) { }); } -#[tokio::test] -async fn invalid_column() { +#[sqlx::test(migrator = "pgt_test_utils::MIGRATIONS")] +async fn invalid_column(pool: PgPool) { test( "invalid_column", "select id, unknown from contacts;", @@ -73,6 +73,7 @@ async fn invalid_column() { ); "#, ), + &pool, ) .await; } diff --git a/crates/pgt_workspace/Cargo.toml b/crates/pgt_workspace/Cargo.toml index 5f598b2d9..860b51331 100644 --- a/crates/pgt_workspace/Cargo.toml +++ b/crates/pgt_workspace/Cargo.toml @@ -13,9 +13,9 @@ version = "0.0.0" [dependencies] biome_deserialize = "0.6.0" -dashmap = "5.5.3" futures = "0.3.31" globset = "0.4.16" +lru = "0.12" ignore = { workspace = true } pgt_analyse = { workspace = true, features = ["serde"] } @@ -26,15 +26,21 @@ pgt_console = { workspace = true } pgt_diagnostics = { workspace = true } pgt_fs = { workspace = true, features = ["serde"] } pgt_lexer = { workspace = true } +pgt_plpgsql_check = { workspace = true } +pgt_query = { workspace = true } pgt_query_ext = { workspace = true } pgt_schema_cache = { workspace = true } pgt_statement_splitter = { workspace = true } +pgt_suppressions = { workspace = true } pgt_text_size.workspace = true +pgt_tokenizer = { workspace = true } pgt_typecheck = { workspace = true } +pgt_workspace_macros = { workspace = true } rustc-hash = { workspace = true } schemars = { workspace = true, optional = true } serde = { workspace = true, features = ["derive"] } serde_json = { workspace = true, features = ["raw_value"] } +slotmap = { workspace = true, features = ["serde"] } sqlx.workspace = true strum = { workspace = true } tokio = { workspace = true, features = ["rt", "rt-multi-thread"] } @@ -60,7 +66,9 @@ schema = [ ] [dev-dependencies] -tempfile = "3.15.0" +pgt_test_utils = { workspace = true } +sqlx = { workspace = true } +tempfile = "3.15.0" [lib] doctest = false diff --git a/crates/pgt_workspace/src/configuration.rs b/crates/pgt_workspace/src/configuration.rs index 88c04eecd..87e77b0ca 100644 --- a/crates/pgt_workspace/src/configuration.rs +++ b/crates/pgt_workspace/src/configuration.rs @@ -1,14 +1,17 @@ use std::{ + ffi::OsStr, io::ErrorKind, ops::Deref, path::{Path, PathBuf}, }; +use biome_deserialize::Merge; use pgt_analyse::AnalyserRules; use pgt_configuration::{ ConfigurationDiagnostic, ConfigurationPathHint, ConfigurationPayload, PartialConfiguration, - VERSION, push_to_analyser_rules, + VERSION, diagnostics::CantLoadExtendFile, push_to_analyser_rules, }; +use pgt_console::markup; use pgt_fs::{AutoSearchResult, ConfigName, FileSystem, OpenOptions}; use crate::{DynRef, WorkspaceError, settings::Settings}; @@ -28,34 +31,41 @@ pub struct LoadedConfiguration { } impl LoadedConfiguration { - /// Return the path of the **directory** where the configuration is - pub fn directory_path(&self) -> Option<&Path> { - self.directory_path.as_deref() - } - - /// Return the path of the **file** where the configuration is - pub fn file_path(&self) -> Option<&Path> { - self.file_path.as_deref() - } -} - -impl From> for LoadedConfiguration { - fn from(value: Option) -> Self { + fn try_from_payload( + value: Option, + fs: &DynRef<'_, dyn FileSystem>, + ) -> Result { let Some(value) = value else { - return LoadedConfiguration::default(); + return Ok(LoadedConfiguration::default()); }; let ConfigurationPayload { + external_resolution_base_path, configuration_file_path, - deserialized: partial_configuration, - .. + deserialized: mut partial_configuration, } = value; - LoadedConfiguration { + partial_configuration.apply_extends( + fs, + &configuration_file_path, + &external_resolution_base_path, + )?; + + Ok(Self { configuration: partial_configuration, directory_path: configuration_file_path.parent().map(PathBuf::from), file_path: Some(configuration_file_path), - } + }) + } + + /// Return the path of the **directory** where the configuration is + pub fn directory_path(&self) -> Option<&Path> { + self.directory_path.as_deref() + } + + /// Return the path of the **file** where the configuration is + pub fn file_path(&self) -> Option<&Path> { + self.file_path.as_deref() } } @@ -65,7 +75,7 @@ pub fn load_configuration( config_path: ConfigurationPathHint, ) -> Result { let config = load_config(fs, config_path)?; - Ok(LoadedConfiguration::from(config)) + LoadedConfiguration::try_from_payload(config, fs) } /// - [Result]: if an error occurred while loading the configuration file. @@ -120,7 +130,7 @@ fn load_config( ConfigurationPathHint::None => file_system.working_directory().unwrap_or_default(), }; - // We first search for `postgrestools.jsonc` + // We first search for `postgrestools.jsonc` files if let Some(auto_search_result) = file_system.auto_search( &configuration_directory, ConfigName::file_names().as_slice(), @@ -176,9 +186,9 @@ pub fn create_config( configuration.schema = node_schema_path.to_str().map(String::from); } else if VERSION == "0.0.0" { // VERSION is 0.0.0 if it has not been explicitly set (e.g local dev, as fallback) - configuration.schema = Some("https://pgtools.dev/schemas/latest/schema.json".to_string()); + configuration.schema = Some("https://pgtools.dev/latest/schema.json".to_string()); } else { - configuration.schema = Some(format!("https://pgtools.dev/schemas/{VERSION}/schema.json")); + configuration.schema = Some(format!("https://pgtools.dev/{VERSION}/schema.json")); } let contents = serde_json::to_string_pretty(&configuration) @@ -265,10 +275,276 @@ pub fn strip_jsonc_comments(jsonc_input: &str) -> String { json_output } +pub trait PartialConfigurationExt { + fn apply_extends( + &mut self, + fs: &DynRef<'_, dyn FileSystem>, + file_path: &Path, + external_resolution_base_path: &Path, + ) -> Result<(), WorkspaceError>; + + fn deserialize_extends( + &mut self, + fs: &DynRef<'_, dyn FileSystem>, + relative_resolution_base_path: &Path, + external_resolution_base_path: &Path, + ) -> Result, WorkspaceError>; + + fn retrieve_gitignore_matches( + &self, + file_system: &DynRef<'_, dyn FileSystem>, + vcs_base_path: Option<&Path>, + ) -> Result<(Option, Vec), WorkspaceError>; +} + +impl PartialConfigurationExt for PartialConfiguration { + /// Mutates the configuration so that any fields that have not been configured explicitly are + /// filled in with their values from configs listed in the `extends` field. + /// + /// The `extends` configs are applied from left to right. + /// + /// If a configuration can't be resolved from the file system, the operation will fail. + fn apply_extends( + &mut self, + fs: &DynRef<'_, dyn FileSystem>, + file_path: &Path, + external_resolution_base_path: &Path, + ) -> Result<(), WorkspaceError> { + let configurations = self.deserialize_extends( + fs, + file_path.parent().expect("file path should have a parent"), + external_resolution_base_path, + )?; + + let extended_configuration = configurations.into_iter().reduce( + |mut previous_configuration, current_configuration| { + previous_configuration.merge_with(current_configuration); + previous_configuration + }, + ); + if let Some(mut extended_configuration) = extended_configuration { + // We swap them to avoid having to clone `self.configuration` to merge it. + std::mem::swap(self, &mut extended_configuration); + self.merge_with(extended_configuration) + } + + Ok(()) + } + + /// It attempts to deserialize all the configuration files that were specified in the `extends` property + fn deserialize_extends( + &mut self, + fs: &DynRef<'_, dyn FileSystem>, + relative_resolution_base_path: &Path, + external_resolution_base_path: &Path, + ) -> Result, WorkspaceError> { + let Some(extends) = &self.extends else { + return Ok(Vec::new()); + }; + + let mut deserialized_configurations = vec![]; + for extend_entry in extends.iter() { + let extend_entry_as_path = Path::new(extend_entry); + + let extend_configuration_file_path = if extend_entry_as_path.starts_with(".") + || matches!( + extend_entry_as_path + .extension() + .map(OsStr::as_encoded_bytes), + Some(b"jsonc") + ) { + // Normalize the path to handle relative segments like "../" + normalize_path(&relative_resolution_base_path.join(extend_entry)) + } else { + fs.resolve_configuration(extend_entry.as_str(), external_resolution_base_path) + .map_err(|error| { + ConfigurationDiagnostic::cant_resolve( + external_resolution_base_path.display().to_string(), + error, + ) + })? + .into_path_buf() + }; + + let mut file = fs + .open_with_options( + extend_configuration_file_path.as_path(), + OpenOptions::default().read(true), + ) + .map_err(|err| { + CantLoadExtendFile::new( + extend_configuration_file_path.display().to_string(), + err.to_string(), + ) + .with_verbose_advice(markup! { + "Postgres Tools tried to load the configuration file \""{ + extend_configuration_file_path.display().to_string() + }"\" in \"extends\" using \""{ + external_resolution_base_path.display().to_string() + }"\" as the base path." + }) + })?; + + let mut content = String::new(); + file.read_to_string(&mut content).map_err(|err| { + CantLoadExtendFile::new(extend_configuration_file_path.display().to_string(), err.to_string()).with_verbose_advice( + markup!{ + "It's possible that the file was created with a different user/group. Make sure you have the rights to read the file." + } + ) + + })?; + + let deserialized = serde_json::from_str::(&content) + .map_err(ConfigurationDiagnostic::new_deserialization_error)?; + deserialized_configurations.push(deserialized) + } + Ok(deserialized_configurations) + } + + /// This function checks if the VCS integration is enabled, and if so, it will attempts to resolve the + /// VCS root directory and the `.gitignore` file. + /// + /// ## Returns + /// + /// A tuple with VCS root folder and the contents of the `.gitignore` file + fn retrieve_gitignore_matches( + &self, + file_system: &DynRef<'_, dyn FileSystem>, + vcs_base_path: Option<&Path>, + ) -> Result<(Option, Vec), WorkspaceError> { + let Some(vcs) = &self.vcs else { + return Ok((None, vec![])); + }; + if vcs.is_enabled() { + let vcs_base_path = match (vcs_base_path, &vcs.root) { + (Some(vcs_base_path), Some(root)) => vcs_base_path.join(root), + (None, Some(root)) => PathBuf::from(root), + (Some(vcs_base_path), None) => PathBuf::from(vcs_base_path), + (None, None) => return Err(WorkspaceError::vcs_disabled()), + }; + if let Some(client_kind) = &vcs.client_kind { + if !vcs.ignore_file_disabled() { + let result = file_system + .auto_search(&vcs_base_path, &[client_kind.ignore_file()], false) + .map_err(WorkspaceError::from)?; + + if let Some(result) = result { + return Ok(( + result.file_path.parent().map(PathBuf::from), + result + .content + .lines() + .map(String::from) + .collect::>(), + )); + } + } + } + } + Ok((None, vec![])) + } +} + +/// Normalizes a path, resolving '..' and '.' segments without requiring the path to exist +fn normalize_path(path: &Path) -> PathBuf { + let mut components = Vec::new(); + let mut prefix_component = None; + let mut is_absolute = false; + + for component in path.components() { + match component { + std::path::Component::Prefix(_prefix) => { + prefix_component = Some(component); + components.clear(); + } + std::path::Component::RootDir => { + is_absolute = true; + components.clear(); + } + std::path::Component::ParentDir => { + if !components.is_empty() { + components.pop(); + } else if !is_absolute && prefix_component.is_none() { + // Only keep parent dir if we're not absolute and have no prefix + components.push(component.as_os_str()); + } + } + std::path::Component::Normal(c) => { + components.push(c); + } + std::path::Component::CurDir => { + // Skip current directory components + } + } + } + + let mut result = PathBuf::new(); + + // Add prefix component (like C: on Windows) + if let Some(prefix) = prefix_component { + result.push(prefix.as_os_str()); + } + + // Add root directory if path is absolute + if is_absolute { + result.push(std::path::Component::RootDir.as_os_str()); + } + + // Add normalized components + for component in components { + result.push(component); + } + + // Handle edge cases + if result.as_os_str().is_empty() { + if prefix_component.is_some() || is_absolute { + // This shouldn't happen with proper input, but fallback to original path's root + return path + .ancestors() + .last() + .unwrap_or(Path::new("")) + .to_path_buf(); + } else { + return PathBuf::from("."); + } + } + + result +} + #[cfg(test)] mod tests { use super::*; + #[test] + fn test_normalize_path_windows_drive() { + if cfg!(windows) { + let path = Path::new(r"z:\workspace\test_one\..\postgrestools.jsonc"); + let normalized = normalize_path(path); + assert_eq!( + normalized, + PathBuf::from(r"z:\workspace\postgrestools.jsonc") + ); + } + } + + #[test] + fn test_normalize_path_relative() { + let path = Path::new("workspace/test_one/../postgrestools.jsonc"); + let normalized = normalize_path(path); + assert_eq!(normalized, PathBuf::from("workspace/postgrestools.jsonc")); + } + + #[test] + fn test_normalize_path_multiple_parent_dirs() { + if cfg!(windows) { + let path = Path::new(r"c:\a\b\c\..\..\d"); + let normalized = normalize_path(path); + assert_eq!(normalized, PathBuf::from(r"c:\a\d")); + } + } + #[test] fn test_strip_jsonc_comments_line_comments() { let input = r#"{ diff --git a/crates/pgt_workspace/src/diagnostics.rs b/crates/pgt_workspace/src/diagnostics.rs index 9ba02a1a5..5020cc62e 100644 --- a/crates/pgt_workspace/src/diagnostics.rs +++ b/crates/pgt_workspace/src/diagnostics.rs @@ -1,4 +1,5 @@ use pgt_configuration::ConfigurationDiagnostic; +use pgt_configuration::diagnostics::CantLoadExtendFile; use pgt_console::fmt::Bytes; use pgt_console::markup; use pgt_diagnostics::{ @@ -354,3 +355,9 @@ impl Diagnostic for FileTooLarge { ) } } + +impl From for WorkspaceError { + fn from(value: CantLoadExtendFile) -> Self { + WorkspaceError::Configuration(ConfigurationDiagnostic::CantLoadExtendFile(value)) + } +} diff --git a/crates/pgt_workspace/src/features/code_actions.rs b/crates/pgt_workspace/src/features/code_actions.rs index 22223dd3c..cd1706d30 100644 --- a/crates/pgt_workspace/src/features/code_actions.rs +++ b/crates/pgt_workspace/src/features/code_actions.rs @@ -12,7 +12,7 @@ pub struct CodeActionsParams { pub skip: Vec, } -#[derive(Debug, serde::Serialize, serde::Deserialize)] +#[derive(Debug, serde::Serialize, serde::Deserialize, Default)] #[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] pub struct CodeActionsResult { pub actions: Vec, @@ -57,7 +57,7 @@ pub struct ExecuteStatementParams { pub path: PgTPath, } -#[derive(Debug, serde::Serialize, serde::Deserialize)] +#[derive(Debug, serde::Serialize, serde::Deserialize, Default, PartialEq, Eq)] #[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] pub struct ExecuteStatementResult { pub message: String, diff --git a/crates/pgt_workspace/src/features/completions.rs b/crates/pgt_workspace/src/features/completions.rs index 853421831..a41dd06eb 100644 --- a/crates/pgt_workspace/src/features/completions.rs +++ b/crates/pgt_workspace/src/features/completions.rs @@ -4,7 +4,7 @@ use pgt_completions::CompletionItem; use pgt_fs::PgTPath; use pgt_text_size::{TextRange, TextSize}; -use crate::workspace::{GetCompletionsFilter, GetCompletionsMapper, ParsedDocument, StatementId}; +use crate::workspace::{Document, GetCompletionsFilter, GetCompletionsMapper, StatementId}; #[derive(Debug, serde::Serialize, serde::Deserialize)] #[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] @@ -30,7 +30,7 @@ impl IntoIterator for CompletionsResult { } pub(crate) fn get_statement_for_completions( - doc: &ParsedDocument, + doc: &Document, position: TextSize, ) -> Option<(StatementId, TextRange, String, Arc)> { let count = doc.count(); @@ -49,7 +49,7 @@ pub(crate) fn get_statement_for_completions( if count == 1 { eligible_statements.next() } else { - let mut prev_stmt = None; + let mut prev_stmt: Option<(StatementId, TextRange, String, Arc)> = None; for current_stmt in eligible_statements { /* @@ -57,10 +57,16 @@ pub(crate) fn get_statement_for_completions( * with the next one. * * select 1 |select 1; + * + * This is however ok if the current statement is a child of the previous one, + * such as in CREATE FUNCTION bodies. */ - if prev_stmt.is_some_and(|_| current_stmt.1.contains(position)) { + if prev_stmt.is_some_and(|prev| { + current_stmt.1.contains(position) && !current_stmt.0.is_child_of(&prev.0) + }) { return None; } + prev_stmt = Some(current_stmt) } @@ -70,28 +76,23 @@ pub(crate) fn get_statement_for_completions( #[cfg(test)] mod tests { - use pgt_fs::PgTPath; use pgt_text_size::TextSize; - use crate::workspace::ParsedDocument; + use crate::workspace::Document; use super::get_statement_for_completions; - static CURSOR_POSITION: &str = "€"; + use pgt_test_utils::QueryWithCursorPosition; - fn get_doc_and_pos(sql: &str) -> (ParsedDocument, TextSize) { + fn get_doc_and_pos(sql: &str) -> (Document, TextSize) { let pos = sql - .find(CURSOR_POSITION) + .find(QueryWithCursorPosition::cursor_marker()) .expect("Please add cursor position to test sql"); let pos: u32 = pos.try_into().unwrap(); ( - ParsedDocument::new( - PgTPath::new("test.sql"), - sql.replace(CURSOR_POSITION, ""), - 5, - ), + Document::new(sql.replace(QueryWithCursorPosition::cursor_marker(), ""), 5), TextSize::new(pos), ) } @@ -106,7 +107,7 @@ mod tests { select 1; "#, - CURSOR_POSITION + QueryWithCursorPosition::cursor_marker() ); let (doc, position) = get_doc_and_pos(sql.as_str()); @@ -119,7 +120,7 @@ mod tests { #[test] fn does_not_break_when_no_statements_exist() { - let sql = CURSOR_POSITION.to_string(); + let sql = QueryWithCursorPosition::cursor_marker().to_string(); let (doc, position) = get_doc_and_pos(sql.as_str()); @@ -128,7 +129,10 @@ mod tests { #[test] fn does_not_return_overlapping_statements_if_too_close() { - let sql = format!("select * from {}select 1;", CURSOR_POSITION); + let sql = format!( + "select * from {}select 1;", + QueryWithCursorPosition::cursor_marker() + ); let (doc, position) = get_doc_and_pos(sql.as_str()); @@ -140,7 +144,10 @@ mod tests { #[test] fn is_fine_with_spaces() { - let sql = format!("select * from {} ;", CURSOR_POSITION); + let sql = format!( + "select * from {} ;", + QueryWithCursorPosition::cursor_marker() + ); let (doc, position) = get_doc_and_pos(sql.as_str()); @@ -152,7 +159,7 @@ mod tests { #[test] fn considers_offset() { - let sql = format!("select * from {}", CURSOR_POSITION); + let sql = format!("select * from {}", QueryWithCursorPosition::cursor_marker()); let (doc, position) = get_doc_and_pos(sql.as_str()); @@ -162,9 +169,36 @@ mod tests { assert_eq!(text, "select * from") } + #[test] + fn identifies_nested_stmts() { + let sql = format!( + r#" + create or replace function one() + returns integer + language sql + as $$ + select {} from cool; + $$; + "#, + QueryWithCursorPosition::cursor_marker() + ); + + let sql = sql.trim(); + + let (doc, position) = get_doc_and_pos(sql); + + let (_, _, text, _) = + get_statement_for_completions(&doc, position).expect("Expected Statement"); + + assert_eq!(text.trim(), "select from cool;") + } + #[test] fn does_not_consider_too_far_offset() { - let sql = format!("select * from {}", CURSOR_POSITION); + let sql = format!( + "select * from {}", + QueryWithCursorPosition::cursor_marker() + ); let (doc, position) = get_doc_and_pos(sql.as_str()); @@ -173,7 +207,10 @@ mod tests { #[test] fn does_not_consider_offset_if_statement_terminated_by_semi() { - let sql = format!("select * from users;{}", CURSOR_POSITION); + let sql = format!( + "select * from users;{}", + QueryWithCursorPosition::cursor_marker() + ); let (doc, position) = get_doc_and_pos(sql.as_str()); diff --git a/crates/pgt_workspace/src/features/diagnostics.rs b/crates/pgt_workspace/src/features/diagnostics.rs index ff60e142c..a697641ed 100644 --- a/crates/pgt_workspace/src/features/diagnostics.rs +++ b/crates/pgt_workspace/src/features/diagnostics.rs @@ -12,7 +12,7 @@ pub struct PullDiagnosticsParams { pub skip: Vec, } -#[derive(Debug, serde::Serialize, serde::Deserialize)] +#[derive(Debug, serde::Serialize, serde::Deserialize, Default)] #[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] pub struct PullDiagnosticsResult { pub diagnostics: Vec, diff --git a/crates/pgt_workspace/src/lib.rs b/crates/pgt_workspace/src/lib.rs index 99fe063f7..df8b0ba7b 100644 --- a/crates/pgt_workspace/src/lib.rs +++ b/crates/pgt_workspace/src/lib.rs @@ -14,6 +14,7 @@ pub mod workspace; #[cfg(feature = "schema")] pub mod workspace_types; +pub use crate::configuration::PartialConfigurationExt; pub use crate::diagnostics::{TransportError, WorkspaceError}; pub use crate::workspace::Workspace; diff --git a/crates/pgt_workspace/src/settings.rs b/crates/pgt_workspace/src/settings.rs index f9275aa90..40db2c1e6 100644 --- a/crates/pgt_workspace/src/settings.rs +++ b/crates/pgt_workspace/src/settings.rs @@ -8,6 +8,7 @@ use std::{ sync::{RwLock, RwLockReadGuard, RwLockWriteGuard}, time::Duration, }; +use tracing::trace; use ignore::gitignore::{Gitignore, GitignoreBuilder}; use pgt_configuration::{ @@ -17,65 +18,202 @@ use pgt_configuration::{ files::FilesConfiguration, migrations::{MigrationsConfiguration, PartialMigrationsConfiguration}, }; -use pgt_fs::FileSystem; +use pgt_fs::PgTPath; -use crate::{DynRef, WorkspaceError, matcher::Matcher}; +use crate::{ + WorkspaceError, + matcher::Matcher, + workspace::{ProjectKey, WorkspaceData}, +}; -/// Global settings for the entire workspace #[derive(Debug, Default)] -pub struct Settings { - /// Filesystem settings for the workspace - pub files: FilesSettings, +/// The information tracked for each project +pub struct ProjectData { + /// The root path of the project. This path should be **absolute**. + path: PgTPath, + /// The settings of the project, usually inferred from the configuration file e.g. `biome.json`. + settings: Settings, +} - /// Database settings for the workspace - pub db: DatabaseSettings, +#[derive(Debug, Default)] +/// Type that manages different projects inside the workspace. +pub struct WorkspaceSettings { + /// The data of the projects + data: WorkspaceData, + /// The ID of the current project. + current_project: ProjectKey, +} - /// Linter settings applied to all files in the workspace - pub linter: LinterSettings, +impl WorkspaceSettings { + pub fn get_current_project_key(&self) -> ProjectKey { + self.current_project + } - /// Migrations settings - pub migrations: Option, -} + pub fn get_current_project_path(&self) -> Option<&PgTPath> { + trace!("Current key {:?}", self.current_project); + self.data + .get(self.current_project) + .as_ref() + .map(|d| &d.path) + } -#[derive(Debug)] -pub struct SettingsHandleMut<'a> { - inner: RwLockWriteGuard<'a, Settings>, + pub fn get_current_project_data_mut(&mut self) -> &mut ProjectData { + self.data + .get_mut(self.current_project) + .expect("Current project not configured") + } + + /// Retrieves the settings of the current workspace folder + pub fn get_current_settings(&self) -> Option<&Settings> { + trace!("Current key {:?}", self.current_project); + let data = self.data.get(self.current_project); + if let Some(data) = data { + Some(&data.settings) + } else { + None + } + } + + /// Retrieves a mutable reference of the settings of the current project + pub fn get_current_settings_mut(&mut self) -> &mut Settings { + &mut self + .data + .get_mut(self.current_project) + .expect("You must have at least one workspace.") + .settings + } + + /// Register the current project using its unique key + pub fn register_current_project(&mut self, key: ProjectKey) { + self.current_project = key; + } + + /// Insert a new project using its folder. Use [WorkspaceSettings::get_current_settings_mut] to retrieve + /// a mutable reference to its [Settings] and manipulate them. + pub fn insert_project(&mut self, workspace_path: impl Into) -> ProjectKey { + let path = PgTPath::new(workspace_path.into()); + trace!("Insert workspace folder: {:?}", path); + self.data.insert(ProjectData { + path, + settings: Settings::default(), + }) + } + + /// Remove a project using its folder. + pub fn remove_project(&mut self, workspace_path: &Path) { + let keys_to_remove = { + let mut data = vec![]; + let iter = self.data.iter(); + + for (key, path_to_settings) in iter { + if path_to_settings.path.as_path() == workspace_path { + data.push(key) + } + } + + data + }; + + for key in keys_to_remove { + self.data.remove(key) + } + } + + /// Checks if the current path belongs to a registered project. + /// + /// If there's a match, and the match **isn't** the current project, it returns the new key. + pub fn path_belongs_to_current_workspace(&self, path: &PgTPath) -> Option { + if self.data.is_empty() { + return None; + } + trace!("Current key: {:?}", self.current_project); + let iter = self.data.iter(); + for (key, path_to_settings) in iter { + trace!( + "Workspace path {:?}, file path {:?}", + path_to_settings.path, path + ); + trace!("Iter key: {:?}", key); + if key == self.current_project { + continue; + } + if path.strip_prefix(path_to_settings.path.as_path()).is_ok() { + trace!("Update workspace to {:?}", key); + return Some(key); + } + } + None + } + + /// Checks if the current path belongs to a registered project. + /// + /// If there's a match, and the match **isn't** the current project, the function will mark the match as the current project. + pub fn set_current_project(&mut self, new_key: ProjectKey) { + self.current_project = new_key; + } } -/// Handle object holding a temporary lock on the settings #[derive(Debug)] -pub struct SettingsHandle<'a> { - inner: RwLockReadGuard<'a, Settings>, +pub struct WorkspaceSettingsHandle<'a> { + inner: RwLockReadGuard<'a, WorkspaceSettings>, } -impl<'a> SettingsHandle<'a> { - pub(crate) fn new(settings: &'a RwLock) -> Self { +impl<'a> WorkspaceSettingsHandle<'a> { + pub(crate) fn new(settings: &'a RwLock) -> Self { Self { inner: settings.read().unwrap(), } } + + pub(crate) fn settings(&self) -> Option<&Settings> { + self.inner.get_current_settings() + } + + pub(crate) fn path(&self) -> Option<&PgTPath> { + self.inner.get_current_project_path() + } } -impl AsRef for SettingsHandle<'_> { - fn as_ref(&self) -> &Settings { +impl AsRef for WorkspaceSettingsHandle<'_> { + fn as_ref(&self) -> &WorkspaceSettings { &self.inner } } -impl<'a> SettingsHandleMut<'a> { - pub(crate) fn new(settings: &'a RwLock) -> Self { +pub struct WorkspaceSettingsHandleMut<'a> { + inner: RwLockWriteGuard<'a, WorkspaceSettings>, +} + +impl<'a> WorkspaceSettingsHandleMut<'a> { + pub(crate) fn new(settings: &'a RwLock) -> Self { Self { inner: settings.write().unwrap(), } } } -impl AsMut for SettingsHandleMut<'_> { - fn as_mut(&mut self) -> &mut Settings { +impl AsMut for WorkspaceSettingsHandleMut<'_> { + fn as_mut(&mut self) -> &mut WorkspaceSettings { &mut self.inner } } +/// Global settings for the entire workspace +#[derive(Debug, Default)] +pub struct Settings { + /// Filesystem settings for the workspace + pub files: FilesSettings, + + /// Database settings for the workspace + pub db: DatabaseSettings, + + /// Linter settings applied to all files in the workspace + pub linter: LinterSettings, + + /// Migrations settings + pub migrations: Option, +} + impl Settings { /// The [PartialConfiguration] is merged into the workspace #[tracing::instrument(level = "trace", skip(self), err)] @@ -137,12 +275,10 @@ impl Settings { &self, code: &Category, ) -> Option { - let rules = self.linter.rules.as_ref(); - if let Some(rules) = rules { - rules.get_severity_from_code(code) - } else { - None - } + self.linter + .rules + .as_ref() + .and_then(|r| r.get_severity_from_code(code)) } } @@ -397,59 +533,6 @@ impl Default for FilesSettings { } } -pub trait PartialConfigurationExt { - fn retrieve_gitignore_matches( - &self, - file_system: &DynRef<'_, dyn FileSystem>, - vcs_base_path: Option<&Path>, - ) -> Result<(Option, Vec), WorkspaceError>; -} - -impl PartialConfigurationExt for PartialConfiguration { - /// This function checks if the VCS integration is enabled, and if so, it will attempts to resolve the - /// VCS root directory and the `.gitignore` file. - /// - /// ## Returns - /// - /// A tuple with VCS root folder and the contents of the `.gitignore` file - fn retrieve_gitignore_matches( - &self, - file_system: &DynRef<'_, dyn FileSystem>, - vcs_base_path: Option<&Path>, - ) -> Result<(Option, Vec), WorkspaceError> { - let Some(vcs) = &self.vcs else { - return Ok((None, vec![])); - }; - if vcs.is_enabled() { - let vcs_base_path = match (vcs_base_path, &vcs.root) { - (Some(vcs_base_path), Some(root)) => vcs_base_path.join(root), - (None, Some(root)) => PathBuf::from(root), - (Some(vcs_base_path), None) => PathBuf::from(vcs_base_path), - (None, None) => return Err(WorkspaceError::vcs_disabled()), - }; - if let Some(client_kind) = &vcs.client_kind { - if !vcs.ignore_file_disabled() { - let result = file_system - .auto_search(&vcs_base_path, &[client_kind.ignore_file()], false) - .map_err(WorkspaceError::from)?; - - if let Some(result) = result { - return Ok(( - result.file_path.parent().map(PathBuf::from), - result - .content - .lines() - .map(String::from) - .collect::>(), - )); - } - } - } - } - Ok((None, vec![])) - } -} - #[cfg(test)] mod tests { use biome_deserialize::StringSet; diff --git a/crates/pgt_workspace/src/workspace.rs b/crates/pgt_workspace/src/workspace.rs index 873dd83e5..9206b39dc 100644 --- a/crates/pgt_workspace/src/workspace.rs +++ b/crates/pgt_workspace/src/workspace.rs @@ -4,8 +4,10 @@ pub use self::client::{TransportRequest, WorkspaceClient, WorkspaceTransport}; use pgt_analyse::RuleCategories; use pgt_configuration::{PartialConfiguration, RuleSelector}; use pgt_fs::PgTPath; -use pgt_text_size::TextRange; +#[cfg(feature = "schema")] +use schemars::{JsonSchema, SchemaGenerator, schema::Schema}; use serde::{Deserialize, Serialize}; +use slotmap::{DenseSlotMap, new_key_type}; use crate::{ WorkspaceError, @@ -22,7 +24,7 @@ mod client; mod server; pub use server::StatementId; -pub(crate) use server::parsed_document::*; +pub(crate) use server::document::*; #[derive(Debug, serde::Serialize, serde::Deserialize)] #[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] @@ -43,21 +45,7 @@ pub struct CloseFileParams { pub struct ChangeFileParams { pub path: PgTPath, pub version: i32, - pub changes: Vec, -} - -#[derive(Debug, serde::Serialize, serde::Deserialize)] -#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] -pub struct ChangeParams { - /// The range of the file that changed. If `None`, the whole file changed. - pub range: Option, - pub text: String, -} - -impl ChangeParams { - pub fn overwrite(text: String) -> Self { - Self { range: None, text } - } + pub content: String, } #[derive(Debug, serde::Serialize, serde::Deserialize)] @@ -92,6 +80,21 @@ pub struct ServerInfo { pub version: Option, } +#[derive(Debug, serde::Serialize, serde::Deserialize)] +#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] +#[serde(rename_all = "camelCase")] +pub struct RegisterProjectFolderParams { + pub path: Option, + pub set_as_current_workspace: bool, +} + +#[derive(Debug, serde::Serialize, serde::Deserialize)] +#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] +#[serde(rename_all = "camelCase")] +pub struct UnregisterProjectFolderParams { + pub path: PgTPath, +} + pub trait Workspace: Send + Sync + RefUnwindSafe { /// Retrieves the list of diagnostics associated to a file fn pull_diagnostics( @@ -110,6 +113,18 @@ pub trait Workspace: Send + Sync + RefUnwindSafe { params: GetCompletionsParams, ) -> Result; + /// Register a possible workspace project folder. Returns the key of said project. Use this key when you want to switch to different projects. + fn register_project_folder( + &self, + params: RegisterProjectFolderParams, + ) -> Result; + + /// Unregister a workspace project folder. The settings that belong to that project are deleted. + fn unregister_project_folder( + &self, + params: UnregisterProjectFolderParams, + ) -> Result<(), WorkspaceError>; + /// Update the global settings for this workspace fn update_settings(&self, params: UpdateSettingsParams) -> Result<(), WorkspaceError>; @@ -175,15 +190,11 @@ impl<'app, W: Workspace + ?Sized> FileGuard<'app, W> { Ok(Self { workspace, path }) } - pub fn change_file( - &self, - version: i32, - changes: Vec, - ) -> Result<(), WorkspaceError> { + pub fn change_file(&self, version: i32, content: String) -> Result<(), WorkspaceError> { self.workspace.change_file(ChangeFileParams { path: self.path.clone(), version, - changes, + content, }) } @@ -222,3 +233,76 @@ impl Drop for FileGuard<'_, W> { .ok(); } } + +new_key_type! { + pub struct ProjectKey; +} + +#[cfg(feature = "schema")] +impl JsonSchema for ProjectKey { + fn schema_name() -> String { + "ProjectKey".to_string() + } + + fn json_schema(generator: &mut SchemaGenerator) -> Schema { + ::json_schema(generator) + } +} + +#[derive(Debug, Default)] +pub struct WorkspaceData { + /// [DenseSlotMap] is the slowest type in insertion/removal, but the fastest in iteration + /// + /// Users wouldn't change workspace folders very often, + paths: DenseSlotMap, +} + +impl WorkspaceData { + /// Inserts an item + pub fn insert(&mut self, item: V) -> ProjectKey { + self.paths.insert(item) + } + + /// Removes an item + pub fn remove(&mut self, key: ProjectKey) { + self.paths.remove(key); + } + + /// Get a reference of the value + pub fn get(&self, key: ProjectKey) -> Option<&V> { + self.paths.get(key) + } + + /// Get a mutable reference of the value + pub fn get_mut(&mut self, key: ProjectKey) -> Option<&mut V> { + self.paths.get_mut(key) + } + + pub fn is_empty(&self) -> bool { + self.paths.is_empty() + } + + pub fn iter(&self) -> WorkspaceDataIterator<'_, V> { + WorkspaceDataIterator::new(self) + } +} + +pub struct WorkspaceDataIterator<'a, V> { + iterator: slotmap::dense::Iter<'a, ProjectKey, V>, +} + +impl<'a, V> WorkspaceDataIterator<'a, V> { + fn new(data: &'a WorkspaceData) -> Self { + Self { + iterator: data.paths.iter(), + } + } +} + +impl<'a, V> Iterator for WorkspaceDataIterator<'a, V> { + type Item = (ProjectKey, &'a V); + + fn next(&mut self) -> Option { + self.iterator.next() + } +} diff --git a/crates/pgt_workspace/src/workspace/client.rs b/crates/pgt_workspace/src/workspace/client.rs index d727fff6c..2bd215133 100644 --- a/crates/pgt_workspace/src/workspace/client.rs +++ b/crates/pgt_workspace/src/workspace/client.rs @@ -7,7 +7,10 @@ use std::{ sync::atomic::{AtomicU64, Ordering}, }; -use super::{CloseFileParams, GetFileContentParams, IsPathIgnoredParams, OpenFileParams}; +use super::{ + CloseFileParams, GetFileContentParams, IsPathIgnoredParams, OpenFileParams, ProjectKey, + RegisterProjectFolderParams, UnregisterProjectFolderParams, +}; pub struct WorkspaceClient { transport: T, @@ -103,6 +106,20 @@ where self.request("pgt/execute_statement", params) } + fn register_project_folder( + &self, + params: RegisterProjectFolderParams, + ) -> Result { + self.request("pgt/register_project_folder", params) + } + + fn unregister_project_folder( + &self, + params: UnregisterProjectFolderParams, + ) -> Result<(), WorkspaceError> { + self.request("pgt/unregister_project_folder", params) + } + fn open_file(&self, params: OpenFileParams) -> Result<(), WorkspaceError> { self.request("pgt/open_file", params) } diff --git a/crates/pgt_workspace/src/workspace/server.rs b/crates/pgt_workspace/src/workspace/server.rs index 82e79e108..49c306f2b 100644 --- a/crates/pgt_workspace/src/workspace/server.rs +++ b/crates/pgt_workspace/src/workspace/server.rs @@ -1,47 +1,50 @@ use std::{ + collections::HashMap, fs, panic::RefUnwindSafe, - path::Path, + path::{Path, PathBuf}, sync::{Arc, RwLock}, }; use analyser::AnalyserVisitorBuilder; use async_helper::run_async; -use dashmap::DashMap; -use db_connection::DbConnection; -use document::Document; -use futures::{StreamExt, stream}; -use parsed_document::{ - AsyncDiagnosticsMapper, CursorPositionFilter, DefaultMapper, ExecuteStatementMapper, - ParsedDocument, SyncDiagnosticsMapper, +use connection_manager::ConnectionManager; +use document::{ + CursorPositionFilter, DefaultMapper, Document, ExecuteStatementMapper, + TypecheckDiagnosticsMapper, }; +use futures::{StreamExt, stream}; +use pg_query::convert_to_positional_params; use pgt_analyse::{AnalyserOptions, AnalysisFilter}; -use pgt_analyser::{Analyser, AnalyserConfig, AnalyserContext}; +use pgt_analyser::{Analyser, AnalyserConfig, AnalyserParams}; use pgt_diagnostics::{ Diagnostic, DiagnosticExt, Error, Severity, serde::Diagnostic as SDiagnostic, }; use pgt_fs::{ConfigName, PgTPath}; use pgt_typecheck::{IdentifierType, TypecheckParams, TypedIdentifier}; +use pgt_workspace_macros::ignored_path; use schema_cache_manager::SchemaCacheManager; -use sqlx::Executor; -use tracing::info; +use sqlx::{Executor, PgPool}; +use tracing::{debug, info}; use crate::{ WorkspaceError, configuration::to_analyser_rules, features::{ code_actions::{ - self, CodeAction, CodeActionKind, CodeActionsResult, CommandAction, + CodeAction, CodeActionKind, CodeActionsParams, CodeActionsResult, CommandAction, CommandActionCategory, ExecuteStatementParams, ExecuteStatementResult, }, completions::{CompletionsResult, GetCompletionsParams, get_statement_for_completions}, diagnostics::{PullDiagnosticsParams, PullDiagnosticsResult}, }, - settings::{Settings, SettingsHandle, SettingsHandleMut}, + settings::{WorkspaceSettings, WorkspaceSettingsHandle, WorkspaceSettingsHandleMut}, + workspace::AnalyserDiagnosticsMapper, }; use super::{ - GetFileContentParams, IsPathIgnoredParams, OpenFileParams, ServerInfo, UpdateSettingsParams, + GetFileContentParams, IsPathIgnoredParams, OpenFileParams, ProjectKey, + RegisterProjectFolderParams, ServerInfo, UnregisterProjectFolderParams, UpdateSettingsParams, Workspace, }; @@ -50,11 +53,10 @@ pub use statement_identifier::StatementId; mod analyser; mod annotation; mod async_helper; -mod change; -mod db_connection; +mod connection_key; +mod connection_manager; pub(crate) mod document; mod migration; -pub(crate) mod parsed_document; mod pg_query; mod schema_cache_manager; mod sql_function; @@ -63,14 +65,14 @@ mod tree_sitter; pub(super) struct WorkspaceServer { /// global settings object for this workspace - settings: RwLock, + settings: RwLock, /// Stores the schema cache for this workspace schema_cache: SchemaCacheManager, - parsed_documents: DashMap, + documents: RwLock>, - connection: RwLock, + connection: ConnectionManager, } /// The `Workspace` object is long-lived, so we want it to be able to cross @@ -90,24 +92,61 @@ impl WorkspaceServer { pub(crate) fn new() -> Self { Self { settings: RwLock::default(), - parsed_documents: DashMap::default(), - schema_cache: SchemaCacheManager::default(), - connection: RwLock::default(), + documents: RwLock::new(HashMap::new()), + schema_cache: SchemaCacheManager::new(), + connection: ConnectionManager::new(), } } /// Provides a reference to the current settings - fn settings(&self) -> SettingsHandle { - SettingsHandle::new(&self.settings) + fn workspaces(&self) -> WorkspaceSettingsHandle { + WorkspaceSettingsHandle::new(&self.settings) + } + + fn workspaces_mut(&self) -> WorkspaceSettingsHandleMut { + WorkspaceSettingsHandleMut::new(&self.settings) } - fn settings_mut(&self) -> SettingsHandleMut { - SettingsHandleMut::new(&self.settings) + fn get_current_connection(&self) -> Option { + let settings = self.workspaces(); + let settings = settings.settings()?; + self.connection.get_pool(&settings.db) + } + + /// Register a new project in the current workspace + fn register_project(&self, path: PathBuf) -> ProjectKey { + let mut workspace = self.workspaces_mut(); + let workspace_mut = workspace.as_mut(); + workspace_mut.insert_project(path.clone()) + } + + /// Retrieves the current project path + fn get_current_project_path(&self) -> Option { + self.workspaces().path().cloned() + } + + /// Sets the current project of the current workspace + fn set_current_project(&self, project_key: ProjectKey) { + let mut workspace = self.workspaces_mut(); + let workspace_mut = workspace.as_mut(); + workspace_mut.set_current_project(project_key); + } + + /// Checks whether the current path belongs to the current project. + /// + /// If there's a match, and the match **isn't** the current project, it returns the new key. + fn path_belongs_to_current_workspace(&self, path: &PgTPath) -> Option { + let workspaces = self.workspaces(); + workspaces.as_ref().path_belongs_to_current_workspace(path) } fn is_ignored_by_migration_config(&self, path: &Path) -> bool { - let set = self.settings(); - set.as_ref() + let settings = self.workspaces(); + let settings = settings.settings(); + let Some(settings) = settings else { + return false; + }; + settings .migrations .as_ref() .and_then(|migration_settings| { @@ -131,8 +170,12 @@ impl WorkspaceServer { /// Check whether a file is ignored in the top-level config `files.ignore`/`files.include` fn is_ignored_by_top_level_config(&self, path: &Path) -> bool { - let set = self.settings(); - let settings = set.as_ref(); + let settings = self.workspaces(); + let settings = settings.settings(); + let Some(settings) = settings else { + return false; + }; + let is_included = settings.files.included_files.is_empty() || is_dir(path) || settings.files.included_files.matches_path(path); @@ -155,6 +198,48 @@ impl WorkspaceServer { } impl Workspace for WorkspaceServer { + fn register_project_folder( + &self, + params: RegisterProjectFolderParams, + ) -> Result { + let current_project_path = self.get_current_project_path(); + debug!( + "Compare the current project with the new one {:?} {:?} {:?}", + current_project_path, + params.path.as_ref(), + current_project_path.as_deref() != params.path.as_ref() + ); + + let is_new_path = match (current_project_path.as_deref(), params.path.as_ref()) { + (Some(current_project_path), Some(params_path)) => current_project_path != params_path, + (Some(_), None) => { + // If the current project is set, but no path is provided, we assume it's a new project + true + } + _ => true, + }; + + if is_new_path { + let path = params.path.unwrap_or_default(); + let key = self.register_project(path.clone()); + if params.set_as_current_workspace { + self.set_current_project(key); + } + Ok(key) + } else { + Ok(self.workspaces().as_ref().get_current_project_key()) + } + } + + fn unregister_project_folder( + &self, + params: UnregisterProjectFolderParams, + ) -> Result<(), WorkspaceError> { + let mut workspace = self.workspaces_mut(); + workspace.as_mut().remove_project(params.path.as_path()); + Ok(()) + } + /// Update the global settings for this workspace /// /// ## Panics @@ -162,43 +247,42 @@ impl Workspace for WorkspaceServer { /// by another thread having previously panicked while holding the lock #[tracing::instrument(level = "trace", skip(self), err)] fn update_settings(&self, params: UpdateSettingsParams) -> Result<(), WorkspaceError> { - tracing::info!("Updating settings in workspace"); - - self.settings_mut().as_mut().merge_with_configuration( - params.configuration, - params.workspace_directory, - params.vcs_base_path, - params.gitignore_matches.as_slice(), - )?; - - tracing::info!("Updated settings in workspace"); - tracing::debug!("Updated settings are {:#?}", self.settings()); - - self.connection - .write() - .unwrap() - .set_conn_settings(&self.settings().as_ref().db); - - tracing::info!("Updated Db connection settings"); + let mut workspace = self.workspaces_mut(); + + workspace + .as_mut() + .get_current_settings_mut() + .merge_with_configuration( + params.configuration, + params.workspace_directory, + params.vcs_base_path, + params.gitignore_matches.as_slice(), + )?; Ok(()) } /// Add a new file to the workspace + #[ignored_path(path=¶ms.path)] #[tracing::instrument(level = "info", skip_all, fields(path = params.path.as_path().as_os_str().to_str()), err)] fn open_file(&self, params: OpenFileParams) -> Result<(), WorkspaceError> { - self.parsed_documents + let mut documents = self.documents.write().unwrap(); + documents .entry(params.path.clone()) - .or_insert_with(|| { - ParsedDocument::new(params.path.clone(), params.content, params.version) - }); + .or_insert_with(|| Document::new(params.content, params.version)); + + if let Some(project_key) = self.path_belongs_to_current_workspace(¶ms.path) { + self.set_current_project(project_key); + } Ok(()) } /// Remove a file from the workspace + #[ignored_path(path=¶ms.path)] fn close_file(&self, params: super::CloseFileParams) -> Result<(), WorkspaceError> { - self.parsed_documents + let mut documents = self.documents.write().unwrap(); + documents .remove(¶ms.path) .ok_or_else(WorkspaceError::not_found)?; @@ -210,17 +294,20 @@ impl Workspace for WorkspaceServer { path = params.path.as_os_str().to_str(), version = params.version ), err)] + #[ignored_path(path=¶ms.path)] fn change_file(&self, params: super::ChangeFileParams) -> Result<(), WorkspaceError> { - let mut parser = - self.parsed_documents - .entry(params.path.clone()) - .or_insert(ParsedDocument::new( - params.path.clone(), - "".to_string(), - params.version, - )); + let mut documents = self.documents.write().unwrap(); - parser.apply_change(params); + match documents.entry(params.path.clone()) { + std::collections::hash_map::Entry::Occupied(mut entry) => { + entry + .get_mut() + .update_content(params.content, params.version); + } + std::collections::hash_map::Entry::Vacant(entry) => { + entry.insert(Document::new(params.content, params.version)); + } + } Ok(()) } @@ -229,9 +316,10 @@ impl Workspace for WorkspaceServer { None } + #[ignored_path(path=¶ms.path)] fn get_file_content(&self, params: GetFileContentParams) -> Result { - let document = self - .parsed_documents + let documents = self.documents.read().unwrap(); + let document = documents .get(¶ms.path) .ok_or(WorkspaceError::not_found())?; Ok(document.get_document_content().to_string()) @@ -241,24 +329,23 @@ impl Workspace for WorkspaceServer { Ok(self.is_ignored(params.pgt_path.as_path())) } + #[ignored_path(path=¶ms.path)] fn pull_code_actions( &self, - params: code_actions::CodeActionsParams, - ) -> Result { - let parser = self - .parsed_documents + params: CodeActionsParams, + ) -> Result { + let documents = self.documents.read().unwrap(); + let parser = documents .get(¶ms.path) .ok_or(WorkspaceError::not_found())?; - let settings = self - .settings - .read() - .expect("Unable to read settings for Code Actions"); + let settings = self.workspaces(); + let settings = settings.settings(); - let disabled_reason: Option = if settings.db.allow_statement_executions { - None - } else { - Some("Statement execution not allowed against database.".into()) + let disabled_reason = match settings { + Some(settings) if settings.db.allow_statement_executions => None, + Some(_) => Some("Statement execution is disabled in the settings.".into()), + None => Some("Statement execution not allowed against database.".into()), }; let actions = parser @@ -285,12 +372,13 @@ impl Workspace for WorkspaceServer { Ok(CodeActionsResult { actions }) } + #[ignored_path(path=¶ms.path)] fn execute_statement( &self, params: ExecuteStatementParams, ) -> Result { - let parser = self - .parsed_documents + let documents = self.documents.read().unwrap(); + let parser = documents .get(¶ms.path) .ok_or(WorkspaceError::not_found())?; @@ -310,15 +398,13 @@ impl Workspace for WorkspaceServer { }); }; - let conn = self.connection.read().unwrap(); - let pool = match conn.get_pool() { - Some(p) => p, - None => { - return Ok(ExecuteStatementResult { - message: "Not connected to database.".into(), - }); - } - }; + let pool = self.get_current_connection(); + if pool.is_none() { + return Ok(ExecuteStatementResult { + message: "No database connection available.".into(), + }); + } + let pool = pool.unwrap(); let result = run_async(async move { pool.execute(sqlx::query(&content)).await })??; @@ -330,62 +416,60 @@ impl Workspace for WorkspaceServer { }) } + #[ignored_path(path=¶ms.path)] fn pull_diagnostics( &self, params: PullDiagnosticsParams, ) -> Result { - let settings = self.settings(); + let settings = self.workspaces(); - // create analyser for this run - // first, collect enabled and disabled rules from the workspace settings - let (enabled_rules, disabled_rules) = AnalyserVisitorBuilder::new(settings.as_ref()) - .with_linter_rules(¶ms.only, ¶ms.skip) - .finish(); - // then, build a map that contains all options - let options = AnalyserOptions { - rules: to_analyser_rules(settings.as_ref()), - }; - // next, build the analysis filter which will be used to match rules - let filter = AnalysisFilter { - categories: params.categories, - enabled_rules: Some(enabled_rules.as_slice()), - disabled_rules: &disabled_rules, + let settings = match settings.settings() { + Some(settings) => settings, + None => { + // return an empty result if no settings are available + // we might want to return an error here in the future + return Ok(PullDiagnosticsResult { + diagnostics: Vec::new(), + errors: 0, + skipped_diagnostics: 0, + }); + } }; - // finally, create the analyser that will be used during this run - let analyser = Analyser::new(AnalyserConfig { - options: &options, - filter, - }); - let parser = self - .parsed_documents + let documents = self.documents.read().unwrap(); + let doc = documents .get(¶ms.path) .ok_or(WorkspaceError::not_found())?; - let mut diagnostics: Vec = parser.document_diagnostics().to_vec(); + /* + * The statements in the document might already have associated diagnostics, + * e.g. if they contain syntax errors that surfaced while parsing/splitting the statements + */ + let mut diagnostics: Vec = doc.document_diagnostics().to_vec(); - if let Some(pool) = self - .connection - .read() - .expect("DbConnection RwLock panicked") - .get_pool() - { + /* + * Type-checking against database connection + */ + if let Some(pool) = self.get_current_connection() { let path_clone = params.path.clone(); let schema_cache = self.schema_cache.load(pool.clone())?; - let schema_cache_arc = schema_cache.get_arc(); - let input = parser.iter(AsyncDiagnosticsMapper).collect::>(); - // sorry for the ugly code :( + let input = doc.iter(TypecheckDiagnosticsMapper).collect::>(); + + // Combined async context for both typecheck and plpgsql_check let async_results = run_async(async move { stream::iter(input) - .map(|(_id, range, content, ast, cst, sign)| { + .map(|(id, range, ast, cst, sign)| { let pool = pool.clone(); let path = path_clone.clone(); - let schema_cache = Arc::clone(&schema_cache_arc); + let schema_cache = Arc::clone(&schema_cache); async move { + let mut diagnostics = Vec::new(); + if let Some(ast) = ast { - pgt_typecheck::check_sql(TypecheckParams { + // Type checking + let typecheck_result = pgt_typecheck::check_sql(TypecheckParams { conn: &pool, - sql: &content, + sql: convert_to_positional_params(id.content()).as_str(), ast: &ast, tree: &cst, schema_cache: schema_cache.as_ref(), @@ -406,18 +490,38 @@ impl Workspace for WorkspaceServer { }) .unwrap_or_default(), }) + .await; + + if let Ok(Some(diag)) = typecheck_result { + let r = diag.location().span.map(|span| span + range.start()); + diagnostics.push( + diag.with_file_path(path.as_path().display().to_string()) + .with_file_span(r.unwrap_or(range)), + ); + } + + // plpgsql_check + let plpgsql_check_results = pgt_plpgsql_check::check_plpgsql( + pgt_plpgsql_check::PlPgSqlCheckParams { + conn: &pool, + sql: id.content(), + ast: &ast, + schema_cache: schema_cache.as_ref(), + }, + ) .await - .map(|d| { - d.map(|d| { - let r = d.location().span.map(|span| span + range.start()); + .unwrap_or_else(|_| vec![]); + for d in plpgsql_check_results { + let r = d.span.map(|span| span + range.start()); + diagnostics.push( d.with_file_path(path.as_path().display().to_string()) - .with_file_span(r.unwrap_or(range)) - }) - }) - } else { - Ok(None) + .with_file_span(r.unwrap_or(range)), + ); + } } + + Ok::, sqlx::Error>(diagnostics) } }) .buffer_unordered(10) @@ -426,56 +530,115 @@ impl Workspace for WorkspaceServer { })?; for result in async_results.into_iter() { - let result = result?; - if let Some(diag) = result { + let diagnostics_batch = result?; + for diag in diagnostics_batch { diagnostics.push(SDiagnostic::new(diag)); } } } - diagnostics.extend(parser.iter(SyncDiagnosticsMapper).flat_map( - |(_id, range, ast, diag)| { - let mut errors: Vec = vec![]; + /* + * Below, we'll apply our static linting rules against the statements, + * considering the user's settings + */ + let (enabled_rules, disabled_rules) = AnalyserVisitorBuilder::new(settings) + .with_linter_rules(¶ms.only, ¶ms.skip) + .finish(); - if let Some(diag) = diag { - errors.push(diag.into()); - } + let options = AnalyserOptions { + rules: to_analyser_rules(settings), + }; - if let Some(ast) = ast { - errors.extend( - analyser - .run(AnalyserContext { root: &ast }) - .into_iter() - .map(Error::from) - .collect::>(), - ); - } + let filter = AnalysisFilter { + categories: params.categories, + enabled_rules: Some(enabled_rules.as_slice()), + disabled_rules: &disabled_rules, + }; + + let analyser = Analyser::new(AnalyserConfig { + options: &options, + filter, + }); + + let path = params.path.as_path().display().to_string(); - errors - .into_iter() - .map(|d| { - let severity = d - .category() - .filter(|category| category.name().starts_with("lint/")) - .map_or_else( - || d.severity(), - |category| { - settings - .as_ref() - .get_severity_from_rule_code(category) - .unwrap_or(Severity::Warning) - }, - ); - - SDiagnostic::new( - d.with_file_path(params.path.as_path().display().to_string()) - .with_file_span(range) - .with_severity(severity), - ) + let schema_cache = self + .get_current_connection() + .and_then(|pool| self.schema_cache.load(pool.clone()).ok()); + + let mut analysable_stmts = vec![]; + for (stmt_root, diagnostic) in doc.iter(AnalyserDiagnosticsMapper) { + if let Some(node) = stmt_root { + analysable_stmts.push(node); + } + if let Some(diag) = diagnostic { + // ignore the syntax error if we already have more specialized diagnostics for the + // same statement. + // this is important for create function statements, where we might already have detailed + // diagnostics from plpgsql_check. + if diagnostics.iter().any(|d| { + d.location().span.is_some_and(|async_loc| { + diag.location() + .span + .is_some_and(|syntax_loc| syntax_loc.contains_range(async_loc)) }) - .collect::>() - }, - )); + }) { + continue; + } + + diagnostics.push(SDiagnostic::new( + diag.with_file_path(path.clone()) + .with_severity(Severity::Error), + )); + } + } + + diagnostics.extend( + analyser + .run(AnalyserParams { + stmts: analysable_stmts, + schema_cache: schema_cache.as_deref(), + }) + .into_iter() + .map(Error::from) + .map(|d| { + let severity = d + .category() + .map(|category| { + settings + .get_severity_from_rule_code(category) + .unwrap_or(Severity::Warning) + }) + .unwrap(); + + let span = d.location().span; + SDiagnostic::new( + d.with_file_path(path.clone()) + .with_file_span(span) + .with_severity(severity), + ) + }), + ); + + let suppressions = doc.suppressions(); + + let disabled_suppression_errors = + suppressions.get_disabled_diagnostic_suppressions_as_errors(&disabled_rules); + + let unused_suppression_errors = + suppressions.get_unused_suppressions_as_errors(&diagnostics); + + let suppression_errors: Vec = suppressions + .diagnostics + .iter() + .chain(disabled_suppression_errors.iter()) + .chain(unused_suppression_errors.iter()) + .cloned() + .map(Error::from) + .collect::>(); + + diagnostics.retain(|d| !suppressions.is_suppressed(d)); + diagnostics.extend(suppression_errors.into_iter().map(SDiagnostic::new)); let errors = diagnostics .iter() @@ -490,6 +653,7 @@ impl Workspace for WorkspaceServer { }) } + #[ignored_path(path=¶ms.path)] #[tracing::instrument(level = "debug", skip_all, fields( path = params.path.as_os_str().to_str(), position = params.position.to_string() @@ -498,27 +662,26 @@ impl Workspace for WorkspaceServer { &self, params: GetCompletionsParams, ) -> Result { - let parsed_doc = self - .parsed_documents + let documents = self.documents.read().unwrap(); + let parsed_doc = documents .get(¶ms.path) .ok_or(WorkspaceError::not_found())?; - let pool = match self.connection.read().unwrap().get_pool() { - Some(pool) => pool, - None => { - tracing::debug!("No connection to database. Skipping completions."); - return Ok(CompletionsResult::default()); - } - }; + let pool = self.get_current_connection(); + if pool.is_none() { + tracing::debug!("No database connection available. Skipping completions."); + return Ok(CompletionsResult::default()); + } + let pool = pool.unwrap(); let schema_cache = self.schema_cache.load(pool)?; - match get_statement_for_completions(&parsed_doc, params.position) { + match get_statement_for_completions(parsed_doc, params.position) { None => { tracing::debug!("No statement found."); Ok(CompletionsResult::default()) } - Some((id, range, content, cst)) => { + Some((_id, range, content, cst)) => { let position = params.position - range.start(); let items = pgt_completions::complete(pgt_completions::CompletionParams { @@ -528,12 +691,6 @@ impl Workspace for WorkspaceServer { text: content, }); - tracing::debug!( - "Found {} completion items for statement with id {}", - items.len(), - id.raw() - ); - Ok(CompletionsResult { items }) } } @@ -545,3 +702,7 @@ impl Workspace for WorkspaceServer { fn is_dir(path: &Path) -> bool { path.is_dir() || (path.is_symlink() && fs::read_link(path).is_ok_and(|path| path.is_dir())) } + +#[cfg(test)] +#[path = "server.tests.rs"] +mod tests; diff --git a/crates/pgt_workspace/src/workspace/server.tests.rs b/crates/pgt_workspace/src/workspace/server.tests.rs new file mode 100644 index 000000000..894d10426 --- /dev/null +++ b/crates/pgt_workspace/src/workspace/server.tests.rs @@ -0,0 +1,333 @@ +use std::sync::Arc; + +use biome_deserialize::{Merge, StringSet}; +use pgt_analyse::RuleCategories; +use pgt_configuration::{ + PartialConfiguration, database::PartialDatabaseConfiguration, files::PartialFilesConfiguration, +}; +use pgt_diagnostics::Diagnostic; +use pgt_fs::PgTPath; +use pgt_text_size::TextRange; +use sqlx::{Executor, PgPool}; + +use crate::{ + Workspace, WorkspaceError, + features::code_actions::ExecuteStatementResult, + workspace::{ + OpenFileParams, RegisterProjectFolderParams, StatementId, UpdateSettingsParams, + server::WorkspaceServer, + }, +}; + +fn get_test_workspace( + partial_config: Option, +) -> Result { + let workspace = WorkspaceServer::new(); + + workspace.register_project_folder(RegisterProjectFolderParams { + path: None, + set_as_current_workspace: true, + })?; + + workspace.update_settings(UpdateSettingsParams { + configuration: partial_config.unwrap_or(PartialConfiguration::init()), + gitignore_matches: vec![], + vcs_base_path: None, + workspace_directory: None, + })?; + + Ok(workspace) +} + +#[sqlx::test(migrator = "pgt_test_utils::MIGRATIONS")] +async fn test_diagnostics(test_db: PgPool) { + let mut conf = PartialConfiguration::init(); + conf.merge_with(PartialConfiguration { + db: Some(PartialDatabaseConfiguration { + database: Some( + test_db + .connect_options() + .get_database() + .unwrap() + .to_string(), + ), + ..Default::default() + }), + ..Default::default() + }); + + let workspace = get_test_workspace(Some(conf)).expect("Unable to create test workspace"); + + let path = PgTPath::new("test.sql"); + let content = r#" + create table users ( + id serial primary key, + name text not null + ); + + drop table non_existing_table; + + select 1; + "#; + + workspace + .open_file(OpenFileParams { + path: path.clone(), + content: content.into(), + version: 1, + }) + .expect("Unable to open test file"); + + let diagnostics = workspace + .pull_diagnostics(crate::workspace::PullDiagnosticsParams { + path: path.clone(), + categories: RuleCategories::all(), + max_diagnostics: 100, + only: vec![], + skip: vec![], + }) + .expect("Unable to pull diagnostics") + .diagnostics; + + assert_eq!(diagnostics.len(), 1, "Expected one diagnostic"); + + let diagnostic = &diagnostics[0]; + + assert_eq!( + diagnostic.category().map(|c| c.name()), + Some("lint/safety/banDropTable") + ); + + assert_eq!( + diagnostic.location().span, + Some(TextRange::new(106.into(), 136.into())) + ); +} + +#[sqlx::test(migrator = "pgt_test_utils::MIGRATIONS")] +async fn test_syntax_error(test_db: PgPool) { + let mut conf = PartialConfiguration::init(); + conf.merge_with(PartialConfiguration { + db: Some(PartialDatabaseConfiguration { + database: Some( + test_db + .connect_options() + .get_database() + .unwrap() + .to_string(), + ), + ..Default::default() + }), + ..Default::default() + }); + + let workspace = get_test_workspace(Some(conf)).expect("Unable to create test workspace"); + + let path = PgTPath::new("test.sql"); + let content = r#" + seect 1; + "#; + + workspace + .open_file(OpenFileParams { + path: path.clone(), + content: content.into(), + version: 1, + }) + .expect("Unable to open test file"); + + let diagnostics = workspace + .pull_diagnostics(crate::workspace::PullDiagnosticsParams { + path: path.clone(), + categories: RuleCategories::all(), + max_diagnostics: 100, + only: vec![], + skip: vec![], + }) + .expect("Unable to pull diagnostics") + .diagnostics; + + assert_eq!(diagnostics.len(), 1, "Expected one diagnostic"); + + let diagnostic = &diagnostics[0]; + + assert_eq!(diagnostic.category().map(|c| c.name()), Some("syntax")); + + assert_eq!( + diagnostic.location().span, + Some(TextRange::new(7.into(), 15.into())) + ); +} + +#[tokio::test] +async fn correctly_ignores_files() { + let mut conf = PartialConfiguration::init(); + conf.merge_with(PartialConfiguration { + files: Some(PartialFilesConfiguration { + ignore: Some(StringSet::from_iter(["test.sql".to_string()])), + ..Default::default() + }), + ..Default::default() + }); + + let workspace = get_test_workspace(Some(conf)).expect("Unable to create test workspace"); + + let path = PgTPath::new("test.sql"); + let content = r#" + seect 1; + "#; + + let diagnostics_result = workspace.pull_diagnostics(crate::workspace::PullDiagnosticsParams { + path: path.clone(), + categories: RuleCategories::all(), + max_diagnostics: 100, + only: vec![], + skip: vec![], + }); + + assert!( + diagnostics_result.is_ok_and(|res| res.diagnostics.is_empty() + && res.errors == 0 + && res.skipped_diagnostics == 0) + ); + + let close_file_result = + workspace.close_file(crate::workspace::CloseFileParams { path: path.clone() }); + + assert!(close_file_result.is_ok()); + + let execute_statement_result = + workspace.execute_statement(crate::workspace::ExecuteStatementParams { + path: path.clone(), + statement_id: StatementId::Root { + content: Arc::from(content), + }, + }); + + assert!(execute_statement_result.is_ok_and(|res| res == ExecuteStatementResult::default())); +} + +#[cfg(all(test, not(target_os = "windows")))] +#[sqlx::test(migrator = "pgt_test_utils::MIGRATIONS")] +async fn test_dedupe_diagnostics(test_db: PgPool) { + let mut conf = PartialConfiguration::init(); + conf.merge_with(PartialConfiguration { + db: Some(PartialDatabaseConfiguration { + database: Some( + test_db + .connect_options() + .get_database() + .unwrap() + .to_string(), + ), + ..Default::default() + }), + ..Default::default() + }); + + let workspace = get_test_workspace(Some(conf)).expect("Unable to create test workspace"); + + let path = PgTPath::new("test.sql"); + + let setup_sql = "CREATE EXTENSION IF NOT EXISTS plpgsql_check;"; + test_db.execute(setup_sql).await.expect("setup sql failed"); + + let content = r#" + CREATE OR REPLACE FUNCTION public.f1() + RETURNS void + LANGUAGE plpgsql + AS $function$ + decare r text; + BEGIN + select '1' into into r; + END; + $function$; + "#; + + workspace + .open_file(OpenFileParams { + path: path.clone(), + content: content.into(), + version: 1, + }) + .expect("Unable to open test file"); + + let diagnostics = workspace + .pull_diagnostics(crate::workspace::PullDiagnosticsParams { + path: path.clone(), + categories: RuleCategories::all(), + max_diagnostics: 100, + only: vec![], + skip: vec![], + }) + .expect("Unable to pull diagnostics") + .diagnostics; + + assert_eq!(diagnostics.len(), 1, "Expected one diagnostic"); + + let diagnostic = &diagnostics[0]; + + assert_eq!( + diagnostic.category().map(|c| c.name()), + Some("plpgsql_check") + ); + + assert_eq!( + diagnostic.location().span, + Some(TextRange::new(115.into(), 210.into())) + ); +} + +#[sqlx::test(migrator = "pgt_test_utils::MIGRATIONS")] +async fn test_positional_params(test_db: PgPool) { + let mut conf = PartialConfiguration::init(); + conf.merge_with(PartialConfiguration { + db: Some(PartialDatabaseConfiguration { + database: Some( + test_db + .connect_options() + .get_database() + .unwrap() + .to_string(), + ), + ..Default::default() + }), + ..Default::default() + }); + + let workspace = get_test_workspace(Some(conf)).expect("Unable to create test workspace"); + + let path = PgTPath::new("test.sql"); + + let setup_sql = r" + create table users ( + id serial primary key, + name text not null, + email text not null + ); + "; + test_db.execute(setup_sql).await.expect("setup sql failed"); + + let content = r#"select * from users where id = @one and name = :two and email = :'three';"#; + + workspace + .open_file(OpenFileParams { + path: path.clone(), + content: content.into(), + version: 1, + }) + .expect("Unable to open test file"); + + let diagnostics = workspace + .pull_diagnostics(crate::workspace::PullDiagnosticsParams { + path: path.clone(), + categories: RuleCategories::all(), + max_diagnostics: 100, + only: vec![], + skip: vec![], + }) + .expect("Unable to pull diagnostics") + .diagnostics; + + assert_eq!(diagnostics.len(), 0, "Expected no diagnostic"); +} diff --git a/crates/pgt_workspace/src/workspace/server/analyser.rs b/crates/pgt_workspace/src/workspace/server/analyser.rs index d4b08ba19..86e3d0763 100644 --- a/crates/pgt_workspace/src/workspace/server/analyser.rs +++ b/crates/pgt_workspace/src/workspace/server/analyser.rs @@ -68,14 +68,24 @@ impl<'a, 'b> LintVisitor<'a, 'b> { fn finish(mut self) -> (FxHashSet>, FxHashSet>) { let has_only_filter = !self.only.is_empty(); + if !has_only_filter { let enabled_rules = self .settings .as_linter_rules() .map(|rules| rules.as_enabled_rules()) .unwrap_or_default(); + self.enabled_rules.extend(enabled_rules); + + let disabled_rules = self + .settings + .as_linter_rules() + .map(|rules| rules.as_disabled_rules()) + .unwrap_or_default(); + self.disabled_rules.extend(disabled_rules); } + (self.enabled_rules, self.disabled_rules) } @@ -127,3 +137,42 @@ impl RegistryVisitor for LintVisitor<'_, '_> { self.push_rule::() } } + +#[cfg(test)] +mod tests { + use pgt_analyse::RuleFilter; + use pgt_configuration::{RuleConfiguration, Rules, analyser::Safety}; + + use crate::{ + settings::{LinterSettings, Settings}, + workspace::server::analyser::AnalyserVisitorBuilder, + }; + + #[test] + fn recognizes_disabled_rules() { + let settings = Settings { + linter: LinterSettings { + rules: Some(Rules { + safety: Some(Safety { + ban_drop_column: Some(RuleConfiguration::Plain( + pgt_configuration::RulePlainConfiguration::Off, + )), + ..Default::default() + }), + ..Default::default() + }), + ..Default::default() + }, + ..Default::default() + }; + + let (_, disabled_rules) = AnalyserVisitorBuilder::new(&settings) + .with_linter_rules(&[], &[]) + .finish(); + + assert_eq!( + disabled_rules, + vec![RuleFilter::Rule("safety", "banDropColumn")] + ) + } +} diff --git a/crates/pgt_workspace/src/workspace/server/annotation.rs b/crates/pgt_workspace/src/workspace/server/annotation.rs index 321dd3ac0..0ff6cc0a0 100644 --- a/crates/pgt_workspace/src/workspace/server/annotation.rs +++ b/crates/pgt_workspace/src/workspace/server/annotation.rs @@ -1,58 +1,68 @@ -use std::sync::Arc; +use std::num::NonZeroUsize; +use std::sync::{Arc, Mutex}; -use dashmap::DashMap; -use pgt_lexer::{SyntaxKind, WHITESPACE_TOKENS}; +use lru::LruCache; +use pgt_lexer::SyntaxKind; use super::statement_identifier::StatementId; +const DEFAULT_CACHE_SIZE: usize = 1000; + #[derive(Debug, Clone, PartialEq, Eq)] pub struct StatementAnnotations { ends_with_semicolon: bool, } pub struct AnnotationStore { - db: DashMap>>, + db: Mutex>>, } +const WHITESPACE_TOKENS: [SyntaxKind; 6] = [ + SyntaxKind::SPACE, + SyntaxKind::TAB, + SyntaxKind::VERTICAL_TAB, + SyntaxKind::FORM_FEED, + SyntaxKind::LINE_ENDING, + SyntaxKind::EOF, +]; + impl AnnotationStore { pub fn new() -> AnnotationStore { - AnnotationStore { db: DashMap::new() } + AnnotationStore { + db: Mutex::new(LruCache::new( + NonZeroUsize::new(DEFAULT_CACHE_SIZE).unwrap(), + )), + } } #[allow(unused)] pub fn get_annotations( &self, - statement: &StatementId, + statement_id: &StatementId, content: &str, - ) -> Option> { - if let Some(existing) = self.db.get(statement).map(|x| x.clone()) { - return existing; + ) -> Arc { + let mut cache = self.db.lock().unwrap(); + + if let Some(existing) = cache.get(statement_id) { + return existing.clone(); } - // we swallow the error here because the lexing within the document would have already - // thrown and we wont even get here if that happened. - let annotations = pgt_lexer::lex(content).ok().map(|tokens| { - let ends_with_semicolon = tokens - .iter() - .rev() - .find(|token| !WHITESPACE_TOKENS.contains(&token.kind)) - .is_some_and(|token| token.kind == SyntaxKind::Ascii59); - - Arc::new(StatementAnnotations { - ends_with_semicolon, - }) - }); + let lexed = pgt_lexer::lex(content); - self.db.insert(statement.clone(), None); - annotations - } + let ends_with_semicolon = (0..lexed.len()) + // Iterate through tokens in reverse to find the last non-whitespace token + .filter(|t| !WHITESPACE_TOKENS.contains(&lexed.kind(*t))) + .next_back() + .map(|t| lexed.kind(t) == SyntaxKind::SEMICOLON) + .unwrap_or(false); - pub fn clear_statement(&self, id: &StatementId) { - self.db.remove(id); + let annotations = Arc::new(StatementAnnotations { + ends_with_semicolon, + }); - if let Some(child_id) = id.get_child_id() { - self.db.remove(&child_id); - } + cache.put(statement_id.clone(), annotations.clone()); + + annotations } } @@ -75,13 +85,12 @@ mod tests { ("SELECT * FROM foo\n", false), ]; - for (idx, (content, expected)) in test_cases.iter().enumerate() { - let statement_id = StatementId::Root(idx.into()); + for (content, expected) in test_cases.iter() { + let statement_id = StatementId::new(content); let annotations = store.get_annotations(&statement_id, content); - assert!(annotations.is_some()); - assert_eq!(annotations.unwrap().ends_with_semicolon, *expected); + assert_eq!(annotations.ends_with_semicolon, *expected); } } } diff --git a/crates/pgt_workspace/src/workspace/server/change.rs b/crates/pgt_workspace/src/workspace/server/change.rs deleted file mode 100644 index c8799922d..000000000 --- a/crates/pgt_workspace/src/workspace/server/change.rs +++ /dev/null @@ -1,1763 +0,0 @@ -use pgt_text_size::{TextLen, TextRange, TextSize}; -use std::ops::{Add, Sub}; - -use crate::workspace::{ChangeFileParams, ChangeParams}; - -use super::{Document, document, statement_identifier::StatementId}; - -#[derive(Debug, PartialEq, Eq)] -pub enum StatementChange { - Added(AddedStatement), - Deleted(StatementId), - Modified(ModifiedStatement), -} - -#[derive(Debug, PartialEq, Eq)] -pub struct AddedStatement { - pub stmt: StatementId, - pub text: String, -} - -#[derive(Debug, PartialEq, Eq)] -pub struct ModifiedStatement { - pub old_stmt: StatementId, - pub old_stmt_text: String, - - pub new_stmt: StatementId, - pub new_stmt_text: String, - - pub change_range: TextRange, - pub change_text: String, -} - -impl StatementChange { - #[allow(dead_code)] - pub fn statement(&self) -> &StatementId { - match self { - StatementChange::Added(stmt) => &stmt.stmt, - StatementChange::Deleted(stmt) => stmt, - StatementChange::Modified(changed) => &changed.new_stmt, - } - } -} - -/// Returns all relevant details about the change and its effects on the current state of the document. -struct Affected { - /// Full range of the change, including the range of all statements that intersect with the change - affected_range: TextRange, - /// All indices of affected statement positions - affected_indices: Vec, - /// The index of the first statement position before the change, if any - prev_index: Option, - /// The index of the first statement position after the change, if any - next_index: Option, - /// the full affected range includng the prev and next statement - full_affected_range: TextRange, -} - -impl Document { - /// Applies a file change to the document and returns the affected statements - pub fn apply_file_change(&mut self, change: &ChangeFileParams) -> Vec { - // cleanup all diagnostics with every change because we cannot guarantee that they are still valid - // this is because we know their ranges only by finding slices within the content which is - // very much not guaranteed to result in correct ranges - self.diagnostics.clear(); - - // when we recieive more than one change, we need to push back the changes based on the - // total range of the previous ones. This is because the ranges are always related to the original state. - let mut changes = Vec::new(); - - let mut offset: i64 = 0; - - for change in &change.changes { - let adjusted_change = if offset != 0 && change.range.is_some() { - &ChangeParams { - text: change.text.clone(), - range: change.range.map(|range| { - let start = u32::from(range.start()); - let end = u32::from(range.end()); - TextRange::new( - TextSize::from((start as i64 + offset).try_into().unwrap_or(0)), - TextSize::from((end as i64 + offset).try_into().unwrap_or(0)), - ) - }), - } - } else { - change - }; - - changes.extend(self.apply_change(adjusted_change)); - - offset += change.change_size(); - } - - self.version = change.version; - - changes - } - - /// Helper method to drain all positions and return them as deleted statements - fn drain_positions(&mut self) -> Vec { - self.positions - .drain(..) - .map(|(id, _)| StatementChange::Deleted(id)) - .collect() - } - - /// Applies a change to the document and returns the affected statements - /// - /// Will always assume its a full change and reparse the whole document - fn apply_full_change(&mut self, change: &ChangeParams) -> Vec { - let mut changes = Vec::new(); - - changes.extend(self.drain_positions()); - - self.content = change.apply_to_text(&self.content); - - let (ranges, diagnostics) = document::split_with_diagnostics(&self.content, None); - - self.diagnostics = diagnostics; - - // Do not add any statements if there is a fatal error - if self.has_fatal_error() { - return changes; - } - - changes.extend(ranges.into_iter().map(|range| { - let id = self.id_generator.next(); - let text = self.content[range].to_string(); - self.positions.push((id.clone(), range)); - - StatementChange::Added(AddedStatement { stmt: id, text }) - })); - - changes - } - - fn insert_statement(&mut self, range: TextRange) -> StatementId { - let pos = self - .positions - .binary_search_by(|(_, r)| r.start().cmp(&range.start())) - .unwrap_err(); - - let new_id = self.id_generator.next(); - self.positions.insert(pos, (new_id.clone(), range)); - - new_id - } - - /// Returns all relevant details about the change and its effects on the current state of the document. - /// - The affected range is the full range of the change, including the range of all statements that intersect with the change - /// - All indices of affected statement positions - /// - The index of the first statement position before the change, if any - /// - The index of the first statement position after the change, if any - /// - the full affected range includng the prev and next statement - fn get_affected( - &self, - change_range: TextRange, - content_size: TextSize, - diff_size: TextSize, - is_addition: bool, - ) -> Affected { - let mut start = change_range.start(); - let mut end = change_range.end().min(content_size); - - let is_trim = change_range.start() >= content_size; - - let mut affected_indices = Vec::new(); - let mut prev_index = None; - let mut next_index = None; - - for (index, (_, pos_range)) in self.positions.iter().enumerate() { - if pos_range.intersect(change_range).is_some() { - affected_indices.push(index); - start = start.min(pos_range.start()); - end = end.max(pos_range.end()); - } else if pos_range.end() <= change_range.start() { - prev_index = Some(index); - } else if pos_range.start() >= change_range.end() && next_index.is_none() { - next_index = Some(index); - break; - } - } - - if affected_indices.is_empty() && prev_index.is_none() { - // if there is no prev_index and no intersection -> use 0 - start = 0.into(); - } - - if affected_indices.is_empty() && next_index.is_none() { - // if there is no next_index and no intersection -> use content_size - end = content_size; - } - - let first_affected_stmt_start = prev_index - .map(|i| self.positions[i].1.start()) - .unwrap_or(start); - - let mut last_affected_stmt_end = next_index - .map(|i| self.positions[i].1.end()) - .unwrap_or_else(|| end); - - if is_addition { - end = end.add(diff_size); - last_affected_stmt_end = last_affected_stmt_end.add(diff_size); - } else if !is_trim { - end = end.sub(diff_size); - last_affected_stmt_end = last_affected_stmt_end.sub(diff_size) - }; - - Affected { - affected_range: { - let end = end.min(content_size); - TextRange::new(start.min(end), end) - }, - affected_indices, - prev_index, - next_index, - full_affected_range: TextRange::new( - first_affected_stmt_start, - last_affected_stmt_end - .min(content_size) - .max(first_affected_stmt_start), - ), - } - } - - fn move_ranges(&mut self, offset: TextSize, diff_size: TextSize, is_addition: bool) { - self.positions - .iter_mut() - .skip_while(|(_, r)| offset > r.start()) - .for_each(|(_, range)| { - let new_range = if is_addition { - range.add(diff_size) - } else { - range.sub(diff_size) - }; - - *range = new_range; - }); - } - - /// Applies a single change to the document and returns the affected statements - fn apply_change(&mut self, change: &ChangeParams) -> Vec { - // if range is none, we have a full change - if change.range.is_none() { - return self.apply_full_change(change); - } - - // i spent a relatively large amount of time thinking about how to handle range changes - // properly. there are quite a few edge cases to consider. I eventually skipped most of - // them, because the complexity is not worth the return for now. we might want to revisit - // this later though. - - let mut changed: Vec = Vec::with_capacity(self.positions.len()); - - let change_range = change.range.unwrap(); - let previous_content = self.content.clone(); - let new_content = change.apply_to_text(&self.content); - - // we first need to determine the affected range and all affected statements, as well as - // the index of the prev and the next statement, if any. The full affected range is the - // affected range expanded to the start of the previous statement and the end of the next - let Affected { - affected_range, - affected_indices, - prev_index, - next_index, - full_affected_range, - } = self.get_affected( - change_range, - new_content.text_len(), - change.diff_size(), - change.is_addition(), - ); - - // if within a statement, we can modify it if the change results in also a single statement - if affected_indices.len() == 1 { - let changed_content = get_affected(&new_content, affected_range); - - let (new_ranges, diags) = - document::split_with_diagnostics(changed_content, Some(affected_range.start())); - - self.diagnostics = diags; - - if self.has_fatal_error() { - // cleanup all positions if there is a fatal error - changed.extend(self.drain_positions()); - // still process text change - self.content = new_content; - return changed; - } - - if new_ranges.len() == 1 { - let affected_idx = affected_indices[0]; - let new_range = new_ranges[0].add(affected_range.start()); - let (old_id, old_range) = self.positions[affected_idx].clone(); - - // move all statements after the affected range - self.move_ranges(old_range.end(), change.diff_size(), change.is_addition()); - - let new_id = self.id_generator.next(); - self.positions[affected_idx] = (new_id.clone(), new_range); - - changed.push(StatementChange::Modified(ModifiedStatement { - old_stmt: old_id.clone(), - old_stmt_text: previous_content[old_range].to_string(), - - new_stmt: new_id, - new_stmt_text: changed_content[new_ranges[0]].to_string(), - // change must be relative to the statement - change_text: change.text.clone(), - // make sure we always have a valid range >= 0 - change_range: change_range - .checked_sub(old_range.start()) - .unwrap_or(change_range.sub(change_range.start())), - })); - - self.content = new_content; - - return changed; - } - } - - // in any other case, parse the full affected range - let changed_content = get_affected(&new_content, full_affected_range); - - let (new_ranges, diags) = - document::split_with_diagnostics(changed_content, Some(full_affected_range.start())); - - self.diagnostics = diags; - - if self.has_fatal_error() { - // cleanup all positions if there is a fatal error - changed.extend(self.drain_positions()); - // still process text change - self.content = new_content; - return changed; - } - - // delete and add new ones - if let Some(next_index) = next_index { - changed.push(StatementChange::Deleted( - self.positions[next_index].0.clone(), - )); - self.positions.remove(next_index); - } - for idx in affected_indices.iter().rev() { - changed.push(StatementChange::Deleted(self.positions[*idx].0.clone())); - self.positions.remove(*idx); - } - if let Some(prev_index) = prev_index { - changed.push(StatementChange::Deleted( - self.positions[prev_index].0.clone(), - )); - self.positions.remove(prev_index); - } - - new_ranges.iter().for_each(|range| { - let actual_range = range.add(full_affected_range.start()); - let new_id = self.insert_statement(actual_range); - changed.push(StatementChange::Added(AddedStatement { - stmt: new_id, - text: new_content[actual_range].to_string(), - })); - }); - - // move all statements after the afffected range - self.move_ranges( - full_affected_range.end(), - change.diff_size(), - change.is_addition(), - ); - - self.content = new_content; - - changed - } -} - -impl ChangeParams { - /// For lack of a better name, this returns the change in size of the text compared to the range - pub fn change_size(&self) -> i64 { - match self.range { - Some(range) => { - let range_length: usize = range.len().into(); - let text_length = self.text.chars().count(); - text_length as i64 - range_length as i64 - } - None => i64::try_from(self.text.chars().count()).unwrap(), - } - } - - pub fn diff_size(&self) -> TextSize { - match self.range { - Some(range) => { - let range_length: usize = range.len().into(); - let text_length = self.text.chars().count(); - let diff = (text_length as i64 - range_length as i64).abs(); - TextSize::from(u32::try_from(diff).unwrap()) - } - None => TextSize::from(u32::try_from(self.text.chars().count()).unwrap()), - } - } - - pub fn is_addition(&self) -> bool { - self.range.is_some() && self.text.len() > self.range.unwrap().len().into() - } - - pub fn is_deletion(&self) -> bool { - self.range.is_some() && self.text.len() < self.range.unwrap().len().into() - } - - pub fn apply_to_text(&self, text: &str) -> String { - if self.range.is_none() { - return self.text.clone(); - } - - let range = self.range.unwrap(); - let start = usize::from(range.start()); - let end = usize::from(range.end()); - - let mut new_text = String::new(); - new_text.push_str(&text[..start]); - new_text.push_str(&self.text); - if end < text.len() { - new_text.push_str(&text[end..]); - } - - new_text - } -} - -fn get_affected(content: &str, range: TextRange) -> &str { - let start_byte = content - .char_indices() - .nth(usize::from(range.start())) - .map(|(i, _)| i) - .unwrap_or(content.len()); - - let end_byte = content - .char_indices() - .nth(usize::from(range.end())) - .map(|(i, _)| i) - .unwrap_or(content.len()); - - &content[start_byte..end_byte] -} - -#[cfg(test)] -mod tests { - - use super::*; - use pgt_diagnostics::Diagnostic; - use pgt_text_size::TextRange; - - use crate::workspace::{ChangeFileParams, ChangeParams}; - - use pgt_fs::PgTPath; - - impl Document { - pub fn get_text(&self, idx: usize) -> String { - self.content[self.positions[idx].1.start().into()..self.positions[idx].1.end().into()] - .to_string() - } - } - - fn assert_document_integrity(d: &Document) { - let ranges = pgt_statement_splitter::split(&d.content) - .expect("Unexpected scan error") - .ranges; - - assert!( - ranges.len() == d.positions.len(), - "should have the correct amount of positions" - ); - - assert!( - ranges - .iter() - .all(|r| { d.positions.iter().any(|(_, stmt_range)| stmt_range == r) }), - "all ranges should be in positions" - ); - } - - #[test] - fn open_doc_with_scan_error() { - let input = "select id from users;\n\n\n\nselect 1443ddwwd33djwdkjw13331333333333;"; - - let d = Document::new(input.to_string(), 0); - - assert_eq!(d.positions.len(), 0); - assert!(d.has_fatal_error()); - } - - #[test] - fn comments_at_begin() { - let path = PgTPath::new("test.sql"); - let input = "\nselect id from users;\n"; - - let mut d = Document::new(input.to_string(), 0); - - let change1 = ChangeFileParams { - path: path.clone(), - version: 1, - changes: vec![ChangeParams { - text: "-".to_string(), - range: Some(TextRange::new(0.into(), 0.into())), - }], - }; - - let _changed1 = d.apply_file_change(&change1); - - assert_eq!(d.content, "-\nselect id from users;\n"); - assert_eq!(d.positions.len(), 2); - - let change2 = ChangeFileParams { - path: path.clone(), - version: 2, - changes: vec![ChangeParams { - text: "-".to_string(), - range: Some(TextRange::new(1.into(), 1.into())), - }], - }; - - let _changed2 = d.apply_file_change(&change2); - - assert_eq!(d.content, "--\nselect id from users;\n"); - assert_eq!(d.positions.len(), 1); - - let change3 = ChangeFileParams { - path: path.clone(), - version: 3, - changes: vec![ChangeParams { - text: " ".to_string(), - range: Some(TextRange::new(2.into(), 2.into())), - }], - }; - - let _changed3 = d.apply_file_change(&change3); - - assert_eq!(d.content, "-- \nselect id from users;\n"); - assert_eq!(d.positions.len(), 1); - - let change4 = ChangeFileParams { - path: path.clone(), - version: 3, - changes: vec![ChangeParams { - text: "t".to_string(), - range: Some(TextRange::new(3.into(), 3.into())), - }], - }; - - let _changed4 = d.apply_file_change(&change4); - - assert_eq!(d.content, "-- t\nselect id from users;\n"); - assert_eq!(d.positions.len(), 1); - - assert_document_integrity(&d); - } - - #[test] - fn typing_comments() { - let path = PgTPath::new("test.sql"); - let input = "select id from users;\n"; - - let mut d = Document::new(input.to_string(), 0); - - let change1 = ChangeFileParams { - path: path.clone(), - version: 1, - changes: vec![ChangeParams { - text: "-".to_string(), - range: Some(TextRange::new(22.into(), 23.into())), - }], - }; - - let _changed1 = d.apply_file_change(&change1); - - assert_eq!(d.content, "select id from users;\n-"); - assert_eq!(d.positions.len(), 2); - - let change2 = ChangeFileParams { - path: path.clone(), - version: 2, - changes: vec![ChangeParams { - text: "-".to_string(), - range: Some(TextRange::new(23.into(), 24.into())), - }], - }; - - let _changed2 = d.apply_file_change(&change2); - - assert_eq!(d.content, "select id from users;\n--"); - assert_eq!(d.positions.len(), 1); - - let change3 = ChangeFileParams { - path: path.clone(), - version: 3, - changes: vec![ChangeParams { - text: " ".to_string(), - range: Some(TextRange::new(24.into(), 25.into())), - }], - }; - - let _changed3 = d.apply_file_change(&change3); - - assert_eq!(d.content, "select id from users;\n-- "); - assert_eq!(d.positions.len(), 1); - - let change4 = ChangeFileParams { - path: path.clone(), - version: 3, - changes: vec![ChangeParams { - text: "t".to_string(), - range: Some(TextRange::new(25.into(), 26.into())), - }], - }; - - let _changed4 = d.apply_file_change(&change4); - - assert_eq!(d.content, "select id from users;\n-- t"); - assert_eq!(d.positions.len(), 1); - - assert_document_integrity(&d); - } - - #[test] - fn change_into_scan_error_within_statement() { - let path = PgTPath::new("test.sql"); - let input = "select id from users;\n\n\n\nselect 1;"; - - let mut d = Document::new(input.to_string(), 0); - - assert_eq!(d.positions.len(), 2); - assert!(!d.has_fatal_error()); - - let change = ChangeFileParams { - path: path.clone(), - version: 1, - changes: vec![ChangeParams { - text: "d".to_string(), - range: Some(TextRange::new(33.into(), 33.into())), - }], - }; - - let changed = d.apply_file_change(&change); - - assert_eq!(d.content, "select id from users;\n\n\n\nselect 1d;"); - assert!( - changed - .iter() - .all(|c| matches!(c, StatementChange::Deleted(_))), - "should delete all statements" - ); - assert!(d.positions.is_empty(), "should clear all positions"); - assert_eq!(d.diagnostics.len(), 1, "should return a scan error"); - assert_eq!( - d.diagnostics[0].location().span, - Some(TextRange::new(32.into(), 34.into())), - "should have correct span" - ); - assert!(d.has_fatal_error()); - } - - #[test] - fn change_into_scan_error_across_statements() { - let path = PgTPath::new("test.sql"); - let input = "select id from users;\n\n\n\nselect 1;"; - - let mut d = Document::new(input.to_string(), 0); - - assert_eq!(d.positions.len(), 2); - assert!(!d.has_fatal_error()); - - let change = ChangeFileParams { - path: path.clone(), - version: 1, - changes: vec![ChangeParams { - text: "1d".to_string(), - range: Some(TextRange::new(7.into(), 33.into())), - }], - }; - - let changed = d.apply_file_change(&change); - - assert_eq!(d.content, "select 1d;"); - assert!( - changed - .iter() - .all(|c| matches!(c, StatementChange::Deleted(_))), - "should delete all statements" - ); - assert!(d.positions.is_empty(), "should clear all positions"); - assert_eq!(d.diagnostics.len(), 1, "should return a scan error"); - assert_eq!( - d.diagnostics[0].location().span, - Some(TextRange::new(7.into(), 9.into())), - "should have correct span" - ); - assert!(d.has_fatal_error()); - } - - #[test] - fn change_from_invalid_to_invalid() { - let path = PgTPath::new("test.sql"); - let input = "select 1d;"; - - let mut d = Document::new(input.to_string(), 0); - - assert_eq!(d.positions.len(), 0); - assert!(d.has_fatal_error()); - assert_eq!(d.diagnostics.len(), 1); - - let change = ChangeFileParams { - path: path.clone(), - version: 1, - changes: vec![ChangeParams { - text: "2e".to_string(), - range: Some(TextRange::new(7.into(), 9.into())), - }], - }; - - let changed = d.apply_file_change(&change); - - assert_eq!(d.content, "select 2e;"); - assert!(changed.is_empty(), "should not emit any changes"); - assert!(d.positions.is_empty(), "should keep positions empty"); - assert_eq!(d.diagnostics.len(), 1, "should still have a scan error"); - assert_eq!( - d.diagnostics[0].location().span, - Some(TextRange::new(7.into(), 9.into())), - "should have updated span" - ); - assert!(d.has_fatal_error()); - } - - #[test] - fn change_from_invalid_to_valid() { - let path = PgTPath::new("test.sql"); - let input = "select 1d;"; - - let mut d = Document::new(input.to_string(), 0); - - assert_eq!(d.positions.len(), 0); - assert!(d.has_fatal_error()); - assert_eq!(d.diagnostics.len(), 1); - - let change = ChangeFileParams { - path: path.clone(), - version: 1, - changes: vec![ChangeParams { - text: "1".to_string(), - range: Some(TextRange::new(7.into(), 9.into())), - }], - }; - - let changed = d.apply_file_change(&change); - - assert_eq!(d.content, "select 1;"); - assert_eq!(changed.len(), 1, "should emit one change"); - assert!(matches!( - changed[0], - StatementChange::Added(AddedStatement { .. }) - )); - assert_eq!(d.positions.len(), 1, "should have one position"); - assert!(d.diagnostics.is_empty(), "should have no diagnostics"); - assert!(!d.has_fatal_error()); - } - - #[test] - fn within_statements() { - let path = PgTPath::new("test.sql"); - let input = "select id from users;\n\n\n\nselect * from contacts;"; - - let mut d = Document::new(input.to_string(), 0); - - assert_eq!(d.positions.len(), 2); - - let change = ChangeFileParams { - path: path.clone(), - version: 1, - changes: vec![ChangeParams { - text: "select 1;".to_string(), - range: Some(TextRange::new(23.into(), 23.into())), - }], - }; - - let changed = d.apply_file_change(&change); - - assert_eq!(changed.len(), 5); - assert_eq!( - changed - .iter() - .filter(|c| matches!(c, StatementChange::Deleted(_))) - .count(), - 2 - ); - assert_eq!( - changed - .iter() - .filter(|c| matches!(c, StatementChange::Added(_))) - .count(), - 3 - ); - - assert_document_integrity(&d); - } - - #[test] - fn within_statements_2() { - let path = PgTPath::new("test.sql"); - let input = "alter table deal alter column value drop not null;\n"; - let mut d = Document::new(input.to_string(), 0); - - assert_eq!(d.positions.len(), 1); - - let change1 = ChangeFileParams { - path: path.clone(), - version: 1, - changes: vec![ChangeParams { - text: " ".to_string(), - range: Some(TextRange::new(17.into(), 17.into())), - }], - }; - - let changed1 = d.apply_file_change(&change1); - assert_eq!(changed1.len(), 1); - assert_eq!( - d.content, - "alter table deal alter column value drop not null;\n" - ); - assert_document_integrity(&d); - - let change2 = ChangeFileParams { - path: path.clone(), - version: 2, - changes: vec![ChangeParams { - text: " ".to_string(), - range: Some(TextRange::new(18.into(), 18.into())), - }], - }; - - let changed2 = d.apply_file_change(&change2); - assert_eq!(changed2.len(), 1); - assert_eq!( - d.content, - "alter table deal alter column value drop not null;\n" - ); - assert_document_integrity(&d); - - let change3 = ChangeFileParams { - path: path.clone(), - version: 3, - changes: vec![ChangeParams { - text: " ".to_string(), - range: Some(TextRange::new(19.into(), 19.into())), - }], - }; - - let changed3 = d.apply_file_change(&change3); - assert_eq!(changed3.len(), 1); - assert_eq!( - d.content, - "alter table deal alter column value drop not null;\n" - ); - assert_document_integrity(&d); - - let change4 = ChangeFileParams { - path: path.clone(), - version: 4, - changes: vec![ChangeParams { - text: " ".to_string(), - range: Some(TextRange::new(20.into(), 20.into())), - }], - }; - - let changed4 = d.apply_file_change(&change4); - assert_eq!(changed4.len(), 1); - assert_eq!( - d.content, - "alter table deal alter column value drop not null;\n" - ); - assert_document_integrity(&d); - } - - #[test] - fn julians_sample() { - let path = PgTPath::new("test.sql"); - let input = "select\n *\nfrom\n test;\n\nselect\n\nalter table test\n\ndrop column id;"; - let mut d = Document::new(input.to_string(), 0); - - assert_eq!(d.positions.len(), 4); - - let change1 = ChangeFileParams { - path: path.clone(), - version: 1, - changes: vec![ChangeParams { - text: " ".to_string(), - range: Some(TextRange::new(31.into(), 31.into())), - }], - }; - - let changed1 = d.apply_file_change(&change1); - assert_eq!(changed1.len(), 1); - assert_eq!( - d.content, - "select\n *\nfrom\n test;\n\nselect \n\nalter table test\n\ndrop column id;" - ); - assert_document_integrity(&d); - - // problem: this creates a new statement - let change2 = ChangeFileParams { - path: path.clone(), - version: 2, - changes: vec![ChangeParams { - text: ";".to_string(), - range: Some(TextRange::new(32.into(), 32.into())), - }], - }; - - let changed2 = d.apply_file_change(&change2); - assert_eq!(changed2.len(), 4); - assert_eq!( - changed2 - .iter() - .filter(|c| matches!(c, StatementChange::Deleted(_))) - .count(), - 2 - ); - assert_eq!( - changed2 - .iter() - .filter(|c| matches!(c, StatementChange::Added(_))) - .count(), - 2 - ); - assert_document_integrity(&d); - - let change3 = ChangeFileParams { - path: path.clone(), - version: 3, - changes: vec![ChangeParams { - text: "".to_string(), - range: Some(TextRange::new(32.into(), 33.into())), - }], - }; - - let changed3 = d.apply_file_change(&change3); - assert_eq!(changed3.len(), 1); - assert!(matches!(&changed3[0], StatementChange::Modified(_))); - assert_eq!( - d.content, - "select\n *\nfrom\n test;\n\nselect \n\nalter table test\n\ndrop column id;" - ); - match &changed3[0] { - StatementChange::Modified(changed) => { - assert_eq!(changed.old_stmt_text, "select ;"); - assert_eq!(changed.new_stmt_text, "select"); - assert_eq!(changed.change_text, ""); - assert_eq!(changed.change_range, TextRange::new(7.into(), 8.into())); - } - _ => panic!("expected modified statement"), - } - assert_document_integrity(&d); - } - - #[test] - fn across_statements() { - let path = PgTPath::new("test.sql"); - let input = "select id from users;\nselect * from contacts;"; - - let mut d = Document::new(input.to_string(), 0); - - assert_eq!(d.positions.len(), 2); - - let change = ChangeFileParams { - path: path.clone(), - version: 1, - changes: vec![ChangeParams { - text: ",test from users;\nselect 1;".to_string(), - range: Some(TextRange::new(9.into(), 45.into())), - }], - }; - - let changed = d.apply_file_change(&change); - - assert_eq!(changed.len(), 4); - assert!(matches!(changed[0], StatementChange::Deleted(_))); - assert_eq!(changed[0].statement().raw(), 1); - assert!(matches!( - changed[1], - StatementChange::Deleted(StatementId::Root(_)) - )); - assert_eq!(changed[1].statement().raw(), 0); - assert!( - matches!(&changed[2], StatementChange::Added(AddedStatement { stmt: _, text }) if text == "select id,test from users;") - ); - assert!( - matches!(&changed[3], StatementChange::Added(AddedStatement { stmt: _, text }) if text == "select 1;") - ); - - assert_document_integrity(&d); - } - - #[test] - fn append_whitespace_to_statement() { - let path = PgTPath::new("test.sql"); - let input = "select id"; - - let mut d = Document::new(input.to_string(), 0); - - assert_eq!(d.positions.len(), 1); - - let change = ChangeFileParams { - path: path.clone(), - version: 1, - changes: vec![ChangeParams { - text: " ".to_string(), - range: Some(TextRange::new(9.into(), 10.into())), - }], - }; - - let changed = d.apply_file_change(&change); - - assert_eq!(changed.len(), 1); - - assert_document_integrity(&d); - } - - #[test] - fn apply_changes() { - let path = PgTPath::new("test.sql"); - let input = "select id from users;\nselect * from contacts;"; - - let mut d = Document::new(input.to_string(), 0); - - assert_eq!(d.positions.len(), 2); - - let change = ChangeFileParams { - path: path.clone(), - version: 1, - changes: vec![ChangeParams { - text: ",test from users\nselect 1;".to_string(), - range: Some(TextRange::new(9.into(), 45.into())), - }], - }; - - let changed = d.apply_file_change(&change); - - assert_eq!(changed.len(), 4); - - assert!(matches!( - changed[0], - StatementChange::Deleted(StatementId::Root(_)) - )); - assert_eq!(changed[0].statement().raw(), 1); - assert!(matches!( - changed[1], - StatementChange::Deleted(StatementId::Root(_)) - )); - assert_eq!(changed[1].statement().raw(), 0); - assert_eq!( - changed[2], - StatementChange::Added(AddedStatement { - stmt: StatementId::Root(2.into()), - text: "select id,test from users".to_string() - }) - ); - assert_eq!( - changed[3], - StatementChange::Added(AddedStatement { - stmt: StatementId::Root(3.into()), - text: "select 1;".to_string() - }) - ); - - assert_eq!("select id,test from users\nselect 1;", d.content); - - assert_document_integrity(&d); - } - - #[test] - fn removing_newline_at_the_beginning() { - let path = PgTPath::new("test.sql"); - let input = "\n"; - - let mut d = Document::new(input.to_string(), 1); - - assert_eq!(d.positions.len(), 0); - - let change = ChangeFileParams { - path: path.clone(), - version: 2, - changes: vec![ChangeParams { - text: "\nbegin;\n\nselect 1\n\nrollback;\n".to_string(), - range: Some(TextRange::new(0.into(), 1.into())), - }], - }; - - let changes = d.apply_file_change(&change); - - assert_eq!(changes.len(), 3); - - assert_document_integrity(&d); - - let change2 = ChangeFileParams { - path: path.clone(), - version: 3, - changes: vec![ChangeParams { - text: "".to_string(), - range: Some(TextRange::new(0.into(), 1.into())), - }], - }; - - let changes2 = d.apply_file_change(&change2); - - assert_eq!(changes2.len(), 1); - - assert_document_integrity(&d); - } - - #[test] - fn apply_changes_at_end_of_statement() { - let path = PgTPath::new("test.sql"); - let input = "select id from\nselect * from contacts;"; - - let mut d = Document::new(input.to_string(), 1); - - assert_eq!(d.positions.len(), 2); - - let change = ChangeFileParams { - path: path.clone(), - version: 2, - changes: vec![ChangeParams { - text: " contacts;".to_string(), - range: Some(TextRange::new(14.into(), 14.into())), - }], - }; - - let changes = d.apply_file_change(&change); - - assert_eq!(changes.len(), 1); - - assert!(matches!(changes[0], StatementChange::Modified(_))); - - assert_eq!( - "select id from contacts;\nselect * from contacts;", - d.content - ); - - assert_document_integrity(&d); - } - - #[test] - fn apply_changes_replacement() { - let path = PgTPath::new("test.sql"); - - let mut doc = Document::new("".to_string(), 0); - - let change = ChangeFileParams { - path: path.clone(), - version: 1, - changes: vec![ChangeParams { - text: "select 1;\nselect 2;".to_string(), - range: None, - }], - }; - - doc.apply_file_change(&change); - - assert_eq!(doc.get_text(0), "select 1;".to_string()); - assert_eq!(doc.get_text(1), "select 2;".to_string()); - assert_eq!( - doc.positions[0].1, - TextRange::new(TextSize::new(0), TextSize::new(9)) - ); - assert_eq!( - doc.positions[1].1, - TextRange::new(TextSize::new(10), TextSize::new(19)) - ); - - let change_2 = ChangeFileParams { - path: path.clone(), - version: 2, - changes: vec![ChangeParams { - text: "".to_string(), - range: Some(TextRange::new(7.into(), 8.into())), - }], - }; - - doc.apply_file_change(&change_2); - - assert_eq!(doc.content, "select ;\nselect 2;"); - assert_eq!(doc.positions.len(), 2); - assert_eq!(doc.get_text(0), "select ;".to_string()); - assert_eq!(doc.get_text(1), "select 2;".to_string()); - assert_eq!( - doc.positions[0].1, - TextRange::new(TextSize::new(0), TextSize::new(8)) - ); - assert_eq!( - doc.positions[1].1, - TextRange::new(TextSize::new(9), TextSize::new(18)) - ); - - let change_3 = ChangeFileParams { - path: path.clone(), - version: 3, - changes: vec![ChangeParams { - text: "!".to_string(), - range: Some(TextRange::new(7.into(), 7.into())), - }], - }; - - doc.apply_file_change(&change_3); - - assert_eq!(doc.content, "select !;\nselect 2;"); - assert_eq!(doc.positions.len(), 2); - assert_eq!( - doc.positions[0].1, - TextRange::new(TextSize::new(0), TextSize::new(9)) - ); - assert_eq!( - doc.positions[1].1, - TextRange::new(TextSize::new(10), TextSize::new(19)) - ); - - let change_4 = ChangeFileParams { - path: path.clone(), - version: 4, - changes: vec![ChangeParams { - text: "".to_string(), - range: Some(TextRange::new(7.into(), 8.into())), - }], - }; - - doc.apply_file_change(&change_4); - - assert_eq!(doc.content, "select ;\nselect 2;"); - assert_eq!(doc.positions.len(), 2); - assert_eq!( - doc.positions[0].1, - TextRange::new(TextSize::new(0), TextSize::new(8)) - ); - assert_eq!( - doc.positions[1].1, - TextRange::new(TextSize::new(9), TextSize::new(18)) - ); - - let change_5 = ChangeFileParams { - path: path.clone(), - version: 5, - changes: vec![ChangeParams { - text: "1".to_string(), - range: Some(TextRange::new(7.into(), 7.into())), - }], - }; - - doc.apply_file_change(&change_5); - - assert_eq!(doc.content, "select 1;\nselect 2;"); - assert_eq!(doc.positions.len(), 2); - assert_eq!( - doc.positions[0].1, - TextRange::new(TextSize::new(0), TextSize::new(9)) - ); - assert_eq!( - doc.positions[1].1, - TextRange::new(TextSize::new(10), TextSize::new(19)) - ); - - assert_document_integrity(&doc); - } - - #[test] - fn comment_at_begin() { - let path = PgTPath::new("test.sql"); - - let mut doc = Document::new( - "-- Add new schema named \"private\"\nCREATE SCHEMA \"private\";".to_string(), - 0, - ); - - let change = ChangeFileParams { - path: path.clone(), - version: 1, - changes: vec![ChangeParams { - text: "".to_string(), - range: Some(TextRange::new(0.into(), 1.into())), - }], - }; - - let changed = doc.apply_file_change(&change); - - assert_eq!( - doc.content, - "- Add new schema named \"private\"\nCREATE SCHEMA \"private\";" - ); - assert_eq!(changed.len(), 3); - assert!(matches!(&changed[0], StatementChange::Deleted(_))); - assert!(matches!( - changed[1], - StatementChange::Added(AddedStatement { .. }) - )); - assert!(matches!( - changed[2], - StatementChange::Added(AddedStatement { .. }) - )); - - let change_2 = ChangeFileParams { - path: path.clone(), - version: 2, - changes: vec![ChangeParams { - text: "-".to_string(), - range: Some(TextRange::new(0.into(), 0.into())), - }], - }; - - let changed_2 = doc.apply_file_change(&change_2); - - assert_eq!( - doc.content, - "-- Add new schema named \"private\"\nCREATE SCHEMA \"private\";" - ); - - assert_eq!(changed_2.len(), 3); - assert!(matches!( - changed_2[0], - StatementChange::Deleted(StatementId::Root(_)) - )); - assert!(matches!( - changed_2[1], - StatementChange::Deleted(StatementId::Root(_)) - )); - assert!(matches!( - changed_2[2], - StatementChange::Added(AddedStatement { .. }) - )); - - assert_document_integrity(&doc); - } - - #[test] - fn apply_changes_within_statement() { - let input = "select id from users;\nselect * from contacts;"; - let path = PgTPath::new("test.sql"); - - let mut doc = Document::new(input.to_string(), 0); - - assert_eq!(doc.positions.len(), 2); - - let stmt_1_range = doc.positions[0].clone(); - let stmt_2_range = doc.positions[1].clone(); - - let update_text = ",test"; - - let update_range = TextRange::new(9.into(), 10.into()); - - let update_text_len = u32::try_from(update_text.chars().count()).unwrap(); - let update_addition = update_text_len - u32::from(update_range.len()); - - let change = ChangeFileParams { - path: path.clone(), - version: 1, - changes: vec![ChangeParams { - text: update_text.to_string(), - range: Some(update_range), - }], - }; - - doc.apply_file_change(&change); - - assert_eq!( - "select id,test from users;\nselect * from contacts;", - doc.content - ); - assert_eq!(doc.positions.len(), 2); - assert_eq!(doc.positions[0].1.start(), stmt_1_range.1.start()); - assert_eq!( - u32::from(doc.positions[0].1.end()), - u32::from(stmt_1_range.1.end()) + update_addition - ); - assert_eq!( - u32::from(doc.positions[1].1.start()), - u32::from(stmt_2_range.1.start()) + update_addition - ); - assert_eq!( - u32::from(doc.positions[1].1.end()), - u32::from(stmt_2_range.1.end()) + update_addition - ); - - assert_document_integrity(&doc); - } - - #[test] - fn remove_outside_of_content() { - let path = PgTPath::new("test.sql"); - let input = "select id from contacts;\n\nselect * from contacts;"; - - let mut d = Document::new(input.to_string(), 1); - - assert_eq!(d.positions.len(), 2); - - let change1 = ChangeFileParams { - path: path.clone(), - version: 2, - changes: vec![ChangeParams { - text: "\n".to_string(), - range: Some(TextRange::new(49.into(), 49.into())), - }], - }; - - d.apply_file_change(&change1); - - assert_eq!( - d.content, - "select id from contacts;\n\nselect * from contacts;\n" - ); - - let change2 = ChangeFileParams { - path: path.clone(), - version: 3, - changes: vec![ChangeParams { - text: "\n".to_string(), - range: Some(TextRange::new(50.into(), 50.into())), - }], - }; - - d.apply_file_change(&change2); - - assert_eq!( - d.content, - "select id from contacts;\n\nselect * from contacts;\n\n" - ); - - let change5 = ChangeFileParams { - path: path.clone(), - version: 6, - changes: vec![ChangeParams { - text: "".to_string(), - range: Some(TextRange::new(51.into(), 52.into())), - }], - }; - - let changes = d.apply_file_change(&change5); - - assert!(matches!( - changes[0], - StatementChange::Deleted(StatementId::Root(_)) - )); - - assert!(matches!( - changes[1], - StatementChange::Added(AddedStatement { .. }) - )); - - assert_eq!(changes.len(), 2); - - assert_eq!( - d.content, - "select id from contacts;\n\nselect * from contacts;\n\n" - ); - - assert_document_integrity(&d); - } - - #[test] - fn remove_trailing_whitespace() { - let path = PgTPath::new("test.sql"); - - let mut doc = Document::new("select * from ".to_string(), 0); - - let change = ChangeFileParams { - path: path.clone(), - version: 1, - changes: vec![ChangeParams { - text: "".to_string(), - range: Some(TextRange::new(13.into(), 14.into())), - }], - }; - - let changed = doc.apply_file_change(&change); - - assert_eq!(doc.content, "select * from"); - - assert_eq!(changed.len(), 1); - - match &changed[0] { - StatementChange::Modified(stmt) => { - let ModifiedStatement { - change_range, - change_text, - new_stmt_text, - old_stmt_text, - .. - } = stmt; - - assert_eq!(change_range, &TextRange::new(13.into(), 14.into())); - assert_eq!(change_text, ""); - assert_eq!(new_stmt_text, "select * from"); - - // the whitespace was not considered - // to be a part of the statement - assert_eq!(old_stmt_text, "select * from"); - } - - _ => unreachable!("Did not yield a modified statement."), - } - - assert_document_integrity(&doc); - } - - #[test] - fn remove_trailing_whitespace_and_last_char() { - let path = PgTPath::new("test.sql"); - - let mut doc = Document::new("select * from ".to_string(), 0); - - let change = ChangeFileParams { - path: path.clone(), - version: 1, - changes: vec![ChangeParams { - text: "".to_string(), - range: Some(TextRange::new(12.into(), 14.into())), - }], - }; - - let changed = doc.apply_file_change(&change); - - assert_eq!(doc.content, "select * fro"); - - assert_eq!(changed.len(), 1); - - match &changed[0] { - StatementChange::Modified(stmt) => { - let ModifiedStatement { - change_range, - change_text, - new_stmt_text, - old_stmt_text, - .. - } = stmt; - - assert_eq!(change_range, &TextRange::new(12.into(), 14.into())); - assert_eq!(change_text, ""); - assert_eq!(new_stmt_text, "select * fro"); - - // the whitespace was not considered - // to be a part of the statement - assert_eq!(old_stmt_text, "select * from"); - } - - _ => unreachable!("Did not yield a modified statement."), - } - - assert_document_integrity(&doc); - } - - #[test] - fn multiple_deletions_at_once() { - let path = PgTPath::new("test.sql"); - - let mut doc = Document::new("\n\n\n\nALTER TABLE ONLY \"public\".\"sendout\"\n ADD CONSTRAINT \"sendout_organisation_id_fkey\" FOREIGN -KEY (\"organisation_id\") REFERENCES \"public\".\"organisation\"(\"id\") ON UPDATE RESTRICT ON DELETE CASCADE;\n".to_string(), 0); - - let change = ChangeFileParams { - path: path.clone(), - version: 1, - changes: vec![ - ChangeParams { - range: Some(TextRange::new(31.into(), 38.into())), - text: "te".to_string(), - }, - ChangeParams { - range: Some(TextRange::new(60.into(), 67.into())), - text: "te".to_string(), - }, - ], - }; - - let changed = doc.apply_file_change(&change); - - assert_eq!(doc.content, "\n\n\n\nALTER TABLE ONLY \"public\".\"te\"\n ADD CONSTRAINT \"te_organisation_id_fkey\" FOREIGN -KEY (\"organisation_id\") REFERENCES \"public\".\"organisation\"(\"id\") ON UPDATE RESTRICT ON DELETE CASCADE;\n"); - - assert_eq!(changed.len(), 2); - - assert_document_integrity(&doc); - } - - #[test] - fn multiple_additions_at_once() { - let path = PgTPath::new("test.sql"); - - let mut doc = Document::new("\n\n\n\nALTER TABLE ONLY \"public\".\"sendout\"\n ADD CONSTRAINT \"sendout_organisation_id_fkey\" FOREIGN -KEY (\"organisation_id\") REFERENCES \"public\".\"organisation\"(\"id\") ON UPDATE RESTRICT ON DELETE CASCADE;\n".to_string(), 0); - - let change = ChangeFileParams { - path: path.clone(), - version: 1, - changes: vec![ - ChangeParams { - range: Some(TextRange::new(31.into(), 38.into())), - text: "omni_channel_message".to_string(), - }, - ChangeParams { - range: Some(TextRange::new(60.into(), 67.into())), - text: "omni_channel_message".to_string(), - }, - ], - }; - - let changed = doc.apply_file_change(&change); - - assert_eq!(doc.content, "\n\n\n\nALTER TABLE ONLY \"public\".\"omni_channel_message\"\n ADD CONSTRAINT \"omni_channel_message_organisation_id_fkey\" FOREIGN -KEY (\"organisation_id\") REFERENCES \"public\".\"organisation\"(\"id\") ON UPDATE RESTRICT ON DELETE CASCADE;\n"); - - assert_eq!(changed.len(), 2); - - assert_document_integrity(&doc); - } - - #[test] - fn remove_inbetween_whitespace() { - let path = PgTPath::new("test.sql"); - - let mut doc = Document::new("select * from users".to_string(), 0); - - let change = ChangeFileParams { - path: path.clone(), - version: 1, - changes: vec![ChangeParams { - text: "".to_string(), - range: Some(TextRange::new(9.into(), 11.into())), - }], - }; - - let changed = doc.apply_file_change(&change); - - assert_eq!(doc.content, "select * from users"); - - assert_eq!(changed.len(), 1); - - match &changed[0] { - StatementChange::Modified(stmt) => { - let ModifiedStatement { - change_range, - change_text, - new_stmt_text, - old_stmt_text, - .. - } = stmt; - - assert_eq!(change_range, &TextRange::new(9.into(), 11.into())); - assert_eq!(change_text, ""); - assert_eq!(old_stmt_text, "select * from users"); - assert_eq!(new_stmt_text, "select * from users"); - } - - _ => unreachable!("Did not yield a modified statement."), - } - - assert_document_integrity(&doc); - } - - #[test] - fn test_comments_only() { - let path = PgTPath::new("test.sql"); - let initial_content = "-- atlas:import async_trigger/setup.sql\n-- atlas:import public/setup.sql\n-- atlas:import private/setup.sql\n-- atlas:import api/setup.sql\n-- atlas:import async_trigger/index.sql\n-- atlas:import public/enums/index.sql\n-- atlas:import public/types/index.sql\n-- atlas:import private/enums/index.sql\n-- atlas:import private/functions/index.sql\n-- atlas:import public/tables/index.sql\n-- atlas:import public/index.sql\n-- atlas:import private/index.sql\n-- atlas:import api/index.sql\n\n\n\n"; - - // Create a new document - let mut doc = Document::new(initial_content.to_string(), 0); - - // First change: Delete some text at line 2, character 24-29 - let change1 = ChangeFileParams { - path: path.clone(), - version: 3, - changes: vec![ChangeParams { - text: "".to_string(), - range: Some(TextRange::new( - // Calculate the correct position based on the content - // Line 2, character 24 - 98.into(), - // Line 2, character 29 - 103.into(), - )), - }], - }; - - let _changes1 = doc.apply_file_change(&change1); - - // Second change: Add 't' at line 2, character 24 - let change2 = ChangeFileParams { - path: path.clone(), - version: 4, - changes: vec![ChangeParams { - text: "t".to_string(), - range: Some(TextRange::new(98.into(), 98.into())), - }], - }; - - let _changes2 = doc.apply_file_change(&change2); - - assert_eq!( - doc.positions.len(), - 0, - "Document should have no statement after adding 't'" - ); - - // Third change: Add 'e' at line 2, character 25 - let change3 = ChangeFileParams { - path: path.clone(), - version: 5, - changes: vec![ChangeParams { - text: "e".to_string(), - range: Some(TextRange::new(99.into(), 99.into())), - }], - }; - - let _changes3 = doc.apply_file_change(&change3); - assert_eq!( - doc.positions.len(), - 0, - "Document should still have no statement" - ); - - // Fourth change: Add 's' at line 2, character 26 - let change4 = ChangeFileParams { - path: path.clone(), - version: 6, - changes: vec![ChangeParams { - text: "s".to_string(), - range: Some(TextRange::new(100.into(), 100.into())), - }], - }; - - let _changes4 = doc.apply_file_change(&change4); - assert_eq!( - doc.positions.len(), - 0, - "Document should still have no statement" - ); - - // Fifth change: Add 't' at line 2, character 27 - let change5 = ChangeFileParams { - path: path.clone(), - version: 7, - changes: vec![ChangeParams { - text: "t".to_string(), - range: Some(TextRange::new(101.into(), 101.into())), - }], - }; - - let _changes5 = doc.apply_file_change(&change5); - assert_eq!( - doc.positions.len(), - 0, - "Document should still have no statement" - ); - - assert_document_integrity(&doc); - } -} diff --git a/crates/pgt_workspace/src/workspace/server/connection_key.rs b/crates/pgt_workspace/src/workspace/server/connection_key.rs new file mode 100644 index 000000000..abdd80258 --- /dev/null +++ b/crates/pgt_workspace/src/workspace/server/connection_key.rs @@ -0,0 +1,44 @@ +use sqlx::PgPool; + +use crate::settings::DatabaseSettings; + +/// A unique identifier for database connection settings +#[derive(Clone, PartialEq, Eq, Hash)] +pub(crate) struct ConnectionKey { + pub host: String, + pub port: u16, + pub username: String, + pub database: String, +} + +impl From<&DatabaseSettings> for ConnectionKey { + fn from(settings: &DatabaseSettings) -> Self { + Self { + host: settings.host.clone(), + port: settings.port, + username: settings.username.clone(), + database: settings.database.clone(), + } + } +} + +impl From<&PgPool> for ConnectionKey { + fn from(pool: &PgPool) -> Self { + let conn = pool.connect_options(); + + match conn.get_database() { + None => Self { + host: conn.get_host().to_string(), + port: conn.get_port(), + username: conn.get_username().to_string(), + database: String::new(), + }, + Some(db) => Self { + host: conn.get_host().to_string(), + port: conn.get_port(), + username: conn.get_username().to_string(), + database: db.to_string(), + }, + } + } +} diff --git a/crates/pgt_workspace/src/workspace/server/connection_manager.rs b/crates/pgt_workspace/src/workspace/server/connection_manager.rs new file mode 100644 index 000000000..145b6fa0c --- /dev/null +++ b/crates/pgt_workspace/src/workspace/server/connection_manager.rs @@ -0,0 +1,99 @@ +use std::collections::HashMap; +use std::sync::RwLock; +use std::time::{Duration, Instant}; + +use sqlx::{PgPool, Postgres, pool::PoolOptions, postgres::PgConnectOptions}; + +use crate::settings::DatabaseSettings; + +use super::connection_key::ConnectionKey; + +/// Cached connection pool with last access time +struct CachedPool { + pool: PgPool, + last_accessed: Instant, + idle_timeout: Duration, +} + +#[derive(Default)] +pub struct ConnectionManager { + pools: RwLock>, +} + +impl ConnectionManager { + pub fn new() -> Self { + Self { + pools: RwLock::new(HashMap::new()), + } + } + + /// Get a connection pool for the given database settings. + /// If a pool already exists for these settings, it will be returned. + /// If not, a new pool will be created if connections are enabled. + /// Will also clean up idle connections that haven't been accessed for a while. + pub(crate) fn get_pool(&self, settings: &DatabaseSettings) -> Option { + let key = ConnectionKey::from(settings); + + if !settings.enable_connection { + tracing::info!("Database connection disabled."); + return None; + } + + { + if let Ok(pools) = self.pools.read() { + if let Some(cached_pool) = pools.get(&key) { + return Some(cached_pool.pool.clone()); + } + } + } + + let mut pools = self.pools.write().unwrap(); + + // Double-check after acquiring write lock + if let Some(cached_pool) = pools.get_mut(&key) { + cached_pool.last_accessed = Instant::now(); + return Some(cached_pool.pool.clone()); + } + + // Clean up idle connections before creating new ones to avoid unbounded growth + let now = Instant::now(); + pools.retain(|k, cached_pool| { + let idle_duration = now.duration_since(cached_pool.last_accessed); + if idle_duration > cached_pool.idle_timeout && k != &key { + tracing::debug!( + "Removing idle database connection (idle for {:?})", + idle_duration + ); + false + } else { + true + } + }); + + // Create a new pool + let config = PgConnectOptions::new() + .host(&settings.host) + .port(settings.port) + .username(&settings.username) + .password(&settings.password) + .database(&settings.database); + + let timeout = settings.conn_timeout_secs; + + let pool = PoolOptions::::new() + .acquire_timeout(timeout) + .acquire_slow_threshold(Duration::from_secs(2)) + .connect_lazy_with(config); + + let cached_pool = CachedPool { + pool: pool.clone(), + last_accessed: Instant::now(), + // TODO: add this to the db settings, for now default to five minutes + idle_timeout: Duration::from_secs(60 * 5), + }; + + pools.insert(key, cached_pool); + + Some(pool) + } +} diff --git a/crates/pgt_workspace/src/workspace/server/db_connection.rs b/crates/pgt_workspace/src/workspace/server/db_connection.rs deleted file mode 100644 index d002c0a27..000000000 --- a/crates/pgt_workspace/src/workspace/server/db_connection.rs +++ /dev/null @@ -1,40 +0,0 @@ -use std::time::Duration; - -use sqlx::{PgPool, Postgres, pool::PoolOptions, postgres::PgConnectOptions}; - -use crate::settings::DatabaseSettings; - -#[derive(Default)] -pub struct DbConnection { - pool: Option, -} - -impl DbConnection { - /// There might be no pool available if the user decides to skip db checks. - pub(crate) fn get_pool(&self) -> Option { - self.pool.clone() - } - - pub(crate) fn set_conn_settings(&mut self, settings: &DatabaseSettings) { - if !settings.enable_connection { - tracing::info!("Database connection disabled."); - return; - } - - let config = PgConnectOptions::new() - .host(&settings.host) - .port(settings.port) - .username(&settings.username) - .password(&settings.password) - .database(&settings.database); - - let timeout = settings.conn_timeout_secs; - - let pool = PoolOptions::::new() - .acquire_timeout(timeout) - .acquire_slow_threshold(Duration::from_secs(2)) - .connect_lazy_with(config); - - self.pool = Some(pool); - } -} diff --git a/crates/pgt_workspace/src/workspace/server/document.rs b/crates/pgt_workspace/src/workspace/server/document.rs index ed0ca40f0..c8cdc1f1d 100644 --- a/crates/pgt_workspace/src/workspace/server/document.rs +++ b/crates/pgt_workspace/src/workspace/server/document.rs @@ -1,57 +1,341 @@ -use pgt_diagnostics::{Diagnostic, DiagnosticExt, Severity, serde::Diagnostic as SDiagnostic}; -use pgt_text_size::{TextRange, TextSize}; - -use super::statement_identifier::{StatementId, StatementIdGenerator}; - -type StatementPos = (StatementId, TextRange); +use std::sync::Arc; -pub(crate) struct Document { - pub(crate) content: String, - pub(crate) version: i32, +use pgt_analyser::AnalysableStatement; +use pgt_diagnostics::{Diagnostic, DiagnosticExt, serde::Diagnostic as SDiagnostic}; +use pgt_query_ext::diagnostics::SyntaxDiagnostic; +use pgt_suppressions::Suppressions; +use pgt_text_size::{TextRange, TextSize}; - pub(super) diagnostics: Vec, - /// List of statements sorted by range.start() - pub(super) positions: Vec, +use super::{ + annotation::AnnotationStore, + pg_query::PgQueryStore, + sql_function::{SQLFunctionSignature, get_sql_fn_body, get_sql_fn_signature}, + statement_identifier::StatementId, + tree_sitter::TreeSitterStore, +}; - pub(super) id_generator: StatementIdGenerator, +pub struct Document { + content: String, + version: i32, + ranges: Vec, + diagnostics: Vec, + ast_db: PgQueryStore, + cst_db: TreeSitterStore, + #[allow(dead_code)] + annotation_db: AnnotationStore, + suppressions: Suppressions, } impl Document { - pub(crate) fn new(content: String, version: i32) -> Self { - let mut id_generator = StatementIdGenerator::new(); + pub fn new(content: String, version: i32) -> Document { + let cst_db = TreeSitterStore::new(); + let ast_db = PgQueryStore::new(); + let annotation_db = AnnotationStore::new(); + let suppressions = Suppressions::from(content.as_str()); let (ranges, diagnostics) = split_with_diagnostics(&content, None); - Self { - positions: ranges - .into_iter() - .map(|range| (id_generator.next(), range)) - .collect(), + Document { + ranges, + diagnostics, content, version, - diagnostics, - id_generator, + ast_db, + cst_db, + annotation_db, + suppressions, } } - pub fn statement_content(&self, id: &StatementId) -> Option<&str> { - self.positions - .iter() - .find(|(statement_id, _)| statement_id == id) - .map(|(_, range)| &self.content[*range]) + pub fn update_content(&mut self, content: String, version: i32) { + self.content = content; + self.version = version; + + let (ranges, diagnostics) = split_with_diagnostics(&self.content, None); + + self.ranges = ranges; + self.diagnostics = diagnostics; + self.suppressions = Suppressions::from(self.content.as_str()); + } + + pub fn suppressions(&self) -> &Suppressions { + &self.suppressions } - /// Returns true if there is at least one fatal error in the diagnostics - /// - /// A fatal error is a scan error that prevents the document from being used - pub(super) fn has_fatal_error(&self) -> bool { - self.diagnostics - .iter() - .any(|d| d.severity() == Severity::Fatal) + pub fn get_document_content(&self) -> &str { + &self.content } - pub fn iter(&self) -> StatementIterator<'_> { - StatementIterator::new(self) + pub fn document_diagnostics(&self) -> &Vec { + &self.diagnostics + } + + pub fn find<'a, M>(&'a self, id: StatementId, mapper: M) -> Option + where + M: StatementMapper<'a>, + { + self.iter_with_filter(mapper, IdFilter::new(id)).next() + } + + pub fn iter<'a, M>(&'a self, mapper: M) -> ParseIterator<'a, M, NoFilter> + where + M: StatementMapper<'a>, + { + self.iter_with_filter(mapper, NoFilter) + } + + pub fn iter_with_filter<'a, M, F>(&'a self, mapper: M, filter: F) -> ParseIterator<'a, M, F> + where + M: StatementMapper<'a>, + F: StatementFilter<'a>, + { + ParseIterator::new(self, mapper, filter) + } + + #[allow(dead_code)] + pub fn count(&self) -> usize { + self.iter(DefaultMapper).count() + } +} + +pub trait StatementMapper<'a> { + type Output; + + fn map(&self, parsed: &'a Document, id: StatementId, range: TextRange) -> Self::Output; +} + +pub trait StatementFilter<'a> { + fn predicate(&self, id: &StatementId, range: &TextRange, content: &str) -> bool; +} + +pub struct ParseIterator<'a, M, F> { + parser: &'a Document, + mapper: M, + filter: F, + ranges: std::slice::Iter<'a, TextRange>, + pending_sub_statements: Vec<(StatementId, TextRange, String)>, +} + +impl<'a, M, F> ParseIterator<'a, M, F> { + pub fn new(parser: &'a Document, mapper: M, filter: F) -> Self { + Self { + parser, + mapper, + filter, + ranges: parser.ranges.iter(), + pending_sub_statements: Vec::new(), + } + } +} + +impl<'a, M, F> Iterator for ParseIterator<'a, M, F> +where + M: StatementMapper<'a>, + F: StatementFilter<'a>, +{ + type Item = M::Output; + + fn next(&mut self) -> Option { + // First check if we have any pending sub-statements to process + if let Some((id, range, content)) = self.pending_sub_statements.pop() { + if self.filter.predicate(&id, &range, content.as_str()) { + return Some(self.mapper.map(self.parser, id, range)); + } + // If the sub-statement doesn't pass the filter, continue to the next item + return self.next(); + } + + // Process the next top-level statement + let next_range = self.ranges.next(); + + if let Some(range) = next_range { + // If we should include sub-statements and this statement has an AST + + let content = &self.parser.content[*range]; + let root_id = StatementId::new(content); + + if let Ok(ast) = self.parser.ast_db.get_or_cache_ast(&root_id).as_ref() { + // Check if this is a SQL function definition with a body + if let Some(sub_statement) = get_sql_fn_body(ast, content) { + // Add sub-statements to our pending queue + self.pending_sub_statements.push(( + root_id.create_child(&sub_statement.body), + // adjust range to document + sub_statement.range + range.start(), + sub_statement.body.clone(), + )); + } + } + + // Return the current statement if it passes the filter + if self.filter.predicate(&root_id, range, content) { + return Some(self.mapper.map(self.parser, root_id, *range)); + } + + // If the current statement doesn't pass the filter, try the next one + return self.next(); + } + + None + } +} + +pub struct DefaultMapper; +impl<'a> StatementMapper<'a> for DefaultMapper { + type Output = (StatementId, TextRange, String); + + fn map(&self, _parser: &'a Document, id: StatementId, range: TextRange) -> Self::Output { + (id.clone(), range, id.content().to_string()) + } +} + +pub struct ExecuteStatementMapper; +impl<'a> StatementMapper<'a> for ExecuteStatementMapper { + type Output = (StatementId, TextRange, String, Option); + + fn map(&self, parser: &'a Document, id: StatementId, range: TextRange) -> Self::Output { + let ast_result = parser.ast_db.get_or_cache_ast(&id); + let ast_option = match &*ast_result { + Ok(node) => Some(node.clone()), + Err(_) => None, + }; + + (id.clone(), range, id.content().to_string(), ast_option) + } +} + +pub struct TypecheckDiagnosticsMapper; +impl<'a> StatementMapper<'a> for TypecheckDiagnosticsMapper { + type Output = ( + StatementId, + TextRange, + Option, + Arc, + Option, + ); + + fn map(&self, parser: &'a Document, id: StatementId, range: TextRange) -> Self::Output { + let ast_result = parser.ast_db.get_or_cache_ast(&id); + + let ast_option = match &*ast_result { + Ok(node) => Some(node.clone()), + Err(_) => None, + }; + + let cst_result = parser.cst_db.get_or_cache_tree(&id); + + let sql_fn_sig = id.parent().and_then(|root| { + let ast_option = parser.ast_db.get_or_cache_ast(&root).as_ref().clone().ok(); + + let ast_option = ast_option.as_ref()?; + + get_sql_fn_signature(ast_option) + }); + + (id.clone(), range, ast_option, cst_result, sql_fn_sig) + } +} + +pub struct AnalyserDiagnosticsMapper; +impl<'a> StatementMapper<'a> for AnalyserDiagnosticsMapper { + type Output = (Option, Option); + + fn map(&self, parser: &'a Document, id: StatementId, range: TextRange) -> Self::Output { + let maybe_node = parser.ast_db.get_or_cache_ast(&id); + + let (ast_option, diagnostics) = match &*maybe_node { + Ok(node) => { + let plpgsql_result = parser.ast_db.get_or_cache_plpgsql_parse(&id); + if let Some(Err(diag)) = plpgsql_result { + // offset the pgpsql diagnostic from the parent statement start + let span = diag.location().span.map(|sp| sp + range.start()); + (Some(node.clone()), Some(diag.span(span.unwrap_or(range)))) + } else { + (Some(node.clone()), None) + } + } + Err(diag) => (None, Some(diag.clone().span(range))), + }; + + ( + ast_option.map(|root| AnalysableStatement { range, root }), + diagnostics, + ) + } +} + +pub struct GetCompletionsMapper; +impl<'a> StatementMapper<'a> for GetCompletionsMapper { + type Output = (StatementId, TextRange, String, Arc); + + fn map(&self, parser: &'a Document, id: StatementId, range: TextRange) -> Self::Output { + let tree = parser.cst_db.get_or_cache_tree(&id); + (id.clone(), range, id.content().to_string(), tree) + } +} + +/* + * We allow an offset of two for the statement: + * + * select * from | <-- we want to suggest items for the next token. + * + * However, if the current statement is terminated by a semicolon, we don't apply any + * offset. + * + * select * from users; | <-- no autocompletions here. + */ +pub struct GetCompletionsFilter { + pub cursor_position: TextSize, +} +impl StatementFilter<'_> for GetCompletionsFilter { + fn predicate(&self, _id: &StatementId, range: &TextRange, content: &str) -> bool { + let is_terminated_by_semi = content.chars().last().is_some_and(|c| c == ';'); + + let measuring_range = if is_terminated_by_semi { + *range + } else { + range.checked_expand_end(2.into()).unwrap_or(*range) + }; + measuring_range.contains(self.cursor_position) + } +} + +pub struct NoFilter; +impl StatementFilter<'_> for NoFilter { + fn predicate(&self, _id: &StatementId, _range: &TextRange, _content: &str) -> bool { + true + } +} + +pub struct CursorPositionFilter { + pos: TextSize, +} + +impl CursorPositionFilter { + pub fn new(pos: TextSize) -> Self { + Self { pos } + } +} + +impl StatementFilter<'_> for CursorPositionFilter { + fn predicate(&self, _id: &StatementId, range: &TextRange, _content: &str) -> bool { + range.contains(self.pos) + } +} + +pub struct IdFilter { + id: StatementId, +} + +impl IdFilter { + pub fn new(id: StatementId) -> Self { + Self { id } + } +} + +impl StatementFilter<'_> for IdFilter { + fn predicate(&self, id: &StatementId, _range: &TextRange, _content: &str) -> bool { + *id == self.id } } @@ -62,57 +346,310 @@ pub(crate) fn split_with_diagnostics( offset: Option, ) -> (Vec, Vec) { let o = offset.unwrap_or_else(|| 0.into()); - match pgt_statement_splitter::split(content) { - Ok(parse) => ( - parse.ranges, - parse - .errors - .into_iter() - .map(|err| { - SDiagnostic::new( - err.clone() - .with_file_span(err.location().span.map(|r| r + o)), - ) - }) - .collect(), - ), - Err(errs) => ( - vec![], - errs.into_iter() - .map(|err| { - SDiagnostic::new( - err.clone() - .with_file_span(err.location().span.map(|r| r + o)), - ) - }) - .collect(), - ), - } -} - -pub struct StatementIterator<'a> { - document: &'a Document, - positions: std::slice::Iter<'a, StatementPos>, -} - -impl<'a> StatementIterator<'a> { - pub fn new(document: &'a Document) -> Self { - Self { - document, - positions: document.positions.iter(), - } - } + let result = pgt_statement_splitter::split(content); + + ( + result.ranges, + result + .errors + .into_iter() + .map(|err| { + SDiagnostic::new( + err.clone() + .with_file_span(err.location().span.map(|r| r + o)), + ) + }) + .collect(), + ) } -impl<'a> Iterator for StatementIterator<'a> { - type Item = (StatementId, TextRange, &'a str); +#[cfg(test)] +mod tests { + use super::*; - fn next(&mut self) -> Option { - self.positions.next().map(|(id, range)| { - let range = *range; - let doc = self.document; - let id = id.clone(); - (id, range, &doc.content[range]) - }) + #[test] + fn sql_function_body() { + let input = "CREATE FUNCTION add(test0 integer, test1 integer) RETURNS integer + AS 'select $1 + $2;' + LANGUAGE SQL + IMMUTABLE + RETURNS NULL ON NULL INPUT;"; + + let d = Document::new(input.to_string(), 1); + + let stmts = d.iter(DefaultMapper).collect::>(); + + assert_eq!(stmts.len(), 2); + assert_eq!(stmts[1].2, "select $1 + $2;"); + } + + #[test] + fn test_sync_diagnostics_mapper_plpgsql_syntax_error() { + let input = " +CREATE FUNCTION test_func() + RETURNS void + LANGUAGE plpgsql + AS $$ +BEGIN + -- syntax error: missing semicolon and typo + DECLAR x integer + x := 10; +END; +$$;"; + + let d = Document::new(input.to_string(), 1); + let results = d.iter(AnalyserDiagnosticsMapper).collect::>(); + + assert_eq!(results.len(), 1); + let (ast, diagnostic) = &results[0]; + + // Should have parsed the CREATE FUNCTION statement + assert!(ast.is_some()); + + // Should have a PL/pgSQL syntax error + assert!(diagnostic.is_some()); + assert_eq!( + format!("{:?}", diagnostic.as_ref().unwrap().message), + "Invalid statement: syntax error at or near \"DECLAR\"" + ); + } + + #[test] + fn test_sync_diagnostics_mapper_plpgsql_valid() { + let input = " +CREATE FUNCTION valid_func() + RETURNS integer + LANGUAGE plpgsql + AS $$ +DECLARE + x integer := 5; +BEGIN + RETURN x * 2; +END; +$$;"; + + let d = Document::new(input.to_string(), 1); + let results = d.iter(AnalyserDiagnosticsMapper).collect::>(); + + assert_eq!(results.len(), 1); + let (ast, diagnostic) = &results[0]; + + // Should have parsed the CREATE FUNCTION statement + assert!(ast.is_some()); + + // Should NOT have any PL/pgSQL syntax errors + assert!(diagnostic.is_none()); + } + + #[test] + fn test_sync_diagnostics_mapper_plpgsql_caching() { + let input = " +CREATE FUNCTION cached_func() + RETURNS void + LANGUAGE plpgsql + AS $$ +BEGIN + RAISE NOTICE 'Testing cache'; +END; +$$;"; + + let d = Document::new(input.to_string(), 1); + + let results1 = d.iter(AnalyserDiagnosticsMapper).collect::>(); + assert_eq!(results1.len(), 1); + assert!(results1[0].0.is_some()); + assert!(results1[0].1.is_none()); + + let results2 = d.iter(AnalyserDiagnosticsMapper).collect::>(); + assert_eq!(results2.len(), 1); + assert!(results2[0].0.is_some()); + assert!(results2[0].1.is_none()); + } + + #[test] + fn test_default_mapper() { + let input = "SELECT 1; INSERT INTO users VALUES (1);"; + let d = Document::new(input.to_string(), 1); + + let results = d.iter(DefaultMapper).collect::>(); + assert_eq!(results.len(), 2); + + assert_eq!(results[0].2, "SELECT 1;"); + assert_eq!(results[1].2, "INSERT INTO users VALUES (1);"); + + assert_eq!(results[0].1.start(), 0.into()); + assert_eq!(results[0].1.end(), 9.into()); + assert_eq!(results[1].1.start(), 10.into()); + assert_eq!(results[1].1.end(), 39.into()); + } + + #[test] + fn test_execute_statement_mapper() { + let input = "SELECT 1; INVALID SYNTAX HERE;"; + let d = Document::new(input.to_string(), 1); + + let results = d.iter(ExecuteStatementMapper).collect::>(); + assert_eq!(results.len(), 2); + + // First statement should parse successfully + assert_eq!(results[0].2, "SELECT 1;"); + assert!(results[0].3.is_some()); + + // Second statement should fail to parse + assert_eq!(results[1].2, "INVALID SYNTAX HERE;"); + assert!(results[1].3.is_none()); + } + + #[test] + fn test_async_diagnostics_mapper() { + let input = " +CREATE FUNCTION test_fn() RETURNS integer AS $$ +BEGIN + RETURN 42; +END; +$$ LANGUAGE plpgsql;"; + + let d = Document::new(input.to_string(), 1); + let results = d.iter(TypecheckDiagnosticsMapper).collect::>(); + + assert_eq!(results.len(), 1); + let (_id, _range, ast, cst, sql_fn_sig) = &results[0]; + + // Should have both AST and CST + assert!(ast.is_some()); + assert_eq!(cst.root_node().kind(), "program"); + + // Should not have SQL function signature for top-level statement + assert!(sql_fn_sig.is_none()); + } + + #[test] + fn test_async_diagnostics_mapper_with_sql_function_body() { + let input = + "CREATE FUNCTION add(a int, b int) RETURNS int AS 'SELECT $1 + $2;' LANGUAGE sql;"; + let d = Document::new(input.to_string(), 1); + + let results = d.iter(TypecheckDiagnosticsMapper).collect::>(); + assert_eq!(results.len(), 2); + + // Check the function body + let (_id, _range, ast, _cst, sql_fn_sig) = &results[1]; + assert_eq!(_id.content(), "SELECT $1 + $2;"); + assert!(ast.is_some()); + assert!(sql_fn_sig.is_some()); + + let sig = sql_fn_sig.as_ref().unwrap(); + assert_eq!(sig.name, "add"); + assert_eq!(sig.args.len(), 2); + assert_eq!(sig.args[0].name, Some("a".to_string())); + assert_eq!(sig.args[1].name, Some("b".to_string())); + } + + #[test] + fn test_get_completions_mapper() { + let input = "SELECT * FROM users;"; + let d = Document::new(input.to_string(), 1); + + let results = d.iter(GetCompletionsMapper).collect::>(); + assert_eq!(results.len(), 1); + + let (_id, _range, content, tree) = &results[0]; + assert_eq!(content, "SELECT * FROM users;"); + assert_eq!(tree.root_node().kind(), "program"); + } + + #[test] + fn test_get_completions_filter() { + let input = "SELECT * FROM users; INSERT INTO"; + let d = Document::new(input.to_string(), 1); + + // Test cursor at end of first statement (terminated with semicolon) + let filter1 = GetCompletionsFilter { + cursor_position: 20.into(), + }; + let results1 = d + .iter_with_filter(DefaultMapper, filter1) + .collect::>(); + assert_eq!(results1.len(), 0); // No completions after semicolon + + // Test cursor at end of second statement (not terminated) + let filter2 = GetCompletionsFilter { + cursor_position: 32.into(), + }; + let results2 = d + .iter_with_filter(DefaultMapper, filter2) + .collect::>(); + assert_eq!(results2.len(), 1); + assert_eq!(results2[0].2, "INSERT INTO"); + } + + #[test] + fn test_cursor_position_filter() { + let input = "SELECT 1; INSERT INTO users VALUES (1);"; + let d = Document::new(input.to_string(), 1); + + // Cursor in first statement + let filter1 = CursorPositionFilter::new(5.into()); + let results1 = d + .iter_with_filter(DefaultMapper, filter1) + .collect::>(); + assert_eq!(results1.len(), 1); + assert_eq!(results1[0].2, "SELECT 1;"); + + // Cursor in second statement + let filter2 = CursorPositionFilter::new(25.into()); + let results2 = d + .iter_with_filter(DefaultMapper, filter2) + .collect::>(); + assert_eq!(results2.len(), 1); + assert_eq!(results2[0].2, "INSERT INTO users VALUES (1);"); + } + + #[test] + fn test_id_filter() { + let input = "SELECT 1; SELECT 2;"; + let d = Document::new(input.to_string(), 1); + + // Get all statements first to get their IDs + let all_results = d.iter(DefaultMapper).collect::>(); + assert_eq!(all_results.len(), 2); + + // Filter by first statement ID + let filter = IdFilter::new(all_results[0].0.clone()); + let results = d + .iter_with_filter(DefaultMapper, filter) + .collect::>(); + assert_eq!(results.len(), 1); + assert_eq!(results[0].2, "SELECT 1;"); + } + + #[test] + fn test_no_filter() { + let input = "SELECT 1; SELECT 2; SELECT 3;"; + let d = Document::new(input.to_string(), 1); + + let results = d + .iter_with_filter(DefaultMapper, NoFilter) + .collect::>(); + assert_eq!(results.len(), 3); + } + + #[test] + fn test_find_method() { + let input = "SELECT 1; SELECT 2;"; + let d = Document::new(input.to_string(), 1); + + // Get all statements to get their IDs + let all_results = d.iter(DefaultMapper).collect::>(); + + // Find specific statement + let result = d.find(all_results[1].0.clone(), DefaultMapper); + assert!(result.is_some()); + assert_eq!(result.unwrap().2, "SELECT 2;"); + + // Try to find non-existent statement + let fake_id = StatementId::new("SELECT 3;"); + let result = d.find(fake_id, DefaultMapper); + assert!(result.is_none()); } } diff --git a/crates/pgt_workspace/src/workspace/server/parsed_document.rs b/crates/pgt_workspace/src/workspace/server/parsed_document.rs deleted file mode 100644 index 2b81faba9..000000000 --- a/crates/pgt_workspace/src/workspace/server/parsed_document.rs +++ /dev/null @@ -1,442 +0,0 @@ -use std::sync::Arc; - -use pgt_diagnostics::serde::Diagnostic as SDiagnostic; -use pgt_fs::PgTPath; -use pgt_query_ext::diagnostics::SyntaxDiagnostic; -use pgt_text_size::{TextRange, TextSize}; - -use crate::workspace::ChangeFileParams; - -use super::{ - annotation::AnnotationStore, - change::StatementChange, - document::{Document, StatementIterator}, - pg_query::PgQueryStore, - sql_function::{SQLFunctionSignature, get_sql_fn_body, get_sql_fn_signature}, - statement_identifier::StatementId, - tree_sitter::TreeSitterStore, -}; - -pub struct ParsedDocument { - #[allow(dead_code)] - path: PgTPath, - - doc: Document, - ast_db: PgQueryStore, - cst_db: TreeSitterStore, - annotation_db: AnnotationStore, -} - -impl ParsedDocument { - pub fn new(path: PgTPath, content: String, version: i32) -> ParsedDocument { - let doc = Document::new(content, version); - - let cst_db = TreeSitterStore::new(); - let ast_db = PgQueryStore::new(); - let annotation_db = AnnotationStore::new(); - - doc.iter().for_each(|(stmt, _, content)| { - cst_db.add_statement(&stmt, content); - }); - - ParsedDocument { - path, - doc, - ast_db, - cst_db, - annotation_db, - } - } - - /// Applies a change to the document and updates the CST and AST databases accordingly. - /// - /// Note that only tree-sitter cares about statement modifications vs remove + add. - /// Hence, we just clear the AST for the old statements and lazily load them when requested. - /// - /// * `params`: ChangeFileParams - The parameters for the change to be applied. - pub fn apply_change(&mut self, params: ChangeFileParams) { - for c in &self.doc.apply_file_change(¶ms) { - match c { - StatementChange::Added(added) => { - tracing::debug!( - "Adding statement: id:{:?}, text:{:?}", - added.stmt, - added.text - ); - self.cst_db.add_statement(&added.stmt, &added.text); - } - StatementChange::Deleted(s) => { - tracing::debug!("Deleting statement: id {:?}", s,); - self.cst_db.remove_statement(s); - self.ast_db.clear_statement(s); - self.annotation_db.clear_statement(s); - } - StatementChange::Modified(s) => { - tracing::debug!( - "Modifying statement with id {:?} (new id {:?}). Range {:?}, Changed from '{:?}' to '{:?}', changed text: {:?}", - s.old_stmt, - s.new_stmt, - s.change_range, - s.old_stmt_text, - s.new_stmt_text, - s.change_text - ); - - self.cst_db.modify_statement(s); - self.ast_db.clear_statement(&s.old_stmt); - self.annotation_db.clear_statement(&s.old_stmt); - } - } - } - } - - pub fn get_document_content(&self) -> &str { - &self.doc.content - } - - pub fn document_diagnostics(&self) -> &Vec { - &self.doc.diagnostics - } - - pub fn find<'a, M>(&'a self, id: StatementId, mapper: M) -> Option - where - M: StatementMapper<'a>, - { - self.iter_with_filter(mapper, IdFilter::new(id)).next() - } - - pub fn iter<'a, M>(&'a self, mapper: M) -> ParseIterator<'a, M, NoFilter> - where - M: StatementMapper<'a>, - { - self.iter_with_filter(mapper, NoFilter) - } - - pub fn iter_with_filter<'a, M, F>(&'a self, mapper: M, filter: F) -> ParseIterator<'a, M, F> - where - M: StatementMapper<'a>, - F: StatementFilter<'a>, - { - ParseIterator::new(self, mapper, filter) - } - - #[allow(dead_code)] - pub fn count(&self) -> usize { - self.iter(DefaultMapper).count() - } -} - -pub trait StatementMapper<'a> { - type Output; - - fn map( - &self, - parsed: &'a ParsedDocument, - id: StatementId, - range: TextRange, - content: &str, - ) -> Self::Output; -} - -pub trait StatementFilter<'a> { - fn predicate(&self, id: &StatementId, range: &TextRange, content: &str) -> bool; -} - -pub struct ParseIterator<'a, M, F> { - parser: &'a ParsedDocument, - statements: StatementIterator<'a>, - mapper: M, - filter: F, - pending_sub_statements: Vec<(StatementId, TextRange, String)>, -} - -impl<'a, M, F> ParseIterator<'a, M, F> { - pub fn new(parser: &'a ParsedDocument, mapper: M, filter: F) -> Self { - Self { - parser, - statements: parser.doc.iter(), - mapper, - filter, - pending_sub_statements: Vec::new(), - } - } -} - -impl<'a, M, F> Iterator for ParseIterator<'a, M, F> -where - M: StatementMapper<'a>, - F: StatementFilter<'a>, -{ - type Item = M::Output; - - fn next(&mut self) -> Option { - // First check if we have any pending sub-statements to process - if let Some((id, range, content)) = self.pending_sub_statements.pop() { - if self.filter.predicate(&id, &range, content.as_str()) { - return Some(self.mapper.map(self.parser, id, range, &content)); - } - // If the sub-statement doesn't pass the filter, continue to the next item - return self.next(); - } - - // Process the next top-level statement - let next_statement = self.statements.next(); - - if let Some((root_id, range, content)) = next_statement { - // If we should include sub-statements and this statement has an AST - let content_owned = content.to_string(); - if let Ok(ast) = self - .parser - .ast_db - .get_or_cache_ast(&root_id, &content_owned) - .as_ref() - { - // Check if this is a SQL function definition with a body - if let Some(sub_statement) = get_sql_fn_body(ast, &content_owned) { - // Add sub-statements to our pending queue - self.pending_sub_statements.push(( - root_id.create_child(), - // adjust range to document - sub_statement.range + range.start(), - sub_statement.body.clone(), - )); - } - } - - // Return the current statement if it passes the filter - if self.filter.predicate(&root_id, &range, content) { - return Some(self.mapper.map(self.parser, root_id, range, content)); - } - - // If the current statement doesn't pass the filter, try the next one - return self.next(); - } - - None - } -} - -pub struct DefaultMapper; -impl<'a> StatementMapper<'a> for DefaultMapper { - type Output = (StatementId, TextRange, String); - - fn map( - &self, - _parser: &'a ParsedDocument, - id: StatementId, - range: TextRange, - content: &str, - ) -> Self::Output { - (id, range, content.to_string()) - } -} - -pub struct ExecuteStatementMapper; -impl<'a> StatementMapper<'a> for ExecuteStatementMapper { - type Output = ( - StatementId, - TextRange, - String, - Option, - ); - - fn map( - &self, - parser: &'a ParsedDocument, - id: StatementId, - range: TextRange, - content: &str, - ) -> Self::Output { - let ast_result = parser.ast_db.get_or_cache_ast(&id, content); - let ast_option = match &*ast_result { - Ok(node) => Some(node.clone()), - Err(_) => None, - }; - - (id, range, content.to_string(), ast_option) - } -} - -pub struct AsyncDiagnosticsMapper; -impl<'a> StatementMapper<'a> for AsyncDiagnosticsMapper { - type Output = ( - StatementId, - TextRange, - String, - Option, - Arc, - Option, - ); - - fn map( - &self, - parser: &'a ParsedDocument, - id: StatementId, - range: TextRange, - content: &str, - ) -> Self::Output { - let content_owned = content.to_string(); - let ast_result = parser.ast_db.get_or_cache_ast(&id, &content_owned); - - let ast_option = match &*ast_result { - Ok(node) => Some(node.clone()), - Err(_) => None, - }; - - let cst_result = parser.cst_db.get_or_cache_tree(&id, &content_owned); - - let sql_fn_sig = id - .parent() - .and_then(|root| { - let c = parser.doc.statement_content(&root)?; - Some((root, c)) - }) - .and_then(|(root, c)| { - let ast_option = parser - .ast_db - .get_or_cache_ast(&root, c) - .as_ref() - .clone() - .ok(); - - let ast_option = ast_option.as_ref()?; - - get_sql_fn_signature(ast_option) - }); - - (id, range, content_owned, ast_option, cst_result, sql_fn_sig) - } -} - -pub struct SyncDiagnosticsMapper; -impl<'a> StatementMapper<'a> for SyncDiagnosticsMapper { - type Output = ( - StatementId, - TextRange, - Option, - Option, - ); - - fn map( - &self, - parser: &'a ParsedDocument, - id: StatementId, - range: TextRange, - content: &str, - ) -> Self::Output { - let ast_result = parser.ast_db.get_or_cache_ast(&id, content); - - let (ast_option, diagnostics) = match &*ast_result { - Ok(node) => (Some(node.clone()), None), - Err(diag) => (None, Some(diag.clone())), - }; - - (id, range, ast_option, diagnostics) - } -} - -pub struct GetCompletionsMapper; -impl<'a> StatementMapper<'a> for GetCompletionsMapper { - type Output = (StatementId, TextRange, String, Arc); - - fn map( - &self, - parser: &'a ParsedDocument, - id: StatementId, - range: TextRange, - content: &str, - ) -> Self::Output { - let tree = parser.cst_db.get_or_cache_tree(&id, content); - (id, range, content.into(), tree) - } -} - -/* - * We allow an offset of two for the statement: - * - * select * from | <-- we want to suggest items for the next token. - * - * However, if the current statement is terminated by a semicolon, we don't apply any - * offset. - * - * select * from users; | <-- no autocompletions here. - */ -pub struct GetCompletionsFilter { - pub cursor_position: TextSize, -} -impl StatementFilter<'_> for GetCompletionsFilter { - fn predicate(&self, _id: &StatementId, range: &TextRange, content: &str) -> bool { - let is_terminated_by_semi = content.chars().last().is_some_and(|c| c == ';'); - - let measuring_range = if is_terminated_by_semi { - *range - } else { - range.checked_expand_end(2.into()).unwrap_or(*range) - }; - measuring_range.contains(self.cursor_position) - } -} - -pub struct NoFilter; -impl StatementFilter<'_> for NoFilter { - fn predicate(&self, _id: &StatementId, _range: &TextRange, _content: &str) -> bool { - true - } -} - -pub struct CursorPositionFilter { - pos: TextSize, -} - -impl CursorPositionFilter { - pub fn new(pos: TextSize) -> Self { - Self { pos } - } -} - -impl StatementFilter<'_> for CursorPositionFilter { - fn predicate(&self, _id: &StatementId, range: &TextRange, _content: &str) -> bool { - range.contains(self.pos) - } -} - -pub struct IdFilter { - id: StatementId, -} - -impl IdFilter { - pub fn new(id: StatementId) -> Self { - Self { id } - } -} - -impl StatementFilter<'_> for IdFilter { - fn predicate(&self, id: &StatementId, _range: &TextRange, _content: &str) -> bool { - *id == self.id - } -} - -#[cfg(test)] -mod tests { - use super::*; - - use pgt_fs::PgTPath; - - #[test] - fn sql_function_body() { - let input = "CREATE FUNCTION add(test0 integer, test1 integer) RETURNS integer - AS 'select $1 + $2;' - LANGUAGE SQL - IMMUTABLE - RETURNS NULL ON NULL INPUT;"; - - let path = PgTPath::new("test.sql"); - - let d = ParsedDocument::new(path, input.to_string(), 0); - - let stmts = d.iter(DefaultMapper).collect::>(); - - assert_eq!(stmts.len(), 2); - assert_eq!(stmts[1].2, "select $1 + $2;"); - } -} diff --git a/crates/pgt_workspace/src/workspace/server/pg_query.rs b/crates/pgt_workspace/src/workspace/server/pg_query.rs index e5c0cac8a..bd9ffdfce 100644 --- a/crates/pgt_workspace/src/workspace/server/pg_query.rs +++ b/crates/pgt_workspace/src/workspace/server/pg_query.rs @@ -1,38 +1,308 @@ -use std::sync::Arc; +use std::collections::HashMap; +use std::num::NonZeroUsize; +use std::sync::{Arc, Mutex}; -use dashmap::DashMap; +use lru::LruCache; use pgt_query_ext::diagnostics::*; +use pgt_text_size::TextRange; +use pgt_tokenizer::tokenize; use super::statement_identifier::StatementId; +const DEFAULT_CACHE_SIZE: usize = 1000; + pub struct PgQueryStore { - db: DashMap>>, + ast_db: Mutex>>>, + plpgsql_db: Mutex>>, } impl PgQueryStore { pub fn new() -> PgQueryStore { - PgQueryStore { db: DashMap::new() } + PgQueryStore { + ast_db: Mutex::new(LruCache::new( + NonZeroUsize::new(DEFAULT_CACHE_SIZE).unwrap(), + )), + plpgsql_db: Mutex::new(LruCache::new( + NonZeroUsize::new(DEFAULT_CACHE_SIZE).unwrap(), + )), + } } pub fn get_or_cache_ast( &self, statement: &StatementId, - content: &str, - ) -> Arc> { - if let Some(existing) = self.db.get(statement).map(|x| x.clone()) { - return existing; + ) -> Arc> { + let mut cache = self.ast_db.lock().unwrap(); + + if let Some(existing) = cache.get(statement) { + return existing.clone(); } - let r = Arc::new(pgt_query_ext::parse(content).map_err(SyntaxDiagnostic::from)); - self.db.insert(statement.clone(), r.clone()); + let r = Arc::new( + pgt_query::parse(&convert_to_positional_params(statement.content())) + .map_err(SyntaxDiagnostic::from) + .and_then(|ast| { + ast.into_root().ok_or_else(|| { + SyntaxDiagnostic::new("No root node found in parse result", None) + }) + }), + ); + cache.put(statement.clone(), r.clone()); r } - pub fn clear_statement(&self, id: &StatementId) { - self.db.remove(id); + pub fn get_or_cache_plpgsql_parse( + &self, + statement: &StatementId, + ) -> Option> { + let ast = self.get_or_cache_ast(statement); + + let create_fn = match ast.as_ref() { + Ok(pgt_query::NodeEnum::CreateFunctionStmt(node)) => node, + _ => return None, + }; + + let language = pgt_query_ext::utils::find_option_value(create_fn, "language")?; + + if language != "plpgsql" { + return None; + } + + let mut cache = self.plpgsql_db.lock().unwrap(); + + if let Some(existing) = cache.get(statement) { + return Some(existing.clone()); + } + + let sql_body = pgt_query_ext::utils::find_option_value(create_fn, "as")?; + + let start = statement.content().find(&sql_body)?; + let end = start + sql_body.len(); + + let range = TextRange::new(start.try_into().unwrap(), end.try_into().unwrap()); + + let r = pgt_query::parse_plpgsql(statement.content()) + .map_err(|err| SyntaxDiagnostic::new(err.to_string(), Some(range))); + cache.put(statement.clone(), r.clone()); + + Some(r) + } +} + +/// Converts named parameters in a SQL query string to positional parameters. +/// +/// This function scans the input SQL string for named parameters (e.g., `@param`, `:param`, `:'param'`) +/// and replaces them with positional parameters (e.g., `$1`, `$2`, etc.). +/// +/// It maintains the original spacing of the named parameters in the output string. +/// +/// Useful for preparing SQL queries for parsing or execution where named paramters are not supported. +pub fn convert_to_positional_params(text: &str) -> String { + let mut result = String::with_capacity(text.len()); + let mut param_mapping: HashMap<&str, usize> = HashMap::new(); + let mut param_index = 1; + let mut position = 0; + + for token in tokenize(text) { + let token_len = token.len as usize; + let token_text = &text[position..position + token_len]; + + if matches!(token.kind, pgt_tokenizer::TokenKind::NamedParam { .. }) { + let idx = match param_mapping.get(token_text) { + Some(&index) => index, + None => { + let index = param_index; + param_mapping.insert(token_text, index); + param_index += 1; + index + } + }; - if let Some(child_id) = id.get_child_id() { - self.db.remove(&child_id); + let replacement = format!("${}", idx); + let original_len = token_text.len(); + let replacement_len = replacement.len(); + + result.push_str(&replacement); + + // maintain original spacing + if replacement_len < original_len { + result.push_str(&" ".repeat(original_len - replacement_len)); + } + } else { + result.push_str(token_text); } + + position += token_len; + } + + result +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_convert_to_positional_params() { + let input = "select * from users where id = @one and name = :two and email = :'three';"; + let result = convert_to_positional_params(input); + assert_eq!( + result, + "select * from users where id = $1 and name = $2 and email = $3 ;" + ); + } + + #[test] + fn test_convert_to_positional_params_with_duplicates() { + let input = "select * from users where first_name = @one and starts_with(email, @one) and created_at > @two;"; + let result = convert_to_positional_params(input); + assert_eq!( + result, + "select * from users where first_name = $1 and starts_with(email, $1 ) and created_at > $2 ;" + ); + } + + #[test] + fn test_plpgsql_syntax_error() { + let input = " +create function test_organisation_id () + returns setof text + language plpgsql + security invoker + as $$ + -- syntax error here + delare + v_organisation_id uuid; +begin + return next is(private.organisation_id(), v_organisation_id, 'should return organisation_id of token'); +end +$$; +"; + + let store = PgQueryStore::new(); + + let res = store.get_or_cache_plpgsql_parse(&StatementId::new(input)); + + assert!(matches!(res, Some(Err(_)))); + } + + #[test] + fn test_plpgsql_valid() { + let input = " +CREATE FUNCTION test_function() + RETURNS integer + LANGUAGE plpgsql + AS $$ +DECLARE + counter integer := 0; +BEGIN + counter := counter + 1; + RETURN counter; +END; +$$; +"; + + let store = PgQueryStore::new(); + + let res = store.get_or_cache_plpgsql_parse(&StatementId::new(input)); + + assert!(matches!(res, Some(Ok(_)))); + } + + #[test] + fn test_non_plpgsql_function() { + let input = " +CREATE FUNCTION add_numbers(a integer, b integer) + RETURNS integer + LANGUAGE sql + AS $$ + SELECT a + b; + $$; +"; + + let store = PgQueryStore::new(); + + let res = store.get_or_cache_plpgsql_parse(&StatementId::new(input)); + + assert!(res.is_none()); + } + + #[test] + fn test_non_function_statement() { + let input = "SELECT * FROM users WHERE id = 1;"; + + let store = PgQueryStore::new(); + + let res = store.get_or_cache_plpgsql_parse(&StatementId::new(input)); + + assert!(res.is_none()); + } + + #[test] + fn test_cache_behavior() { + let input = " +CREATE FUNCTION cached_function() + RETURNS void + LANGUAGE plpgsql + AS $$ +BEGIN + RAISE NOTICE 'Hello from cache test'; +END; +$$; +"; + + let store = PgQueryStore::new(); + let statement_id = StatementId::new(input); + + // First call should parse + let res1 = store.get_or_cache_plpgsql_parse(&statement_id); + assert!(matches!(res1, Some(Ok(_)))); + + // Second call should return cached result + let res2 = store.get_or_cache_plpgsql_parse(&statement_id); + assert!(matches!(res2, Some(Ok(_)))); + } + + #[test] + fn test_plpgsql_with_complex_body() { + let input = " +CREATE FUNCTION complex_function(p_id integer) + RETURNS TABLE(id integer, name text, status boolean) + LANGUAGE plpgsql + AS $$ +DECLARE + v_count integer; + v_status boolean := true; +BEGIN + SELECT COUNT(*) INTO v_count FROM users WHERE user_id = p_id; + + IF v_count > 0 THEN + RETURN QUERY + SELECT u.id, u.name, v_status + FROM users u + WHERE u.user_id = p_id; + ELSE + RAISE EXCEPTION 'User not found'; + END IF; +END; +$$; +"; + + let store = PgQueryStore::new(); + + let res = store.get_or_cache_plpgsql_parse(&StatementId::new(input)); + + assert!(matches!(res, Some(Ok(_)))); + } + + #[test] + fn test_invalid_ast() { + let input = "CREATE FUNCTION invalid syntax here"; + + let store = PgQueryStore::new(); + + let res = store.get_or_cache_plpgsql_parse(&StatementId::new(input)); + + assert!(res.is_none()); } } diff --git a/crates/pgt_workspace/src/workspace/server/schema_cache_manager.rs b/crates/pgt_workspace/src/workspace/server/schema_cache_manager.rs index 03cd6ded1..007ebb782 100644 --- a/crates/pgt_workspace/src/workspace/server/schema_cache_manager.rs +++ b/crates/pgt_workspace/src/workspace/server/schema_cache_manager.rs @@ -1,97 +1,50 @@ -use std::sync::{Arc, RwLock, RwLockReadGuard}; +use std::collections::HashMap; +use std::sync::{Arc, RwLock}; use pgt_schema_cache::SchemaCache; use sqlx::PgPool; use crate::WorkspaceError; -use super::async_helper::run_async; +use super::{async_helper::run_async, connection_key::ConnectionKey}; -pub(crate) struct SchemaCacheHandle<'a> { - inner: RwLockReadGuard<'a, SchemaCacheManagerInner>, +#[derive(Default)] +pub struct SchemaCacheManager { + schemas: RwLock>>, } -impl<'a> SchemaCacheHandle<'a> { - pub(crate) fn new(cache: &'a RwLock) -> Self { +impl SchemaCacheManager { + pub fn new() -> Self { Self { - inner: cache.read().unwrap(), + schemas: RwLock::new(HashMap::new()), } } - pub(crate) fn wrap(inner: RwLockReadGuard<'a, SchemaCacheManagerInner>) -> Self { - Self { inner } - } - - pub fn get_arc(&self) -> Arc { - Arc::clone(&self.inner.cache) - } -} - -impl AsRef for SchemaCacheHandle<'_> { - fn as_ref(&self) -> &SchemaCache { - &self.inner.cache - } -} - -#[derive(Default)] -pub(crate) struct SchemaCacheManagerInner { - cache: Arc, - conn_str: String, -} - -#[derive(Default)] -pub struct SchemaCacheManager { - inner: RwLock, -} - -impl SchemaCacheManager { - pub fn load(&self, pool: PgPool) -> Result { - let new_conn_str = pool_to_conn_str(&pool); + pub fn load(&self, pool: PgPool) -> Result, WorkspaceError> { + let key: ConnectionKey = (&pool).into(); - { - // return early if the connection string is the same - let inner = self.inner.read().unwrap(); - if new_conn_str == inner.conn_str { - tracing::info!("Same connection string, no updates."); - return Ok(SchemaCacheHandle::wrap(inner)); + // Try read lock first for cache hit + if let Ok(schemas) = self.schemas.read() { + if let Some(cache) = schemas.get(&key) { + return Ok(Arc::clone(cache)); } } - let maybe_refreshed = run_async(async move { SchemaCache::load(&pool).await })?; - let refreshed = maybe_refreshed?; - - { - // write lock must be dropped before we return the reference below, hence the block - let mut inner = self.inner.write().unwrap(); + // Cache miss - need write lock to insert + let mut schemas = self.schemas.write().unwrap(); - // Double-check that we still need to refresh (another thread might have done it) - if new_conn_str != inner.conn_str { - inner.cache = Arc::new(refreshed); - inner.conn_str = new_conn_str; - tracing::info!("Refreshed connection."); - } + // Double-check after acquiring write lock + if let Some(cache) = schemas.get(&key) { + return Ok(Arc::clone(cache)); } - Ok(SchemaCacheHandle::new(&self.inner)) - } -} - -fn pool_to_conn_str(pool: &PgPool) -> String { - let conn = pool.connect_options(); + // Load schema cache + let pool_clone = pool.clone(); + let schema_cache = Arc::new(run_async( + async move { SchemaCache::load(&pool_clone).await }, + )??); - match conn.get_database() { - None => format!( - "postgres://{}:@{}:{}", - conn.get_username(), - conn.get_host(), - conn.get_port() - ), - Some(db) => format!( - "postgres://{}:@{}:{}/{}", - conn.get_username(), - conn.get_host(), - conn.get_port(), - db - ), + schemas.insert(key, schema_cache.clone()); + Ok(schema_cache) } } diff --git a/crates/pgt_workspace/src/workspace/server/sql_function.rs b/crates/pgt_workspace/src/workspace/server/sql_function.rs index bc2c6c3b9..4a1463b71 100644 --- a/crates/pgt_workspace/src/workspace/server/sql_function.rs +++ b/crates/pgt_workspace/src/workspace/server/sql_function.rs @@ -28,30 +28,30 @@ pub struct SQLFunctionBody { } /// Extracts the function signature from a SQL function definition -pub fn get_sql_fn_signature(ast: &pgt_query_ext::NodeEnum) -> Option { +pub fn get_sql_fn_signature(ast: &pgt_query::NodeEnum) -> Option { let create_fn = match ast { - pgt_query_ext::NodeEnum::CreateFunctionStmt(cf) => cf, + pgt_query::NodeEnum::CreateFunctionStmt(cf) => cf, _ => return None, }; // Extract language from function options - let language = find_option_value(create_fn, "language")?; + let language = pgt_query_ext::utils::find_option_value(create_fn, "language")?; // Only process SQL functions if language != "sql" { return None; } - let fn_name = parse_name(&create_fn.funcname)?; + let fn_name = pgt_query_ext::utils::parse_name(&create_fn.funcname)?; // we return None if anything is not expected let mut fn_args = Vec::new(); for arg in &create_fn.parameters { - if let Some(pgt_query_ext::NodeEnum::FunctionParameter(node)) = &arg.node { + if let Some(pgt_query::NodeEnum::FunctionParameter(node)) = &arg.node { let arg_name = (!node.name.is_empty()).then_some(node.name.clone()); let arg_type = node.arg_type.as_ref()?; - let type_name = parse_name(&arg_type.names)?; + let type_name = pgt_query_ext::utils::parse_name(&arg_type.names)?; fn_args.push(SQLFunctionArg { name: arg_name, type_: ArgType { @@ -77,14 +77,14 @@ pub fn get_sql_fn_signature(ast: &pgt_query_ext::NodeEnum) -> Option Option { +pub fn get_sql_fn_body(ast: &pgt_query::NodeEnum, content: &str) -> Option { let create_fn = match ast { - pgt_query_ext::NodeEnum::CreateFunctionStmt(cf) => cf, + pgt_query::NodeEnum::CreateFunctionStmt(cf) => cf, _ => return None, }; // Extract language from function options - let language = find_option_value(create_fn, "language")?; + let language = pgt_query_ext::utils::find_option_value(create_fn, "language")?; // Only process SQL functions if language != "sql" { @@ -92,7 +92,7 @@ pub fn get_sql_fn_body(ast: &pgt_query_ext::NodeEnum, content: &str) -> Option Option Option { - create_fn - .options - .iter() - .filter_map(|opt_wrapper| opt_wrapper.node.as_ref()) - .find_map(|opt| { - if let pgt_query_ext::NodeEnum::DefElem(def_elem) = opt { - if def_elem.defname == option_name { - def_elem - .arg - .iter() - .filter_map(|arg_wrapper| arg_wrapper.node.as_ref()) - .find_map(|arg| { - if let pgt_query_ext::NodeEnum::String(s) = arg { - Some(s.sval.clone()) - } else if let pgt_query_ext::NodeEnum::List(l) = arg { - l.items.iter().find_map(|item_wrapper| { - if let Some(pgt_query_ext::NodeEnum::String(s)) = - item_wrapper.node.as_ref() - { - Some(s.sval.clone()) - } else { - None - } - }) - } else { - None - } - }) - } else { - None - } - } else { - None - } - }) -} - -fn parse_name(nodes: &[pgt_query_ext::protobuf::Node]) -> Option<(Option, String)> { - let names = nodes - .iter() - .map(|n| match &n.node { - Some(pgt_query_ext::NodeEnum::String(s)) => Some(s.sval.clone()), - _ => None, - }) - .collect::>(); - - match names.as_slice() { - [Some(schema), Some(name)] => Some((Some(schema.clone()), name.clone())), - [Some(name)] => Some((None, name.clone())), - _ => None, - } -} - #[cfg(test)] mod tests { use super::*; @@ -176,7 +118,7 @@ mod tests { IMMUTABLE RETURNS NULL ON NULL INPUT;"; - let ast = pgt_query_ext::parse(input).unwrap(); + let ast = pgt_query::parse(input).unwrap().into_root().unwrap(); let sig = get_sql_fn_signature(&ast); @@ -202,7 +144,7 @@ mod tests { IMMUTABLE RETURNS NULL ON NULL INPUT;"; - let ast = pgt_query_ext::parse(input).unwrap(); + let ast = pgt_query::parse(input).unwrap().into_root().unwrap(); let sig = get_sql_fn_signature(&ast); diff --git a/crates/pgt_workspace/src/workspace/server/statement_identifier.rs b/crates/pgt_workspace/src/workspace/server/statement_identifier.rs index 7c7d76f0f..592596902 100644 --- a/crates/pgt_workspace/src/workspace/server/statement_identifier.rs +++ b/crates/pgt_workspace/src/workspace/server/statement_identifier.rs @@ -1,24 +1,6 @@ -use serde::{Deserialize, Serialize}; - -#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)] -#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] -pub struct RootId { - inner: usize, -} - -#[cfg(test)] -impl From for usize { - fn from(val: RootId) -> Self { - val.inner - } -} +use std::sync::Arc; -#[cfg(test)] -impl From for RootId { - fn from(inner: usize) -> Self { - RootId { inner } - } -} +use serde::{Deserialize, Serialize}; #[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)] #[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] @@ -35,80 +17,82 @@ impl From for RootId { /// ``` /// /// For now, we only support SQL functions – no complex, nested statements. -/// -/// An SQL function only ever has ONE child, that's why the inner `RootId` of a `Root` -/// is the same as the one of its `Child`. pub enum StatementId { - Root(RootId), - // StatementId is the same as the root id since we can only have a single sql function body per Root - Child(RootId), + Root { + content: Arc, + }, + Child { + content: Arc, // child's actual content + parent_content: Arc, // parent's content for lookups + }, } +// this is only here for strum to work on the code actions enum impl Default for StatementId { fn default() -> Self { - StatementId::Root(RootId { inner: 0 }) + StatementId::Root { content: "".into() } } } impl StatementId { - pub fn raw(&self) -> usize { - match self { - StatementId::Root(s) => s.inner, - StatementId::Child(s) => s.inner, + pub fn new(statement: &str) -> Self { + StatementId::Root { + content: statement.into(), } } - pub fn is_root(&self) -> bool { - matches!(self, StatementId::Root(_)) + /// Use this if you need to create a matching `StatementId::Child` for `Root`. + /// You cannot create a `Child` of a `Child`. + /// Note: This method requires the child content to be provided. + pub fn create_child(&self, child_content: &str) -> StatementId { + match self { + StatementId::Root { content } => StatementId::Child { + content: child_content.into(), + parent_content: content.clone(), + }, + StatementId::Child { .. } => panic!("Cannot create child from a child statement id"), + } } - pub fn is_child(&self) -> bool { - matches!(self, StatementId::Child(_)) + pub fn content(&self) -> &str { + match self { + StatementId::Root { content } => content, + StatementId::Child { content, .. } => content, + } } - pub fn parent(&self) -> Option { + /// Returns the parent content if this is a child statement + pub fn parent_content(&self) -> Option<&str> { match self { - StatementId::Root(_) => None, - StatementId::Child(id) => Some(StatementId::Root(id.clone())), + StatementId::Root { .. } => None, + StatementId::Child { parent_content, .. } => Some(parent_content), } } -} - -/// Helper struct to generate unique statement ids -pub struct StatementIdGenerator { - next_id: usize, -} -impl StatementIdGenerator { - pub fn new() -> Self { - Self { next_id: 0 } + pub fn is_root(&self) -> bool { + matches!(self, StatementId::Root { .. }) } - pub fn next(&mut self) -> StatementId { - let id = self.next_id; - self.next_id += 1; - StatementId::Root(RootId { inner: id }) + pub fn is_child(&self) -> bool { + matches!(self, StatementId::Child { .. }) } -} -impl StatementId { - /// Use this to get the matching `StatementId::Child` for - /// a `StatementId::Root`. - /// If the `StatementId` was already a `Child`, this will return `None`. - /// It is not guaranteed that the `Root` actually has a `Child` statement in the workspace. - pub fn get_child_id(&self) -> Option { + pub fn is_child_of(&self, maybe_parent: &StatementId) -> bool { match self { - StatementId::Root(id) => Some(StatementId::Child(RootId { inner: id.inner })), - StatementId::Child(_) => None, + StatementId::Root { .. } => false, + StatementId::Child { parent_content, .. } => match maybe_parent { + StatementId::Root { content } => parent_content == content, + StatementId::Child { .. } => false, + }, } } - /// Use this if you need to create a matching `StatementId::Child` for `Root`. - /// You cannot create a `Child` of a `Child`. - pub fn create_child(&self) -> StatementId { + pub fn parent(&self) -> Option { match self { - StatementId::Root(id) => StatementId::Child(RootId { inner: id.inner }), - StatementId::Child(_) => panic!("Cannot create child from a child statement id"), + StatementId::Root { .. } => None, + StatementId::Child { parent_content, .. } => Some(StatementId::Root { + content: parent_content.clone(), + }), } } } diff --git a/crates/pgt_workspace/src/workspace/server/tree_sitter.rs b/crates/pgt_workspace/src/workspace/server/tree_sitter.rs index a89325356..71411d277 100644 --- a/crates/pgt_workspace/src/workspace/server/tree_sitter.rs +++ b/crates/pgt_workspace/src/workspace/server/tree_sitter.rs @@ -1,12 +1,14 @@ +use std::num::NonZeroUsize; use std::sync::{Arc, Mutex}; -use dashmap::DashMap; -use tree_sitter::InputEdit; +use lru::LruCache; -use super::{change::ModifiedStatement, statement_identifier::StatementId}; +use super::statement_identifier::StatementId; + +const DEFAULT_CACHE_SIZE: usize = 1000; pub struct TreeSitterStore { - db: DashMap>, + db: Mutex>>, parser: Mutex, } @@ -18,144 +20,37 @@ impl TreeSitterStore { .expect("Error loading sql language"); TreeSitterStore { - db: DashMap::new(), + db: Mutex::new(LruCache::new( + NonZeroUsize::new(DEFAULT_CACHE_SIZE).unwrap(), + )), parser: Mutex::new(parser), } } - pub fn get_or_cache_tree( - &self, - statement: &StatementId, - content: &str, - ) -> Arc { - if let Some(existing) = self.db.get(statement).map(|x| x.clone()) { - return existing; - } - - let mut parser = self.parser.lock().expect("Failed to lock parser"); - let tree = Arc::new(parser.parse(content, None).unwrap()); - self.db.insert(statement.clone(), tree.clone()); - - tree - } - - pub fn add_statement(&self, statement: &StatementId, content: &str) { - let mut parser = self.parser.lock().expect("Failed to lock parser"); - let tree = parser.parse(content, None).unwrap(); - self.db.insert(statement.clone(), Arc::new(tree)); - } - - pub fn remove_statement(&self, id: &StatementId) { - self.db.remove(id); - - if let Some(child_id) = id.get_child_id() { - self.db.remove(&child_id); - } - } - - pub fn modify_statement(&self, change: &ModifiedStatement) { - let old = self.db.remove(&change.old_stmt); - - if old.is_none() { - self.add_statement(&change.new_stmt, &change.change_text); - return; + pub fn get_or_cache_tree(&self, statement: &StatementId) -> Arc { + // First check cache + { + let mut cache = self.db.lock().unwrap(); + if let Some(existing) = cache.get(statement) { + return existing.clone(); + } } - // we clone the three for now, lets see if that is sufficient or if we need to mutate the - // original tree instead but that will require some kind of locking - let mut tree = old.unwrap().1.as_ref().clone(); - - let edit = edit_from_change( - change.old_stmt_text.as_str(), - usize::from(change.change_range.start()), - usize::from(change.change_range.end()), - change.change_text.as_str(), - ); - - tree.edit(&edit); - - let mut parser = self.parser.lock().expect("Failed to lock parser"); - // todo handle error - self.db.insert( - change.new_stmt.clone(), - Arc::new(parser.parse(&change.new_stmt_text, Some(&tree)).unwrap()), - ); - } -} - -// Converts character positions and replacement text into a tree-sitter InputEdit -pub(crate) fn edit_from_change( - text: &str, - start_char: usize, - end_char: usize, - replacement_text: &str, -) -> InputEdit { - let mut start_byte = 0; - let mut end_byte = 0; - let mut chars_counted = 0; - - let mut line = 0; - let mut current_line_char_start = 0; // Track start of the current line in characters - let mut column_start = 0; - let mut column_end = 0; - - // Find the byte positions corresponding to the character positions - for (idx, c) in text.char_indices() { - if chars_counted == start_char { - start_byte = idx; - column_start = chars_counted - current_line_char_start; - } - if chars_counted == end_char { - end_byte = idx; - column_end = chars_counted - current_line_char_start; - break; // Found both start and end - } - if c == '\n' { - line += 1; - current_line_char_start = chars_counted + 1; // Next character starts a new line + // Cache miss - parse outside of cache lock to avoid deadlock + let mut parser = self.parser.lock().unwrap(); + let tree = Arc::new(parser.parse(statement.content(), None).unwrap()); + drop(parser); + + // Insert into cache + { + let mut cache = self.db.lock().unwrap(); + // Double-check in case another thread inserted while we were parsing + if let Some(existing) = cache.get(statement) { + return existing.clone(); + } + cache.put(statement.clone(), tree.clone()); } - chars_counted += 1; - } - - // Handle case where end_char is at the end of the text - if end_char == chars_counted && end_byte == 0 { - end_byte = text.len(); - column_end = chars_counted - current_line_char_start; - } - - let start_point = tree_sitter::Point::new(line, column_start); - let old_end_point = tree_sitter::Point::new(line, column_end); - // Calculate the new end byte after the edit - let new_end_byte = start_byte + replacement_text.len(); - - // Calculate the new end position - let new_lines = replacement_text.matches('\n').count(); - let last_line_length = if new_lines > 0 { - replacement_text - .split('\n') - .next_back() - .unwrap_or("") - .chars() - .count() - } else { - replacement_text.chars().count() - }; - - let new_end_position = if new_lines > 0 { - // If there are new lines, the row is offset by the number of new lines, and the column is the length of the last line - tree_sitter::Point::new(start_point.row + new_lines, last_line_length) - } else { - // If there are no new lines, the row remains the same, and the column is offset by the length of the insertion - tree_sitter::Point::new(start_point.row, start_point.column + last_line_length) - }; - - InputEdit { - start_byte, - old_end_byte: end_byte, - new_end_byte, - start_position: start_point, - old_end_position: old_end_point, - new_end_position, + tree } } diff --git a/crates/pgt_workspace/src/workspace_types.rs b/crates/pgt_workspace/src/workspace_types.rs index 02215e790..b902fad66 100644 --- a/crates/pgt_workspace/src/workspace_types.rs +++ b/crates/pgt_workspace/src/workspace_types.rs @@ -457,9 +457,10 @@ macro_rules! workspace_method { } /// Returns a list of signature for all the methods in the [Workspace] trait -pub fn methods() -> [WorkspaceMethod; 8] { +pub fn methods() -> [WorkspaceMethod; 9] { [ workspace_method!(is_path_ignored), + workspace_method!(register_project_folder), workspace_method!(get_file_content), workspace_method!(pull_diagnostics), workspace_method!(get_completions), diff --git a/crates/pgt_query_ext_codegen/Cargo.toml b/crates/pgt_workspace_macros/Cargo.toml similarity index 65% rename from crates/pgt_query_ext_codegen/Cargo.toml rename to crates/pgt_workspace_macros/Cargo.toml index c3a0f20d3..c192db04b 100644 --- a/crates/pgt_query_ext_codegen/Cargo.toml +++ b/crates/pgt_workspace_macros/Cargo.toml @@ -6,17 +6,14 @@ edition.workspace = true homepage.workspace = true keywords.workspace = true license.workspace = true -name = "pgt_query_ext_codegen" +name = "pgt_workspace_macros" repository.workspace = true version = "0.0.0" - -[dependencies] -proc-macro2.workspace = true -quote.workspace = true - -pgt_query_proto_parser.workspace = true - [lib] -doctest = false proc-macro = true + +[dependencies] +proc-macro2 = { version = "1.0.95" } +quote = { workspace = true } +syn = { workspace = true } diff --git a/crates/pgt_workspace_macros/src/lib.rs b/crates/pgt_workspace_macros/src/lib.rs new file mode 100644 index 000000000..d46f484da --- /dev/null +++ b/crates/pgt_workspace_macros/src/lib.rs @@ -0,0 +1,123 @@ +use std::ops::Deref; + +use proc_macro::TokenStream; +use quote::quote; +use syn::{TypePath, TypeTuple, parse_macro_input}; + +struct IgnoredPath { + path: syn::Expr, +} + +impl syn::parse::Parse for IgnoredPath { + fn parse(input: syn::parse::ParseStream) -> syn::Result { + let arg_name: syn::Ident = input.parse()?; + + if arg_name != "path" { + return Err(syn::Error::new_spanned( + arg_name, + "Expected 'path' argument.", + )); + } + + let _: syn::Token!(=) = input.parse()?; + let path: syn::Expr = input.parse()?; + + Ok(Self { path }) + } +} + +#[proc_macro_attribute] +/// You can use this on a workspace server function to return a default if the specified path +/// is ignored by the user's settings. +/// +/// This will work for any function where &self is in scope and that returns `Result`, `Result<(), E>`, or `T`, where `T: Default`. +/// `path` needs to point at a `&PgTPath`. +/// +/// ### Usage +/// +/// ```ignore +/// impl WorkspaceServer { +/// #[ignore_path(path=¶ms.path)] +/// fn foo(&self, params: FooParams) -> Result { +/// ... codeblock +/// } +/// } +/// +/// // …expands to… +/// +/// impl WorkspaceServer { +/// fn foo(&self, params: FooParams) -> Result { +/// if self.is_ignored(¶ms.path) { +/// return Ok(FooResult::default()); +/// } +/// ... codeblock +/// } +/// } +/// ``` +pub fn ignored_path(args: TokenStream, input: TokenStream) -> TokenStream { + let ignored_path = parse_macro_input!(args as IgnoredPath); + let input_fn = parse_macro_input!(input as syn::ItemFn); + + let macro_specified_path = ignored_path.path; + + let vis = &input_fn.vis; + let sig = &input_fn.sig; + let block = &input_fn.block; + let attrs = &input_fn.attrs; + + // handles cases `fn foo() -> Result` and `fn foo() -> Result<(), E>` + // T needs to implement default + if let syn::ReturnType::Type(_, ty) = &sig.output { + if let syn::Type::Path(TypePath { path, .. }) = ty.deref() { + if let Some(seg) = path.segments.last() { + if seg.ident == "Result" { + if let syn::PathArguments::AngleBracketed(type_args) = &seg.arguments { + if let Some(syn::GenericArgument::Type(t)) = type_args.args.first() { + if let syn::Type::Tuple(TypeTuple { elems, .. }) = t { + // case: Result<(), E> + if elems.is_empty() { + return TokenStream::from(quote! { + #(#attrs)* + #vis #sig { + if self.is_ignored(#macro_specified_path) { + return Ok(()); + }; + #block + } + }); + } + } + if let syn::Type::Path(TypePath { path, .. }) = t { + if let Some(seg) = path.segments.first() { + let ident = &seg.ident; + return TokenStream::from(quote! { + #(#attrs)* + #vis #sig { + if self.is_ignored(#macro_specified_path) { + return Ok(#ident::default()); + }; + #block + } + }); + } + } + }; + }; + }; + }; + }; + }; + + // case fn foo() -> T {} + // handles all other T's + // T needs to implement Default + TokenStream::from(quote! { + #(#attrs)* + #vis #sig { + if self.is_ignored(#macro_specified_path) { + return Default::default(); + } + #block + } + }) +} diff --git a/docker-compose.yml b/docker-compose.yml index fede1d664..468d4cc30 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,7 +1,7 @@ services: db: # postgres://postgres:postgres@127.0.0.1:5432/postgres - image: postgres + build: . restart: always environment: - POSTGRES_USER=postgres diff --git a/docs/codegen/Cargo.toml b/docs/codegen/Cargo.toml index 96092a7a3..bf650ac9b 100644 --- a/docs/codegen/Cargo.toml +++ b/docs/codegen/Cargo.toml @@ -26,6 +26,7 @@ pgt_cli = { workspace = true } pgt_analyse = { workspace = true } pgt_analyser = { workspace = true } pgt_diagnostics = { workspace = true } +pgt_query = { workspace = true } pgt_query_ext = { workspace = true } pgt_workspace = { workspace = true } pgt_statement_splitter = { workspace = true } diff --git a/docs/codegen/src/rules_docs.rs b/docs/codegen/src/rules_docs.rs index 92f0dc426..67626237d 100644 --- a/docs/codegen/src/rules_docs.rs +++ b/docs/codegen/src/rules_docs.rs @@ -1,7 +1,7 @@ use anyhow::{Result, bail}; use biome_string_case::Case; use pgt_analyse::{AnalyserOptions, AnalysisFilter, RuleFilter, RuleMetadata}; -use pgt_analyser::{Analyser, AnalyserConfig}; +use pgt_analyser::{AnalysableStatement, Analyser, AnalyserConfig}; use pgt_console::StdDisplay; use pgt_diagnostics::{Diagnostic, DiagnosticExt, PrintDiagnostic}; use pgt_query_ext::diagnostics::SyntaxDiagnostic; @@ -442,24 +442,32 @@ fn print_diagnostics( }); // split and parse each statement - let stmts = pgt_statement_splitter::split(code).expect("unexpected parse error"); - for stmt in stmts.ranges { - match pgt_query_ext::parse(&code[stmt]) { + let stmts = pgt_statement_splitter::split(code); + for stmt_range in stmts.ranges { + match pgt_query::parse(&code[stmt_range]) { Ok(ast) => { - for rule_diag in analyser.run(pgt_analyser::AnalyserContext { root: &ast }) { - let diag = pgt_diagnostics::serde::Diagnostic::new(rule_diag); - - let category = diag.category().expect("linter diagnostic has no code"); - let severity = settings.get_severity_from_rule_code(category).expect( + if let Some(root) = ast.into_root() { + for rule_diag in analyser.run(pgt_analyser::AnalyserParams { + schema_cache: None, + stmts: vec![AnalysableStatement { + range: stmt_range, + root, + }], + }) { + let diag = pgt_diagnostics::serde::Diagnostic::new(rule_diag); + + let category = diag.category().expect("linter diagnostic has no code"); + let severity = settings.get_severity_from_rule_code(category).expect( "If you see this error, it means you need to run cargo codegen-configuration", ); - let error = diag - .with_severity(severity) - .with_file_path(&file_path) - .with_file_source_code(code); + let error = diag + .with_severity(severity) + .with_file_path(&file_path) + .with_file_source_code(code); - write_diagnostic(code, error)?; + write_diagnostic(code, error)?; + } } } Err(e) => { diff --git a/docs/codegen/src/rules_index.rs b/docs/codegen/src/rules_index.rs index 655e4bdb2..99d22580e 100644 --- a/docs/codegen/src/rules_index.rs +++ b/docs/codegen/src/rules_index.rs @@ -69,7 +69,7 @@ fn generate_group( write!( content, - "| [{rule_name}](/rules/{dashed_rule}) | {summary} | {properties} |" + "| [{rule_name}](./{dashed_rule}) | {summary} | {properties} |" )?; writeln!(content)?; diff --git a/docs/codegen/src/rules_sources.rs b/docs/codegen/src/rules_sources.rs index b8fac23d8..5b21ea933 100644 --- a/docs/codegen/src/rules_sources.rs +++ b/docs/codegen/src/rules_sources.rs @@ -48,12 +48,12 @@ pub fn generate_rule_sources(docs_dir: &Path) -> anyhow::Result<()> { for (rule_name, metadata) in rules { let kebab_rule_name = Case::Kebab.convert(rule_name); if metadata.sources.is_empty() { - exclusive_rules.insert((rule_name.to_string(), format!("./rules/{kebab_rule_name}"))); + exclusive_rules.insert((rule_name.to_string(), format!("../rules/{kebab_rule_name}"))); } else { for source in metadata.sources { let source_set = SourceSet { rule_name: rule_name.to_string(), - link: format!("./rules/{kebab_rule_name}"), + link: format!("../rules/{kebab_rule_name}"), source_link: source.to_rule_url(), source_rule_name: source.as_rule_name().to_string(), }; diff --git a/docs/codegen/src/schema.rs b/docs/codegen/src/schema.rs index acfc42f1b..3e430517d 100644 --- a/docs/codegen/src/schema.rs +++ b/docs/codegen/src/schema.rs @@ -1,4 +1,4 @@ -use pgt_configuration::{PartialConfiguration, VERSION}; +use pgt_configuration::PartialConfiguration; use schemars::{ schema::{RootSchema, Schema, SchemaObject}, schema_for, @@ -10,25 +10,10 @@ use std::{fs, path::Path}; /// /// * `docs_dir`: Path to the docs directory. pub fn generate_schema(docs_dir: &Path) -> anyhow::Result<()> { - let schemas_dir = docs_dir.join("schemas"); - let latest_schema_dir = schemas_dir.join("latest"); - let latest_schema_path = latest_schema_dir.join("schema.json"); - - let version_schema_dir = schemas_dir.join(VERSION); - let version_schema_path = version_schema_dir.join("schema.json"); - - if !latest_schema_dir.exists() { - fs::create_dir_all(&latest_schema_dir)?; - } - - if !version_schema_dir.exists() { - fs::create_dir_all(&version_schema_dir)?; - } - + let schema_path = docs_dir.join("schema.json"); let schema_content = get_configuration_schema_content()?; - fs::write(latest_schema_path, &schema_content)?; - fs::write(version_schema_path, &schema_content)?; + fs::write(schema_path, &schema_content)?; Ok(()) } diff --git a/docs/index.md b/docs/index.md index 144903857..328e7c770 100644 --- a/docs/index.md +++ b/docs/index.md @@ -90,7 +90,7 @@ You’ll now have a `postgrestools.jsonc` file in your directory: ```json { - "$schema": "https://pgtools.dev/schemas/0.0.0/schema.json", + "$schema": "https://pgtools.dev/latest/schema.json", "vcs": { "enabled": false, "clientKind": "git", @@ -121,6 +121,8 @@ You’ll now have a `postgrestools.jsonc` file in your directory: Make sure to edit the database connection settings to connect to your local development database. To see all options, run `postgrestools --help`. +You can use your current `postgrestools` version instead of "latest" in the `$schema` URL, e.g. `https://pgtools.dev/0.8.1/schema.json`. + ## Usage You can use Postgres Tools via the command line or a using a code editor that supports an LSP. @@ -155,7 +157,7 @@ postgrestools start Then, every command needs to add the `--use-server` options, e.g.: ```sh -echo "select 1" | biome check --use-server --stdin-file-path=dummy.sql +echo "select 1" | postgrestools check --use-server --stdin-file-path=dummy.sql ``` #### Daemon logs diff --git a/docs/rule_sources.md b/docs/rule_sources.md index b5c1f49f7..679448cdc 100644 --- a/docs/rule_sources.md +++ b/docs/rule_sources.md @@ -3,7 +3,9 @@ ### Squawk | Squawk Rule Name | Rule Name | | ---- | ---- | -| [adding-required-field](https://squawkhq.com/docs/adding-required-field) |[addingRequiredField](./rules/adding-required-field) | -| [ban-drop-column](https://squawkhq.com/docs/ban-drop-column) |[banDropColumn](./rules/ban-drop-column) | -| [ban-drop-not-null](https://squawkhq.com/docs/ban-drop-not-null) |[banDropNotNull](./rules/ban-drop-not-null) | -| [ban-drop-table](https://squawkhq.com/docs/ban-drop-table) |[banDropTable](./rules/ban-drop-table) | +| [adding-required-field](https://squawkhq.com/docs/adding-required-field) |[addingRequiredField](../rules/adding-required-field) | +| [ban-drop-column](https://squawkhq.com/docs/ban-drop-column) |[banDropColumn](../rules/ban-drop-column) | +| [ban-drop-database](https://squawkhq.com/docs/ban-drop-database) |[banDropDatabase](../rules/ban-drop-database) | +| [ban-drop-not-null](https://squawkhq.com/docs/ban-drop-not-null) |[banDropNotNull](../rules/ban-drop-not-null) | +| [ban-drop-table](https://squawkhq.com/docs/ban-drop-table) |[banDropTable](../rules/ban-drop-table) | +| [ban-truncate-cascade](https://squawkhq.com/docs/ban-truncate-cascade) |[banTruncateCascade](../rules/ban-truncate-cascade) | diff --git a/docs/rule_suppressions.md b/docs/rule_suppressions.md new file mode 100644 index 000000000..af5890e72 --- /dev/null +++ b/docs/rule_suppressions.md @@ -0,0 +1,94 @@ +# Rule Suppressions + +You can suppress specific diagnostics or rules in your code using suppression comments. This is useful when you want to ignore a particular rule for an entire file, a line or a block of code. + +## How to Suppress a Rule + +To suppress a rule, add a comment above the line causing the diagnostic with the following format: + +```sql +-- pgt-ignore lint/safety/banDropTable +drop table users; +``` + +You can suppress single rules, groups of rules, or entire categories. The format of the rule to suppress is: + +`category(/group(/specific-rule))` + +Where group and specific rule are optional. + +So, to suppress the `lint/safety/banDropTable` diagnostic, all of these would work: + +```sql +-- pgt-ignore lint +-- pgt-ignore lint/safety +-- pgt-ignore lint/safety/banDropTable +``` + +You can also add an explanation to the suppression by adding a `:` and the explanation text: + +```sql +-- pgt-ignore lint/safety/banDropTable: My startup never had any users. +drop table users; +``` + +### Suppressing Rules for Block of Code + +You can suppress rules for blocks of code. + +```sql +create table users ( + -- ... +); + +-- pgt-ignore-start typecheck: The `users` table will be created with this migration. +alter table users drop constraint users_pkey; + +alter table users add primary key (user_id); +-- pgt-ignore-end typecheck +``` + +Every `pgt-ignore-start` needs a `pgt-ignore-end` suppression comment, and the suppressed rules must match exactly. + +This _won't_ work, because the start tag suppresses a different diagnostic: + +```sql +-- pgt-ignore-start lint/safety/banDropColumn +-- pgt-ignore-end lint/safety +``` + +Nesting is allowed, so this works fine: + +```sql +-- pgt-ignore-start typecheck: outer +-- pgt-ignore-start lint/safety: inner +-- pgt-ignore-end lint/safety: inner +-- pgt-ignore-end typecheck: outer +``` + +### Suppressing Rules for Entire Files + +Instead of repeating the same suppression on multiple lines, you can suppress for an entire file. + +```sql +-- pgt-ignore-all lint/safety/banDropTable + +drop table tasks; +drop table projects; +drop table users; +``` + +## Suppressing Multiple Rules + +You can suppress multiple rules by adding multiple suppression comments above a statement: + +```sql +-- pgt-ignore lint/safety/banDropColumn +-- pgt-ignore typecheck +alter table tasks drop column created_at; +``` + +## Notes + +- Trying to suppress diagnostics that have already been disabled in your [configuration file](/#configuration) will show a warning. +- Trying to suppress diagnostics that don't haven't been raised will also show a warning. diff --git a/docs/rules.md b/docs/rules.md index 1f674af65..d74b67e88 100644 --- a/docs/rules.md +++ b/docs/rules.md @@ -12,10 +12,12 @@ Rules that detect potential safety issues in your code. | Rule name | Description | Properties | | --- | --- | --- | -| [addingRequiredField](/rules/adding-required-field) | Adding a new column that is NOT NULL and has no default value to an existing table effectively makes it required. | | -| [banDropColumn](/rules/ban-drop-column) | Dropping a column may break existing clients. | ✅ | -| [banDropNotNull](/rules/ban-drop-not-null) | Dropping a NOT NULL constraint may break existing clients. | ✅ | -| [banDropTable](/rules/ban-drop-table) | Dropping a table may break existing clients. | ✅ | +| [addingRequiredField](./adding-required-field) | Adding a new column that is NOT NULL and has no default value to an existing table effectively makes it required. | | +| [banDropColumn](./ban-drop-column) | Dropping a column may break existing clients. | ✅ | +| [banDropDatabase](./ban-drop-database) | Dropping a database may break existing clients (and everything else, really). | | +| [banDropNotNull](./ban-drop-not-null) | Dropping a NOT NULL constraint may break existing clients. | ✅ | +| [banDropTable](./ban-drop-table) | Dropping a table may break existing clients. | ✅ | +| [banTruncateCascade](./ban-truncate-cascade) | Using `TRUNCATE`'s `CASCADE` option will truncate any tables that are also foreign-keyed to the specified tables. | | [//]: # (END RULES_INDEX) diff --git a/docs/rules/ban-drop-column.md b/docs/rules/ban-drop-column.md index 49a0d054b..28b3a4b5e 100644 --- a/docs/rules/ban-drop-column.md +++ b/docs/rules/ban-drop-column.md @@ -25,9 +25,13 @@ alter table test drop column id; ``` ```sh -code-block.sql lint/safety/banDropColumn ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ +code-block.sql:1:1 lint/safety/banDropColumn ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ - × Dropping a column may break existing clients. + ! Dropping a column may break existing clients. + + > 1 │ alter table test drop column id; + │ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + 2 │ i You can leave the column as nullable or delete the column once queries no longer select or modify the column. diff --git a/docs/rules/ban-drop-database.md b/docs/rules/ban-drop-database.md new file mode 100644 index 000000000..8bbcd3961 --- /dev/null +++ b/docs/rules/ban-drop-database.md @@ -0,0 +1,28 @@ +# banDropDatabase +**Diagnostic Category: `lint/safety/banDropDatabase`** + +**Since**: `vnext` + + +**Sources**: +- Inspired from: squawk/ban-drop-database + +## Description +Dropping a database may break existing clients (and everything else, really). + +Make sure that you really want to drop it. + +## How to configure +```json + +{ + "linter": { + "rules": { + "safety": { + "banDropDatabase": "error" + } + } + } +} + +``` diff --git a/docs/rules/ban-drop-not-null.md b/docs/rules/ban-drop-not-null.md index ccf49f956..56ec33c74 100644 --- a/docs/rules/ban-drop-not-null.md +++ b/docs/rules/ban-drop-not-null.md @@ -25,9 +25,13 @@ alter table users alter column email drop not null; ``` ```sh -code-block.sql lint/safety/banDropNotNull ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ +code-block.sql:1:1 lint/safety/banDropNotNull ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ - × Dropping a NOT NULL constraint may break existing clients. + ! Dropping a NOT NULL constraint may break existing clients. + + > 1 │ alter table users alter column email drop not null; + │ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + 2 │ i Consider using a marker value that represents NULL. Alternatively, create a new table allowing NULL values, copy the data from the old table, and create a view that filters NULL values. diff --git a/docs/rules/ban-drop-table.md b/docs/rules/ban-drop-table.md index f2f341561..8aeb6e2c6 100644 --- a/docs/rules/ban-drop-table.md +++ b/docs/rules/ban-drop-table.md @@ -26,9 +26,13 @@ drop table some_table; ``` ```sh -code-block.sql lint/safety/banDropTable ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ +code-block.sql:1:1 lint/safety/banDropTable ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ - × Dropping a table may break existing clients. + ! Dropping a table may break existing clients. + + > 1 │ drop table some_table; + │ ^^^^^^^^^^^^^^^^^^^^^^ + 2 │ i Update your application code to no longer read or write the table, and only then delete the table. Be sure to create a backup. diff --git a/docs/rules/ban-truncate-cascade.md b/docs/rules/ban-truncate-cascade.md new file mode 100644 index 000000000..1a4502d2d --- /dev/null +++ b/docs/rules/ban-truncate-cascade.md @@ -0,0 +1,40 @@ +# banTruncateCascade +**Diagnostic Category: `lint/safety/banTruncateCascade`** + +**Since**: `vnext` + + +**Sources**: +- Inspired from: squawk/ban-truncate-cascade + +## Description +Using `TRUNCATE`'s `CASCADE` option will truncate any tables that are also foreign-keyed to the specified tables. + +So if you had tables with foreign-keys like: + +`a <- b <- c` + +and ran: + +`truncate a cascade;` + +You'd end up with a, b, & c all being truncated! + +Instead, you can manually specify the tables you want. + +`truncate a, b;` + +## How to configure +```json + +{ + "linter": { + "rules": { + "safety": { + "banTruncateCascade": "error" + } + } + } +} + +``` diff --git a/docs/schemas/latest/schema.json b/docs/schema.json similarity index 92% rename from docs/schemas/latest/schema.json rename to docs/schema.json index faba3b5ca..1c56618ed 100644 --- a/docs/schemas/latest/schema.json +++ b/docs/schema.json @@ -22,6 +22,17 @@ } ] }, + "extends": { + "description": "A list of paths to other JSON files, used to extends the current configuration.", + "anyOf": [ + { + "$ref": "#/definitions/StringSet" + }, + { + "type": "null" + } + ] + }, "files": { "description": "The configuration of the filesystem", "anyOf": [ @@ -338,6 +349,17 @@ } ] }, + "banDropDatabase": { + "description": "Dropping a database may break existing clients (and everything else, really).", + "anyOf": [ + { + "$ref": "#/definitions/RuleConfiguration" + }, + { + "type": "null" + } + ] + }, "banDropNotNull": { "description": "Dropping a NOT NULL constraint may break existing clients.", "anyOf": [ @@ -360,6 +382,17 @@ } ] }, + "banTruncateCascade": { + "description": "Using TRUNCATE's CASCADE option will truncate any tables that are also foreign-keyed to the specified tables.", + "anyOf": [ + { + "$ref": "#/definitions/RuleConfiguration" + }, + { + "type": "null" + } + ] + }, "recommended": { "description": "It enables the recommended rules for this group", "type": [ diff --git a/docs/schemas/0.0.0/schema.json b/docs/schemas/0.0.0/schema.json deleted file mode 100644 index faba3b5ca..000000000 --- a/docs/schemas/0.0.0/schema.json +++ /dev/null @@ -1,438 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - "title": "Configuration", - "description": "The configuration that is contained inside the configuration file.", - "type": "object", - "properties": { - "$schema": { - "description": "A field for the [JSON schema](https://json-schema.org/) specification", - "type": [ - "string", - "null" - ] - }, - "db": { - "description": "The configuration of the database connection", - "anyOf": [ - { - "$ref": "#/definitions/DatabaseConfiguration" - }, - { - "type": "null" - } - ] - }, - "files": { - "description": "The configuration of the filesystem", - "anyOf": [ - { - "$ref": "#/definitions/FilesConfiguration" - }, - { - "type": "null" - } - ] - }, - "linter": { - "description": "The configuration for the linter", - "anyOf": [ - { - "$ref": "#/definitions/LinterConfiguration" - }, - { - "type": "null" - } - ] - }, - "migrations": { - "description": "Configure migrations", - "anyOf": [ - { - "$ref": "#/definitions/MigrationsConfiguration" - }, - { - "type": "null" - } - ] - }, - "vcs": { - "description": "The configuration of the VCS integration", - "anyOf": [ - { - "$ref": "#/definitions/VcsConfiguration" - }, - { - "type": "null" - } - ] - } - }, - "additionalProperties": false, - "definitions": { - "DatabaseConfiguration": { - "description": "The configuration of the database connection.", - "type": "object", - "properties": { - "allowStatementExecutionsAgainst": { - "anyOf": [ - { - "$ref": "#/definitions/StringSet" - }, - { - "type": "null" - } - ] - }, - "connTimeoutSecs": { - "description": "The connection timeout in seconds.", - "type": [ - "integer", - "null" - ], - "format": "uint16", - "minimum": 0.0 - }, - "database": { - "description": "The name of the database.", - "type": [ - "string", - "null" - ] - }, - "host": { - "description": "The host of the database. Required if you want database-related features. All else falls back to sensible defaults.", - "type": [ - "string", - "null" - ] - }, - "password": { - "description": "The password to connect to the database.", - "type": [ - "string", - "null" - ] - }, - "port": { - "description": "The port of the database.", - "type": [ - "integer", - "null" - ], - "format": "uint16", - "minimum": 0.0 - }, - "username": { - "description": "The username to connect to the database.", - "type": [ - "string", - "null" - ] - } - }, - "additionalProperties": false - }, - "FilesConfiguration": { - "description": "The configuration of the filesystem", - "type": "object", - "properties": { - "ignore": { - "description": "A list of Unix shell style patterns. Will ignore files/folders that will match these patterns.", - "anyOf": [ - { - "$ref": "#/definitions/StringSet" - }, - { - "type": "null" - } - ] - }, - "include": { - "description": "A list of Unix shell style patterns. Will handle only those files/folders that will match these patterns.", - "anyOf": [ - { - "$ref": "#/definitions/StringSet" - }, - { - "type": "null" - } - ] - }, - "maxSize": { - "description": "The maximum allowed size for source code files in bytes. Files above this limit will be ignored for performance reasons. Defaults to 1 MiB", - "type": [ - "integer", - "null" - ], - "format": "uint64", - "minimum": 1.0 - } - }, - "additionalProperties": false - }, - "LinterConfiguration": { - "type": "object", - "properties": { - "enabled": { - "description": "if `false`, it disables the feature and the linter won't be executed. `true` by default", - "type": [ - "boolean", - "null" - ] - }, - "ignore": { - "description": "A list of Unix shell style patterns. The formatter will ignore files/folders that will match these patterns.", - "anyOf": [ - { - "$ref": "#/definitions/StringSet" - }, - { - "type": "null" - } - ] - }, - "include": { - "description": "A list of Unix shell style patterns. The formatter will include files/folders that will match these patterns.", - "anyOf": [ - { - "$ref": "#/definitions/StringSet" - }, - { - "type": "null" - } - ] - }, - "rules": { - "description": "List of rules", - "anyOf": [ - { - "$ref": "#/definitions/Rules" - }, - { - "type": "null" - } - ] - } - }, - "additionalProperties": false - }, - "MigrationsConfiguration": { - "description": "The configuration of the filesystem", - "type": "object", - "properties": { - "after": { - "description": "Ignore any migrations before this timestamp", - "type": [ - "integer", - "null" - ], - "format": "uint64", - "minimum": 0.0 - }, - "migrationsDir": { - "description": "The directory where the migration files are stored", - "type": [ - "string", - "null" - ] - } - }, - "additionalProperties": false - }, - "RuleConfiguration": { - "anyOf": [ - { - "$ref": "#/definitions/RulePlainConfiguration" - }, - { - "$ref": "#/definitions/RuleWithNoOptions" - } - ] - }, - "RulePlainConfiguration": { - "type": "string", - "enum": [ - "warn", - "error", - "info", - "off" - ] - }, - "RuleWithNoOptions": { - "type": "object", - "required": [ - "level" - ], - "properties": { - "level": { - "description": "The severity of the emitted diagnostics by the rule", - "allOf": [ - { - "$ref": "#/definitions/RulePlainConfiguration" - } - ] - } - }, - "additionalProperties": false - }, - "Rules": { - "type": "object", - "properties": { - "all": { - "description": "It enables ALL rules. The rules that belong to `nursery` won't be enabled.", - "type": [ - "boolean", - "null" - ] - }, - "recommended": { - "description": "It enables the lint rules recommended by Postgres Tools. `true` by default.", - "type": [ - "boolean", - "null" - ] - }, - "safety": { - "anyOf": [ - { - "$ref": "#/definitions/Safety" - }, - { - "type": "null" - } - ] - } - }, - "additionalProperties": false - }, - "Safety": { - "description": "A list of rules that belong to this group", - "type": "object", - "properties": { - "addingRequiredField": { - "description": "Adding a new column that is NOT NULL and has no default value to an existing table effectively makes it required.", - "anyOf": [ - { - "$ref": "#/definitions/RuleConfiguration" - }, - { - "type": "null" - } - ] - }, - "all": { - "description": "It enables ALL rules for this group.", - "type": [ - "boolean", - "null" - ] - }, - "banDropColumn": { - "description": "Dropping a column may break existing clients.", - "anyOf": [ - { - "$ref": "#/definitions/RuleConfiguration" - }, - { - "type": "null" - } - ] - }, - "banDropNotNull": { - "description": "Dropping a NOT NULL constraint may break existing clients.", - "anyOf": [ - { - "$ref": "#/definitions/RuleConfiguration" - }, - { - "type": "null" - } - ] - }, - "banDropTable": { - "description": "Dropping a table may break existing clients.", - "anyOf": [ - { - "$ref": "#/definitions/RuleConfiguration" - }, - { - "type": "null" - } - ] - }, - "recommended": { - "description": "It enables the recommended rules for this group", - "type": [ - "boolean", - "null" - ] - } - }, - "additionalProperties": false - }, - "StringSet": { - "type": "array", - "items": { - "type": "string" - }, - "uniqueItems": true - }, - "VcsClientKind": { - "oneOf": [ - { - "description": "Integration with the git client as VCS", - "type": "string", - "enum": [ - "git" - ] - } - ] - }, - "VcsConfiguration": { - "description": "Set of properties to integrate with a VCS software.", - "type": "object", - "properties": { - "clientKind": { - "description": "The kind of client.", - "anyOf": [ - { - "$ref": "#/definitions/VcsClientKind" - }, - { - "type": "null" - } - ] - }, - "defaultBranch": { - "description": "The main branch of the project", - "type": [ - "string", - "null" - ] - }, - "enabled": { - "description": "Whether we should integrate itself with the VCS client", - "type": [ - "boolean", - "null" - ] - }, - "root": { - "description": "The folder where we should check for VCS files. By default, we will use the same folder where `postgrestools.jsonc` was found.\n\nIf we can't find the configuration, it will attempt to use the current working directory. If no current working directory can't be found, we won't use the VCS integration, and a diagnostic will be emitted", - "type": [ - "string", - "null" - ] - }, - "useIgnoreFile": { - "description": "Whether we should use the VCS ignore file. When [true], we will ignore the files specified in the ignore file.", - "type": [ - "boolean", - "null" - ] - } - }, - "additionalProperties": false - } - } -} \ No newline at end of file diff --git a/flake.lock b/flake.lock new file mode 100644 index 000000000..e8bb45768 --- /dev/null +++ b/flake.lock @@ -0,0 +1,82 @@ +{ + "nodes": { + "flake-utils": { + "inputs": { + "systems": "systems" + }, + "locked": { + "lastModified": 1731533236, + "narHash": "sha256-l0KFg5HjrsfsO/JpG+r7fRrqm12kzFHyUHqHCVpMMbI=", + "owner": "numtide", + "repo": "flake-utils", + "rev": "11707dc2f618dd54ca8739b309ec4fc024de578b", + "type": "github" + }, + "original": { + "owner": "numtide", + "repo": "flake-utils", + "type": "github" + } + }, + "nixpkgs": { + "locked": { + "lastModified": 1751271578, + "narHash": "sha256-P/SQmKDu06x8yv7i0s8bvnnuJYkxVGBWLWHaU+tt4YY=", + "owner": "NixOS", + "repo": "nixpkgs", + "rev": "3016b4b15d13f3089db8a41ef937b13a9e33a8df", + "type": "github" + }, + "original": { + "owner": "NixOS", + "ref": "nixos-unstable", + "repo": "nixpkgs", + "type": "github" + } + }, + "root": { + "inputs": { + "flake-utils": "flake-utils", + "nixpkgs": "nixpkgs", + "rust-overlay": "rust-overlay" + } + }, + "rust-overlay": { + "inputs": { + "nixpkgs": [ + "nixpkgs" + ] + }, + "locked": { + "lastModified": 1751510438, + "narHash": "sha256-m8PjOoyyCR4nhqtHEBP1tB/jF+gJYYguSZmUmVTEAQE=", + "owner": "oxalica", + "repo": "rust-overlay", + "rev": "7f415261f298656f8164bd636c0dc05af4e95b6b", + "type": "github" + }, + "original": { + "owner": "oxalica", + "repo": "rust-overlay", + "type": "github" + } + }, + "systems": { + "locked": { + "lastModified": 1681028828, + "narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=", + "owner": "nix-systems", + "repo": "default", + "rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e", + "type": "github" + }, + "original": { + "owner": "nix-systems", + "repo": "default", + "type": "github" + } + } + }, + "root": "root", + "version": 7 +} diff --git a/flake.nix b/flake.nix new file mode 100644 index 000000000..0dac4b195 --- /dev/null +++ b/flake.nix @@ -0,0 +1,92 @@ +{ + description = "PostgreSQL Language Server Development Environment"; + + inputs = { + nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable"; + flake-utils.url = "github:numtide/flake-utils"; + rust-overlay = { + url = "github:oxalica/rust-overlay"; + inputs.nixpkgs.follows = "nixpkgs"; + }; + }; + + outputs = { self, nixpkgs, flake-utils, rust-overlay }: + flake-utils.lib.eachDefaultSystem (system: + let + overlays = [ (import rust-overlay) ]; + pkgs = import nixpkgs { + inherit system overlays; + }; + + # Read rust-toolchain.toml to get the exact Rust version + rustToolchain = pkgs.rust-bin.fromRustupToolchainFile ./rust-toolchain.toml; + + # Development dependencies + buildInputs = with pkgs; [ + # Rust toolchain + rustToolchain + + # Node.js ecosystem + bun + nodejs_20 + + # Python for additional tooling + python3 + python3Packages.pip + + # System dependencies + pkg-config + openssl + + # Build tools + just + git + + # LSP and development tools + rust-analyzer + + # Additional tools that might be needed + cmake + gcc + libiconv + ]; + + # Environment variables + env = { + RUST_SRC_PATH = "${rustToolchain}/lib/rustlib/src/rust/library"; + PKG_CONFIG_PATH = "${pkgs.openssl.dev}/lib/pkgconfig"; + }; + + in + { + devShells.default = pkgs.mkShell { + inherit buildInputs; + + shellHook = '' + echo "PostgreSQL Language Server Development Environment" + echo "Available tools:" + echo " • Rust $(rustc --version)" + echo " • Node.js $(node --version)" + echo " • Bun $(bun --version)" + echo " • Just $(just --version)" + echo "" + echo "Development Commands:" + echo " • just --list # Show tasks" + echo " • cargo check # Check Rust" + echo " • bun install # Install deps" + echo "" + echo "Use Docker for database:" + echo " • docker-compose up -d" + echo "" + + # Set environment variables + ${pkgs.lib.concatStringsSep "\n" + (pkgs.lib.mapAttrsToList (name: value: "export ${name}=\"${value}\"") env)} + ''; + }; + + # Formatter for nix files + formatter = pkgs.nixfmt-rfc-style; + } + ); +} \ No newline at end of file diff --git a/justfile b/justfile index a55207aeb..7e53a8a62 100644 --- a/justfile +++ b/justfile @@ -6,6 +6,7 @@ alias r := ready alias l := lint alias t := test alias rg := reset-git +alias qm := quick-modify # Installs the tools needed to develop install-tools: @@ -30,8 +31,8 @@ gen-lint: just format # Creates a new lint rule in the given path, with the given name. Name has to be camel case. Group should be lowercase. -new-lintrule group rulename: - cargo run -p xtask_codegen -- new-lintrule --category=lint --name={{rulename}} --group={{group}} +new-lintrule group rulename severity="error": + cargo run -p xtask_codegen -- new-lintrule --category=lint --name={{rulename}} --group={{group}} --severity={{severity}} just gen-lint # Format Rust, JS and TOML files @@ -132,10 +133,22 @@ merge-main: git fetch origin main:main git merge main +quick-create branch commit: + git checkout -b {{branch}} + git add -A + git commit -m "{{commit}}" + git push + gh pr create --fill + +quick-modify: + just format + git add -A + git commit -m "progress" + git push # Make sure to set your PGT_LOG_PATH in your shell profile. # You can use the PGT_LOG_LEVEL to set your log level. # We recommend to install `bunyan` (npm i -g bunyan) and pipe the output through there for color-coding: # just show-logs | bunyan show-logs: - tail -f $(ls $PGT_LOG_PATH/server.log.* | sort -t- -k2,2 -k3,3 -k4,4 | tail -n 1) \ No newline at end of file + tail -f $(ls $PGT_LOG_PATH/server.log.* | sort -t- -k2,2 -k3,3 -k4,4 | tail -n 1) diff --git a/libpg_query b/libpg_query deleted file mode 160000 index 1c1a32ed2..000000000 --- a/libpg_query +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 1c1a32ed2f4c7799830d50bf4cb159222aafec48 diff --git a/mkdocs.yml b/mkdocs.yml index 3597e08c3..572642c60 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -6,34 +6,27 @@ repo_name: supabase-community/postgres-language-server repo_url: https://github.com/supabase-community/postgres-language-server theme: - name: 'readthedocs' - features: - - navigation.expand - palette: - primary: grey - accent: red + name: "readthedocs" + features: + - navigation.expand + palette: + primary: grey + accent: red nav: - Introduction: index.md - Guides: - - Linting Migrations: checking_migrations.md - - Troubleshooting: troubleshooting.md + - Linting Migrations: checking_migrations.md + - Troubleshooting: troubleshooting.md - Reference: - - Rules: rules.md - - Rule Sources: rule_sources.md - - CLI: cli_reference.md - - Environment Variables: env_variables.md + - Rules: rules.md + - Rule Sources: rule_sources.md + - Rule Suppressions: rule_suppressions.md + - CLI: cli_reference.md + - Environment Variables: env_variables.md plugins: - gh-admonitions + - mike markdown_extensions: - admonition - # - pymdownx.highlight: - # anchor_linenums: true - # line_spans: __span - # pygments_lang_class: true - # - pymdownx.inlinehilite - # - pymdownx.snippets - # - pymdownx.superfences - # - pymdownx.tabbed: - # alternate_style: true diff --git a/packages/@postgrestools/backend-jsonrpc/src/workspace.ts b/packages/@postgrestools/backend-jsonrpc/src/workspace.ts index a35dad811..971f07ec9 100644 --- a/packages/@postgrestools/backend-jsonrpc/src/workspace.ts +++ b/packages/@postgrestools/backend-jsonrpc/src/workspace.ts @@ -19,6 +19,11 @@ export type FileKind = FileKind2[]; * The priority of the file */ export type FileKind2 = "Config" | "Ignore" | "Inspectable" | "Handleable"; +export interface RegisterProjectFolderParams { + path?: string; + setAsCurrentWorkspace: boolean; +} +export type ProjectKey = string; export interface GetFileContentParams { path: PgTPath; } @@ -60,8 +65,10 @@ export interface Advices { export type Category = | "lint/safety/addingRequiredField" | "lint/safety/banDropColumn" + | "lint/safety/banDropDatabase" | "lint/safety/banDropNotNull" | "lint/safety/banDropTable" + | "lint/safety/banTruncateCascade" | "stdin" | "check" | "configuration" @@ -185,6 +192,7 @@ export interface CompletionsResult { export interface CompletionItem { completion_text?: CompletionText; description: string; + detail?: string; kind: CompletionItemKind; label: string; preselected: boolean; @@ -199,13 +207,20 @@ export interface CompletionItem { label: "users", description: "Schema: auth", completion_text: "auth.users". */ export interface CompletionText { + is_snippet: boolean; /** * A `range` is required because some editors replace the current token, others naively insert the text. Having a range where start == end makes it an insertion. */ range: TextRange; text: string; } -export type CompletionItemKind = "table" | "function" | "column" | "schema"; +export type CompletionItemKind = + | "table" + | "function" + | "column" + | "schema" + | "policy" + | "role"; export interface UpdateSettingsParams { configuration: PartialConfiguration; gitignore_matches: string[]; @@ -224,6 +239,10 @@ export interface PartialConfiguration { * The configuration of the database connection */ db?: PartialDatabaseConfiguration; + /** + * A list of paths to other JSON files, used to extends the current configuration. + */ + extends?: StringSet; /** * The configuration of the filesystem */ @@ -271,6 +290,7 @@ export interface PartialDatabaseConfiguration { */ username?: string; } +export type StringSet = string[]; /** * The configuration of the filesystem */ @@ -346,7 +366,6 @@ If we can't find the configuration, it will attempt to use the current working d */ useIgnoreFile?: boolean; } -export type StringSet = string[]; export interface Rules { /** * It enables ALL rules. The rules that belong to `nursery` won't be enabled. @@ -375,6 +394,10 @@ export interface Safety { * Dropping a column may break existing clients. */ banDropColumn?: RuleConfiguration_for_Null; + /** + * Dropping a database may break existing clients (and everything else, really). + */ + banDropDatabase?: RuleConfiguration_for_Null; /** * Dropping a NOT NULL constraint may break existing clients. */ @@ -383,6 +406,10 @@ export interface Safety { * Dropping a table may break existing clients. */ banDropTable?: RuleConfiguration_for_Null; + /** + * Using TRUNCATE's CASCADE option will truncate any tables that are also foreign-keyed to the specified tables. + */ + banTruncateCascade?: RuleConfiguration_for_Null; /** * It enables the recommended rules for this group */ @@ -408,23 +435,19 @@ export interface OpenFileParams { version: number; } export interface ChangeFileParams { - changes: ChangeParams[]; + content: string; path: PgTPath; version: number; } -export interface ChangeParams { - /** - * The range of the file that changed. If `None`, the whole file changed. - */ - range?: TextRange; - text: string; -} export interface CloseFileParams { path: PgTPath; } export type Configuration = PartialConfiguration; export interface Workspace { isPathIgnored(params: IsPathIgnoredParams): Promise; + registerProjectFolder( + params: RegisterProjectFolderParams, + ): Promise; getFileContent(params: GetFileContentParams): Promise; pullDiagnostics( params: PullDiagnosticsParams, @@ -441,6 +464,9 @@ export function createWorkspace(transport: Transport): Workspace { isPathIgnored(params) { return transport.request("pgt/is_path_ignored", params); }, + registerProjectFolder(params) { + return transport.request("pgt/register_project_folder", params); + }, getFileContent(params) { return transport.request("pgt/get_file_content", params); }, diff --git a/packages/@postgrestools/backend-jsonrpc/tests/workspace.test.mjs b/packages/@postgrestools/backend-jsonrpc/tests/workspace.test.mjs index c83d5e44c..c35904c41 100644 --- a/packages/@postgrestools/backend-jsonrpc/tests/workspace.test.mjs +++ b/packages/@postgrestools/backend-jsonrpc/tests/workspace.test.mjs @@ -2,7 +2,7 @@ import { resolve } from "node:path"; import { fileURLToPath } from "node:url"; import { describe, expect, it } from "vitest"; -import { createWorkspaceWithBinary } from "../dist"; +import { createWorkspaceWithBinary } from "../src"; describe("Workspace API", () => { it("should process remote requests", async () => { @@ -14,6 +14,9 @@ describe("Workspace API", () => { ); const workspace = await createWorkspaceWithBinary(command); + workspace.registerProjectFolder({ + setAsCurrentWorkspace: true, + }); await workspace.openFile({ path: { path: "test.sql", diff --git a/postgrestools.jsonc b/postgrestools.jsonc index 325c7861f..47d08c729 100644 --- a/postgrestools.jsonc +++ b/postgrestools.jsonc @@ -1,5 +1,5 @@ { - "$schema": "./docs/schemas/latest/schema.json", + "$schema": "./docs/schema.json", "vcs": { "enabled": false, "clientKind": "git", diff --git a/pyproject.toml b/pyproject.toml index 73ee0fa82..41317471b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -5,6 +5,7 @@ description = "A collection of language tools and a Language Server Protocol (LS readme = "README.md" requires-python = ">=3.13" dependencies = [ + "mike>=2.1.3", "mkdocs>=1.6.1", "mkdocs-github-admonitions-plugin>=0.0.3", ] diff --git a/test.sql b/test.sql index 88b7310da..b21a2e8dc 100644 --- a/test.sql +++ b/test.sql @@ -9,3 +9,26 @@ from unknown_users; sel 1; + + + +create function test_organisation_id () + returns setof text + language plpgsql + security invoker + as $$ + declre + v_organisation_id uuid; +begin + return next is(private.organisation_id(), v_organisation_id, 'should return organisation_id of token'); +end +$$; + + +create function f1() +returns void as $$ +declare b constant int; +begin + call p1(10, b); +end; +$$ language plpgsql; diff --git a/uv.lock b/uv.lock index 8ec36cbc8..0f36c0844 100644 --- a/uv.lock +++ b/uv.lock @@ -1,5 +1,5 @@ version = 1 -revision = 1 +revision = 2 requires-python = ">=3.13" [[package]] @@ -9,18 +9,18 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "colorama", marker = "sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/b9/2e/0090cbf739cee7d23781ad4b89a9894a41538e4fcf4c31dcdd705b78eb8b/click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a", size = 226593 } +sdist = { url = "https://files.pythonhosted.org/packages/b9/2e/0090cbf739cee7d23781ad4b89a9894a41538e4fcf4c31dcdd705b78eb8b/click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a", size = 226593, upload-time = "2024-12-21T18:38:44.339Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/7e/d4/7ebdbd03970677812aac39c869717059dbb71a4cfc033ca6e5221787892c/click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2", size = 98188 }, + { url = "https://files.pythonhosted.org/packages/7e/d4/7ebdbd03970677812aac39c869717059dbb71a4cfc033ca6e5221787892c/click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2", size = 98188, upload-time = "2024-12-21T18:38:41.666Z" }, ] [[package]] name = "colorama" version = "0.4.6" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697 } +sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335 }, + { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" }, ] [[package]] @@ -30,9 +30,30 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "python-dateutil" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/d9/29/d40217cbe2f6b1359e00c6c307bb3fc876ba74068cbab3dde77f03ca0dc4/ghp-import-2.1.0.tar.gz", hash = "sha256:9c535c4c61193c2df8871222567d7fd7e5014d835f97dc7b7439069e2413d343", size = 10943 } +sdist = { url = "https://files.pythonhosted.org/packages/d9/29/d40217cbe2f6b1359e00c6c307bb3fc876ba74068cbab3dde77f03ca0dc4/ghp-import-2.1.0.tar.gz", hash = "sha256:9c535c4c61193c2df8871222567d7fd7e5014d835f97dc7b7439069e2413d343", size = 10943, upload-time = "2022-05-02T15:47:16.11Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/f7/ec/67fbef5d497f86283db54c22eec6f6140243aae73265799baaaa19cd17fb/ghp_import-2.1.0-py3-none-any.whl", hash = "sha256:8337dd7b50877f163d4c0289bc1f1c7f127550241988d568c1db512c4324a619", size = 11034 }, + { url = "https://files.pythonhosted.org/packages/f7/ec/67fbef5d497f86283db54c22eec6f6140243aae73265799baaaa19cd17fb/ghp_import-2.1.0-py3-none-any.whl", hash = "sha256:8337dd7b50877f163d4c0289bc1f1c7f127550241988d568c1db512c4324a619", size = 11034, upload-time = "2022-05-02T15:47:14.552Z" }, +] + +[[package]] +name = "importlib-metadata" +version = "8.7.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "zipp" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/76/66/650a33bd90f786193e4de4b3ad86ea60b53c89b669a5c7be931fac31cdb0/importlib_metadata-8.7.0.tar.gz", hash = "sha256:d13b81ad223b890aa16c5471f2ac3056cf76c5f10f82d6f9292f0b415f389000", size = 56641, upload-time = "2025-04-27T15:29:01.736Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/20/b0/36bd937216ec521246249be3bf9855081de4c5e06a0c9b4219dbeda50373/importlib_metadata-8.7.0-py3-none-any.whl", hash = "sha256:e5dd1551894c77868a30651cef00984d50e1002d06942a7101d34870c5f02afd", size = 27656, upload-time = "2025-04-27T15:29:00.214Z" }, +] + +[[package]] +name = "importlib-resources" +version = "6.5.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/cf/8c/f834fbf984f691b4f7ff60f50b514cc3de5cc08abfc3295564dd89c5e2e7/importlib_resources-6.5.2.tar.gz", hash = "sha256:185f87adef5bcc288449d98fb4fba07cea78bc036455dd44c5fc4a2fe78fed2c", size = 44693, upload-time = "2025-01-03T18:51:56.698Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a4/ed/1f1afb2e9e7f38a545d628f864d562a5ae64fe6f7a10e28ffb9b185b4e89/importlib_resources-6.5.2-py3-none-any.whl", hash = "sha256:789cfdc3ed28c78b67a06acb8126751ced69a3d5f79c095a98298cd8a760ccec", size = 37461, upload-time = "2025-01-03T18:51:54.306Z" }, ] [[package]] @@ -42,55 +63,74 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "markupsafe" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/af/92/b3130cbbf5591acf9ade8708c365f3238046ac7cb8ccba6e81abccb0ccff/jinja2-3.1.5.tar.gz", hash = "sha256:8fefff8dc3034e27bb80d67c671eb8a9bc424c0ef4c0826edbff304cceff43bb", size = 244674 } +sdist = { url = "https://files.pythonhosted.org/packages/af/92/b3130cbbf5591acf9ade8708c365f3238046ac7cb8ccba6e81abccb0ccff/jinja2-3.1.5.tar.gz", hash = "sha256:8fefff8dc3034e27bb80d67c671eb8a9bc424c0ef4c0826edbff304cceff43bb", size = 244674, upload-time = "2024-12-21T18:30:22.828Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/bd/0f/2ba5fbcd631e3e88689309dbe978c5769e883e4b84ebfe7da30b43275c5a/jinja2-3.1.5-py3-none-any.whl", hash = "sha256:aba0f4dc9ed8013c424088f68a5c226f7d6097ed89b246d7749c2ec4175c6adb", size = 134596 }, + { url = "https://files.pythonhosted.org/packages/bd/0f/2ba5fbcd631e3e88689309dbe978c5769e883e4b84ebfe7da30b43275c5a/jinja2-3.1.5-py3-none-any.whl", hash = "sha256:aba0f4dc9ed8013c424088f68a5c226f7d6097ed89b246d7749c2ec4175c6adb", size = 134596, upload-time = "2024-12-21T18:30:19.133Z" }, ] [[package]] name = "markdown" version = "3.7" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/54/28/3af612670f82f4c056911fbbbb42760255801b3068c48de792d354ff4472/markdown-3.7.tar.gz", hash = "sha256:2ae2471477cfd02dbbf038d5d9bc226d40def84b4fe2986e49b59b6b472bbed2", size = 357086 } +sdist = { url = "https://files.pythonhosted.org/packages/54/28/3af612670f82f4c056911fbbbb42760255801b3068c48de792d354ff4472/markdown-3.7.tar.gz", hash = "sha256:2ae2471477cfd02dbbf038d5d9bc226d40def84b4fe2986e49b59b6b472bbed2", size = 357086, upload-time = "2024-08-16T15:55:17.812Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/3f/08/83871f3c50fc983b88547c196d11cf8c3340e37c32d2e9d6152abe2c61f7/Markdown-3.7-py3-none-any.whl", hash = "sha256:7eb6df5690b81a1d7942992c97fad2938e956e79df20cbc6186e9c3a77b1c803", size = 106349 }, + { url = "https://files.pythonhosted.org/packages/3f/08/83871f3c50fc983b88547c196d11cf8c3340e37c32d2e9d6152abe2c61f7/Markdown-3.7-py3-none-any.whl", hash = "sha256:7eb6df5690b81a1d7942992c97fad2938e956e79df20cbc6186e9c3a77b1c803", size = 106349, upload-time = "2024-08-16T15:55:16.176Z" }, ] [[package]] name = "markupsafe" version = "3.0.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b2/97/5d42485e71dfc078108a86d6de8fa46db44a1a9295e89c5d6d4a06e23a62/markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0", size = 20537 } +sdist = { url = "https://files.pythonhosted.org/packages/b2/97/5d42485e71dfc078108a86d6de8fa46db44a1a9295e89c5d6d4a06e23a62/markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0", size = 20537, upload-time = "2024-10-18T15:21:54.129Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/83/0e/67eb10a7ecc77a0c2bbe2b0235765b98d164d81600746914bebada795e97/MarkupSafe-3.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd", size = 14274 }, - { url = "https://files.pythonhosted.org/packages/2b/6d/9409f3684d3335375d04e5f05744dfe7e9f120062c9857df4ab490a1031a/MarkupSafe-3.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430", size = 12352 }, - { url = "https://files.pythonhosted.org/packages/d2/f5/6eadfcd3885ea85fe2a7c128315cc1bb7241e1987443d78c8fe712d03091/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094", size = 24122 }, - { url = "https://files.pythonhosted.org/packages/0c/91/96cf928db8236f1bfab6ce15ad070dfdd02ed88261c2afafd4b43575e9e9/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396", size = 23085 }, - { url = "https://files.pythonhosted.org/packages/c2/cf/c9d56af24d56ea04daae7ac0940232d31d5a8354f2b457c6d856b2057d69/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79", size = 22978 }, - { url = "https://files.pythonhosted.org/packages/2a/9f/8619835cd6a711d6272d62abb78c033bda638fdc54c4e7f4272cf1c0962b/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a", size = 24208 }, - { url = "https://files.pythonhosted.org/packages/f9/bf/176950a1792b2cd2102b8ffeb5133e1ed984547b75db47c25a67d3359f77/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca", size = 23357 }, - { url = "https://files.pythonhosted.org/packages/ce/4f/9a02c1d335caabe5c4efb90e1b6e8ee944aa245c1aaaab8e8a618987d816/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c", size = 23344 }, - { url = "https://files.pythonhosted.org/packages/ee/55/c271b57db36f748f0e04a759ace9f8f759ccf22b4960c270c78a394f58be/MarkupSafe-3.0.2-cp313-cp313-win32.whl", hash = "sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1", size = 15101 }, - { url = "https://files.pythonhosted.org/packages/29/88/07df22d2dd4df40aba9f3e402e6dc1b8ee86297dddbad4872bd5e7b0094f/MarkupSafe-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f", size = 15603 }, - { url = "https://files.pythonhosted.org/packages/62/6a/8b89d24db2d32d433dffcd6a8779159da109842434f1dd2f6e71f32f738c/MarkupSafe-3.0.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c", size = 14510 }, - { url = "https://files.pythonhosted.org/packages/7a/06/a10f955f70a2e5a9bf78d11a161029d278eeacbd35ef806c3fd17b13060d/MarkupSafe-3.0.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb", size = 12486 }, - { url = "https://files.pythonhosted.org/packages/34/cf/65d4a571869a1a9078198ca28f39fba5fbb910f952f9dbc5220afff9f5e6/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c", size = 25480 }, - { url = "https://files.pythonhosted.org/packages/0c/e3/90e9651924c430b885468b56b3d597cabf6d72be4b24a0acd1fa0e12af67/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d", size = 23914 }, - { url = "https://files.pythonhosted.org/packages/66/8c/6c7cf61f95d63bb866db39085150df1f2a5bd3335298f14a66b48e92659c/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe", size = 23796 }, - { url = "https://files.pythonhosted.org/packages/bb/35/cbe9238ec3f47ac9a7c8b3df7a808e7cb50fe149dc7039f5f454b3fba218/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5", size = 25473 }, - { url = "https://files.pythonhosted.org/packages/e6/32/7621a4382488aa283cc05e8984a9c219abad3bca087be9ec77e89939ded9/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a", size = 24114 }, - { url = "https://files.pythonhosted.org/packages/0d/80/0985960e4b89922cb5a0bac0ed39c5b96cbc1a536a99f30e8c220a996ed9/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9", size = 24098 }, - { url = "https://files.pythonhosted.org/packages/82/78/fedb03c7d5380df2427038ec8d973587e90561b2d90cd472ce9254cf348b/MarkupSafe-3.0.2-cp313-cp313t-win32.whl", hash = "sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6", size = 15208 }, - { url = "https://files.pythonhosted.org/packages/4f/65/6079a46068dfceaeabb5dcad6d674f5f5c61a6fa5673746f42a9f4c233b3/MarkupSafe-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f", size = 15739 }, + { url = "https://files.pythonhosted.org/packages/83/0e/67eb10a7ecc77a0c2bbe2b0235765b98d164d81600746914bebada795e97/MarkupSafe-3.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd", size = 14274, upload-time = "2024-10-18T15:21:24.577Z" }, + { url = "https://files.pythonhosted.org/packages/2b/6d/9409f3684d3335375d04e5f05744dfe7e9f120062c9857df4ab490a1031a/MarkupSafe-3.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430", size = 12352, upload-time = "2024-10-18T15:21:25.382Z" }, + { url = "https://files.pythonhosted.org/packages/d2/f5/6eadfcd3885ea85fe2a7c128315cc1bb7241e1987443d78c8fe712d03091/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094", size = 24122, upload-time = "2024-10-18T15:21:26.199Z" }, + { url = "https://files.pythonhosted.org/packages/0c/91/96cf928db8236f1bfab6ce15ad070dfdd02ed88261c2afafd4b43575e9e9/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396", size = 23085, upload-time = "2024-10-18T15:21:27.029Z" }, + { url = "https://files.pythonhosted.org/packages/c2/cf/c9d56af24d56ea04daae7ac0940232d31d5a8354f2b457c6d856b2057d69/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79", size = 22978, upload-time = "2024-10-18T15:21:27.846Z" }, + { url = "https://files.pythonhosted.org/packages/2a/9f/8619835cd6a711d6272d62abb78c033bda638fdc54c4e7f4272cf1c0962b/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a", size = 24208, upload-time = "2024-10-18T15:21:28.744Z" }, + { url = "https://files.pythonhosted.org/packages/f9/bf/176950a1792b2cd2102b8ffeb5133e1ed984547b75db47c25a67d3359f77/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca", size = 23357, upload-time = "2024-10-18T15:21:29.545Z" }, + { url = "https://files.pythonhosted.org/packages/ce/4f/9a02c1d335caabe5c4efb90e1b6e8ee944aa245c1aaaab8e8a618987d816/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c", size = 23344, upload-time = "2024-10-18T15:21:30.366Z" }, + { url = "https://files.pythonhosted.org/packages/ee/55/c271b57db36f748f0e04a759ace9f8f759ccf22b4960c270c78a394f58be/MarkupSafe-3.0.2-cp313-cp313-win32.whl", hash = "sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1", size = 15101, upload-time = "2024-10-18T15:21:31.207Z" }, + { url = "https://files.pythonhosted.org/packages/29/88/07df22d2dd4df40aba9f3e402e6dc1b8ee86297dddbad4872bd5e7b0094f/MarkupSafe-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f", size = 15603, upload-time = "2024-10-18T15:21:32.032Z" }, + { url = "https://files.pythonhosted.org/packages/62/6a/8b89d24db2d32d433dffcd6a8779159da109842434f1dd2f6e71f32f738c/MarkupSafe-3.0.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c", size = 14510, upload-time = "2024-10-18T15:21:33.625Z" }, + { url = "https://files.pythonhosted.org/packages/7a/06/a10f955f70a2e5a9bf78d11a161029d278eeacbd35ef806c3fd17b13060d/MarkupSafe-3.0.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb", size = 12486, upload-time = "2024-10-18T15:21:34.611Z" }, + { url = "https://files.pythonhosted.org/packages/34/cf/65d4a571869a1a9078198ca28f39fba5fbb910f952f9dbc5220afff9f5e6/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c", size = 25480, upload-time = "2024-10-18T15:21:35.398Z" }, + { url = "https://files.pythonhosted.org/packages/0c/e3/90e9651924c430b885468b56b3d597cabf6d72be4b24a0acd1fa0e12af67/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d", size = 23914, upload-time = "2024-10-18T15:21:36.231Z" }, + { url = "https://files.pythonhosted.org/packages/66/8c/6c7cf61f95d63bb866db39085150df1f2a5bd3335298f14a66b48e92659c/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe", size = 23796, upload-time = "2024-10-18T15:21:37.073Z" }, + { url = "https://files.pythonhosted.org/packages/bb/35/cbe9238ec3f47ac9a7c8b3df7a808e7cb50fe149dc7039f5f454b3fba218/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5", size = 25473, upload-time = "2024-10-18T15:21:37.932Z" }, + { url = "https://files.pythonhosted.org/packages/e6/32/7621a4382488aa283cc05e8984a9c219abad3bca087be9ec77e89939ded9/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a", size = 24114, upload-time = "2024-10-18T15:21:39.799Z" }, + { url = "https://files.pythonhosted.org/packages/0d/80/0985960e4b89922cb5a0bac0ed39c5b96cbc1a536a99f30e8c220a996ed9/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9", size = 24098, upload-time = "2024-10-18T15:21:40.813Z" }, + { url = "https://files.pythonhosted.org/packages/82/78/fedb03c7d5380df2427038ec8d973587e90561b2d90cd472ce9254cf348b/MarkupSafe-3.0.2-cp313-cp313t-win32.whl", hash = "sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6", size = 15208, upload-time = "2024-10-18T15:21:41.814Z" }, + { url = "https://files.pythonhosted.org/packages/4f/65/6079a46068dfceaeabb5dcad6d674f5f5c61a6fa5673746f42a9f4c233b3/MarkupSafe-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f", size = 15739, upload-time = "2024-10-18T15:21:42.784Z" }, ] [[package]] name = "mergedeep" version = "1.3.4" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/3a/41/580bb4006e3ed0361b8151a01d324fb03f420815446c7def45d02f74c270/mergedeep-1.3.4.tar.gz", hash = "sha256:0096d52e9dad9939c3d975a774666af186eda617e6ca84df4c94dec30004f2a8", size = 4661 } +sdist = { url = "https://files.pythonhosted.org/packages/3a/41/580bb4006e3ed0361b8151a01d324fb03f420815446c7def45d02f74c270/mergedeep-1.3.4.tar.gz", hash = "sha256:0096d52e9dad9939c3d975a774666af186eda617e6ca84df4c94dec30004f2a8", size = 4661, upload-time = "2021-02-05T18:55:30.623Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/2c/19/04f9b178c2d8a15b076c8b5140708fa6ffc5601fb6f1e975537072df5b2a/mergedeep-1.3.4-py3-none-any.whl", hash = "sha256:70775750742b25c0d8f36c55aed03d24c3384d17c951b3175d898bd778ef0307", size = 6354 }, + { url = "https://files.pythonhosted.org/packages/2c/19/04f9b178c2d8a15b076c8b5140708fa6ffc5601fb6f1e975537072df5b2a/mergedeep-1.3.4-py3-none-any.whl", hash = "sha256:70775750742b25c0d8f36c55aed03d24c3384d17c951b3175d898bd778ef0307", size = 6354, upload-time = "2021-02-05T18:55:29.583Z" }, +] + +[[package]] +name = "mike" +version = "2.1.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "importlib-metadata" }, + { name = "importlib-resources" }, + { name = "jinja2" }, + { name = "mkdocs" }, + { name = "pyparsing" }, + { name = "pyyaml" }, + { name = "pyyaml-env-tag" }, + { name = "verspec" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ab/f7/2933f1a1fb0e0f077d5d6a92c6c7f8a54e6128241f116dff4df8b6050bbf/mike-2.1.3.tar.gz", hash = "sha256:abd79b8ea483fb0275b7972825d3082e5ae67a41820f8d8a0dc7a3f49944e810", size = 38119, upload-time = "2024-08-13T05:02:14.167Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fd/1a/31b7cd6e4e7a02df4e076162e9783620777592bea9e4bb036389389af99d/mike-2.1.3-py3-none-any.whl", hash = "sha256:d90c64077e84f06272437b464735130d380703a76a5738b152932884c60c062a", size = 33754, upload-time = "2024-08-13T05:02:12.515Z" }, ] [[package]] @@ -112,9 +152,9 @@ dependencies = [ { name = "pyyaml-env-tag" }, { name = "watchdog" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/bc/c6/bbd4f061bd16b378247f12953ffcb04786a618ce5e904b8c5a01a0309061/mkdocs-1.6.1.tar.gz", hash = "sha256:7b432f01d928c084353ab39c57282f29f92136665bdd6abf7c1ec8d822ef86f2", size = 3889159 } +sdist = { url = "https://files.pythonhosted.org/packages/bc/c6/bbd4f061bd16b378247f12953ffcb04786a618ce5e904b8c5a01a0309061/mkdocs-1.6.1.tar.gz", hash = "sha256:7b432f01d928c084353ab39c57282f29f92136665bdd6abf7c1ec8d822ef86f2", size = 3889159, upload-time = "2024-08-30T12:24:06.899Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/22/5b/dbc6a8cddc9cfa9c4971d59fb12bb8d42e161b7e7f8cc89e49137c5b279c/mkdocs-1.6.1-py3-none-any.whl", hash = "sha256:db91759624d1647f3f34aa0c3f327dd2601beae39a366d6e064c03468d35c20e", size = 3864451 }, + { url = "https://files.pythonhosted.org/packages/22/5b/dbc6a8cddc9cfa9c4971d59fb12bb8d42e161b7e7f8cc89e49137c5b279c/mkdocs-1.6.1-py3-none-any.whl", hash = "sha256:db91759624d1647f3f34aa0c3f327dd2601beae39a366d6e064c03468d35c20e", size = 3864451, upload-time = "2024-08-30T12:24:05.054Z" }, ] [[package]] @@ -126,9 +166,9 @@ dependencies = [ { name = "platformdirs" }, { name = "pyyaml" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/98/f5/ed29cd50067784976f25ed0ed6fcd3c2ce9eb90650aa3b2796ddf7b6870b/mkdocs_get_deps-0.2.0.tar.gz", hash = "sha256:162b3d129c7fad9b19abfdcb9c1458a651628e4b1dea628ac68790fb3061c60c", size = 10239 } +sdist = { url = "https://files.pythonhosted.org/packages/98/f5/ed29cd50067784976f25ed0ed6fcd3c2ce9eb90650aa3b2796ddf7b6870b/mkdocs_get_deps-0.2.0.tar.gz", hash = "sha256:162b3d129c7fad9b19abfdcb9c1458a651628e4b1dea628ac68790fb3061c60c", size = 10239, upload-time = "2023-11-20T17:51:09.981Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/9f/d4/029f984e8d3f3b6b726bd33cafc473b75e9e44c0f7e80a5b29abc466bdea/mkdocs_get_deps-0.2.0-py3-none-any.whl", hash = "sha256:2bf11d0b133e77a0dd036abeeb06dec8775e46efa526dc70667d8863eefc6134", size = 9521 }, + { url = "https://files.pythonhosted.org/packages/9f/d4/029f984e8d3f3b6b726bd33cafc473b75e9e44c0f7e80a5b29abc466bdea/mkdocs_get_deps-0.2.0-py3-none-any.whl", hash = "sha256:2bf11d0b133e77a0dd036abeeb06dec8775e46efa526dc70667d8863eefc6134", size = 9521, upload-time = "2023-11-20T17:51:08.587Z" }, ] [[package]] @@ -138,36 +178,36 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "mkdocs" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/65/13/a2b2b81604481569982fdaf51f0746f320df303efbd13d7b74fbf7b2c3a4/mkdocs_github_admonitions_plugin-0.0.3.tar.gz", hash = "sha256:4fd3ca88157c18c5f0cc4420c1a7f73ed1ed3f1886f41d6ce869932e90f38c48", size = 3998 } +sdist = { url = "https://files.pythonhosted.org/packages/65/13/a2b2b81604481569982fdaf51f0746f320df303efbd13d7b74fbf7b2c3a4/mkdocs_github_admonitions_plugin-0.0.3.tar.gz", hash = "sha256:4fd3ca88157c18c5f0cc4420c1a7f73ed1ed3f1886f41d6ce869932e90f38c48", size = 3998, upload-time = "2024-10-23T19:30:39.095Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/d2/87/650f2cbd07f142034d84357ac651586748032546287ba70e90244180b92c/mkdocs_github_admonitions_plugin-0.0.3-py3-none-any.whl", hash = "sha256:cb06f56e5b51e5d7b22fcbb4ab632079e3082b7f37bdbeb20cc9fd8a7c5e1657", size = 5043 }, + { url = "https://files.pythonhosted.org/packages/d2/87/650f2cbd07f142034d84357ac651586748032546287ba70e90244180b92c/mkdocs_github_admonitions_plugin-0.0.3-py3-none-any.whl", hash = "sha256:cb06f56e5b51e5d7b22fcbb4ab632079e3082b7f37bdbeb20cc9fd8a7c5e1657", size = 5043, upload-time = "2024-10-23T19:30:38.195Z" }, ] [[package]] name = "packaging" version = "24.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/d0/63/68dbb6eb2de9cb10ee4c9c14a0148804425e13c4fb20d61cce69f53106da/packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f", size = 163950 } +sdist = { url = "https://files.pythonhosted.org/packages/d0/63/68dbb6eb2de9cb10ee4c9c14a0148804425e13c4fb20d61cce69f53106da/packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f", size = 163950, upload-time = "2024-11-08T09:47:47.202Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/88/ef/eb23f262cca3c0c4eb7ab1933c3b1f03d021f2c48f54763065b6f0e321be/packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759", size = 65451 }, + { url = "https://files.pythonhosted.org/packages/88/ef/eb23f262cca3c0c4eb7ab1933c3b1f03d021f2c48f54763065b6f0e321be/packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759", size = 65451, upload-time = "2024-11-08T09:47:44.722Z" }, ] [[package]] name = "pathspec" version = "0.12.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ca/bc/f35b8446f4531a7cb215605d100cd88b7ac6f44ab3fc94870c120ab3adbf/pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712", size = 51043 } +sdist = { url = "https://files.pythonhosted.org/packages/ca/bc/f35b8446f4531a7cb215605d100cd88b7ac6f44ab3fc94870c120ab3adbf/pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712", size = 51043, upload-time = "2023-12-10T22:30:45Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/cc/20/ff623b09d963f88bfde16306a54e12ee5ea43e9b597108672ff3a408aad6/pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08", size = 31191 }, + { url = "https://files.pythonhosted.org/packages/cc/20/ff623b09d963f88bfde16306a54e12ee5ea43e9b597108672ff3a408aad6/pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08", size = 31191, upload-time = "2023-12-10T22:30:43.14Z" }, ] [[package]] name = "platformdirs" version = "4.3.6" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/13/fc/128cc9cb8f03208bdbf93d3aa862e16d376844a14f9a0ce5cf4507372de4/platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907", size = 21302 } +sdist = { url = "https://files.pythonhosted.org/packages/13/fc/128cc9cb8f03208bdbf93d3aa862e16d376844a14f9a0ce5cf4507372de4/platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907", size = 21302, upload-time = "2024-09-17T19:06:50.688Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/3c/a6/bc1012356d8ece4d66dd75c4b9fc6c1f6650ddd5991e421177d9f8f671be/platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb", size = 18439 }, + { url = "https://files.pythonhosted.org/packages/3c/a6/bc1012356d8ece4d66dd75c4b9fc6c1f6650ddd5991e421177d9f8f671be/platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb", size = 18439, upload-time = "2024-09-17T19:06:49.212Z" }, ] [[package]] @@ -175,16 +215,27 @@ name = "postgrestools" version = "0.1.0" source = { virtual = "." } dependencies = [ + { name = "mike" }, { name = "mkdocs" }, { name = "mkdocs-github-admonitions-plugin" }, ] [package.metadata] requires-dist = [ + { name = "mike", specifier = ">=2.1.3" }, { name = "mkdocs", specifier = ">=1.6.1" }, { name = "mkdocs-github-admonitions-plugin", specifier = ">=0.0.3" }, ] +[[package]] +name = "pyparsing" +version = "3.2.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/bb/22/f1129e69d94ffff626bdb5c835506b3a5b4f3d070f17ea295e12c2c6f60f/pyparsing-3.2.3.tar.gz", hash = "sha256:b9c13f1ab8b3b542f72e28f634bad4de758ab3ce4546e4301970ad6fa77c38be", size = 1088608, upload-time = "2025-03-25T05:01:28.114Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/05/e7/df2285f3d08fee213f2d041540fa4fc9ca6c2d44cf36d3a035bf2a8d2bcc/pyparsing-3.2.3-py3-none-any.whl", hash = "sha256:a749938e02d6fd0b59b356ca504a24982314bb090c383e3cf201c95ef7e2bfcf", size = 111120, upload-time = "2025-03-25T05:01:24.908Z" }, +] + [[package]] name = "python-dateutil" version = "2.9.0.post0" @@ -192,26 +243,26 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "six" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/66/c0/0c8b6ad9f17a802ee498c46e004a0eb49bc148f2fd230864601a86dcf6db/python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 342432 } +sdist = { url = "https://files.pythonhosted.org/packages/66/c0/0c8b6ad9f17a802ee498c46e004a0eb49bc148f2fd230864601a86dcf6db/python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 342432, upload-time = "2024-03-01T18:36:20.211Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427", size = 229892 }, + { url = "https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427", size = 229892, upload-time = "2024-03-01T18:36:18.57Z" }, ] [[package]] name = "pyyaml" version = "6.0.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/54/ed/79a089b6be93607fa5cdaedf301d7dfb23af5f25c398d5ead2525b063e17/pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e", size = 130631 } +sdist = { url = "https://files.pythonhosted.org/packages/54/ed/79a089b6be93607fa5cdaedf301d7dfb23af5f25c398d5ead2525b063e17/pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e", size = 130631, upload-time = "2024-08-06T20:33:50.674Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ef/e3/3af305b830494fa85d95f6d95ef7fa73f2ee1cc8ef5b495c7c3269fb835f/PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba", size = 181309 }, - { url = "https://files.pythonhosted.org/packages/45/9f/3b1c20a0b7a3200524eb0076cc027a970d320bd3a6592873c85c92a08731/PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1", size = 171679 }, - { url = "https://files.pythonhosted.org/packages/7c/9a/337322f27005c33bcb656c655fa78325b730324c78620e8328ae28b64d0c/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133", size = 733428 }, - { url = "https://files.pythonhosted.org/packages/a3/69/864fbe19e6c18ea3cc196cbe5d392175b4cf3d5d0ac1403ec3f2d237ebb5/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484", size = 763361 }, - { url = "https://files.pythonhosted.org/packages/04/24/b7721e4845c2f162d26f50521b825fb061bc0a5afcf9a386840f23ea19fa/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5", size = 759523 }, - { url = "https://files.pythonhosted.org/packages/2b/b2/e3234f59ba06559c6ff63c4e10baea10e5e7df868092bf9ab40e5b9c56b6/PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc", size = 726660 }, - { url = "https://files.pythonhosted.org/packages/fe/0f/25911a9f080464c59fab9027482f822b86bf0608957a5fcc6eaac85aa515/PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652", size = 751597 }, - { url = "https://files.pythonhosted.org/packages/14/0d/e2c3b43bbce3cf6bd97c840b46088a3031085179e596d4929729d8d68270/PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183", size = 140527 }, - { url = "https://files.pythonhosted.org/packages/fa/de/02b54f42487e3d3c6efb3f89428677074ca7bf43aae402517bc7cca949f3/PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563", size = 156446 }, + { url = "https://files.pythonhosted.org/packages/ef/e3/3af305b830494fa85d95f6d95ef7fa73f2ee1cc8ef5b495c7c3269fb835f/PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba", size = 181309, upload-time = "2024-08-06T20:32:43.4Z" }, + { url = "https://files.pythonhosted.org/packages/45/9f/3b1c20a0b7a3200524eb0076cc027a970d320bd3a6592873c85c92a08731/PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1", size = 171679, upload-time = "2024-08-06T20:32:44.801Z" }, + { url = "https://files.pythonhosted.org/packages/7c/9a/337322f27005c33bcb656c655fa78325b730324c78620e8328ae28b64d0c/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133", size = 733428, upload-time = "2024-08-06T20:32:46.432Z" }, + { url = "https://files.pythonhosted.org/packages/a3/69/864fbe19e6c18ea3cc196cbe5d392175b4cf3d5d0ac1403ec3f2d237ebb5/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484", size = 763361, upload-time = "2024-08-06T20:32:51.188Z" }, + { url = "https://files.pythonhosted.org/packages/04/24/b7721e4845c2f162d26f50521b825fb061bc0a5afcf9a386840f23ea19fa/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5", size = 759523, upload-time = "2024-08-06T20:32:53.019Z" }, + { url = "https://files.pythonhosted.org/packages/2b/b2/e3234f59ba06559c6ff63c4e10baea10e5e7df868092bf9ab40e5b9c56b6/PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc", size = 726660, upload-time = "2024-08-06T20:32:54.708Z" }, + { url = "https://files.pythonhosted.org/packages/fe/0f/25911a9f080464c59fab9027482f822b86bf0608957a5fcc6eaac85aa515/PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652", size = 751597, upload-time = "2024-08-06T20:32:56.985Z" }, + { url = "https://files.pythonhosted.org/packages/14/0d/e2c3b43bbce3cf6bd97c840b46088a3031085179e596d4929729d8d68270/PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183", size = 140527, upload-time = "2024-08-06T20:33:03.001Z" }, + { url = "https://files.pythonhosted.org/packages/fa/de/02b54f42487e3d3c6efb3f89428677074ca7bf43aae402517bc7cca949f3/PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563", size = 156446, upload-time = "2024-08-06T20:33:04.33Z" }, ] [[package]] @@ -221,37 +272,55 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pyyaml" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/fb/8e/da1c6c58f751b70f8ceb1eb25bc25d524e8f14fe16edcce3f4e3ba08629c/pyyaml_env_tag-0.1.tar.gz", hash = "sha256:70092675bda14fdec33b31ba77e7543de9ddc88f2e5b99160396572d11525bdb", size = 5631 } +sdist = { url = "https://files.pythonhosted.org/packages/fb/8e/da1c6c58f751b70f8ceb1eb25bc25d524e8f14fe16edcce3f4e3ba08629c/pyyaml_env_tag-0.1.tar.gz", hash = "sha256:70092675bda14fdec33b31ba77e7543de9ddc88f2e5b99160396572d11525bdb", size = 5631, upload-time = "2020-11-12T02:38:26.239Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/5a/66/bbb1dd374f5c870f59c5bb1db0e18cbe7fa739415a24cbd95b2d1f5ae0c4/pyyaml_env_tag-0.1-py3-none-any.whl", hash = "sha256:af31106dec8a4d68c60207c1886031cbf839b68aa7abccdb19868200532c2069", size = 3911 }, + { url = "https://files.pythonhosted.org/packages/5a/66/bbb1dd374f5c870f59c5bb1db0e18cbe7fa739415a24cbd95b2d1f5ae0c4/pyyaml_env_tag-0.1-py3-none-any.whl", hash = "sha256:af31106dec8a4d68c60207c1886031cbf839b68aa7abccdb19868200532c2069", size = 3911, upload-time = "2020-11-12T02:38:24.638Z" }, ] [[package]] name = "six" version = "1.17.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/94/e7/b2c673351809dca68a0e064b6af791aa332cf192da575fd474ed7d6f16a2/six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81", size = 34031 } +sdist = { url = "https://files.pythonhosted.org/packages/94/e7/b2c673351809dca68a0e064b6af791aa332cf192da575fd474ed7d6f16a2/six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81", size = 34031, upload-time = "2024-12-04T17:35:28.174Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", size = 11050 }, + { url = "https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", size = 11050, upload-time = "2024-12-04T17:35:26.475Z" }, +] + +[[package]] +name = "verspec" +version = "0.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e7/44/8126f9f0c44319b2efc65feaad589cadef4d77ece200ae3c9133d58464d0/verspec-0.1.0.tar.gz", hash = "sha256:c4504ca697b2056cdb4bfa7121461f5a0e81809255b41c03dda4ba823637c01e", size = 27123, upload-time = "2020-11-30T02:24:09.646Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a4/ce/3b6fee91c85626eaf769d617f1be9d2e15c1cca027bbdeb2e0d751469355/verspec-0.1.0-py3-none-any.whl", hash = "sha256:741877d5633cc9464c45a469ae2a31e801e6dbbaa85b9675d481cda100f11c31", size = 19640, upload-time = "2020-11-30T02:24:08.387Z" }, ] [[package]] name = "watchdog" version = "6.0.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/db/7d/7f3d619e951c88ed75c6037b246ddcf2d322812ee8ea189be89511721d54/watchdog-6.0.0.tar.gz", hash = "sha256:9ddf7c82fda3ae8e24decda1338ede66e1c99883db93711d8fb941eaa2d8c282", size = 131220 } +sdist = { url = "https://files.pythonhosted.org/packages/db/7d/7f3d619e951c88ed75c6037b246ddcf2d322812ee8ea189be89511721d54/watchdog-6.0.0.tar.gz", hash = "sha256:9ddf7c82fda3ae8e24decda1338ede66e1c99883db93711d8fb941eaa2d8c282", size = 131220, upload-time = "2024-11-01T14:07:13.037Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/68/98/b0345cabdce2041a01293ba483333582891a3bd5769b08eceb0d406056ef/watchdog-6.0.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:490ab2ef84f11129844c23fb14ecf30ef3d8a6abafd3754a6f75ca1e6654136c", size = 96480, upload-time = "2024-11-01T14:06:42.952Z" }, + { url = "https://files.pythonhosted.org/packages/85/83/cdf13902c626b28eedef7ec4f10745c52aad8a8fe7eb04ed7b1f111ca20e/watchdog-6.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:76aae96b00ae814b181bb25b1b98076d5fc84e8a53cd8885a318b42b6d3a5134", size = 88451, upload-time = "2024-11-01T14:06:45.084Z" }, + { url = "https://files.pythonhosted.org/packages/fe/c4/225c87bae08c8b9ec99030cd48ae9c4eca050a59bf5c2255853e18c87b50/watchdog-6.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a175f755fc2279e0b7312c0035d52e27211a5bc39719dd529625b1930917345b", size = 89057, upload-time = "2024-11-01T14:06:47.324Z" }, + { url = "https://files.pythonhosted.org/packages/a9/c7/ca4bf3e518cb57a686b2feb4f55a1892fd9a3dd13f470fca14e00f80ea36/watchdog-6.0.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:7607498efa04a3542ae3e05e64da8202e58159aa1fa4acddf7678d34a35d4f13", size = 79079, upload-time = "2024-11-01T14:06:59.472Z" }, + { url = "https://files.pythonhosted.org/packages/5c/51/d46dc9332f9a647593c947b4b88e2381c8dfc0942d15b8edc0310fa4abb1/watchdog-6.0.0-py3-none-manylinux2014_armv7l.whl", hash = "sha256:9041567ee8953024c83343288ccc458fd0a2d811d6a0fd68c4c22609e3490379", size = 79078, upload-time = "2024-11-01T14:07:01.431Z" }, + { url = "https://files.pythonhosted.org/packages/d4/57/04edbf5e169cd318d5f07b4766fee38e825d64b6913ca157ca32d1a42267/watchdog-6.0.0-py3-none-manylinux2014_i686.whl", hash = "sha256:82dc3e3143c7e38ec49d61af98d6558288c415eac98486a5c581726e0737c00e", size = 79076, upload-time = "2024-11-01T14:07:02.568Z" }, + { url = "https://files.pythonhosted.org/packages/ab/cc/da8422b300e13cb187d2203f20b9253e91058aaf7db65b74142013478e66/watchdog-6.0.0-py3-none-manylinux2014_ppc64.whl", hash = "sha256:212ac9b8bf1161dc91bd09c048048a95ca3a4c4f5e5d4a7d1b1a7d5752a7f96f", size = 79077, upload-time = "2024-11-01T14:07:03.893Z" }, + { url = "https://files.pythonhosted.org/packages/2c/3b/b8964e04ae1a025c44ba8e4291f86e97fac443bca31de8bd98d3263d2fcf/watchdog-6.0.0-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:e3df4cbb9a450c6d49318f6d14f4bbc80d763fa587ba46ec86f99f9e6876bb26", size = 79078, upload-time = "2024-11-01T14:07:05.189Z" }, + { url = "https://files.pythonhosted.org/packages/62/ae/a696eb424bedff7407801c257d4b1afda455fe40821a2be430e173660e81/watchdog-6.0.0-py3-none-manylinux2014_s390x.whl", hash = "sha256:2cce7cfc2008eb51feb6aab51251fd79b85d9894e98ba847408f662b3395ca3c", size = 79077, upload-time = "2024-11-01T14:07:06.376Z" }, + { url = "https://files.pythonhosted.org/packages/b5/e8/dbf020b4d98251a9860752a094d09a65e1b436ad181faf929983f697048f/watchdog-6.0.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:20ffe5b202af80ab4266dcd3e91aae72bf2da48c0d33bdb15c66658e685e94e2", size = 79078, upload-time = "2024-11-01T14:07:07.547Z" }, + { url = "https://files.pythonhosted.org/packages/07/f6/d0e5b343768e8bcb4cda79f0f2f55051bf26177ecd5651f84c07567461cf/watchdog-6.0.0-py3-none-win32.whl", hash = "sha256:07df1fdd701c5d4c8e55ef6cf55b8f0120fe1aef7ef39a1c6fc6bc2e606d517a", size = 79065, upload-time = "2024-11-01T14:07:09.525Z" }, + { url = "https://files.pythonhosted.org/packages/db/d9/c495884c6e548fce18a8f40568ff120bc3a4b7b99813081c8ac0c936fa64/watchdog-6.0.0-py3-none-win_amd64.whl", hash = "sha256:cbafb470cf848d93b5d013e2ecb245d4aa1c8fd0504e863ccefa32445359d680", size = 79070, upload-time = "2024-11-01T14:07:10.686Z" }, + { url = "https://files.pythonhosted.org/packages/33/e8/e40370e6d74ddba47f002a32919d91310d6074130fe4e17dabcafc15cbf1/watchdog-6.0.0-py3-none-win_ia64.whl", hash = "sha256:a1914259fa9e1454315171103c6a30961236f508b9b623eae470268bbcc6a22f", size = 79067, upload-time = "2024-11-01T14:07:11.845Z" }, +] + +[[package]] +name = "zipp" +version = "3.22.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/12/b6/7b3d16792fdf94f146bed92be90b4eb4563569eca91513c8609aebf0c167/zipp-3.22.0.tar.gz", hash = "sha256:dd2f28c3ce4bc67507bfd3781d21b7bb2be31103b51a4553ad7d90b84e57ace5", size = 25257, upload-time = "2025-05-26T14:46:32.217Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/68/98/b0345cabdce2041a01293ba483333582891a3bd5769b08eceb0d406056ef/watchdog-6.0.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:490ab2ef84f11129844c23fb14ecf30ef3d8a6abafd3754a6f75ca1e6654136c", size = 96480 }, - { url = "https://files.pythonhosted.org/packages/85/83/cdf13902c626b28eedef7ec4f10745c52aad8a8fe7eb04ed7b1f111ca20e/watchdog-6.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:76aae96b00ae814b181bb25b1b98076d5fc84e8a53cd8885a318b42b6d3a5134", size = 88451 }, - { url = "https://files.pythonhosted.org/packages/fe/c4/225c87bae08c8b9ec99030cd48ae9c4eca050a59bf5c2255853e18c87b50/watchdog-6.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a175f755fc2279e0b7312c0035d52e27211a5bc39719dd529625b1930917345b", size = 89057 }, - { url = "https://files.pythonhosted.org/packages/a9/c7/ca4bf3e518cb57a686b2feb4f55a1892fd9a3dd13f470fca14e00f80ea36/watchdog-6.0.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:7607498efa04a3542ae3e05e64da8202e58159aa1fa4acddf7678d34a35d4f13", size = 79079 }, - { url = "https://files.pythonhosted.org/packages/5c/51/d46dc9332f9a647593c947b4b88e2381c8dfc0942d15b8edc0310fa4abb1/watchdog-6.0.0-py3-none-manylinux2014_armv7l.whl", hash = "sha256:9041567ee8953024c83343288ccc458fd0a2d811d6a0fd68c4c22609e3490379", size = 79078 }, - { url = "https://files.pythonhosted.org/packages/d4/57/04edbf5e169cd318d5f07b4766fee38e825d64b6913ca157ca32d1a42267/watchdog-6.0.0-py3-none-manylinux2014_i686.whl", hash = "sha256:82dc3e3143c7e38ec49d61af98d6558288c415eac98486a5c581726e0737c00e", size = 79076 }, - { url = "https://files.pythonhosted.org/packages/ab/cc/da8422b300e13cb187d2203f20b9253e91058aaf7db65b74142013478e66/watchdog-6.0.0-py3-none-manylinux2014_ppc64.whl", hash = "sha256:212ac9b8bf1161dc91bd09c048048a95ca3a4c4f5e5d4a7d1b1a7d5752a7f96f", size = 79077 }, - { url = "https://files.pythonhosted.org/packages/2c/3b/b8964e04ae1a025c44ba8e4291f86e97fac443bca31de8bd98d3263d2fcf/watchdog-6.0.0-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:e3df4cbb9a450c6d49318f6d14f4bbc80d763fa587ba46ec86f99f9e6876bb26", size = 79078 }, - { url = "https://files.pythonhosted.org/packages/62/ae/a696eb424bedff7407801c257d4b1afda455fe40821a2be430e173660e81/watchdog-6.0.0-py3-none-manylinux2014_s390x.whl", hash = "sha256:2cce7cfc2008eb51feb6aab51251fd79b85d9894e98ba847408f662b3395ca3c", size = 79077 }, - { url = "https://files.pythonhosted.org/packages/b5/e8/dbf020b4d98251a9860752a094d09a65e1b436ad181faf929983f697048f/watchdog-6.0.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:20ffe5b202af80ab4266dcd3e91aae72bf2da48c0d33bdb15c66658e685e94e2", size = 79078 }, - { url = "https://files.pythonhosted.org/packages/07/f6/d0e5b343768e8bcb4cda79f0f2f55051bf26177ecd5651f84c07567461cf/watchdog-6.0.0-py3-none-win32.whl", hash = "sha256:07df1fdd701c5d4c8e55ef6cf55b8f0120fe1aef7ef39a1c6fc6bc2e606d517a", size = 79065 }, - { url = "https://files.pythonhosted.org/packages/db/d9/c495884c6e548fce18a8f40568ff120bc3a4b7b99813081c8ac0c936fa64/watchdog-6.0.0-py3-none-win_amd64.whl", hash = "sha256:cbafb470cf848d93b5d013e2ecb245d4aa1c8fd0504e863ccefa32445359d680", size = 79070 }, - { url = "https://files.pythonhosted.org/packages/33/e8/e40370e6d74ddba47f002a32919d91310d6074130fe4e17dabcafc15cbf1/watchdog-6.0.0-py3-none-win_ia64.whl", hash = "sha256:a1914259fa9e1454315171103c6a30961236f508b9b623eae470268bbcc6a22f", size = 79067 }, + { url = "https://files.pythonhosted.org/packages/ad/da/f64669af4cae46f17b90798a827519ce3737d31dbafad65d391e49643dc4/zipp-3.22.0-py3-none-any.whl", hash = "sha256:fe208f65f2aca48b81f9e6fd8cf7b8b32c26375266b009b413d45306b6148343", size = 9796, upload-time = "2025-05-26T14:46:30.775Z" }, ] diff --git a/xtask/codegen/Cargo.toml b/xtask/codegen/Cargo.toml index b5497b2cb..758a3212d 100644 --- a/xtask/codegen/Cargo.toml +++ b/xtask/codegen/Cargo.toml @@ -14,6 +14,7 @@ biome_string_case = { workspace = true } bpaf = { workspace = true, features = ["derive"] } pgt_analyse = { workspace = true } pgt_analyser = { workspace = true } +pgt_diagnostics = { workspace = true } pgt_workspace = { workspace = true, features = ["schema"] } proc-macro2 = { workspace = true, features = ["span-locations"] } pulldown-cmark = { version = "0.12.2" } diff --git a/xtask/codegen/src/generate_configuration.rs b/xtask/codegen/src/generate_configuration.rs index 91ae304c6..661f44b51 100644 --- a/xtask/codegen/src/generate_configuration.rs +++ b/xtask/codegen/src/generate_configuration.rs @@ -1,6 +1,7 @@ use crate::{to_capitalized, update}; use biome_string_case::Case; use pgt_analyse::{GroupCategory, RegistryVisitor, Rule, RuleCategory, RuleGroup, RuleMetadata}; +use pgt_diagnostics::Severity; use proc_macro2::{Ident, Literal, Span, TokenStream}; use pulldown_cmark::{Event, Parser, Tag, TagEnd}; use quote::quote; @@ -60,6 +61,8 @@ fn generate_for_groups( let mut group_idents = Vec::with_capacity(groups.len()); let mut group_strings = Vec::with_capacity(groups.len()); let mut group_as_default_rules = Vec::with_capacity(groups.len()); + let mut group_as_disabled_rules = Vec::with_capacity(groups.len()); + for (group, rules) in groups { let group_pascal_ident = quote::format_ident!("{}", &Case::Pascal.convert(group)); let group_ident = quote::format_ident!("{}", group); @@ -94,6 +97,12 @@ fn generate_for_groups( } }); + group_as_disabled_rules.push(quote! { + if let Some(group) = self.#group_ident.as_ref() { + disabled_rules.extend(&group.get_disabled_rules()); + } + }); + group_pascal_idents.push(group_pascal_ident); group_idents.push(group_ident); group_strings.push(Literal::string(group)); @@ -135,10 +144,9 @@ fn generate_for_groups( /// Given a category coming from [Diagnostic](pgt_diagnostics::Diagnostic), this function returns /// the [Severity](pgt_diagnostics::Severity) associated to the rule, if the configuration changed it. - /// If the severity is off or not set, then the function returns the default severity of the rule: - /// [Severity::Error] for recommended rules and [Severity::Warning] for other rules. - /// - /// If not, the function returns [None]. + /// If the severity is off or not set, then the function returns the default severity of the rule, + /// which is configured at the rule definition. + /// The function can return `None` if the rule is not properly configured. pub fn get_severity_from_code(&self, category: &Category) -> Option { let mut split_code = category.name().split('/'); @@ -155,13 +163,10 @@ fn generate_for_groups( .as_ref() .and_then(|group| group.get_rule_configuration(rule_name)) .filter(|(level, _)| !matches!(level, RulePlainConfiguration::Off)) - .map_or_else(|| { - if #group_pascal_idents::is_recommended_rule(rule_name) { - Severity::Error - } else { - Severity::Warning - } - }, |(level, _)| level.into()), + .map_or_else( + || #group_pascal_idents::severity(rule_name), + |(level, _)| level.into() + ), )* }; Some(severity) @@ -249,6 +254,13 @@ fn generate_for_groups( #( #group_as_default_rules )* enabled_rules } + + /// It returns the disabled rules by configuration. + pub fn as_disabled_rules(&self) -> FxHashSet> { + let mut disabled_rules = FxHashSet::default(); + #( #group_as_disabled_rules )* + disabled_rules + } } #( #struct_groups )* @@ -361,6 +373,13 @@ fn generate_for_groups( enabled_rules.difference(&disabled_rules).copied().collect() } + + /// It returns the disabled rules by configuration. + pub fn as_disabled_rules(&self) -> FxHashSet> { + let mut disabled_rules = FxHashSet::default(); + #( #group_as_disabled_rules )* + disabled_rules + } } #( #struct_groups )* @@ -453,7 +472,6 @@ fn generate_group_struct( rules: &BTreeMap<&'static str, RuleMetadata>, kind: RuleCategory, ) -> TokenStream { - let mut lines_recommended_rule = Vec::new(); let mut lines_recommended_rule_as_filter = Vec::new(); let mut lines_all_rule_as_filter = Vec::new(); let mut lines_rule = Vec::new(); @@ -461,6 +479,7 @@ fn generate_group_struct( let mut rule_enabled_check_line = Vec::new(); let mut rule_disabled_check_line = Vec::new(); let mut get_rule_configuration_line = Vec::new(); + let mut get_severity_lines = Vec::new(); for (index, (rule, metadata)) in rules.iter().enumerate() { let summary = { @@ -522,10 +541,6 @@ fn generate_group_struct( lines_recommended_rule_as_filter.push(quote! { RuleFilter::Rule(Self::GROUP_NAME, Self::GROUP_RULES[#rule_position]) }); - - lines_recommended_rule.push(quote! { - #rule - }); } lines_all_rule_as_filter.push(quote! { RuleFilter::Rule(Self::GROUP_NAME, Self::GROUP_RULES[#rule_position]) @@ -567,6 +582,18 @@ fn generate_group_struct( get_rule_configuration_line.push(quote! { #rule => self.#rule_identifier.as_ref().map(|conf| (conf.level(), conf.get_options())) }); + + let severity = match metadata.severity { + Severity::Hint => quote! { Severity::Hint }, + Severity::Information => quote! { Severity::Information }, + Severity::Warning => quote! { Severity::Warning }, + Severity::Error => quote! { Severity::Error }, + Severity::Fatal => quote! { Severity::Fatal }, + }; + + get_severity_lines.push(quote! { + #rule => #severity + }) } let group_pascal_ident = Ident::new(&to_capitalized(group), Span::call_site()); @@ -648,10 +675,6 @@ fn generate_group_struct( #( #lines_rule ),* ]; - const RECOMMENDED_RULES: &'static [&'static str] = &[ - #( #lines_recommended_rule ),* - ]; - const RECOMMENDED_RULES_AS_FILTERS: &'static [RuleFilter<'static>] = &[ #( #lines_recommended_rule_as_filter ),* ]; @@ -695,11 +718,6 @@ fn generate_group_struct( Some(Self::GROUP_RULES[Self::GROUP_RULES.binary_search(&rule_name).ok()?]) } - /// Checks if, given a rule name, it is marked as recommended - pub(crate) fn is_recommended_rule(rule_name: &str) -> bool { - Self::RECOMMENDED_RULES.contains(&rule_name) - } - pub(crate) fn recommended_rules_as_filters() -> &'static [RuleFilter<'static>] { Self::RECOMMENDED_RULES_AS_FILTERS } @@ -725,6 +743,13 @@ fn generate_group_struct( } } + pub(crate) fn severity(rule_name: &str) -> Severity { + match rule_name { + #( #get_severity_lines ),*, + _ => unreachable!() + } + } + #get_configuration_function } } diff --git a/xtask/codegen/src/generate_new_analyser_rule.rs b/xtask/codegen/src/generate_new_analyser_rule.rs index 6fecdff77..4c4bcc696 100644 --- a/xtask/codegen/src/generate_new_analyser_rule.rs +++ b/xtask/codegen/src/generate_new_analyser_rule.rs @@ -1,5 +1,6 @@ use biome_string_case::Case; use bpaf::Bpaf; +use pgt_diagnostics::Severity; use std::str::FromStr; use xtask::project_root; @@ -24,15 +25,27 @@ fn generate_rule_template( category: &Category, rule_name_upper_camel: &str, rule_name_lower_camel: &str, + severity: Severity, ) -> String { let macro_name = match category { Category::Lint => "declare_lint_rule", }; + + let severity_code = match severity { + Severity::Hint => "Severity::Hint", + Severity::Information => "Severity::Information", + Severity::Warning => "Severity::Warning", + Severity::Error => "Severity::Error", + Severity::Fatal => "Severity::Fatal", + }; + format!( r#"use pgt_analyse::{{ - context::RuleContext, {macro_name}, Rule, RuleDiagnostic + AnalysedFileContext, context::RuleContext, {macro_name}, Rule, RuleDiagnostic, }}; use pgt_console::markup; +use pgt_diagnostics::Severity; +use pgt_schema_cache::SchemaCache; {macro_name}! {{ /// Succinct description of the rule. @@ -58,6 +71,7 @@ use pgt_console::markup; pub {rule_name_upper_camel} {{ version: "next", name: "{rule_name_lower_camel}", + severity: {severity_code}, recommended: false, }} }} @@ -65,7 +79,11 @@ use pgt_console::markup; impl Rule for {rule_name_upper_camel} {{ type Options = (); - fn run(ctx: &RuleContext) -> Vec {{ + fn run( + ctx: &RuleContext + _file_context: &AnalysedFileContext, + _schema_cache: Option<&SchemaCache>, + ) -> Vec {{ Vec::new() }} }} @@ -77,7 +95,12 @@ fn gen_sql(category_name: &str) -> String { format!("-- expect_only_{category_name}\n-- select 1;") } -pub fn generate_new_analyser_rule(category: Category, rule_name: &str, group: &str) { +pub fn generate_new_analyser_rule( + category: Category, + rule_name: &str, + group: &str, + severity: Severity, +) { let rule_name_camel = Case::Camel.convert(rule_name); let crate_folder = project_root().join("crates/pgt_analyser"); let rule_folder = match &category { @@ -92,6 +115,7 @@ pub fn generate_new_analyser_rule(category: Category, rule_name: &str, group: &s &category, Case::Pascal.convert(rule_name).as_str(), rule_name_camel.as_str(), + severity, ); let file_name = format!( "{}/{}.rs", @@ -108,7 +132,7 @@ pub fn generate_new_analyser_rule(category: Category, rule_name: &str, group: &s // We sort rules to reduce conflicts between contributions made in parallel. let rule_line = match category { Category::Lint => format!( - r#" "lint/{group}/{rule_name_camel}": "https://pgtools.dev/linter/rules/{kebab_case_rule}","# + r#" "lint/{group}/{rule_name_camel}": "https://pgtools.dev/latest/rules/{kebab_case_rule}","# ), }; let lint_start = match category { diff --git a/xtask/codegen/src/lib.rs b/xtask/codegen/src/lib.rs index 61ae5e4f3..dc6f81a08 100644 --- a/xtask/codegen/src/lib.rs +++ b/xtask/codegen/src/lib.rs @@ -13,6 +13,7 @@ pub use self::generate_crate::generate_crate; pub use self::generate_new_analyser_rule::generate_new_analyser_rule; use bpaf::Bpaf; use generate_new_analyser_rule::Category; +use pgt_diagnostics::Severity; use std::path::Path; use xtask::{glue::fs2, Mode, Result}; @@ -84,5 +85,9 @@ pub enum TaskCommand { /// Group of the rule #[bpaf(long("group"))] group: String, + + /// Severity of the rule + #[bpaf(long("severity"), fallback(Severity::Error))] + severity: Severity, }, } diff --git a/xtask/codegen/src/main.rs b/xtask/codegen/src/main.rs index 8e0e6cd89..4ff33c21b 100644 --- a/xtask/codegen/src/main.rs +++ b/xtask/codegen/src/main.rs @@ -21,8 +21,9 @@ fn main() -> Result<()> { name, category, group, + severity, } => { - generate_new_analyser_rule(category, &name, &group); + generate_new_analyser_rule(category, &name, &group, severity); } TaskCommand::Configuration => { generate_rules_configuration(Overwrite)?; diff --git a/xtask/rules_check/Cargo.toml b/xtask/rules_check/Cargo.toml index 3f0198d1c..abd02a05b 100644 --- a/xtask/rules_check/Cargo.toml +++ b/xtask/rules_check/Cargo.toml @@ -11,6 +11,7 @@ pgt_analyse = { workspace = true } pgt_analyser = { workspace = true } pgt_console = { workspace = true } pgt_diagnostics = { workspace = true } +pgt_query = { workspace = true } pgt_query_ext = { workspace = true } pgt_statement_splitter = { workspace = true } pgt_workspace = { workspace = true } diff --git a/xtask/rules_check/src/lib.rs b/xtask/rules_check/src/lib.rs index 68a6d6504..dfdd24ba5 100644 --- a/xtask/rules_check/src/lib.rs +++ b/xtask/rules_check/src/lib.rs @@ -7,7 +7,7 @@ use pgt_analyse::{ AnalyserOptions, AnalysisFilter, GroupCategory, RegistryVisitor, Rule, RuleCategory, RuleFilter, RuleGroup, RuleMetadata, }; -use pgt_analyser::{Analyser, AnalyserConfig}; +use pgt_analyser::{AnalysableStatement, Analyser, AnalyserConfig}; use pgt_console::{markup, Console}; use pgt_diagnostics::{Diagnostic, DiagnosticExt, PrintDiagnostic}; use pgt_query_ext::diagnostics::SyntaxDiagnostic; @@ -126,52 +126,55 @@ fn assert_lint( filter, }); - // split and parse each statement - match pgt_statement_splitter::split(code) { - Ok(stmts) => { - for stmt in stmts.ranges { - match pgt_query_ext::parse(&code[stmt]) { - Ok(ast) => { - for rule_diag in analyser.run(pgt_analyser::AnalyserContext { root: &ast }) - { - let diag = pgt_diagnostics::serde::Diagnostic::new(rule_diag); - - let category = diag.category().expect("linter diagnostic has no code"); - let severity = settings.get_severity_from_rule_code(category).expect( + let result = pgt_statement_splitter::split(code); + for stmt_range in result.ranges { + match pgt_query::parse(&code[stmt_range]) { + Ok(ast) => { + if let Some(root) = ast.into_root() { + for rule_diag in analyser.run(pgt_analyser::AnalyserParams { + schema_cache: None, + stmts: vec![AnalysableStatement { + range: stmt_range, + root, + }], + }) { + let diag = pgt_diagnostics::serde::Diagnostic::new(rule_diag); + + let category = diag.category().expect("linter diagnostic has no code"); + let severity = settings.get_severity_from_rule_code(category).expect( "If you see this error, it means you need to run cargo codegen-configuration", ); - let error = diag - .with_severity(severity) - .with_file_path(&file_path) - .with_file_source_code(code); - - write_diagnostic(code, error)?; - } - } - Err(e) => { - let error = SyntaxDiagnostic::from(e) + let error = diag + .with_severity(severity) .with_file_path(&file_path) .with_file_source_code(code); + write_diagnostic(code, error)?; } - }; + } } - } - Err(errs) => { - // Print all diagnostics to help the user - let mut console = pgt_console::EnvConsole::default(); - for err in errs { - console.println( - pgt_console::LogLevel::Error, - markup! { - {PrintDiagnostic::verbose(&err)} - }, - ); + Err(e) => { + let error = SyntaxDiagnostic::from(e) + .with_file_path(&file_path) + .with_file_source_code(code); + write_diagnostic(code, error)?; } - bail!("Analysis of '{group}/{rule}' on the following code block returned a scan diagnostic.\n\n{code}"); + }; + } + if !result.errors.is_empty() { + // Print all diagnostics to help the user + let mut console = pgt_console::EnvConsole::default(); + for err in result.errors { + console.println( + pgt_console::LogLevel::Error, + markup! { + {PrintDiagnostic::verbose(&err)} + }, + ); } - }; + bail!("Analysis of '{group}/{rule}' on the following code block returned a scan diagnostic.\n\n{code}"); + } Ok(()) }